diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index a965f6f8..a2db6b20 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -6,21 +6,22 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: 3.11 + - uses: actions/checkout@v4 - - name: Install uv - run: pip install uv + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' - - name: Create venv - run: uv venv + - name: Install uv + run: pip install uv - - name: Install package with dev dependencies - run: uv pip install -e ".[dev]" + - name: Sync workspace + # --all-packages is required to include extras from workspace members + # (backend's dev extra holds ruff itself). + run: uv sync --all-packages --all-extras - - name: Run Ruff - run: uv run ruff check src \ No newline at end of file + - name: Ruff check (backend + cli + tests) + # --no-sync prevents uv run's implicit re-sync from dropping member + # extras (it defaults to no extras and would remove ruff). + run: uv run --no-sync ruff check backend/src cli/src backend/tests diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 354d2812..3714fa4e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -5,26 +5,25 @@ on: [push, pull_request] jobs: tests: runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: 3.11 - - - name: Install uv - run: pip install uv - - - name: Create venv - run: uv venv - - - name: Install package with dev dependencies - run: uv pip install -e ".[dev]" - - - name: Run tests - run: uv run pytest - env: - ENVIRONMENT: local - SECRET_KEY: test-secret-key-for-testing-only \ No newline at end of file + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install uv + run: pip install uv + + - name: Sync workspace + run: uv sync --all-packages --all-extras + + # Tests live in backend/ today; cli/ has no tests yet. + - name: Pytest (backend) + # --no-sync prevents uv run's implicit re-sync from dropping member + # extras (it defaults to no extras and would remove pytest). + run: cd backend && uv run --no-sync pytest + env: + ENVIRONMENT: local + SECRET_KEY: test-secret-key-for-testing-only diff --git a/.github/workflows/type-checking.yml b/.github/workflows/type-checking.yml index b6efa72f..8b0bf279 100644 --- a/.github/workflows/type-checking.yml +++ b/.github/workflows/type-checking.yml @@ -5,26 +5,28 @@ on: [push, pull_request] jobs: type-check: runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: 3.11 - - - name: Install uv - run: pip install uv - - - name: Create venv - run: uv venv - - - name: Install package with dev dependencies - run: uv pip install -e ".[dev]" - - - name: Run mypy - run: uv run mypy src --config-file pyproject.toml - env: - ENVIRONMENT: local - SECRET_KEY: test-secret-key-for-testing-only \ No newline at end of file + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install uv + run: pip install uv + + - name: Sync workspace + run: uv sync --all-packages --all-extras + + - name: Mypy (backend) + run: cd backend && uv run --no-sync mypy src --config-file pyproject.toml + env: + ENVIRONMENT: local + SECRET_KEY: test-secret-key-for-testing-only + + - name: Mypy (cli) + run: cd cli && uv run --no-sync mypy -p cli + env: + ENVIRONMENT: local + SECRET_KEY: test-secret-key-for-testing-only diff --git a/.gitignore b/.gitignore index 9dec4471..c7d3d063 100644 --- a/.gitignore +++ b/.gitignore @@ -14,8 +14,8 @@ dist/ downloads/ eggs/ .eggs/ -lib/ -lib64/ +/lib/ +/lib64/ parts/ sdist/ var/ @@ -110,7 +110,7 @@ venv.bak/ # Rope project settings .ropeproject -# mkdocs documentation +# zensical documentation /site # mypy diff --git a/README.md b/README.md index b9b2ef1e..92fd0e94 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@

Benav Labs FastAPI boilerplate

- Batteries-included FastAPI starter with production-ready defaults, optional modules, and clear docs. + Batteries-included FastAPI starter — vertical-slice modules, swappable infrastructure, plugin-ready CLI.

@@ -32,15 +32,14 @@ * ⚡️ Fully async FastAPI + SQLAlchemy 2.0 * 🧱 Pydantic v2 models & validation -* 🔐 JWT auth (access + refresh), cookies for refresh -* 👮 Rate limiter + tiers (free/pro/etc.) +* 🔐 Server-side sessions + CSRF; OAuth (Google wired, GitHub scaffolded); API keys +* 👮 Rate limiter with per-tier, per-path rules * 🧰 FastCRUD for efficient CRUD & pagination -* 🧑‍💼 **CRUDAdmin**: minimal admin panel (optional) -* 🚦 ARQ background jobs (Redis) -* 🧊 Redis caching (server + client-side headers) -* 🌐 Configurable CORS middleware for frontend integration -* 🐳 One-command Docker Compose -* 🚀 NGINX & Gunicorn recipes for prod +* 🧑‍💼 **SQLAdmin**-based admin panel (optional, env-toggled) +* 🚦 [Taskiq](https://taskiq-python.github.io/) workers (Redis or RabbitMQ broker) +* 🧊 Redis or Memcached caching (`@cache` decorator + provider API) +* 🛠 **Plugin-ready `bp` CLI** — generate compose files, audit env, mount third-party command/feature plugins +* 🐳 Docker Compose for local / prod / nginx-fronted (generated by the CLI) ## Why and When to use it @@ -48,152 +47,103 @@ * A pragmatic starter with auth, CRUD, jobs, caching and rate-limits * **Sensible defaults** with the freedom to opt-out of modules -* **Docs over boilerplate** in README - depth lives in the site +* **A foundation that grows** — vertical-slice modules + a plugin-aware CLI for code generators +* **Docs over boilerplate** in this README — depth lives on the [docs site](https://benavlabs.github.io/FastAPI-boilerplate/) -> **Not a fit** if you need a monorepo microservices scaffold - [see the docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/project-structure/) for pointers. +> **Not a fit** if you need a monorepo microservices scaffold — [see the docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/project-structure/) for pointers. **What you get:** -* **App**: FastAPI app factory, [env-aware docs](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/development/) exposure -* **Auth**: [JWT access/refresh](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/authentication/), logout via token blacklist -* **DB**: Postgres + SQLAlchemy 2.0, [Alembic migrations](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/database/) -* **CRUD**: [FastCRUD generics](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/database/crud/) (get, get_multi, create, update, delete, joins) -* **Caching**: [decorator-based endpoints cache](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/caching/); client cache headers -* **Queues**: [ARQ worker](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/background-tasks/) (async jobs), Redis connection helpers +* **App**: FastAPI [app factory](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/project-structure/), env-aware docs exposure +* **Auth**: [server-side sessions](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/authentication/sessions/), CSRF, [OAuth](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/authentication/), [API keys](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/authentication/permissions/) +* **DB**: Postgres + SQLAlchemy 2.0, [Alembic migrations](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/database/migrations/) with prod-confirm gate +* **CRUD**: [FastCRUD generics](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/database/crud/) +* **Caching**: [decorator + provider API](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/caching/) (Redis or Memcached) +* **Queues**: [Taskiq workers](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/background-tasks/) (Redis or RabbitMQ) * **Rate limits**: [per-tier + per-path rules](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/rate-limiting/) -* **Admin**: [CRUDAdmin views](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/admin-panel/) for common models (optional) +* **Admin**: [SQLAdmin views](https://benavlabs.github.io/FastAPI-boilerplate/user-guide/admin-panel/) (optional, env-toggled) +* **CLI**: [`bp` tool](https://benavlabs.github.io/FastAPI-boilerplate/cli/) for compose scaffolding, env audits, and plugin extensions -This is what we've been using in production apps. Several applications running in production started from this boilerplate as their foundation - from SaaS platforms to internal tools. It's proven, stable technology that works together reliably. Use this as the foundation for whatever you want to build on top. +> **Building an AI SaaS?** Skip even more setup with [**FastroAI**](https://fastro.ai) — our production-ready template with AI integration, payments, and frontend included. -> **Building an AI SaaS?** Skip even more setup with [**FastroAI**](https://fastro.ai) - our production-ready template with AI integration, payments, and frontend included. +## Repo Layout -## TL;DR - Quickstart +This is a [uv workspace](https://docs.astral.sh/uv/concepts/projects/workspaces/) with two members. One venv at the root covers both. -Use the template on GitHub, create your repo, then: +```text +fastapi-boilerplate/ +├── pyproject.toml # workspace root (uv workspace metadata) +├── backend/ # the deployable application +│ ├── src/ # interfaces/, infrastructure/, modules/ +│ ├── pyproject.toml +│ └── Dockerfile # multi-stage: dev / migrate / prod +└── cli/ # `bp` — developer/operator tool (never ships in prod) + └── src/cli/ +``` + +## Quickstart ```bash git clone https://github.com//FastAPI-boilerplate cd FastAPI-boilerplate +uv sync --all-packages --all-extras # one venv at the root, both members installed ``` -**Quick setup:** Run the interactive setup script to choose your deployment configuration: +Generate a compose file for the deployment shape you want: ```bash -./setup.py +uv run bp deploy generate local # hot-reload dev stack +# or: uv run bp deploy generate prod # production single-host +# or: uv run bp deploy generate nginx # production behind nginx ``` -Or directly specify the deployment type: `./setup.py local`, `./setup.py staging`, or `./setup.py production`. - -The script copies the right files for your deployment scenario. Here's what each option sets up: - -### Option 1: Local development with Uvicorn - -Best for: **Development and testing** - -**Copies:** - -- `scripts/local_with_uvicorn/Dockerfile` → `Dockerfile` -- `scripts/local_with_uvicorn/docker-compose.yml` → `docker-compose.yml` -- `scripts/local_with_uvicorn/.env.example` → `src/.env` - -Sets up Uvicorn with auto-reload enabled. The example environment values work fine for development. - -**Manual setup:** `./setup.py local` or copy the files above manually. - -### Option 2: Staging with Gunicorn managing Uvicorn workers - -Best for: **Staging environments and load testing** - -**Copies:** - -- `scripts/gunicorn_managing_uvicorn_workers/Dockerfile` → `Dockerfile` -- `scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml` → `docker-compose.yml` -- `scripts/gunicorn_managing_uvicorn_workers/.env.example` → `src/.env` - -Sets up Gunicorn managing multiple Uvicorn workers for production-like performance testing. - -> [!WARNING] -> Change `SECRET_KEY` and passwords in the `.env` file for staging environments. - -**Manual setup:** `./setup.py staging` or copy the files above manually. - -### Option 3: Production with NGINX - -Best for: **Production deployments** - -**Copies:** - -- `scripts/production_with_nginx/Dockerfile` → `Dockerfile` -- `scripts/production_with_nginx/docker-compose.yml` → `docker-compose.yml` -- `scripts/production_with_nginx/.env.example` → `src/.env` - -Sets up NGINX as reverse proxy with Gunicorn + Uvicorn workers for production. - -> [!CAUTION] -> You MUST change `SECRET_KEY`, all passwords, and sensitive values in the `.env` file before deploying! - -**Manual setup:** `./setup.py production` or copy the files above manually. - ---- - -**Start your application:** +Configure your env (the CLI helps with secrets and validation): ```bash -docker compose up +cp backend/.env.example backend/.env +uv run bp env gen-secret # print a fresh SECRET_KEY +uv run bp env validate # audit .env against the production validator ``` -**Access your app:** -- **Local**: http://127.0.0.1:8000 (auto-reload enabled) → [API docs](http://127.0.0.1:8000/docs) -- **Staging**: http://127.0.0.1:8000 (production-like performance) -- **Production**: http://localhost (NGINX reverse proxy) - -### Next steps - -**Create your first admin user:** -```bash -docker compose run --rm create_superuser -``` +Bring it up: -**Run database migrations** (if you add models): ```bash -cd src && uv run alembic revision --autogenerate && uv run alembic upgrade head +docker compose up --build +# → http://127.0.0.1:8000 (Swagger at /docs) ``` -**Test background jobs:** -```bash -curl -X POST 'http://127.0.0.1:8000/api/v1/tasks/task?message=hello' -``` +**Without Docker** (Postgres + Redis required locally): -**Or run locally without Docker:** ```bash -uv sync && uv run uvicorn src.app.main:app --reload +cd backend +uv run alembic upgrade head +uv run python -m scripts.setup_initial_data # creates the first admin user + default tier +uv run fastapi dev src/interfaces/main.py # API +uv run taskiq worker infrastructure.taskiq.worker:default_broker # in a second terminal ``` -> Full setup (from-scratch, .env examples, PostgreSQL & Redis, gunicorn, nginx) lives in the [docs](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/installation/). - -## Configuration (minimal) - -Create `src/.env` and set **app**, **database**, **JWT**, and **environment** settings. See the [docs](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/configuration/) for a copy-pasteable example and production guidance. - -[https://benavlabs.github.io/FastAPI-boilerplate/getting-started/configuration/](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/configuration/) - -* `ENVIRONMENT=local|staging|production` controls API docs exposure -* Set `ADMIN_*` to enable the first admin user +> Full setup, env-var reference, and per-environment deployment guides live in the [docs](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/installation/). ## Common tasks ```bash -# run locally with reload (without Docker) -uv sync && uv run uvicorn src.app.main:app --reload +# generate a fresh production-ready compose file +uv run bp deploy generate prod --workers 8 + +# audit your .env against the production security validator +uv run bp env validate # run Alembic migrations -cd src && uv run alembic revision --autogenerate && uv run alembic upgrade head +cd backend && uv run alembic revision --autogenerate -m "" && uv run alembic upgrade head + +# run tests +cd backend && uv run pytest -# enqueue a background job (example endpoint) -curl -X POST 'http://127.0.0.1:8000/api/v1/tasks/task?message=hello' +# install bp as a global tool (optional) +uv tool install --editable ./cli ``` -More examples (superuser creation, tiers, rate limits, admin usage) in the [docs](https://benavlabs.github.io/FastAPI-boilerplate/getting-started/first-run/). +More examples (superuser creation, tiers, rate limits, admin usage, plugin authoring) in the [docs](https://benavlabs.github.io/FastAPI-boilerplate/). ## Contributing diff --git a/backend/.env.example b/backend/.env.example new file mode 100644 index 00000000..57f0c9ed --- /dev/null +++ b/backend/.env.example @@ -0,0 +1,211 @@ +# =================================== +# Environment Configuration +# =================================== +# Options: production, staging, development, local +ENVIRONMENT=development + +# =================================== +# Database Configuration +# =================================== +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_DB=postgres +# For Docker Compose: use 'db' (the service name) +# For local development without Docker: use 'localhost' +POSTGRES_SERVER=db +POSTGRES_PORT=5432 +POSTGRES_SYNC_PREFIX=postgresql:// +POSTGRES_ASYNC_PREFIX=postgresql+asyncpg:// +CREATE_TABLES_ON_STARTUP=true + +# =================================== +# Cache Configuration +# =================================== +CACHE_ENABLED=true +# Options: memcached, redis +CACHE_BACKEND=redis +DEFAULT_CACHE_EXPIRATION=3600 + +# Client-side cache settings +CLIENT_CACHE_ENABLED=true +CLIENT_CACHE_MAX_AGE=60 + +# Cache Memcached settings (when using CACHE_BACKEND=memcached) +CACHE_MEMCACHED_HOST=localhost +CACHE_MEMCACHED_PORT=11211 +CACHE_MEMCACHED_POOL_SIZE=10 +CACHE_MEMCACHED_CONNECT_TIMEOUT=5 + +# Cache Redis settings (when using CACHE_BACKEND=redis) +# For Docker Compose: use 'redis' (the service name) +# For local development without Docker: use 'localhost' +CACHE_REDIS_HOST=redis +CACHE_REDIS_PORT=6379 +CACHE_REDIS_DB=0 +CACHE_REDIS_PASSWORD= +CACHE_REDIS_CONNECT_TIMEOUT=5 +CACHE_REDIS_POOL_SIZE=10 + +# =================================== +# Rate Limiting Configuration +# =================================== +RATE_LIMITER_ENABLED=true +# Options: memcached, redis +RATE_LIMITER_BACKEND=redis +RATE_LIMITER_FAIL_OPEN=true +DEFAULT_RATE_LIMIT_LIMIT=100 +DEFAULT_RATE_LIMIT_PERIOD=60 + +# Rate Limiter Memcached settings (when using RATE_LIMITER_BACKEND=memcached) +RATE_LIMITER_MEMCACHED_HOST=localhost +RATE_LIMITER_MEMCACHED_PORT=11211 +RATE_LIMITER_MEMCACHED_POOL_SIZE=10 + +# Rate Limiter Redis settings (when using RATE_LIMITER_BACKEND=redis) +# Uses DB 1 by default to separate from cache (which uses DB 0) +# For Docker Compose: use 'redis' (the service name) +# For local development without Docker: use 'localhost' +RATE_LIMITER_REDIS_HOST=redis +RATE_LIMITER_REDIS_PORT=6379 +RATE_LIMITER_REDIS_DB=1 +RATE_LIMITER_REDIS_PASSWORD= +RATE_LIMITER_REDIS_CONNECT_TIMEOUT=5 +RATE_LIMITER_REDIS_POOL_SIZE=10 + +# =================================== +# Taskiq Configuration +# =================================== +# Enable/disable Taskiq async task processing +TASKIQ_ENABLED=true +# Broker type: 'redis' or 'rabbitmq' +TASKIQ_BROKER_TYPE=redis + +# Redis broker settings (used when TASKIQ_BROKER_TYPE=redis) +# For Docker Compose: use 'redis' (the service name) +# For local development without Docker: use 'localhost' +TASKIQ_REDIS_HOST=redis +TASKIQ_REDIS_PORT=6379 +TASKIQ_REDIS_DB=3 +TASKIQ_REDIS_PASSWORD= + +# RabbitMQ broker settings (used when TASKIQ_BROKER_TYPE=rabbitmq) +TASKIQ_RABBITMQ_HOST=localhost +TASKIQ_RABBITMQ_PORT=5672 +TASKIQ_RABBITMQ_USER=guest +TASKIQ_RABBITMQ_PASSWORD=guest +TASKIQ_RABBITMQ_VHOST=/ + +# Taskiq worker settings +TASKIQ_WORKER_CONCURRENCY=2 +TASKIQ_MAX_TASKS_PER_WORKER=1000 + + +# =================================== +# Web Server Configuration +# =================================== +# CORS settings +CORS_ENABLED=true +# Comma-separated list of allowed origins +CORS_ORIGINS=* +CORS_ALLOW_CREDENTIALS=true +CORS_ALLOW_METHODS=* +CORS_ALLOW_HEADERS=* + +# Compression settings +GZIP_ENABLED=true +GZIP_MINIMUM_SIZE=1000 + +# API documentation settings +ENABLE_DOCS_IN_PRODUCTION=false +OPENAPI_PREFIX= + +# =================================== +# Authentication & Security +# =================================== +SECRET_KEY=insecure-secret-key-change-this-in-production + +# Production Security Validation +# Automatically validates security settings in production environments +PRODUCTION_SECURITY_VALIDATION_ENABLED=true +PRODUCTION_SECURITY_STRICT_MODE=false + +# Session Management +SESSION_TIMEOUT_MINUTES=30 +SESSION_CLEANUP_INTERVAL_MINUTES=15 +MAX_SESSIONS_PER_USER=5 +SESSION_SECURE_COOKIES=true +SESSION_BACKEND=redis +SESSION_COOKIE_MAX_AGE=86400 + +# CSRF Protection +# Set to false for development/testing to disable CSRF validation +CSRF_ENABLED=true + +# Login Rate Limiting +LOGIN_MAX_ATTEMPTS=5 +LOGIN_WINDOW_MINUTES=15 + +# =================================== +# Admin Interface (SQLAdmin) +# =================================== +# Enable/disable the admin interface +ADMIN_ENABLED=true + +# =================================== +# OAuth Configuration +# =================================== +OAUTH_REDIRECT_BASE_URL=http://localhost:8000 + +# Google OAuth +OAUTH_GOOGLE_CLIENT_ID= +OAUTH_GOOGLE_CLIENT_SECRET= + +# GitHub OAuth +OAUTH_GITHUB_CLIENT_ID= +OAUTH_GITHUB_CLIENT_SECRET= + +# =================================== +# Application Settings +# =================================== +DEBUG=false +APP_NAME=FastAPI Boilerplate +APP_DESCRIPTION=Modular FastAPI starter +VERSION=0.18.0 +CONTACT_NAME=Support +CONTACT_EMAIL=support@example.com +LICENSE_NAME=MIT + +# API settings (optional overrides) +# API_PREFIX=/api +# DOCS_URL=/docs +# REDOC_URL=/redoc + +# =================================== +# Initial Setup Configuration +# =================================== +# Admin user settings (for initial setup) +ADMIN_NAME=Admin User +ADMIN_EMAIL=admin@example.com +ADMIN_USERNAME=admin +ADMIN_PASSWORD=your-secure-password + +# =================================== +# Production Security Checklist +# =================================== +# Before deploying to production, ensure: +# 1. Generate strong SECRET_KEY with high entropy +# Example: python -c "import secrets; print(secrets.token_urlsafe(64))" +# 2. Set unique database passwords (avoid defaults like 'postgres') +# 3. Set Redis passwords for all instances: +# - CACHE_REDIS_PASSWORD (for cache operations) +# - RATE_LIMITER_REDIS_PASSWORD (for rate limiting) +# 4. Use separate Redis instances/databases for different services: +# - Cache: CACHE_REDIS_DB=0 (default) +# - Rate Limiter: RATE_LIMITER_REDIS_DB=1 (default) +# - Taskiq: TASKIQ_REDIS_DB=3 (default) +# 5. Restrict CORS_ORIGINS to specific domains (avoid *) +# 6. Set strong admin credentials (ADMIN_USERNAME, ADMIN_PASSWORD) +# 7. Review session timeout settings for your security requirements +# 8. Set ENVIRONMENT=production to enable security validation +# 9. Configure RabbitMQ with strong credentials (avoid guest/guest) +# 10. Set TASKIQ_ENABLED=true in production for background task processing diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 00000000..d1eb706c --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,106 @@ +# ===================== Requirements Export Stage ===================== +# Export requirements from uv.lock for reproducible, secure builds +FROM python:3.11-slim AS requirements-stage + +WORKDIR /tmp + +# Use official uv image with pinned version for reproducibility +COPY --from=ghcr.io/astral-sh/uv:0.9.9 /uv /usr/local/bin/uv + +# Copy both pyproject.toml and uv.lock for deterministic export +COPY pyproject.toml uv.lock ./ + +# Export from lock file (not re-resolving dependencies!) +# Filter out the local project line and keep only external dependencies +RUN uv export --no-dev --no-editable -o /tmp/req-full.txt && \ + grep -v "^\\.$" /tmp/req-full.txt > requirements-prod.txt + +# Export dev requirements from lock file +RUN uv export --no-editable -o /tmp/req-dev-full.txt && \ + grep -v "^\\.$" /tmp/req-dev-full.txt > requirements-dev.txt + +# ===================== Production Base Stage ===================== +FROM python:3.11-slim AS base + +WORKDIR /app + +# Install uv for fast, secure package installation +COPY --from=ghcr.io/astral-sh/uv:0.9.9 /uv /usr/local/bin/uv + +# Install system dependencies needed for Python packages +RUN apt-get update && apt-get install -y \ + gcc \ + && rm -rf /var/lib/apt/lists/* \ + && apt-get clean + +# Copy and install production requirements with uv (fast + hash verification) +COPY --from=requirements-stage /tmp/requirements-prod.txt . +RUN --mount=type=cache,target=/root/.cache/uv \ + uv pip install --system -r requirements-prod.txt && \ + rm requirements-prod.txt + +# Copy source code +COPY src ./src + +# Set Python path +ENV PYTHONPATH=/app/src + +# ===================== Development Stage ===================== +FROM base AS dev + +WORKDIR /app/src + +# Copy and install dev requirements with uv (fast + hash verification) +COPY --from=requirements-stage /tmp/requirements-dev.txt . +RUN --mount=type=cache,target=/root/.cache/uv \ + uv pip install --system -r requirements-dev.txt && \ + rm requirements-dev.txt + +# Copy test files for development +COPY tests ./tests + +# Create non-root user for security (same as production) +RUN groupadd -r appuser && useradd -r -m -g appuser appuser +RUN chown -R appuser:appuser /app +USER appuser + +# Add quality-of-life configs for development +ENV PYTHONUNBUFFERED=1 + +# Development command with FastAPI CLI auto-reload +CMD ["fastapi", "dev", "interfaces/main.py", "--host", "0.0.0.0", "--port", "8000"] + +# ===================== Migration Stage ===================== +FROM base AS migrate + +# Optional build arg for CI/CD pipelines +ARG DATABASE_URL="" + +# Copy migration files +COPY migrations ./migrations +COPY alembic.ini . + +# Create non-root user for security +RUN groupadd -r appuser && useradd -r -m -g appuser appuser +RUN chown -R appuser:appuser /app +USER appuser + +# Set build-time DATABASE_URL as environment variable if provided +ENV DATABASE_URL=${DATABASE_URL} + +# Default command runs migrations +CMD ["alembic", "upgrade", "head"] + +# ===================== Production Stage ===================== +FROM base AS prod + +WORKDIR /app/src + +# Create non-root user for security +RUN groupadd -r appuser && useradd -r -m -g appuser appuser +RUN chown -R appuser:appuser /app +USER appuser + +# Production command with FastAPI CLI and configurable workers +ENV WORKERS=1 +CMD ["sh", "-c", "fastapi run interfaces/main.py --host 0.0.0.0 --port 8000 --workers $WORKERS"] \ No newline at end of file diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 00000000..035f57b3 --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,147 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file +script_location = %(here)s/migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. for multiple paths, the path separator +# is defined by "path_separator" below. +prepend_sys_path = . + + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to /versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "path_separator" +# below. +# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions + +# path_separator; This indicates what character is used to split lists of file +# paths, including version_locations and prepend_sys_path within configparser +# files such as alembic.ini. +# The default rendered in new alembic.ini files is "os", which uses os.pathsep +# to provide os-dependent path splitting. +# +# Note that in order to support legacy alembic.ini files, this default does NOT +# take place if path_separator is not present in alembic.ini. If this +# option is omitted entirely, fallback logic is as follows: +# +# 1. Parsing of the version_locations option falls back to using the legacy +# "version_path_separator" key, which if absent then falls back to the legacy +# behavior of splitting on spaces and/or commas. +# 2. Parsing of the prepend_sys_path option falls back to the legacy +# behavior of splitting on spaces, commas, or colons. +# +# Valid values for path_separator are: +# +# path_separator = : +# path_separator = ; +# path_separator = space +# path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +path_separator = os + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module +# hooks = ruff +# ruff.type = module +# ruff.module = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Alternatively, use the exec runner to execute a binary found on your PATH +# hooks = ruff +# ruff.type = exec +# ruff.executable = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Logging configuration. This is also consumed by the user-maintained +# env.py script only. +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/src/migrations/env.py b/backend/migrations/env.py similarity index 54% rename from src/migrations/env.py rename to backend/migrations/env.py index ea64b733..51141ba6 100644 --- a/src/migrations/env.py +++ b/backend/migrations/env.py @@ -1,5 +1,6 @@ import asyncio import importlib +import os import pkgutil from logging.config import fileConfig @@ -8,29 +9,65 @@ from sqlalchemy.engine import Connection from sqlalchemy.ext.asyncio import async_engine_from_config -from app.core.config import settings -from app.core.db.database import Base +from src.infrastructure.config.settings import settings +from src.infrastructure.database.session import Base # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config -config.set_main_option( - "sqlalchemy.url", - f"{settings.POSTGRES_ASYNC_PREFIX}{settings.POSTGRES_USER}:{settings.POSTGRES_PASSWORD}@{settings.POSTGRES_SERVER}:{settings.POSTGRES_PORT}/{settings.POSTGRES_DB}", -) + +# Production safety checks +def validate_production_migration(): + """Validate production migration safety.""" + environment = os.getenv("ENVIRONMENT", "development") + + if environment == "production": + print("🚨 PRODUCTION MIGRATION DETECTED") + + # Require explicit confirmation + confirm = os.getenv("CONFIRM_PRODUCTION_MIGRATION") + if confirm != "yes": + raise Exception( + "Production migration requires CONFIRM_PRODUCTION_MIGRATION=yes environment variable. " + "This ensures you understand you're migrating production data." + ) + + # Check for required production environment variables + required_vars = ["DATABASE_URL", "SECRET_KEY"] + missing_vars = [var for var in required_vars if not os.getenv(var)] + if missing_vars: + raise Exception(f"Missing required production environment variables: {missing_vars}") + + # Warn about production migration + print("✅ Production migration confirmed") + print("🔄 Running migration against production database...") + print("⚠️ This operation will modify production data!") + + +# Build the database URL from settings - use the built-in DATABASE_URL property +config.set_main_option("sqlalchemy.url", settings.DATABASE_URL) + +# Run production safety checks +validate_production_migration() if config.config_file_name is not None: fileConfig(config.config_file_name) def import_models(package_name): + """Automatically import all models from a package and its subpackages.""" package = importlib.import_module(package_name) for _, module_name, _ in pkgutil.walk_packages(package.__path__, package.__name__ + "."): - importlib.import_module(module_name) + try: + importlib.import_module(module_name) + except ImportError: + # Skip modules that can't be imported (e.g., due to missing dependencies) + pass -import_models("app.models") +# Import all models to ensure they're registered with SQLAlchemy +import_models("src.modules") target_metadata = Base.metadata diff --git a/src/migrations/script.py.mako b/backend/migrations/script.py.mako similarity index 82% rename from src/migrations/script.py.mako rename to backend/migrations/script.py.mako index fbc4b07d..11016301 100644 --- a/src/migrations/script.py.mako +++ b/backend/migrations/script.py.mako @@ -13,14 +13,16 @@ ${imports if imports else ""} # revision identifiers, used by Alembic. revision: str = ${repr(up_revision)} -down_revision: Union[str, None] = ${repr(down_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} def upgrade() -> None: + """Upgrade schema.""" ${upgrades if upgrades else "pass"} def downgrade() -> None: + """Downgrade schema.""" ${downgrades if downgrades else "pass"} diff --git a/src/__init__.py b/backend/migrations/versions/.gitkeep similarity index 100% rename from src/__init__.py rename to backend/migrations/versions/.gitkeep diff --git a/backend/pyproject.toml b/backend/pyproject.toml new file mode 100644 index 00000000..830677e6 --- /dev/null +++ b/backend/pyproject.toml @@ -0,0 +1,134 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "fastapi-boilerplate" +version = "0.18.0" +description = "Modular FastAPI starter — vertical slices, swappable infrastructure, plugin-ready." +authors = [{ name = "Benav Labs", email = "contact@benav.io" }] +license = { text = "MIT" } +readme = "README.md" +requires-python = ">=3.11" +dependencies = [ + "aiomcache>=0.8.2", + "aiosqlite>=0.21.0", + "alembic>=1.16.4", + "asyncpg>=0.30.0", + "faker>=37.1.0", + "fastapi[standard]>=0.115.8", + "fastcrud>=0.21.0", + "fastsecure>=0.3.0", + "greenlet>=3.1.1", + "httpx>=0.28.1", + "itsdangerous>=2.2.0", + "jinja2>=3.1.6", + "pydantic>=2.10.6", + "pydantic-settings>=2.7.1", + "redis>=6.1.0", + "sqladmin>=0.22.0", + "sqlalchemy>=2.0.37", + "taskiq>=0.11.20", + "taskiq-redis>=1.1.2", + "taskiq-aio-pika>=0.4.3", + "user-agents>=2.2.0", +] + +[project.optional-dependencies] +dev = [ + "mypy>=1.14.1", + "ruff>=0.9.4", + "pytest>=8.3.5", + "pytest-asyncio>=0.25.3", + "pytest-mock>=3.14.0", + "types-python-jose>=3.4.0.20250224", + "testcontainers[postgres]>=4.10.0", + "pytest-xdist[psutil]>=3.8.0", +] + +[tool.setuptools.packages.find] +where = ["src"] +include = ["*"] + +[tool.pytest.ini_options] +pythonpath = ["src"] +testpaths = ["tests"] +python_files = ["test_*.py"] +python_functions = ["test_*"] +python_classes = ["Test*"] +asyncio_mode = "auto" +addopts = ["-v", "--strict-markers", "--tb=short", "--disable-warnings"] +env = ["ENVIRONMENT=pytest", "PYTEST_CURRENT_TEST=true"] +markers = [ + "unit: Unit tests that don't require external dependencies", + "integration: Integration tests that may require external services", + "asyncio: Tests that use asyncio", + "slow: marks tests as slow running", +] + +[tool.mypy] +python_version = "3.11" +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = false +disallow_incomplete_defs = false +check_untyped_defs = true +disallow_untyped_decorators = false +no_implicit_optional = true +strict_optional = true +warn_redundant_casts = true +warn_unused_ignores = true +warn_no_return = true +warn_unreachable = true +namespace_packages = true +explicit_package_bases = true + +[[tool.mypy.overrides]] +module = "aiomcache" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "user_agents" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sqladmin" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sqladmin.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "wtforms.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sqlalchemy.ext.asyncio" +ignore_errors = true + +[[tool.mypy.overrides]] +module = "taskiq" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "taskiq.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "taskiq_redis" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "taskiq_aio_pika" +ignore_missing_imports = true + +[tool.ruff] +line-length = 128 + +[tool.ruff.lint] +select = ["E", "F", "I", "UP"] +extend-select = ["UP006", "UP007", "UP035", "UP039", "PLC0415"] + +[tool.ruff.lint.isort] +known-first-party = ["src"] diff --git a/backend/scripts/create_first_superuser.py b/backend/scripts/create_first_superuser.py new file mode 100644 index 00000000..a79379f8 --- /dev/null +++ b/backend/scripts/create_first_superuser.py @@ -0,0 +1,85 @@ +import asyncio +import sys +from pathlib import Path + +backend_dir = Path(__file__).parent.parent +sys.path.append(str(backend_dir)) + +from sqlalchemy import update # noqa: E402 + +from src.infrastructure.config.settings import settings # noqa: E402 +from src.infrastructure.database.session import local_session # noqa: E402 +from src.infrastructure.logging import get_logger # noqa: E402 +from src.modules.common.exceptions import UserNotFoundError # noqa: E402 +from src.modules.user.models import User # noqa: E402 +from src.modules.user.schemas import UserCreate # noqa: E402 +from src.modules.user.service import UserService # noqa: E402 + +logger = get_logger() + + +async def create_first_superuser() -> None: + """ + Create the first superuser in the database if it doesn't exist. + + This script uses environment variables for configuration: + - ADMIN_NAME: The admin's full name + - ADMIN_EMAIL: The admin's email address + - ADMIN_USERNAME: The admin's username + - ADMIN_PASSWORD: The admin's password + """ + try: + name = settings.ADMIN_NAME + email = settings.ADMIN_EMAIL + username = settings.ADMIN_USERNAME + password = settings.ADMIN_PASSWORD + + if not all([name, email, username, password]): + logger.error("Admin configuration is incomplete. Please check environment variables.") + logger.info("Using default admin credentials for testing") + name = "Admin User" + email = "admin@example.com" + username = "admin" + password = "adminpassword" + + async with local_session() as session: + user_service = UserService() + + user = None + try: + user_model = await user_service.get_by_email(email, session) + if user_model: + logger.info(f"Superuser with email {email} already exists.") + if not user_model["is_superuser"]: + user_model["is_superuser"] = True + await session.commit() + logger.info(f"Updated user {username} to be a superuser") + return + except UserNotFoundError: + logger.info(f"No user found with email {email}, creating a new superuser") + + user_data = UserCreate(name=name, email=email, username=username, password=password) + + user = await user_service.create(user_data, session) + + if hasattr(user, "id"): + user_id = user.id + else: + user_id = user["id"] + + stmt = update(User).where(User.id == user_id).values(is_superuser=True) + await session.execute(stmt) + await session.commit() + + logger.info(f"Superuser {username} created successfully with ID {user_id}") + + except Exception as e: + logger.error(f"Error creating superuser: {e}") + + +async def main() -> None: + await create_first_superuser() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/backend/scripts/create_first_tier.py b/backend/scripts/create_first_tier.py new file mode 100644 index 00000000..ee36aa2a --- /dev/null +++ b/backend/scripts/create_first_tier.py @@ -0,0 +1,54 @@ +import asyncio +import sys +from pathlib import Path + +# Add the backend directory to Python path +backend_dir = Path(__file__).parent.parent +sys.path.append(str(backend_dir)) + +from sqlalchemy import select # noqa: E402 + +from src.infrastructure.config.settings import settings # noqa: E402 +from src.infrastructure.database.session import local_session # noqa: E402 +from src.infrastructure.logging import get_logger # noqa: E402 +from src.modules.tier.models import Tier # noqa: E402 + +logger = get_logger() + + +async def create_first_tier() -> None: + """ + Create the first tier in the database if it doesn't exist. + + This script uses environment variables for configuration: + - DEFAULT_TIER_NAME: The name of the default tier (defaults to "free") + """ + try: + tier_name = getattr(settings, "DEFAULT_TIER_NAME", "free") + + async with local_session() as session: + query = select(Tier).where(Tier.name == tier_name) + result = await session.execute(query) + tier = result.scalar_one_or_none() + + if tier: + logger.info(f"Tier '{tier_name}' already exists with ID {tier.id}") + return + + tier = Tier(name=tier_name) + session.add(tier) + await session.commit() + await session.refresh(tier) + + logger.info(f"Tier '{tier_name}' created successfully with ID {tier.id}") + + except Exception as e: + logger.error(f"Error creating tier: {e}") + + +async def main() -> None: + await create_first_tier() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/backend/scripts/create_tables.py b/backend/scripts/create_tables.py new file mode 100644 index 00000000..2ea32586 --- /dev/null +++ b/backend/scripts/create_tables.py @@ -0,0 +1,28 @@ +"""Script to create database tables from SQLAlchemy models.""" + +import asyncio +import sys +from pathlib import Path + +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from src.infrastructure.database.session import create_tables # noqa: E402 +from src.infrastructure.logging import get_logger # noqa: E402 + +logger = get_logger() + + +async def main() -> None: + """Create database tables.""" + logger.info("Creating database tables...") + + try: + await create_tables() + logger.info("✅ Database tables created successfully!") + except Exception as e: + logger.error(f"❌ Error creating database tables: {str(e)}", exc_info=True) + sys.exit(1) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/backend/scripts/setup_initial_data.py b/backend/scripts/setup_initial_data.py new file mode 100644 index 00000000..de11f951 --- /dev/null +++ b/backend/scripts/setup_initial_data.py @@ -0,0 +1,43 @@ +import asyncio +import sys +from pathlib import Path + +backend_dir = Path(__file__).parent.parent +sys.path.append(str(backend_dir)) + +from scripts.create_first_superuser import create_first_superuser # noqa: E402 +from scripts.create_first_tier import create_first_tier # noqa: E402 +from src.infrastructure.database.session import create_tables # noqa: E402 +from src.infrastructure.logging import get_logger # noqa: E402 + +logger = get_logger() + + +async def setup_initial_data() -> None: + """ + Setup initial data for the application, including: + - Create database tables + - Create default tier + - Create admin superuser + """ + logger.info("Setting up initial data...") + + logger.info("Creating database tables...") + try: + await create_tables() + logger.info("Database tables created successfully") + except Exception as e: + logger.error(f"Error creating database tables: {str(e)}", exc_info=True) + sys.exit(1) + + logger.info("Creating first tier...") + await create_first_tier() + + logger.info("Creating superuser...") + await create_first_superuser() + + logger.info("Initial data setup complete") + + +if __name__ == "__main__": + asyncio.run(setup_initial_data()) diff --git a/src/app/__init__.py b/backend/src/__init__.py similarity index 100% rename from src/app/__init__.py rename to backend/src/__init__.py diff --git a/backend/src/infrastructure/__init__.py b/backend/src/infrastructure/__init__.py new file mode 100644 index 00000000..72784aeb --- /dev/null +++ b/backend/src/infrastructure/__init__.py @@ -0,0 +1,10 @@ +"""Infrastructure module for the application.""" + +from .config import get_settings +from .database.session import async_session, create_tables + +__all__ = [ + "async_session", + "create_tables", + "get_settings", +] diff --git a/backend/src/infrastructure/app_factory.py b/backend/src/infrastructure/app_factory.py new file mode 100644 index 00000000..5be005f0 --- /dev/null +++ b/backend/src/infrastructure/app_factory.py @@ -0,0 +1,335 @@ +import json +import logging +from asyncio import Event +from collections.abc import AsyncGenerator, Callable +from contextlib import AbstractAsyncContextManager, asynccontextmanager +from typing import Any + +import anyio +import fastapi +from fastapi import APIRouter, Depends, FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.middleware.gzip import GZipMiddleware +from fastapi.openapi.docs import get_redoc_html, get_swagger_ui_html +from fastapi.openapi.utils import get_openapi + +from ..modules.common.utils.error_handler import register_exception_handlers +from .auth.session.dependencies import get_current_superuser +from .cache.initialize import close_cache, initialize_cache +from .config.settings import ( + CacheSettings, + DatabaseSettings, + EnvironmentOption, + EnvironmentSettings, + RateLimiterSettings, + Settings, + get_settings, +) +from .database.session import create_tables +from .middleware import ClientCacheMiddleware, SecurityHeadersMiddleware +from .rate_limit.initialize import close_rate_limiter, initialize_rate_limiter +from .rate_limit.middleware import RateLimiterMiddleware + +logger = logging.getLogger(__name__) + + +async def set_threadpool_tokens(number_of_tokens: int = 100) -> None: + """Configure the number of threadpool tokens for anyio.""" + limiter = anyio.to_thread.current_default_thread_limiter() + limiter.total_tokens = number_of_tokens + + +def lifespan_factory( + settings: Settings, + create_tables_on_startup: bool = True, +) -> Callable[[FastAPI], AbstractAsyncContextManager[None]]: + """Factory to create a lifespan async context manager for a FastAPI app.""" + + @asynccontextmanager + async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: + initialization_complete = Event() + app.state.initialization_complete = initialization_complete + + await set_threadpool_tokens() + + try: + if isinstance(settings, DatabaseSettings) and create_tables_on_startup: + await create_tables() + + if isinstance(settings, CacheSettings) and settings.CACHE_ENABLED: + await initialize_cache() + + if isinstance(settings, RateLimiterSettings) and settings.RATE_LIMITER_ENABLED: + await initialize_rate_limiter() + + initialization_complete.set() + + yield + + finally: + if isinstance(settings, CacheSettings) and settings.CACHE_ENABLED: + await close_cache() + + if isinstance(settings, RateLimiterSettings) and settings.RATE_LIMITER_ENABLED: + await close_rate_limiter() + + return lifespan + + +def create_application( + router: APIRouter, + settings: Settings | None = None, + lifespan: Callable[[FastAPI], AbstractAsyncContextManager[None]] | None = None, + create_tables_on_startup: bool | None = None, + enable_cors: bool | None = None, + cors_origins: list[str] | None = None, + enable_docs_in_production: bool | None = None, + docs_production_dependency: Callable[..., Any] | None = None, + enable_gzip: bool | None = None, + openapi_prefix: str | None = None, + title: str | None = None, + summary: str | None = None, + description: str | None = None, + version: str | None = None, + terms_of_service: str | None = None, + contact: dict[str, str] | None = None, + license_info: dict[str, str] | None = None, + openapi_tags: list[dict[str, Any]] | None = None, + docs_url: str | None = None, + redoc_url: str | None = None, + openapi_url: str | None = None, + **kwargs: Any, +) -> FastAPI: + """Creates and configures a FastAPI application based on the provided settings.""" + if settings is None: + settings = get_settings() + + _create_tables_on_startup = True + if create_tables_on_startup is not None: + _create_tables_on_startup = create_tables_on_startup + elif hasattr(settings, "CREATE_TABLES_ON_STARTUP"): + _create_tables_on_startup = settings.CREATE_TABLES_ON_STARTUP + + _enable_cors = True + if enable_cors is not None: + _enable_cors = enable_cors + elif hasattr(settings, "CORS_ENABLED"): + _enable_cors = settings.CORS_ENABLED + + _cors_origins: list[str] = ["*"] + if cors_origins is not None: + _cors_origins = cors_origins + elif hasattr(settings, "CORS_ORIGINS_LIST"): + _cors_origins = settings.CORS_ORIGINS_LIST + + _enable_docs_in_production = False + if enable_docs_in_production is not None: + _enable_docs_in_production = enable_docs_in_production + elif hasattr(settings, "ENABLE_DOCS_IN_PRODUCTION"): + _enable_docs_in_production = settings.ENABLE_DOCS_IN_PRODUCTION + + _enable_gzip = True + if enable_gzip is not None: + _enable_gzip = enable_gzip + elif hasattr(settings, "GZIP_ENABLED"): + _enable_gzip = settings.GZIP_ENABLED + + _openapi_prefix = "" + if openapi_prefix is not None: + _openapi_prefix = openapi_prefix + elif hasattr(settings, "OPENAPI_PREFIX"): + _openapi_prefix = settings.OPENAPI_PREFIX + + metadata: dict[str, Any] = {"openapi_prefix": _openapi_prefix} + + if title is not None: + metadata["title"] = title + elif hasattr(settings, "API_TITLE") and settings.API_TITLE: + metadata["title"] = settings.API_TITLE + elif hasattr(settings, "APP_NAME"): + metadata["title"] = settings.APP_NAME + + if summary is not None: + metadata["summary"] = summary + elif hasattr(settings, "API_SUMMARY") and settings.API_SUMMARY: + metadata["summary"] = settings.API_SUMMARY + + if description is not None: + metadata["description"] = description + elif hasattr(settings, "API_DESCRIPTION") and settings.API_DESCRIPTION: + metadata["description"] = settings.API_DESCRIPTION + elif hasattr(settings, "APP_DESCRIPTION"): + metadata["description"] = settings.APP_DESCRIPTION + + if version is not None: + metadata["version"] = version + elif hasattr(settings, "API_VERSION") and settings.API_VERSION: + metadata["version"] = settings.API_VERSION + elif hasattr(settings, "VERSION"): + metadata["version"] = settings.VERSION + + if terms_of_service is not None: + metadata["terms_of_service"] = terms_of_service + elif hasattr(settings, "API_TERMS_OF_SERVICE") and settings.API_TERMS_OF_SERVICE: + metadata["terms_of_service"] = settings.API_TERMS_OF_SERVICE + + if contact is not None: + metadata["contact"] = contact + else: + contact_dict = {} + if hasattr(settings, "API_CONTACT_NAME") and settings.API_CONTACT_NAME: + contact_dict["name"] = settings.API_CONTACT_NAME + elif hasattr(settings, "CONTACT_NAME") and settings.CONTACT_NAME: + contact_dict["name"] = settings.CONTACT_NAME + if hasattr(settings, "API_CONTACT_EMAIL") and settings.API_CONTACT_EMAIL: + contact_dict["email"] = settings.API_CONTACT_EMAIL + elif hasattr(settings, "CONTACT_EMAIL") and settings.CONTACT_EMAIL: + contact_dict["email"] = settings.CONTACT_EMAIL + if hasattr(settings, "API_CONTACT_URL") and settings.API_CONTACT_URL: + contact_dict["url"] = settings.API_CONTACT_URL + if contact_dict: + metadata["contact"] = contact_dict + + if license_info is not None: + metadata["license_info"] = license_info + else: + license_dict = {} + if hasattr(settings, "API_LICENSE_NAME") and settings.API_LICENSE_NAME: + license_dict["name"] = settings.API_LICENSE_NAME + elif hasattr(settings, "LICENSE_NAME") and settings.LICENSE_NAME: + license_dict["name"] = settings.LICENSE_NAME + if hasattr(settings, "API_LICENSE_URL") and settings.API_LICENSE_URL: + license_dict["url"] = settings.API_LICENSE_URL + if hasattr(settings, "API_LICENSE_IDENTIFIER") and settings.API_LICENSE_IDENTIFIER: + license_dict["identifier"] = settings.API_LICENSE_IDENTIFIER + if license_dict: + metadata["license_info"] = license_dict + + if openapi_tags is not None: + metadata["openapi_tags"] = openapi_tags + elif hasattr(settings, "API_TAGS_METADATA") and settings.API_TAGS_METADATA: + try: + metadata["openapi_tags"] = json.loads(settings.API_TAGS_METADATA) + except json.JSONDecodeError: + pass + + _docs_url = "/docs" + if docs_url is not None: + _docs_url = docs_url + elif hasattr(settings, "DOCS_URL"): + _docs_url = settings.DOCS_URL + + _redoc_url = "/redoc" + if redoc_url is not None: + _redoc_url = redoc_url + elif hasattr(settings, "REDOC_URL"): + _redoc_url = settings.REDOC_URL + + _openapi_url = "/openapi.json" + if openapi_url is not None: + _openapi_url = openapi_url + elif hasattr(settings, "OPENAPI_URL"): + _openapi_url = settings.OPENAPI_URL + + metadata["docs_url"] = _docs_url + metadata["redoc_url"] = _redoc_url + metadata["openapi_url"] = _openapi_url + + kwargs.update(metadata) + + hide_docs = ( + isinstance(settings, EnvironmentSettings) + and settings.ENVIRONMENT == EnvironmentOption.PRODUCTION + and not _enable_docs_in_production + ) + if hide_docs: + kwargs.update({"docs_url": None, "redoc_url": None, "openapi_url": None}) + + if lifespan is None: + lifespan = lifespan_factory(settings, create_tables_on_startup=_create_tables_on_startup) + + application = FastAPI(lifespan=lifespan, **kwargs) + + register_exception_handlers(application) + + application.include_router(router) + + if isinstance(settings, RateLimiterSettings) and settings.RATE_LIMITER_ENABLED: + application.add_middleware(RateLimiterMiddleware) + + if isinstance(settings, CacheSettings) and settings.CACHE_ENABLED and hasattr(settings, "CLIENT_CACHE_ENABLED"): + if settings.CLIENT_CACHE_ENABLED: + client_cache_max_age = getattr(settings, "CLIENT_CACHE_MAX_AGE", 60) + application.add_middleware(ClientCacheMiddleware, max_age=client_cache_max_age) + + if _enable_cors: + cors_settings_dict: dict[str, Any] = { + "allow_origins": _cors_origins, + "allow_credentials": True, + "allow_methods": ["*"], + "allow_headers": ["*"], + } + if hasattr(settings, "CORS_ALLOW_CREDENTIALS"): + cors_settings_dict["allow_credentials"] = settings.CORS_ALLOW_CREDENTIALS + if hasattr(settings, "CORS_ALLOW_METHODS"): + methods = settings.CORS_ALLOW_METHODS + cors_settings_dict["allow_methods"] = methods.split(",") if isinstance(methods, str) else methods + if hasattr(settings, "CORS_ALLOW_HEADERS"): + headers = settings.CORS_ALLOW_HEADERS + cors_settings_dict["allow_headers"] = headers.split(",") if isinstance(headers, str) else headers + application.add_middleware(CORSMiddleware, **cors_settings_dict) + + if _enable_gzip: + gzip_min_size = getattr(settings, "GZIP_MINIMUM_SIZE", 1000) if hasattr(settings, "GZIP_MINIMUM_SIZE") else 1000 + application.add_middleware(GZipMiddleware, minimum_size=gzip_min_size) + + _security_headers_enabled = getattr(settings, "SECURITY_HEADERS_ENABLED", True) + if _security_headers_enabled: + _environment = settings.ENVIRONMENT.value if hasattr(settings, "ENVIRONMENT") else EnvironmentOption.DEVELOPMENT.value + application.add_middleware(SecurityHeadersMiddleware, environment=_environment) + + show_docs = isinstance(settings, EnvironmentSettings) and ( + settings.ENVIRONMENT != EnvironmentOption.PRODUCTION or _enable_docs_in_production + ) + + if show_docs: + docs_router = APIRouter() + + is_production = isinstance(settings, EnvironmentSettings) and settings.ENVIRONMENT == EnvironmentOption.PRODUCTION + is_local = isinstance(settings, EnvironmentSettings) and settings.ENVIRONMENT == EnvironmentOption.LOCAL + + apply_dependency = False + dependency_to_apply = None + + if is_production and _enable_docs_in_production: + apply_dependency = True + dependency_to_apply = ( + docs_production_dependency if docs_production_dependency is not None else get_current_superuser + ) + elif not is_local and not is_production: + apply_dependency = True + dependency_to_apply = get_current_superuser + + if apply_dependency and dependency_to_apply is not None: + docs_router = APIRouter(dependencies=[Depends(dependency_to_apply)]) + + @docs_router.get("/docs", include_in_schema=False) + async def get_swagger_documentation() -> fastapi.responses.HTMLResponse: + return get_swagger_ui_html(openapi_url="/openapi.json", title="docs") + + @docs_router.get("/redoc", include_in_schema=False) + async def get_redoc_documentation() -> fastapi.responses.HTMLResponse: + return get_redoc_html(openapi_url="/openapi.json", title="redoc") + + @docs_router.get("/openapi.json", include_in_schema=False) + async def openapi() -> dict[str, Any]: + return get_openapi( + title=metadata.get("title", "API"), + version=metadata.get("version", "0.1.0"), + description=metadata.get("description", ""), + routes=application.routes, + ) + + application.include_router(docs_router) + + return application diff --git a/backend/src/infrastructure/auth/__init__.py b/backend/src/infrastructure/auth/__init__.py new file mode 100644 index 00000000..34d667a5 --- /dev/null +++ b/backend/src/infrastructure/auth/__init__.py @@ -0,0 +1,8 @@ +from .session.dependencies import authenticate_user, get_current_superuser, get_current_user, get_optional_user + +__all__ = [ + "get_current_user", + "get_optional_user", + "get_current_superuser", + "authenticate_user", +] diff --git a/backend/src/infrastructure/auth/constants.py b/backend/src/infrastructure/auth/constants.py new file mode 100644 index 00000000..e9069013 --- /dev/null +++ b/backend/src/infrastructure/auth/constants.py @@ -0,0 +1,3 @@ +"""Authentication constants.""" + +HSTS_MAX_AGE_SECONDS = 63072000 # 2 years diff --git a/backend/src/infrastructure/auth/http_exceptions.py b/backend/src/infrastructure/auth/http_exceptions.py new file mode 100644 index 00000000..b23e40c6 --- /dev/null +++ b/backend/src/infrastructure/auth/http_exceptions.py @@ -0,0 +1,82 @@ +"""Authentication-specific HTTP exceptions. + +This module provides HTTP exceptions specifically designed for authentication +and authorization scenarios, extending the base FastCRUD exceptions with +auth-specific functionality like CSRF protection. + +The module re-exports commonly used HTTP exceptions from FastCRUD for +convenience and consistency across the authentication system. +""" + +from fastapi import status +from fastapi.exceptions import HTTPException +from fastcrud.exceptions.http_exceptions import ( + BadRequestException, + DuplicateValueException, + ForbiddenException, + NotFoundException, + RateLimitException, + UnauthorizedException, + UnprocessableEntityException, +) + +__all__ = [ + "BadRequestException", + "NotFoundException", + "ForbiddenException", + "UnauthorizedException", + "UnprocessableEntityException", + "DuplicateValueException", + "RateLimitException", + "HTTPException", + "CSRFException", +] + + +class CSRFException(HTTPException): + """Exception for Cross-Site Request Forgery (CSRF) validation failures. + + Raised when CSRF token validation fails, indicating a potential + security attack or invalid request from an untrusted source. + + This exception automatically sets the appropriate HTTP status code + (403 Forbidden) and includes security-relevant headers to help + clients and security tools identify CSRF-related failures. + + Args: + detail: Custom error message describing the CSRF failure. + Defaults to "CSRF token validation failed". + + Note: + This exception includes the X-CSRF-Error header which: + - Helps security monitoring tools identify CSRF attacks + - Allows client-side handling of CSRF errors + - Provides clear indication of the error type + - Assists in debugging authentication issues + + Example: + ```python + # In a CSRF validation function + def validate_csrf_token(token: str, session_token: str): + if not token or token != session_token: + raise CSRFException("Invalid CSRF token") + + # In an endpoint with CSRF protection + @app.post("/api/protected-action") + async def protected_action(csrf_token: str = Form(...)): + try: + validate_csrf_token(csrf_token, request.session.get("csrf_token")) + # Process the protected action + except CSRFException: + # Log security event + logger.warning("CSRF attack attempt detected") + raise + ``` + """ + + def __init__(self, detail: str = "CSRF token validation failed"): + super().__init__( + status_code=status.HTTP_403_FORBIDDEN, + detail=detail, + headers={"X-CSRF-Error": "true"}, + ) diff --git a/backend/src/infrastructure/auth/oauth/__init__.py b/backend/src/infrastructure/auth/oauth/__init__.py new file mode 100644 index 00000000..4d1d4f26 --- /dev/null +++ b/backend/src/infrastructure/auth/oauth/__init__.py @@ -0,0 +1,17 @@ +"""OAuth authentication integration.""" + +from .factory import OAuthProviderFactory +from .providers.github import GitHubOAuthProvider +from .providers.google import GoogleOAuthProvider +from .schemas import OAuthState, OAuthToken, OAuthUserInfo +from .services import oauth_account_service + +__all__ = [ + "GoogleOAuthProvider", + "GitHubOAuthProvider", + "OAuthProviderFactory", + "OAuthState", + "OAuthUserInfo", + "OAuthToken", + "oauth_account_service", +] diff --git a/backend/src/infrastructure/auth/oauth/dependencies.py b/backend/src/infrastructure/auth/oauth/dependencies.py new file mode 100644 index 00000000..08275b7c --- /dev/null +++ b/backend/src/infrastructure/auth/oauth/dependencies.py @@ -0,0 +1,95 @@ +from fastapi import Depends, HTTPException, status + +from ....infrastructure.config.settings import get_settings +from ....modules.user.enums import OAuthProvider +from ...logging import get_logger +from ..session.storage import AbstractSessionStorage, get_session_storage +from .factory import OAuthProviderFactory +from .provider import AbstractOAuthProvider +from .providers.github import GitHubOAuthProvider +from .providers.google import GoogleOAuthProvider +from .schemas import OAuthState + +logger = get_logger() +settings = get_settings() + +OAuthProviderFactory.register_provider(OAuthProvider.GOOGLE.value, GoogleOAuthProvider) +OAuthProviderFactory.register_provider(OAuthProvider.GITHUB.value, GitHubOAuthProvider) + + +def get_oauth_state_storage() -> AbstractSessionStorage[OAuthState]: + """Get a storage backend for OAuth state objects.""" + return get_session_storage( + backend=settings.SESSION_BACKEND, + model_type=OAuthState, + prefix="oauth_state:", + expiration=1800, + host=settings.CACHE_REDIS_HOST, + port=settings.CACHE_REDIS_PORT, + db=settings.CACHE_REDIS_DB, + password=settings.CACHE_REDIS_PASSWORD, + ) + + +def get_google_provider() -> AbstractOAuthProvider: + """ + Get the configured Google OAuth provider instance. + + Returns: + Configured Google OAuth provider + + Raises: + HTTPException: If provider is not configured properly + """ + if not settings.OAUTH_GOOGLE_CLIENT_ID or not settings.OAUTH_GOOGLE_CLIENT_SECRET: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Google OAuth credentials not configured") + + try: + return OAuthProviderFactory.create_provider( + provider_name=OAuthProvider.GOOGLE.value, + client_id=settings.OAUTH_GOOGLE_CLIENT_ID, + client_secret=settings.OAUTH_GOOGLE_CLIENT_SECRET, + redirect_uri=f"{settings.OAUTH_REDIRECT_BASE_URL}/api/v1/auth/oauth/callback/{OAuthProvider.GOOGLE.value}", + ) + except ValueError: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Google OAuth provider not configured") + + +def get_github_provider() -> AbstractOAuthProvider: + """ + Get the configured GitHub OAuth provider instance. + + Returns: + Configured GitHub OAuth provider + + Raises: + HTTPException: If provider is not configured properly + """ + if not settings.OAUTH_GITHUB_CLIENT_ID or not settings.OAUTH_GITHUB_CLIENT_SECRET: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="GitHub OAuth credentials not configured") + + try: + return OAuthProviderFactory.create_provider( + provider_name=OAuthProvider.GITHUB.value, + client_id=settings.OAUTH_GITHUB_CLIENT_ID, + client_secret=settings.OAUTH_GITHUB_CLIENT_SECRET, + redirect_uri=f"{settings.OAUTH_REDIRECT_BASE_URL}/api/v1/auth/oauth/callback/{OAuthProvider.GITHUB.value}", + ) + except ValueError: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="GitHub OAuth provider not configured") + + +async def get_oauth_state( + state: str, state_storage: AbstractSessionStorage[OAuthState] = Depends(get_oauth_state_storage) +) -> OAuthState | None: + """ + Get and validate the OAuth state from storage. + + Args: + state: State parameter from OAuth callback + state_storage: Storage backend for OAuth state + + Returns: + OAuthState if found and valid, None otherwise + """ + return await state_storage.get(state, OAuthState) diff --git a/backend/src/infrastructure/auth/oauth/factory.py b/backend/src/infrastructure/auth/oauth/factory.py new file mode 100644 index 00000000..da63b970 --- /dev/null +++ b/backend/src/infrastructure/auth/oauth/factory.py @@ -0,0 +1,73 @@ +from typing import cast + +from .provider import AbstractOAuthProvider + + +class OAuthProviderFactory: + """Factory class for creating OAuth provider instances.""" + + _providers: dict[str, type[AbstractOAuthProvider]] = {} + + @classmethod + def register_provider(cls, provider_name: str, provider_class: type[AbstractOAuthProvider]) -> None: + """ + Register an OAuth provider class. + + Args: + provider_name: Name identifier for the provider + provider_class: The provider class to register + """ + cls._providers[provider_name] = provider_class + + @classmethod + def get_provider_class(cls, provider_name: str) -> type[AbstractOAuthProvider] | None: + """ + Get an OAuth provider class by name. + + Args: + provider_name: Name identifier for the provider + + Returns: + The provider class if registered, None otherwise + """ + return cls._providers.get(provider_name) + + @classmethod + def create_provider( + cls, provider_name: str, client_id: str, client_secret: str, redirect_uri: str + ) -> AbstractOAuthProvider: + """ + Create an instance of the requested provider with the given credentials. + + Args: + provider_name: Name of the provider to create + client_id: OAuth client ID + client_secret: OAuth client secret + redirect_uri: Callback URL for OAuth flow + + Returns: + Configured provider instance + + Raises: + ValueError: If provider not registered + """ + provider_class = cls.get_provider_class(provider_name) + if not provider_class: + raise ValueError(f"OAuth provider {provider_name} not registered") + + if hasattr(provider_class, "create"): + return cast( + AbstractOAuthProvider, + provider_class.create(client_id=client_id, client_secret=client_secret, redirect_uri=redirect_uri), + ) + + return provider_class( + client_id=client_id, + client_secret=client_secret, + redirect_uri=redirect_uri, + scopes=[], + authorize_endpoint="", + token_endpoint="", + userinfo_endpoint="", + provider_name=provider_name, + ) diff --git a/backend/src/infrastructure/auth/oauth/provider.py b/backend/src/infrastructure/auth/oauth/provider.py new file mode 100644 index 00000000..f44e91a4 --- /dev/null +++ b/backend/src/infrastructure/auth/oauth/provider.py @@ -0,0 +1,206 @@ +import base64 +import hashlib +import secrets +from abc import ABC, abstractmethod +from typing import Any, cast +from urllib.parse import urlencode + +import httpx + +from ...logging import get_logger +from .schemas import OAuthUserInfo + +logger = get_logger() + + +class AbstractOAuthProvider(ABC): + """ + Abstract base class for OAuth 2.0 authentication providers. + + This class defines the interface that all OAuth providers must implement + and provides common functionality for the OAuth authentication flow. + + Attributes: + client_id: OAuth client ID from provider + client_secret: OAuth client secret + redirect_uri: URI to redirect after authentication + scopes: List of OAuth scopes to request + authorize_endpoint: Provider's authorization endpoint + token_endpoint: Provider's token endpoint + userinfo_endpoint: Provider's user info endpoint + name: Provider identifier (e.g. "google", "github") + """ + + def __init__( + self, + client_id: str, + client_secret: str, + redirect_uri: str, + scopes: list[str], + authorize_endpoint: str, + token_endpoint: str, + userinfo_endpoint: str, + provider_name: str, + ): + """Initialize the OAuth provider with required configuration.""" + self.client_id = client_id + self.client_secret = client_secret + self.redirect_uri = redirect_uri + self.scopes = scopes + self.authorize_endpoint = authorize_endpoint + self.token_endpoint = token_endpoint + self.userinfo_endpoint = userinfo_endpoint + self._name = provider_name + + @property + def name(self) -> str: + """Get the provider name.""" + return self._name + + def generate_state(self) -> str: + """Generate a random state parameter for CSRF protection.""" + return secrets.token_urlsafe(32) + + def generate_pkce_codes(self) -> dict[str, str]: + """Generate PKCE code challenge and verifier for auth flow.""" + code_verifier = secrets.token_urlsafe(64) + code_verifier_bytes = code_verifier.encode("ascii") + code_challenge = base64.urlsafe_b64encode(hashlib.sha256(code_verifier_bytes).digest()).decode("ascii").rstrip("=") + + return {"code_verifier": code_verifier, "code_challenge": code_challenge} + + async def get_authorization_url( + self, state: str | None = None, pkce: bool = True, extra_params: dict[str, str] | None = None + ) -> dict[str, str]: + """ + Get the authorization URL for redirecting users to the provider. + + Args: + state: Optional state parameter for CSRF protection. If not provided, + a random state will be generated. + pkce: Whether to use PKCE extension for enhanced security + extra_params: Additional query parameters to include in the URL + + Returns: + Dict containing the authorization URL and state/pkce parameters + """ + if state is None: + state = self.generate_state() + + params = { + "client_id": self.client_id, + "redirect_uri": self.redirect_uri, + "response_type": "code", + "state": state, + "scope": " ".join(self.scopes), + } + + result = {"url": "", "state": state} + + if pkce: + pkce_codes = self.generate_pkce_codes() + params["code_challenge"] = pkce_codes["code_challenge"] + params["code_challenge_method"] = "S256" + result["code_verifier"] = pkce_codes["code_verifier"] + + if extra_params: + params.update(extra_params) + + result["url"] = f"{self.authorize_endpoint}?{urlencode(params)}" + return result + + async def exchange_code( + self, code: str, code_verifier: str | None = None, headers: dict[str, str] | None = None + ) -> dict[str, Any]: + """ + Exchange authorization code for access token. + + Args: + code: Authorization code received from provider + code_verifier: PKCE code verifier if PKCE was used + headers: Additional headers for the token request + + Returns: + Dict containing access_token and other provider response + """ + data = { + "client_id": self.client_id, + "client_secret": self.client_secret, + "code": code, + "redirect_uri": self.redirect_uri, + "grant_type": "authorization_code", + } + + if code_verifier: + data["code_verifier"] = code_verifier + + request_headers = {"Accept": "application/json"} + if headers: + request_headers.update(headers) + + try: + async with httpx.AsyncClient() as client: + response = await client.post(self.token_endpoint, data=data, headers=request_headers) + response.raise_for_status() + return cast(dict[str, Any], response.json()) + except Exception as e: + logger.error(f"Error exchanging code for {self.name}: {str(e)}") + raise + + async def get_user_info(self, access_token: str) -> dict[str, Any]: + """ + Get user information from the provider using an access token. + + Args: + access_token: OAuth access token + + Returns: + Dict containing user profile information from provider + """ + headers = { + "Authorization": f"Bearer {access_token}", + "Accept": "application/json", + } + + try: + async with httpx.AsyncClient() as client: + response = await client.get(self.userinfo_endpoint, headers=headers) + response.raise_for_status() + return cast(dict[str, Any], response.json()) + except Exception as e: + logger.error(f"Error fetching user info for {self.name}: {str(e)}") + raise + + async def validate_token(self, access_token: str) -> bool: + """ + Validate that an access token is still valid. + + Default implementation checks if we can fetch user info. + Override for providers with specific token validation endpoints. + + Args: + access_token: OAuth access token to validate + + Returns: + True if token is valid, False otherwise + """ + try: + await self.get_user_info(access_token) + return True + except Exception: + return False + + @abstractmethod + async def process_user_info(self, user_info: dict[str, Any]) -> OAuthUserInfo: + """ + Process provider-specific user info into a standardized format. + + Must be implemented by each provider to normalize user data. + + Args: + user_info: Raw user info from provider + + Returns: + Standardized user info + """ + pass diff --git a/backend/src/infrastructure/auth/oauth/providers/__init__.py b/backend/src/infrastructure/auth/oauth/providers/__init__.py new file mode 100644 index 00000000..4c9936de --- /dev/null +++ b/backend/src/infrastructure/auth/oauth/providers/__init__.py @@ -0,0 +1 @@ +"""OAuth provider implementations.""" diff --git a/backend/src/infrastructure/auth/oauth/providers/github.py b/backend/src/infrastructure/auth/oauth/providers/github.py new file mode 100644 index 00000000..05d575a8 --- /dev/null +++ b/backend/src/infrastructure/auth/oauth/providers/github.py @@ -0,0 +1,160 @@ +from typing import Any + +import httpx + +from ..provider import AbstractOAuthProvider +from ..schemas import OAuthUserInfo + + +class GitHubOAuthProvider(AbstractOAuthProvider): + """ + OAuth authentication provider for GitHub Sign-In. + + This provider implements GitHub's OAuth 2.0 authentication flow, + allowing users to sign in with their GitHub accounts. It handles + the OAuth flow and standardizes the user information format. + """ + + def __init__( + self, + client_id: str, + client_secret: str, + redirect_uri: str, + scopes: list[str] | None = None, + ): + """ + Initialize the GitHub OAuth provider. + + Args: + client_id: GitHub OAuth client ID from GitHub Developer Settings + client_secret: GitHub OAuth client secret + redirect_uri: Callback URL for OAuth flow completion + scopes: Optional list of GitHub OAuth scopes to request. + If not provided, uses default scopes for basic profile + and email access. + """ + default_scopes = ["read:user", "user:email"] + + super().__init__( + client_id=client_id, + client_secret=client_secret, + redirect_uri=redirect_uri, + scopes=scopes or default_scopes, + authorize_endpoint="https://github.com/login/oauth/authorize", + token_endpoint="https://github.com/login/oauth/access_token", + userinfo_endpoint="https://api.github.com/user", + provider_name="github", + ) + + async def exchange_code( + self, code: str, code_verifier: str | None = None, headers: dict[str, str] | None = None + ) -> dict[str, Any]: + """ + Override to handle GitHub-specific token response. + + GitHub requires the 'Accept: application/json' header to receive + the response in JSON format instead of the default + application/x-www-form-urlencoded. + + Args: + code: The authorization code received from GitHub + code_verifier: PKCE code verifier if PKCE was used + headers: Optional additional headers for the token request + + Returns: + Dict[str, Any]: The token response containing: + - access_token: OAuth access token + - token_type: Token type (usually "bearer") + - scope: Granted scopes as a comma-separated string + """ + if headers is None: + headers = {} + + headers["Accept"] = "application/json" + return await super().exchange_code(code, code_verifier, headers) + + async def get_user_info(self, access_token: str) -> dict[str, Any]: + """ + Get both user profile and email information from GitHub. + + Makes two API calls: + 1. Fetches the user's profile from the user endpoint + 2. Fetches the user's email addresses from the emails endpoint + + GitHub requires separate API calls to get email information, + especially for users with private email addresses. + + Args: + access_token: Valid GitHub OAuth access token + + Returns: + Dict[str, Any]: Combined user profile and email data + """ + profile = await super().get_user_info(access_token) + + headers = { + "Authorization": f"Bearer {access_token}", + "Accept": "application/json", + } + + async with httpx.AsyncClient() as client: + response = await client.get("https://api.github.com/user/emails", headers=headers) + + if response.status_code == 200: + emails_data = response.json() + profile["emails"] = emails_data + + return profile + + async def process_user_info(self, user_info: dict[str, Any]) -> OAuthUserInfo: + """ + Process GitHub user info into standardized format. + + Transforms the raw user info from GitHub's API into a consistent + format. Handles the extraction of primary email and its verification + status from the emails array. + + Args: + user_info: Raw user info from GitHub containing fields like + id, login, name, emails array, etc. + + Returns: + Standardized user info + """ + email = None + email_verified = False + + if emails := user_info.get("emails", []): + for e in emails: + if e.get("primary"): + email = e.get("email") + email_verified = e.get("verified", False) + break + + return OAuthUserInfo( + provider="github", + provider_user_id=str(user_info.get("id")), + email=email, + email_verified=email_verified, + name=user_info.get("name"), + given_name=None, + family_name=None, + username=user_info.get("login"), + picture=user_info.get("avatar_url"), + raw_data=user_info, + ) + + @classmethod + def create(cls, client_id: str, client_secret: str, redirect_uri: str) -> "GitHubOAuthProvider": + """ + Factory method to create an instance with default settings. + + Args: + client_id: GitHub OAuth client ID + client_secret: GitHub OAuth client secret + redirect_uri: Callback URL for OAuth flow completion + + Returns: + Configured GitHubOAuthProvider instance + """ + return cls(client_id=client_id, client_secret=client_secret, redirect_uri=redirect_uri) diff --git a/backend/src/infrastructure/auth/oauth/providers/google.py b/backend/src/infrastructure/auth/oauth/providers/google.py new file mode 100644 index 00000000..7b950364 --- /dev/null +++ b/backend/src/infrastructure/auth/oauth/providers/google.py @@ -0,0 +1,113 @@ +from typing import Any + +from ..provider import AbstractOAuthProvider +from ..schemas import OAuthUserInfo + + +class GoogleOAuthProvider(AbstractOAuthProvider): + """ + OAuth authentication provider for Google Sign-In. + + This provider implements Google's OAuth 2.0 authentication flow, + allowing users to sign in with their Google accounts. It handles + the OAuth flow and standardizes the user information format. + """ + + def __init__( + self, + client_id: str, + client_secret: str, + redirect_uri: str, + scopes: list[str] | None = None, + ): + """ + Initialize the Google OAuth provider. + + Args: + client_id: Google OAuth client ID from Google Cloud Console + client_secret: Google OAuth client secret + redirect_uri: Callback URL for OAuth flow completion + scopes: Optional list of Google OAuth scopes to request. + If not provided, uses default scopes for basic profile + and email access. + """ + default_scopes = [ + "openid", + "https://www.googleapis.com/auth/userinfo.email", + "https://www.googleapis.com/auth/userinfo.profile", + ] + + super().__init__( + client_id=client_id, + client_secret=client_secret, + redirect_uri=redirect_uri, + scopes=scopes or default_scopes, + authorize_endpoint="https://accounts.google.com/o/oauth2/v2/auth", + token_endpoint="https://oauth2.googleapis.com/token", + userinfo_endpoint="https://www.googleapis.com/oauth2/v3/userinfo", + provider_name="google", + ) + + async def get_authorization_url( + self, state: str | None = None, pkce: bool = True, extra_params: dict[str, str] | None = None + ) -> dict[str, str]: + """ + Get Google authorization URL with additional parameters. + + Adds Google-specific parameters like access_type=offline to + request a refresh token. + + Args: + state: Optional state parameter for CSRF protection + pkce: Whether to use PKCE for enhanced security + extra_params: Additional query parameters to include + + Returns: + Dict with authorization URL and state/PKCE parameters + """ + if extra_params is None: + extra_params = {} + + extra_params["access_type"] = "offline" + extra_params["prompt"] = "consent" + + return await super().get_authorization_url(state, pkce, extra_params) + + async def process_user_info(self, user_info: dict[str, Any]) -> OAuthUserInfo: + """ + Process Google user info into standardized format. + + Args: + user_info: Raw user info from Google containing fields like + sub, email, name, picture, etc. + + Returns: + Standardized user info + """ + return OAuthUserInfo( + provider="google", + provider_user_id=str(user_info.get("sub", "")), + email=user_info.get("email"), + email_verified=user_info.get("email_verified", False), + name=user_info.get("name"), + given_name=user_info.get("given_name"), + family_name=user_info.get("family_name"), + username=None, + picture=user_info.get("picture"), + raw_data=user_info, + ) + + @classmethod + def create(cls, client_id: str, client_secret: str, redirect_uri: str) -> "GoogleOAuthProvider": + """ + Factory method to create an instance with default settings. + + Args: + client_id: Google OAuth client ID + client_secret: Google OAuth client secret + redirect_uri: Callback URL for OAuth flow completion + + Returns: + Configured GoogleOAuthProvider instance + """ + return cls(client_id=client_id, client_secret=client_secret, redirect_uri=redirect_uri) diff --git a/backend/src/infrastructure/auth/oauth/schemas.py b/backend/src/infrastructure/auth/oauth/schemas.py new file mode 100644 index 00000000..e6000697 --- /dev/null +++ b/backend/src/infrastructure/auth/oauth/schemas.py @@ -0,0 +1,67 @@ +from datetime import UTC, datetime +from typing import Any + +from pydantic import BaseModel, Field + + +class OAuthState(BaseModel): + """ + Store data needed for OAuth state validation. + + Used to maintain state between authorization request and callback. + """ + + state: str = Field(description="State parameter for CSRF protection") + provider: str = Field(description="OAuth provider name") + code_verifier: str | None = Field(None, description="PKCE code verifier") + redirect_to: str | None = Field(None, description="Where to redirect after authentication") + created_at: datetime = Field(default_factory=lambda: datetime.now(UTC)) + + +class OAuthUserInfo(BaseModel): + """ + Standardized user information from OAuth providers. + + Each provider's raw user information is normalized to this format. + """ + + provider: str = Field(description="OAuth provider name") + provider_user_id: str = Field(description="User ID from the provider") + email: str | None = Field(None, description="User's email address") + email_verified: bool = Field(default=False, description="Whether email is verified") + name: str | None = Field(None, description="User's full name") + given_name: str | None = Field(None, description="User's given/first name") + family_name: str | None = Field(None, description="User's family/last name") + username: str | None = Field(None, description="Username if available") + picture: str | None = Field(None, description="URL to user's profile picture") + raw_data: dict[str, Any] = Field(default_factory=dict, description="Raw provider data") + + +class OAuthToken(BaseModel): + """ + OAuth token information. + + Stores token data received from OAuth providers. + """ + + access_token: str + token_type: str = "Bearer" + id_token: str | None = None + refresh_token: str | None = None + expires_in: int | None = None + scope: str | None = None + created_at: datetime = Field(default_factory=lambda: datetime.now(UTC)) + + @property + def is_expired(self) -> bool: + """ + Check if the token is expired. + + Returns: + True if expired, False if still valid or no expiration set + """ + if not self.expires_in: + return False + + expiry = self.created_at.timestamp() + self.expires_in + return datetime.now(UTC).timestamp() > expiry diff --git a/backend/src/infrastructure/auth/oauth/services.py b/backend/src/infrastructure/auth/oauth/services.py new file mode 100644 index 00000000..51aad390 --- /dev/null +++ b/backend/src/infrastructure/auth/oauth/services.py @@ -0,0 +1,125 @@ +import secrets +from datetime import UTC, datetime +from typing import Any, cast + +from sqlalchemy.ext.asyncio import AsyncSession + +from ....modules.user.crud import crud_users +from ....modules.user.enums import OAuthProvider +from ....modules.user.schemas import UserCreateInternal, UserRead +from ...auth.utils import get_password_hash +from ...logging import get_logger +from .schemas import OAuthUserInfo + +logger = get_logger() + + +class OAuthAccountService: + """ + Service for handling OAuth account creation and linking. + + This service is responsible for: + - Linking OAuth accounts to existing users + - Creating new users from OAuth account information + - Handling the user lookup during OAuth authentication + """ + + async def get_or_create_user(self, oauth_user_info: OAuthUserInfo, db: AsyncSession) -> tuple[dict[str, Any], bool]: + """ + Get existing user or create a new one from OAuth information. + + Args: + oauth_user_info: Standardized OAuth user info + db: SQLAlchemy async session + + Returns: + Tuple of (user_dict, created) where created is True if a new user was created + + Raises: + ValueError: If required user data is missing + """ + provider_field = f"{oauth_user_info.provider}_id" + provider_id_filter = {provider_field: oauth_user_info.provider_user_id} + + user = await crud_users.get(db=db, filter_by=provider_id_filter) + + if user: + logger.info(f"Found existing user by {provider_field}") + return user, False + + if oauth_user_info.email: + user = await crud_users.get(db=db, filter_by={"email": oauth_user_info.email}) + + if user: + logger.info(f"Found existing user by email {oauth_user_info.email}") + + update_data = {provider_field: oauth_user_info.provider_user_id, "oauth_updated_at": datetime.now(UTC)} + + user = await crud_users.update(db=db, object_id=user["id"], object=update_data) + return cast(dict[str, Any], user), False + + logger.info("Creating new user from OAuth information") + return await self._create_user_from_oauth(oauth_user_info, db) + + async def _create_user_from_oauth(self, oauth_user_info: OAuthUserInfo, db: AsyncSession) -> tuple[dict[str, Any], bool]: + """ + Create a new user from OAuth user information. + + Args: + oauth_user_info: Standardized OAuth user info + db: SQLAlchemy async session + + Returns: + Tuple of (user_dict, True) indicating a new user was created + + Raises: + ValueError: If required user data is missing for account creation + """ + if not oauth_user_info.email: + logger.warning("Cannot create user without email") + raise ValueError("Email is required for user creation") + + username = oauth_user_info.username + if not username: + username_base = oauth_user_info.given_name or oauth_user_info.name or oauth_user_info.email.split("@")[0] + + username_base = username_base.lower().replace(" ", "_") + username = username_base + + i = 1 + while await crud_users.exists(db=db, filter_by={"username": username}): + username = f"{username_base}{i}" + i += 1 + else: + if await crud_users.exists(db=db, filter_by={"username": username}): + username_base = username + i = 1 + while await crud_users.exists(db=db, filter_by={"username": username}): + username = f"{username_base}{i}" + i += 1 + + name = oauth_user_info.name or f"{oauth_user_info.given_name or ''} {oauth_user_info.family_name or ''}".strip() + if not name and oauth_user_info.email: + name = oauth_user_info.email.split("@")[0] + + random_password = secrets.token_urlsafe(16) + + user_data = UserCreateInternal( + username=username, + email=oauth_user_info.email, + name=name, + hashed_password=get_password_hash(random_password), + email_verified=oauth_user_info.email_verified, + google_id=oauth_user_info.provider_user_id if oauth_user_info.provider == OAuthProvider.GOOGLE.value else None, + github_id=oauth_user_info.provider_user_id if oauth_user_info.provider == OAuthProvider.GITHUB.value else None, + oauth_provider=oauth_user_info.provider, + oauth_created_at=datetime.now(UTC), + oauth_updated_at=datetime.now(UTC), + ) + + user = await crud_users.create(db=db, object=user_data, schema_to_select=UserRead) + + return user, True + + +oauth_account_service = OAuthAccountService() diff --git a/backend/src/infrastructure/auth/routes.py b/backend/src/infrastructure/auth/routes.py new file mode 100644 index 00000000..ac309e17 --- /dev/null +++ b/backend/src/infrastructure/auth/routes.py @@ -0,0 +1,463 @@ +import inspect +from typing import Annotated, Any + +from fastapi import APIRouter, Depends, HTTPException, Query, Request, Response, status +from fastapi.responses import RedirectResponse +from fastapi.security import OAuth2PasswordRequestForm +from sqlalchemy.ext.asyncio import AsyncSession + +from ...modules.user.crud import crud_users +from ...modules.user.enums import OAuthProvider +from ..config.settings import get_settings +from ..database.session import async_session +from ..logging import get_logger +from .http_exceptions import UnauthorizedException +from .oauth.dependencies import get_google_provider, get_oauth_state, get_oauth_state_storage +from .oauth.provider import AbstractOAuthProvider +from .oauth.schemas import OAuthState, OAuthToken +from .oauth.services import oauth_account_service +from .session.dependencies import authenticate_user, get_current_session_data, get_session_manager +from .session.manager import SessionManager +from .session.schemas import SessionData +from .session.storage import AbstractSessionStorage + +settings = get_settings() +logger = get_logger() + +router = APIRouter(tags=["Authentication"]) + + +@router.post( + "/login", + summary="User Login", + description=""" + Authenticates a user and creates a new session. + + This endpoint accepts username/email and password credentials and verifies them. + On successful authentication: + - A new session is created + - A session ID is set as an HTTP-only cookie + - A CSRF token is generated for protection against CSRF attacks + + The endpoint is protected by rate limiting to prevent brute force attacks. + After multiple failed attempts, further login attempts will be temporarily blocked. + """, + responses={ + 200: {"description": "Login successful, session created"}, + 401: {"description": "Authentication failed or rate limit exceeded"}, + 429: {"description": "Too many login attempts, try again later"}, + }, + response_description="CSRF token for use in subsequent requests", +) +async def login( + request: Request, + response: Response, + form_data: Annotated[OAuth2PasswordRequestForm, Depends()], + db: Annotated[AsyncSession, Depends(async_session)], + session_manager: Annotated[SessionManager, Depends(get_session_manager)], +) -> dict[str, str]: + """Login endpoint to get session cookies. + + The session ID is set as an HTTP-only cookie. + The CSRF token is set as a regular cookie and returned in the response. + This endpoint is protected by rate limiting to prevent brute force attacks. + """ + ip_address = request.client.host if request.client and hasattr(request.client, "host") else "unknown" + + is_allowed, attempts_remaining = await session_manager.track_login_attempt( + ip_address=ip_address, username=form_data.username, success=False + ) + + if not is_allowed: + logger.warning(f"Login rate limit exceeded for {form_data.username} from IP {ip_address}") + raise UnauthorizedException("Too many failed login attempts. Please try again later.") + + user = await authenticate_user(username_or_email=form_data.username, password=form_data.password, db=db) + + if user is None: + logger.warning(f"Failed login attempt for {form_data.username} from IP {ip_address}") + raise UnauthorizedException("Incorrect username or password") + + try: + await session_manager.track_login_attempt(ip_address=ip_address, username=form_data.username, success=True) + + session_id, csrf_token = await session_manager.create_session( + request=request, + user_id=user["id"], + metadata={ + "login_type": "password", + "username": user["username"], + }, + ) + + session_manager.set_session_cookies( + response=response, + session_id=session_id, + csrf_token=csrf_token, + secure=settings.SESSION_SECURE_COOKIES, + path="/", + ) + + return {"csrf_token": csrf_token} + + except Exception as e: + logger.error(f"Error during login: {str(e)}", exc_info=True) + raise UnauthorizedException("An error occurred during login") + + +@router.post( + "/logout", + summary="User Logout", + description=""" + Terminates the current user session. + + This endpoint: + - Invalidates the active session in the storage backend + - Clears all session-related cookies from the client + + After logout, the user will need to authenticate again to access + protected resources. Any existing session tokens will no longer be valid. + """, + responses={200: {"description": "Logout successful, session terminated"}, 401: {"description": "Not authenticated"}}, + response_description="Confirmation of successful logout", +) +async def logout( + request: Request, + response: Response, + session_data: Annotated[SessionData, Depends(get_current_session_data)], + session_manager: Annotated[SessionManager, Depends(get_session_manager)], +) -> dict[str, str]: + """Logout endpoint to terminate the session and clear cookies.""" + await session_manager.terminate_session(session_data.session_id) + session_manager.clear_session_cookies(response) + + return {"message": "Logged out successfully"} + + +@router.post( + "/refresh-csrf", + summary="Refresh CSRF Token", + description=""" + Generates a new CSRF token for the current session. + + This endpoint should be called to obtain a fresh CSRF token when: + - The current token is about to expire + - After a certain period of inactivity + - When increased security is needed for sensitive operations + + The new token is returned in the response and also set as a cookie. + """, + responses={200: {"description": "New CSRF token generated successfully"}, 401: {"description": "Not authenticated"}}, + response_description="The new CSRF token for the session", +) +async def refresh_csrf_token( + request: Request, + response: Response, + session_data: Annotated[SessionData, Depends(get_current_session_data)], + session_manager: Annotated[SessionManager, Depends(get_session_manager)], +) -> dict[str, str]: + """Generate a new CSRF token for the current session.""" + csrf_token = await session_manager.regenerate_csrf_token( + user_id=session_data.user_id, + session_id=session_data.session_id, + ) + + response.set_cookie( + key="csrf_token", + value=csrf_token, + max_age=int(session_manager.session_timeout.total_seconds()), + path="/", + httponly=False, + secure=settings.SESSION_SECURE_COOKIES, + samesite="lax", + ) + + return {"csrf_token": csrf_token} + + +@router.get( + "/oauth/google", + summary="Initiate Google OAuth Login", + description=""" + Starts the OAuth 2.0 authentication flow with Google. + + This endpoint generates the authorization URL that the user should be + redirected to in order to authenticate with Google. The flow includes: + - Creation of a state parameter for CSRF protection + - Generation of PKCE code challenge (for enhanced security) + - Setting appropriate OAuth scopes for profile access + + After successful authentication with Google, the user will be redirected + back to this application's callback endpoint. + + An optional redirect_uri can be specified to control where the user + is sent after the entire authentication process completes. + """, + responses={ + 200: {"description": "Authorization URL generated successfully"}, + 500: {"description": "Failed to initiate Google login"}, + }, + response_description="The Google authorization URL to redirect the user to", +) +async def oauth_google_login( + request: Request, + redirect_uri: str | None = Query(None), + oauth_provider: AbstractOAuthProvider = Depends(get_google_provider), + state_storage: AbstractSessionStorage[OAuthState] = Depends(get_oauth_state_storage), +) -> dict[str, str]: + """ + Initiate OAuth login flow for Google. + + Args: + request: The request object + redirect_uri: Optional URI to redirect after successful authentication + + Returns: + Dict with authorization URL to redirect the user to Google + """ + try: + auth_data = await oauth_provider.get_authorization_url() + + state_obj = OAuthState( + state=auth_data["state"], + provider=OAuthProvider.GOOGLE.value, + redirect_to=redirect_uri, + code_verifier=auth_data.get("code_verifier"), + ) + + await state_storage.create(data=state_obj, session_id=auth_data["state"]) + + return {"url": auth_data["url"]} + + except Exception as e: + logger.error(f"Error initiating Google OAuth: {str(e)}", exc_info=True) + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to initiate Google login") + + +def _is_provider_valid(provider_value: Any, expected_provider: str) -> bool: + """Check if a provider value matches the expected provider name. + + This handles different types of values (strings, objects, mocks) safely. + + Args: + provider_value: The provider value to check (could be a string, object, or mock) + expected_provider: The expected provider name (e.g., "google" or "github") + + Returns: + bool: True if the provider is valid, False otherwise + """ + if provider_value is None: + return False + + if isinstance(provider_value, str): + return provider_value.lower() == expected_provider.lower() + + if hasattr(provider_value, "name") and isinstance(getattr(provider_value, "name", None), str): + name_value: str = getattr(provider_value, "name") + return name_value.lower() == expected_provider.lower() + + if inspect.iscoroutine(provider_value) or inspect.isawaitable(provider_value): + return expected_provider.lower() in str(provider_value).lower() + + try: + return expected_provider.lower() in str(provider_value).lower() + except Exception: + return False + + +@router.get( + "/oauth/callback/google", + summary="Google OAuth Callback Handler", + description=""" + Processes the authentication callback from Google OAuth. + + This endpoint handles the authorization code returned by Google after + the user has successfully authenticated. The process includes: + - Validating the state parameter to prevent CSRF attacks + - Exchanging the authorization code for access/refresh tokens + - Fetching the user profile from Google + - Creating or updating the user account in the system + - Establishing a new session for the authenticated user + + Two response formats are supported: + - redirect: Redirects to the frontend with success/error parameters (default) + - json: Returns user information and tokens as a JSON response + + The json format is useful for mobile apps or single-page applications that + handle the OAuth flow programmatically. + """, + responses={ + 200: {"description": "Authentication successful (JSON response)"}, + 302: {"description": "Authentication successful (redirect response)"}, + 400: {"description": "Invalid OAuth state or other parameter"}, + 401: {"description": "Authentication failed"}, + 500: {"description": "Server error during authentication"}, + }, + response_description="Authentication result with session cookies set", +) +async def oauth_google_callback( + request: Request, + response: Response, + code: str = Query(...), + state: str = Query(...), + response_format: str = Query("redirect", description="Response format, either 'redirect' or 'json'"), + oauth_provider: AbstractOAuthProvider = Depends(get_google_provider), + state_storage: AbstractSessionStorage[OAuthState] = Depends(get_oauth_state_storage), + db: AsyncSession = Depends(async_session), + session_manager: SessionManager = Depends(get_session_manager), +): + """ + Handle OAuth callback from Google. + + Args: + request: The request object + response: The response object + code: Authorization code from Google + state: State parameter for CSRF protection + response_format: Format of the response, either 'redirect' (default) or 'json' + + Returns: + Redirect to frontend with success/error indication or JSON response with user info + """ + state_data = await get_oauth_state(state, state_storage) + + if not state_data: + logger.warning(f"Invalid OAuth state in callback: {state}") + if response_format == "json": + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid OAuth state") + return RedirectResponse( + url=f"/login?error=oauth_error&provider={OAuthProvider.GOOGLE.value}&reason=invalid_state", + status_code=status.HTTP_302_FOUND, + ) + + provider_valid = False + try: + provider_valid = _is_provider_valid(state_data.provider, OAuthProvider.GOOGLE.value) + except Exception as e: + logger.warning(f"Error checking provider type: {e}") + provider_valid = False + + if not provider_valid: + expected = OAuthProvider.GOOGLE.value + actual = getattr(state_data, "provider", "unknown") + logger.warning(f"Provider mismatch in OAuth callback: expected {expected}, got {actual}") + if response_format == "json": + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Provider mismatch") + return RedirectResponse( + url=f"/login?error=oauth_error&provider={OAuthProvider.GOOGLE.value}&reason=provider_mismatch", + status_code=status.HTTP_302_FOUND, + ) + + try: + token_data = await oauth_provider.exchange_code(code, code_verifier=state_data.code_verifier) + + token = OAuthToken( + access_token=token_data["access_token"], + token_type=token_data.get("token_type", "Bearer"), + id_token=token_data.get("id_token"), + refresh_token=token_data.get("refresh_token"), + expires_in=token_data.get("expires_in"), + scope=token_data.get("scope"), + ) + + user_info_raw = await oauth_provider.get_user_info(token.access_token) + user_info = await oauth_provider.process_user_info(user_info_raw) + + user, is_new_user = await oauth_account_service.get_or_create_user(user_info, db) + + session_id, csrf_token = await session_manager.create_session( + request=request, + user_id=user["id"], + metadata={ + "login_type": "oauth", + "oauth_provider": OAuthProvider.GOOGLE.value, + "username": user["username"], + "is_new_user": is_new_user, + }, + ) + + session_manager.set_session_cookies( + response=response, + session_id=session_id, + csrf_token=csrf_token, + secure=settings.SESSION_SECURE_COOKIES, + path="/", + ) + + await state_storage.delete(state) + + if response_format == "json": + return { + "success": True, + "user": {"id": user["id"], "username": user["username"], "email": user["email"], "is_new_user": is_new_user}, + "csrf_token": csrf_token, + } + + redirect_to = "/" + try: + if state_data.redirect_to: + redirect_to = str(state_data.redirect_to) + except Exception as e: + logger.warning(f"Error getting redirect_to value: {e}, using default") + + return RedirectResponse( + url=redirect_to, + status_code=status.HTTP_302_FOUND, + ) + + except Exception as e: + logger.error(f"Error in Google OAuth callback: {str(e)}", exc_info=True) + + if response_format == "json": + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"OAuth authentication failed: {str(e)}" + ) + + return RedirectResponse( + url=f"/login?error=oauth_error&provider={OAuthProvider.GOOGLE.value}", + status_code=status.HTTP_302_FOUND, + ) + + +@router.get("/check-auth") +async def check_auth( + session_data: Annotated[SessionData | None, Depends(get_current_session_data)], + db: AsyncSession = Depends(async_session), +) -> dict[str, Any]: + """ + Check if the user is authenticated and return basic user information. + + This is useful for clients to verify authentication status and can be used + with both cookie-based and API-based authentication. + + Args: + session_data: The session data if the user is authenticated + + Returns: + Authentication status and user information if authenticated + """ + if not session_data: + return {"authenticated": False, "message": "Not authenticated"} + + try: + user = await crud_users.get(db=db, id=session_data.user_id) + + if not user: + return {"authenticated": False, "message": "User not found"} + + return { + "authenticated": True, + "user": { + "id": user["id"], + "username": user["username"], + "email": user["email"], + "oauth_provider": user.get("oauth_provider"), + }, + "session": { + "created_at": session_data.created_at.isoformat() if session_data.created_at else None, + "last_activity": session_data.last_activity.isoformat() if session_data.last_activity else None, + }, + } + except Exception as e: + logger.error(f"Error checking authentication: {str(e)}", exc_info=True) + return {"authenticated": False, "message": "Error checking authentication status"} diff --git a/backend/src/infrastructure/auth/session/__init__.py b/backend/src/infrastructure/auth/session/__init__.py new file mode 100644 index 00000000..57676ff7 --- /dev/null +++ b/backend/src/infrastructure/auth/session/__init__.py @@ -0,0 +1,20 @@ +from .dependencies import ( + authenticate_user, + get_current_session_data, + get_current_superuser, + get_current_user, + get_optional_user, +) +from .manager import SessionManager +from .schemas import CSRFToken, SessionData + +__all__ = [ + "get_current_user", + "get_optional_user", + "get_current_superuser", + "authenticate_user", + "get_current_session_data", + "SessionData", + "CSRFToken", + "SessionManager", +] diff --git a/backend/src/infrastructure/auth/session/backends/__init__.py b/backend/src/infrastructure/auth/session/backends/__init__.py new file mode 100644 index 00000000..592d6db3 --- /dev/null +++ b/backend/src/infrastructure/auth/session/backends/__init__.py @@ -0,0 +1,7 @@ +from .memory import MemorySessionStorage +from .redis import RedisSessionStorage + +__all__ = [ + "RedisSessionStorage", + "MemorySessionStorage", +] diff --git a/backend/src/infrastructure/auth/session/backends/memcached.py b/backend/src/infrastructure/auth/session/backends/memcached.py new file mode 100644 index 00000000..5abc78f4 --- /dev/null +++ b/backend/src/infrastructure/auth/session/backends/memcached.py @@ -0,0 +1,350 @@ +import hashlib +import json +from typing import TypeVar + +try: + import aiomcache +except ImportError: + raise ImportError( + "The aiomcache package is not installed. " + "Please install it with 'pip install aiomcache' or 'pip install -e \".[memcached]\"'" + ) + +from pydantic import BaseModel + +from ....config.settings import get_settings +from ....logging import get_logger +from ..base import AbstractSessionStorage + +T = TypeVar("T", bound=BaseModel) +settings = get_settings() +logger = get_logger() + + +class MemcachedSessionStorage(AbstractSessionStorage[T]): + """Memcached implementation of session storage.""" + + def __init__( + self, + prefix: str = "session:", + expiration: int = 1800, + host: str = settings.CACHE_MEMCACHED_HOST, + port: int = settings.CACHE_MEMCACHED_PORT, + pool_size: int = settings.CACHE_MEMCACHED_POOL_SIZE, + ): + """Initialize the Memcached session storage. + + Args: + prefix: Prefix for all session keys + expiration: Default session expiration in seconds + host: Memcached host + port: Memcached port + pool_size: Memcached connection pool size + """ + super().__init__(prefix=prefix, expiration=expiration) + + self.client = aiomcache.Client( + host=host, + port=port, + pool_size=pool_size, + ) + + self.user_sessions_prefix = f"{prefix}user:" + + def _encode_key(self, key: str) -> bytes: + """Encode a key for Memcached. + + Memcached has a 250 byte key limit, so we hash long keys. + + Args: + key: The key to encode + + Returns: + The encoded key as bytes + """ + if len(key) > 240: + key_hash = hashlib.sha256(key.encode()).hexdigest()[:32] + key = f"{key[:200]}:{key_hash}" + return key.encode("utf-8") + + def get_user_sessions_key(self, user_id: int) -> str: + """Get the key for a user's sessions. + + Args: + user_id: The user ID + + Returns: + The Memcached key for the user's sessions + """ + return f"{self.user_sessions_prefix}{user_id}" + + async def create(self, data: T, session_id: str | None = None, expiration: int | None = None) -> str: + """Create a new session in Memcached. + + Args: + data: Session data (must be a Pydantic model) + session_id: Optional session ID. If not provided, one will be generated + expiration: Optional custom expiration in seconds + + Returns: + The session ID + """ + if session_id is None: + session_id = self.generate_session_id() + + key = self.get_key(session_id) + exp = expiration if expiration is not None else self.expiration + + json_data = data.model_dump_json().encode("utf-8") + + try: + await self.client.set(self._encode_key(key), json_data, exptime=exp) + + if hasattr(data, "user_id"): + user_id = getattr(data, "user_id") + user_sessions_key = self.get_user_sessions_key(user_id) + + user_sessions_data = await self.client.get(self._encode_key(user_sessions_key)) + + if user_sessions_data: + try: + user_sessions = json.loads(user_sessions_data.decode("utf-8")) + if session_id not in user_sessions: + user_sessions.append(session_id) + except (json.JSONDecodeError, UnicodeDecodeError): + user_sessions = [session_id] + else: + user_sessions = [session_id] + + user_sessions_json = json.dumps(user_sessions).encode("utf-8") + await self.client.set( + self._encode_key(user_sessions_key), + user_sessions_json, + exptime=exp + 3600, + ) + + logger.debug(f"Created session {session_id} with expiration {exp}s") + return session_id + except Exception as e: + logger.error(f"Error creating session: {e}") + raise + + async def get(self, session_id: str, model_class: type[T]) -> T | None: + """Get session data from Memcached. + + Args: + session_id: The session ID + model_class: The Pydantic model class to decode the data into + + Returns: + The session data or None if session doesn't exist + """ + key = self.get_key(session_id) + + try: + data = await self.client.get(self._encode_key(key)) + if data is None: + return None + + try: + json_data = json.loads(data.decode("utf-8")) + return model_class.model_validate(json_data) + except (json.JSONDecodeError, UnicodeDecodeError) as e: + logger.error(f"Error parsing session data: {e}") + return None + + except Exception as e: + logger.error(f"Error getting session: {e}") + raise + + async def update(self, session_id: str, data: T, reset_expiration: bool = True, expiration: int | None = None) -> bool: + """Update session data in Memcached. + + Args: + session_id: The session ID + data: New session data + reset_expiration: Whether to reset the expiration + expiration: Optional custom expiration in seconds + + Returns: + True if the session was updated, False if it didn't exist + """ + key = self.get_key(session_id) + + try: + if not await self.client.get(self._encode_key(key)): + return False + + json_data = data.model_dump_json().encode("utf-8") + exp = expiration if expiration is not None else self.expiration + + await self.client.set(self._encode_key(key), json_data, exptime=exp) + + if reset_expiration and hasattr(data, "user_id"): + user_id = getattr(data, "user_id") + user_sessions_key = self.get_user_sessions_key(user_id) + + user_sessions_data = await self.client.get(self._encode_key(user_sessions_key)) + + if user_sessions_data: + try: + user_sessions = json.loads(user_sessions_data.decode("utf-8")) + user_sessions_json = json.dumps(user_sessions).encode("utf-8") + await self.client.set( + self._encode_key(user_sessions_key), + user_sessions_json, + exptime=exp + 3600, + ) + except (json.JSONDecodeError, UnicodeDecodeError): + pass + + return True + + except Exception as e: + logger.error(f"Error updating session: {e}") + raise + + async def delete(self, session_id: str) -> bool: + """Delete a session from Memcached. + + Args: + session_id: The session ID + + Returns: + True if the session was deleted, False if it didn't exist + """ + key = self.get_key(session_id) + + try: + session_data = await self.client.get(self._encode_key(key)) + if session_data is None: + return False + + await self.client.delete(self._encode_key(key)) + + try: + json_data = json.loads(session_data.decode("utf-8")) + if "user_id" in json_data: + user_id = json_data["user_id"] + user_sessions_key = self.get_user_sessions_key(user_id) + + user_sessions_data = await self.client.get(self._encode_key(user_sessions_key)) + + if user_sessions_data: + try: + user_sessions = json.loads(user_sessions_data.decode("utf-8")) + if session_id in user_sessions: + user_sessions.remove(session_id) + user_sessions_json = json.dumps(user_sessions).encode("utf-8") + await self.client.set( + self._encode_key(user_sessions_key), + user_sessions_json, + exptime=3600 * 24, + ) + except (json.JSONDecodeError, UnicodeDecodeError): + pass + except (json.JSONDecodeError, UnicodeDecodeError): + pass + + return True + except Exception as e: + logger.error(f"Error deleting session: {e}") + raise + + async def extend(self, session_id: str, expiration: int | None = None) -> bool: + """Extend the expiration of a session in Memcached. + + Args: + session_id: The session ID + expiration: Optional custom expiration in seconds + + Returns: + True if the session was extended, False if it didn't exist + + Note: + Memcached doesn't allow extending expiration without updating the value. + We need to get, then set the value again with a new expiration. + """ + key = self.get_key(session_id) + exp = expiration if expiration is not None else self.expiration + + try: + session_data = await self.client.get(self._encode_key(key)) + if session_data is None: + return False + + await self.client.set(self._encode_key(key), session_data, exptime=exp) + + try: + json_data = json.loads(session_data.decode("utf-8")) + if "user_id" in json_data: + user_id = json_data["user_id"] + user_sessions_key = self.get_user_sessions_key(user_id) + + user_sessions_data = await self.client.get(self._encode_key(user_sessions_key)) + + if user_sessions_data: + await self.client.set( + self._encode_key(user_sessions_key), + user_sessions_data, + exptime=exp + 3600, + ) + except (json.JSONDecodeError, UnicodeDecodeError): + pass + + return True + except Exception as e: + logger.error(f"Error extending session: {e}") + raise + + async def exists(self, session_id: str) -> bool: + """Check if a session exists in Memcached. + + Args: + session_id: The session ID + + Returns: + True if the session exists, False otherwise + """ + key = self.get_key(session_id) + + try: + data = await self.client.get(self._encode_key(key)) + return data is not None + except Exception as e: + logger.error(f"Error checking session existence: {e}") + raise + + async def get_user_sessions(self, user_id: int) -> list[str]: + """Get all session IDs for a user. + + Args: + user_id: The user ID + + Returns: + List of session IDs for the user + """ + user_sessions_key = self.get_user_sessions_key(user_id) + + try: + data = await self.client.get(self._encode_key(user_sessions_key)) + if data is None: + return [] + + try: + user_sessions = json.loads(data.decode("utf-8")) + if isinstance(user_sessions, list): + return [str(session_id) for session_id in user_sessions] + else: + logger.error(f"User sessions data is not a list: {user_sessions}") + return [] + except (json.JSONDecodeError, UnicodeDecodeError) as e: + logger.error(f"Error parsing user sessions data: {e}") + return [] + except Exception as e: + logger.error(f"Error getting user sessions: {e}") + raise + + async def close(self) -> None: + """Close the Memcached connection.""" + await self.client.close() diff --git a/backend/src/infrastructure/auth/session/backends/memory.py b/backend/src/infrastructure/auth/session/backends/memory.py new file mode 100644 index 00000000..333d6adb --- /dev/null +++ b/backend/src/infrastructure/auth/session/backends/memory.py @@ -0,0 +1,229 @@ +import json +import re +from datetime import UTC, datetime, timedelta +from re import Pattern +from typing import TypeVar + +from pydantic import BaseModel + +from ....logging import get_logger +from ..base import AbstractSessionStorage + +T = TypeVar("T", bound=BaseModel) +logger = get_logger() + + +class MemorySessionStorage(AbstractSessionStorage[T]): + """In-memory implementation of session storage for testing.""" + + def __init__( + self, + prefix: str = "session:", + expiration: int = 1800, + ): + """Initialize the in-memory session storage. + + Args: + prefix: Prefix for all session keys + expiration: Default session expiration in seconds + """ + super().__init__(prefix=prefix, expiration=expiration) + self.data: dict[str, bytes] = {} + self.expiry: dict[str, datetime] = {} + + async def create(self, data: T, session_id: str | None = None, expiration: int | None = None) -> str: + """Create a new session in memory. + + Args: + data: Session data (must be a Pydantic model) + session_id: Optional session ID. If not provided, one will be generated + expiration: Optional custom expiration in seconds + + Returns: + The session ID + """ + if session_id is None: + session_id = self.generate_session_id() + + key = self.get_key(session_id) + exp = expiration if expiration is not None else self.expiration + + json_data = data.model_dump_json() + + value_bytes = json_data.encode("utf-8") if isinstance(json_data, str) else json_data + + self.data[key] = value_bytes + self.expiry[key] = datetime.now(UTC) + timedelta(seconds=exp) + + logger.debug(f"Created session {session_id} with expiration {exp}s") + return session_id + + async def get(self, session_id: str, model_class: type[T]) -> T | None: + """Get session data from memory. + + Args: + session_id: The session ID + model_class: The Pydantic model class to decode the data into + + Returns: + The session data or None if session doesn't exist + """ + key = self.get_key(session_id) + + if self._check_expiry(key): + return None + + data_bytes = self.data.get(key) + if data_bytes is None: + return None + + try: + data_str = data_bytes.decode("utf-8") if isinstance(data_bytes, bytes) else data_bytes + json_data = json.loads(data_str) + return model_class.model_validate(json_data) + except (json.JSONDecodeError, ValueError) as e: + logger.error(f"Error parsing session data: {e}") + return None + + async def update(self, session_id: str, data: T, reset_expiration: bool = True, expiration: int | None = None) -> bool: + """Update session data in memory. + + Args: + session_id: The session ID + data: New session data + reset_expiration: Whether to reset the expiration + expiration: Optional custom expiration in seconds + + Returns: + True if the session was updated, False if it didn't exist + """ + key = self.get_key(session_id) + + if key not in self.data or self._check_expiry(key): + return False + + json_data = data.model_dump_json() + value_bytes = json_data.encode("utf-8") if isinstance(json_data, str) else json_data + + self.data[key] = value_bytes + + if reset_expiration: + exp = expiration if expiration is not None else self.expiration + self.expiry[key] = datetime.now(UTC) + timedelta(seconds=exp) + + return True + + async def delete(self, session_id: str) -> bool: + """Delete a session from memory. + + Args: + session_id: The session ID + + Returns: + True if the session was deleted, False if it didn't exist + """ + key = self.get_key(session_id) + + if key in self.data: + del self.data[key] + if key in self.expiry: + del self.expiry[key] + return True + return False + + async def extend(self, session_id: str, expiration: int | None = None) -> bool: + """Extend the expiration of a session in memory. + + Args: + session_id: The session ID + expiration: Optional custom expiration in seconds + + Returns: + True if the session was extended, False if it didn't exist + """ + key = self.get_key(session_id) + exp = expiration if expiration is not None else self.expiration + + if key in self.data and not self._check_expiry(key): + self.expiry[key] = datetime.now(UTC) + timedelta(seconds=exp) + return True + return False + + async def exists(self, session_id: str) -> bool: + """Check if a session exists in memory. + + Args: + session_id: The session ID + + Returns: + True if the session exists, False otherwise + """ + key = self.get_key(session_id) + return key in self.data and not self._check_expiry(key) + + async def _scan_iter(self, match: str | None = None) -> list[str]: + """Scan for keys matching a pattern. + + Args: + match: Pattern to match + + Returns: + List of matching keys + """ + if match: + pattern = match.replace("*", ".*").replace("?", ".") + pattern = f"^{pattern}$" + regex: Pattern = re.compile(pattern) + + matching_keys = [] + for key in list(self.data.keys()): + if self._check_expiry(key): + continue + + if regex.match(key): + matching_keys.append(key) + return matching_keys + else: + return [key for key in list(self.data.keys()) if not self._check_expiry(key)] + + def _check_expiry(self, key: str) -> bool: + """Check if a key has expired and remove it if so. + + Args: + key: The key to check + + Returns: + True if expired (and removed), False otherwise + """ + if key in self.expiry and datetime.now(UTC) > self.expiry[key]: + del self.data[key] + del self.expiry[key] + return True + return False + + async def close(self) -> None: + """Clear all data.""" + self.data.clear() + self.expiry.clear() + + async def delete_pattern(self, pattern: str) -> int: + """Delete all keys matching a pattern. + + Args: + pattern: The pattern to match keys (e.g., "login:*") + + Returns: + Number of keys deleted + """ + matching_keys = await self._scan_iter(match=pattern) + + deleted_count = 0 + for key in matching_keys: + if key in self.data: + del self.data[key] + if key in self.expiry: + del self.expiry[key] + deleted_count += 1 + + logger.debug(f"Deleted {deleted_count} keys matching pattern '{pattern}'") + return deleted_count diff --git a/backend/src/infrastructure/auth/session/backends/redis.py b/backend/src/infrastructure/auth/session/backends/redis.py new file mode 100644 index 00000000..6801e7cd --- /dev/null +++ b/backend/src/infrastructure/auth/session/backends/redis.py @@ -0,0 +1,364 @@ +import json +from collections.abc import Awaitable +from typing import Any, TypeVar, cast + +try: + from redis.asyncio import Redis as AsyncRedis + from redis.exceptions import RedisError +except ImportError: + raise ImportError( + "The redis package is not installed. Please install it with 'pip install redis' or 'pip install -e \".[redis]\"'" + ) + +from pydantic import BaseModel + +from ....config.settings import get_settings +from ....logging import get_logger +from ..base import AbstractSessionStorage + +T = TypeVar("T", bound=BaseModel) +settings = get_settings() +logger = get_logger() + + +class RedisSessionStorage(AbstractSessionStorage[T]): + """Redis implementation of session storage.""" + + client: AsyncRedis + + def __init__( + self, + prefix: str = "session:", + expiration: int = 1800, + host: str = settings.CACHE_REDIS_HOST, + port: int = settings.CACHE_REDIS_PORT, + db: int = settings.CACHE_REDIS_DB, + password: str | None = settings.CACHE_REDIS_PASSWORD, + pool_size: int = settings.CACHE_REDIS_POOL_SIZE, + connect_timeout: int = settings.CACHE_REDIS_CONNECT_TIMEOUT, + ): + """Initialize the Redis session storage. + + Args: + prefix: Prefix for all session keys + expiration: Default session expiration in seconds + host: Redis host + port: Redis port + db: Redis database number + password: Redis password + pool_size: Redis connection pool size + connect_timeout: Redis connection timeout + """ + super().__init__(prefix=prefix, expiration=expiration) + + self.client = AsyncRedis( + host=host, + port=port, + db=db, + password=password, + socket_timeout=connect_timeout, + socket_connect_timeout=connect_timeout, + socket_keepalive=True, + decode_responses=False, + max_connections=pool_size, + ) + + self.user_sessions_prefix = f"{prefix}user:" + + def get_user_sessions_key(self, user_id: int) -> str: + """Get the key for a user's sessions set. + + Args: + user_id: The user ID + + Returns: + The Redis key for the user's sessions set + """ + return f"{self.user_sessions_prefix}{user_id}" + + async def create(self, data: T, session_id: str | None = None, expiration: int | None = None) -> str: + """Create a new session in Redis. + + Args: + data: Session data (must be a Pydantic model) + session_id: Optional session ID. If not provided, one will be generated + expiration: Optional custom expiration in seconds + + Returns: + The session ID + + Raises: + RedisError: If there is an error with Redis + """ + if session_id is None: + session_id = self.generate_session_id() + + key = self.get_key(session_id) + exp = expiration if expiration is not None else self.expiration + + json_data = data.model_dump_json() + + try: + pipeline = self.client.pipeline() + pipeline.set(key, json_data, ex=exp) + + if hasattr(data, "user_id"): + user_id = getattr(data, "user_id") + user_sessions_key = self.get_user_sessions_key(user_id) + + pipeline.sadd(user_sessions_key, session_id) + + pipeline.expire(user_sessions_key, exp + 3600) + + await pipeline.execute() + logger.debug(f"Created session {session_id} with expiration {exp}s") + return session_id + except RedisError as e: + logger.error(f"Error creating session: {e}") + raise + + async def get(self, session_id: str, model_class: type[T]) -> T | None: + """Get session data from Redis. + + Args: + session_id: The session ID + model_class: The Pydantic model class to decode the data into + + Returns: + The session data or None if session doesn't exist + + Raises: + RedisError: If there is an error with Redis + ValueError: If the data cannot be parsed + """ + key = self.get_key(session_id) + + try: + data = await self.client.get(key) + if data is None: + return None + + try: + json_data = json.loads(data) + return model_class.model_validate(json_data) + except (json.JSONDecodeError, ValueError) as e: + logger.error(f"Error parsing session data: {e}") + return None + + except RedisError as e: + logger.error(f"Error getting session: {e}") + raise + + async def update(self, session_id: str, data: T, reset_expiration: bool = True, expiration: int | None = None) -> bool: + """Update session data in Redis. + + Args: + session_id: The session ID + data: New session data + reset_expiration: Whether to reset the expiration + expiration: Optional custom expiration in seconds + + Returns: + True if the session was updated, False if it didn't exist + + Raises: + RedisError: If there is an error with Redis + """ + key = self.get_key(session_id) + + try: + if not await self.client.exists(key): + return False + + json_data = data.model_dump_json() + pipeline = self.client.pipeline() + + if reset_expiration: + exp = expiration if expiration is not None else self.expiration + pipeline.set(key, json_data, ex=exp) + + if hasattr(data, "user_id"): + user_id = getattr(data, "user_id") + user_sessions_key = self.get_user_sessions_key(user_id) + pipeline.expire(user_sessions_key, exp + 3600) + else: + ttl = await self.client.ttl(key) + if ttl > 0: + pipeline.set(key, json_data, ex=ttl) + else: + exp = expiration if expiration is not None else self.expiration + pipeline.set(key, json_data, ex=exp) + + if hasattr(data, "user_id"): + user_id = getattr(data, "user_id") + user_sessions_key = self.get_user_sessions_key(user_id) + pipeline.expire(user_sessions_key, exp + 3600) + + await pipeline.execute() + return True + + except RedisError as e: + logger.error(f"Error updating session: {e}") + raise + + async def delete(self, session_id: str) -> bool: + """Delete a session from Redis. + + Args: + session_id: The session ID + + Returns: + True if the session was deleted, False if it didn't exist + + Raises: + RedisError: If there is an error with Redis + """ + key = self.get_key(session_id) + + try: + data = await self.client.get(key) + if data is None: + return False + + pipeline = self.client.pipeline() + + pipeline.delete(key) + + try: + json_data = json.loads(data) + if "user_id" in json_data: + user_id = json_data["user_id"] + user_sessions_key = self.get_user_sessions_key(user_id) + pipeline.srem(user_sessions_key, session_id) + except (json.JSONDecodeError, ValueError): + pass + + result = await pipeline.execute() + return bool(result[0] > 0) + except RedisError as e: + logger.error(f"Error deleting session: {e}") + raise + + async def extend(self, session_id: str, expiration: int | None = None) -> bool: + """Extend the expiration of a session in Redis. + + Args: + session_id: The session ID + expiration: Optional custom expiration in seconds + + Returns: + True if the session was extended, False if it didn't exist + + Raises: + RedisError: If there is an error with Redis + """ + key = self.get_key(session_id) + exp = expiration if expiration is not None else self.expiration + + try: + data = await self.client.get(key) + if data is None: + return False + + pipeline = self.client.pipeline() + + pipeline.expire(key, exp) + + try: + json_data = json.loads(data) + if "user_id" in json_data: + user_id = json_data["user_id"] + user_sessions_key = self.get_user_sessions_key(user_id) + pipeline.expire(user_sessions_key, exp + 3600) + except (json.JSONDecodeError, ValueError): + pass + + results = await pipeline.execute() + return bool(results[0]) + + except RedisError as e: + logger.error(f"Error extending session: {e}") + raise + + async def exists(self, session_id: str) -> bool: + """Check if a session exists in Redis. + + Args: + session_id: The session ID + + Returns: + True if the session exists, False otherwise + + Raises: + RedisError: If there is an error with Redis + """ + key = self.get_key(session_id) + + try: + exists_result = await self.client.exists(key) + return bool(exists_result) + except RedisError as e: + logger.error(f"Error checking session existence: {e}") + raise + + async def get_user_sessions(self, user_id: int) -> list[str]: + """Get all session IDs for a user. + + Args: + user_id: The user ID + + Returns: + List of session IDs for the user + + Raises: + RedisError: If there is an error with Redis + """ + user_sessions_key = self.get_user_sessions_key(user_id) + + try: + members = await cast(Awaitable[set[Any]], self.client.smembers(user_sessions_key)) + return [m.decode("utf-8") if isinstance(m, bytes) else m for m in members] + except RedisError as e: + logger.error(f"Error getting user sessions: {e}") + raise + + async def close(self) -> None: + """Close the Redis connection.""" + await self.client.close() + + async def delete_pattern(self, pattern: str) -> int: + """Delete all Redis keys matching a pattern. + + This method is useful for bulk cleanup operations like clearing + expired rate limiting keys or other grouped data. + + Args: + pattern: The pattern to match keys (e.g., "login:*") + + Returns: + Number of keys deleted + + Raises: + RedisError: If there is an error with Redis + """ + try: + matched_keys = [] + async for key in self.client.scan_iter(match=pattern): + matched_keys.append(key) + + if not matched_keys: + return 0 + + pipeline = self.client.pipeline() + for key in matched_keys: + pipeline.delete(key) + + results = await pipeline.execute() + deleted_count = sum(1 for result in results if result > 0) + + logger.debug(f"Deleted {deleted_count} keys matching pattern '{pattern}'") + return deleted_count + + except RedisError as e: + logger.error(f"Error deleting keys with pattern '{pattern}': {e}") + raise diff --git a/backend/src/infrastructure/auth/session/base.py b/backend/src/infrastructure/auth/session/base.py new file mode 100644 index 00000000..ff4654a0 --- /dev/null +++ b/backend/src/infrastructure/auth/session/base.py @@ -0,0 +1,137 @@ +"""Abstract base for session storage backends. + +Lives in its own module so that the concrete backend implementations +under ``backends/`` can subclass it without participating in the +``storage.py`` factory's import cycle. The factory (``storage.py``) +imports both this base and the concrete backends; concrete backends +only import this base. +""" + +from abc import ABC, abstractmethod +from typing import Generic, TypeVar +from uuid import uuid4 + +from pydantic import BaseModel + +T = TypeVar("T", bound=BaseModel) + + +class AbstractSessionStorage(Generic[T], ABC): + """Abstract base class for session storage implementations.""" + + def __init__( + self, + prefix: str = "session:", + expiration: int = 1800, + ): + """Initialize the session storage. + + Args: + prefix: Prefix for all session keys + expiration: Default session expiration in seconds + """ + self.prefix = prefix + self.expiration = expiration + + def generate_session_id(self) -> str: + """Generate a unique session ID. + + Returns: + A unique session ID string + """ + return str(uuid4()) + + def get_key(self, session_id: str) -> str: + """Generate the full key for a session ID. + + Args: + session_id: The session ID + + Returns: + The full storage key + """ + return f"{self.prefix}{session_id}" + + @abstractmethod + async def create(self, data: T, session_id: str | None = None, expiration: int | None = None) -> str: + """Create a new session. + + Args: + data: Session data (must be a Pydantic model) + session_id: Optional session ID. If not provided, one will be generated + expiration: Optional custom expiration in seconds + + Returns: + The session ID + """ + pass + + @abstractmethod + async def get(self, session_id: str, model_class: type[T]) -> T | None: + """Get session data. + + Args: + session_id: The session ID + model_class: The Pydantic model class to decode the data into + + Returns: + The session data or None if session doesn't exist + """ + pass + + @abstractmethod + async def update(self, session_id: str, data: T, reset_expiration: bool = True, expiration: int | None = None) -> bool: + """Update session data. + + Args: + session_id: The session ID + data: New session data + reset_expiration: Whether to reset the expiration + expiration: Optional custom expiration in seconds + + Returns: + True if the session was updated, False if it didn't exist + """ + pass + + @abstractmethod + async def delete(self, session_id: str) -> bool: + """Delete a session. + + Args: + session_id: The session ID + + Returns: + True if the session was deleted, False if it didn't exist + """ + pass + + @abstractmethod + async def extend(self, session_id: str, expiration: int | None = None) -> bool: + """Extend the expiration of a session. + + Args: + session_id: The session ID + expiration: Optional custom expiration in seconds + + Returns: + True if the session was extended, False if it didn't exist + """ + pass + + @abstractmethod + async def exists(self, session_id: str) -> bool: + """Check if a session exists. + + Args: + session_id: The session ID + + Returns: + True if the session exists, False otherwise + """ + pass + + @abstractmethod + async def close(self) -> None: + """Close the storage connection.""" + pass diff --git a/backend/src/infrastructure/auth/session/dependencies.py b/backend/src/infrastructure/auth/session/dependencies.py new file mode 100644 index 00000000..6b06994e --- /dev/null +++ b/backend/src/infrastructure/auth/session/dependencies.py @@ -0,0 +1,268 @@ +from typing import Annotated, Any + +from fastapi import Cookie, Depends, Header, Request +from sqlalchemy.ext.asyncio import AsyncSession + +from ....infrastructure.auth.http_exceptions import ( + CSRFException, + ForbiddenException, + UnauthorizedException, +) +from ....infrastructure.database.session import async_session +from ....modules.user.crud import crud_users +from ...config.settings import get_settings +from ...logging import get_logger +from ...rate_limit.provider import get_rate_limiter_backend +from ..utils import verify_password +from .manager import SessionManager +from .schemas import SessionData +from .storage import AbstractSessionStorage, get_session_storage + +settings = get_settings() +logger = get_logger() + +_session_manager: SessionManager | None = None + + +def get_session_manager() -> SessionManager: + """Get the session manager singleton (initialized once, reused across requests).""" + global _session_manager # noqa: PLW0603 + if _session_manager is not None: + return _session_manager + + storage: AbstractSessionStorage[SessionData] = get_session_storage( + backend=settings.SESSION_BACKEND, + model_type=SessionData, + prefix="session:", + expiration=settings.SESSION_TIMEOUT_MINUTES * 60, + host=settings.CACHE_REDIS_HOST, + port=settings.CACHE_REDIS_PORT, + db=settings.CACHE_REDIS_DB, + password=settings.CACHE_REDIS_PASSWORD, + ) + + rate_limiter = None + if settings.RATE_LIMITER_ENABLED: + try: + rate_limiter = get_rate_limiter_backend(settings.RATE_LIMITER_BACKEND) + logger.info(f"Rate limiter initialized for login attempts using {settings.RATE_LIMITER_BACKEND} backend") + except Exception as e: + logger.warning(f"Failed to initialize rate limiter for login attempts: {e}") + logger.warning("Login rate limiting will be disabled") + + _session_manager = SessionManager( + session_storage=storage, + max_sessions_per_user=settings.MAX_SESSIONS_PER_USER, + session_timeout_minutes=settings.SESSION_TIMEOUT_MINUTES, + cleanup_interval_minutes=settings.SESSION_CLEANUP_INTERVAL_MINUTES, + rate_limiter=rate_limiter, + login_max_attempts=settings.LOGIN_MAX_ATTEMPTS, + login_window_minutes=settings.LOGIN_WINDOW_MINUTES, + ) + return _session_manager + + +async def get_session_from_cookie( + request: Request, + session_id: str | None = Cookie(None), + session_manager: SessionManager = Depends(get_session_manager), +) -> SessionData | None: + """Get session data from cookie, validating it. + + Args: + request: The request object + session_id: The session ID from cookie + session_manager: The session manager + + Returns: + The session data or None if invalid + """ + if not session_id: + return None + + await session_manager.cleanup_expired_sessions() + + return await session_manager.validate_session(session_id) + + +async def verify_csrf_token( + request: Request, + session_data: Annotated[SessionData | None, Depends(get_session_from_cookie)], + csrf_token: str | None = Cookie(None), + x_csrf_token: str | None = Header(None, alias="X-CSRF-Token"), + session_manager: SessionManager = Depends(get_session_manager), +) -> None: + """Verify CSRF token for mutation operations. + + This should be used for POST/PUT/DELETE operations. + + Args: + request: The request object + session_data: The session data + csrf_token: The CSRF token from cookie + x_csrf_token: The CSRF token from header + session_manager: The session manager + + Raises: + CSRFException: If CSRF validation fails + """ + if request.method in ("GET", "HEAD", "OPTIONS"): + return None + + if not settings.CSRF_ENABLED: + logger.debug("CSRF validation disabled by configuration") + return None + + if not session_data: + return None + + token = x_csrf_token or csrf_token + if not token: + raise CSRFException("Missing CSRF token") + + is_valid = await session_manager.validate_csrf_token(session_data.session_id, token) + + if not is_valid: + raise CSRFException("Invalid CSRF token") + + +async def get_current_user( + session_data: Annotated[SessionData | None, Depends(get_session_from_cookie)], + db: Annotated[AsyncSession, Depends(async_session)], + _: Annotated[None, Depends(verify_csrf_token)], +) -> dict[str, Any]: + """Get the current authenticated user. + + Args: + session_data: The session data + db: The database session + + Returns: + The user data + + Raises: + UnauthorizedException: If not authenticated or user doesn't exist + """ + credentials_exception = UnauthorizedException("Not authenticated") + + if not session_data: + raise credentials_exception + + if not session_data.is_active: + raise credentials_exception + + user = await crud_users.get(db=db, id=session_data.user_id, is_deleted=False) + + if user is None: + raise credentials_exception + + return user + + +async def get_optional_user( + session_data: Annotated[SessionData | None, Depends(get_session_from_cookie)], + db: Annotated[AsyncSession, Depends(async_session)], +) -> dict[str, Any] | None: + """Get the current user if authenticated, None otherwise. + + Args: + session_data: The session data + db: The database session + + Returns: + The user data or None + """ + if not session_data: + return None + + if not session_data.is_active: + return None + + user = await crud_users.get(db=db, id=session_data.user_id, is_deleted=False) + + return user + + +async def get_current_superuser( + current_user: Annotated[dict[str, Any], Depends(get_current_user)], +) -> dict[str, Any]: + """Get the current user, requiring superuser privileges. + + Args: + current_user: The current user + + Returns: + The user data + + Raises: + ForbiddenException: If not a superuser + """ + if not current_user.get("is_superuser", False): + raise ForbiddenException("Insufficient privileges") + + return current_user + + +async def get_session_id_from_cookie(request: Request) -> str | None: + """Extract session ID from cookies. + + Args: + request: The request object + + Returns: + The session ID from cookies or None if not present + """ + return request.cookies.get("session_id") + + +async def get_current_session_data( + request: Request, + session_id: Annotated[str | None, Depends(get_session_id_from_cookie)], + session_manager: Annotated[SessionManager, Depends(get_session_manager)], +) -> SessionData: + """Get the current session data from cookie. + + Args: + request: The request object + session_id: The session ID from cookie + session_manager: The session manager + + Returns: + The session data + + Raises: + UnauthorizedException: If not authenticated or session is invalid + """ + if not session_id: + raise UnauthorizedException("Not authenticated") + + session_data = await session_manager.validate_session(session_id) + if not session_data: + raise UnauthorizedException("Invalid or expired session") + + return session_data + + +async def authenticate_user(username_or_email: str, password: str, db: AsyncSession) -> dict[str, Any] | None: + """Authenticate a user by username/email and password. + + Args: + username_or_email: The username or email + password: The plaintext password + db: The database session + + Returns: + The user data dict if authenticated, None otherwise + """ + if "@" in username_or_email: + user = await crud_users.get(db=db, email=username_or_email, is_deleted=False) + else: + user = await crud_users.get(db=db, username=username_or_email, is_deleted=False) + + if not user: + return None + + if not await verify_password(password, user["hashed_password"]): + return None + + return user diff --git a/backend/src/infrastructure/auth/session/manager.py b/backend/src/infrastructure/auth/session/manager.py new file mode 100644 index 00000000..95cbaaa2 --- /dev/null +++ b/backend/src/infrastructure/auth/session/manager.py @@ -0,0 +1,590 @@ +import secrets +from datetime import UTC, datetime, timedelta +from typing import Any, Literal + +from fastapi import Request, Response + +from ...config.settings import get_settings +from ...logging import get_logger +from .schemas import CSRFToken, SessionCreate, SessionData, UserAgentInfo +from .storage import AbstractSessionStorage, get_session_storage +from .user_agents_types import parse + +settings = get_settings() +logger = get_logger() + +SamesiteType = Literal["lax", "strict", "none"] +DEV_SAMESITE: SamesiteType = "lax" +PROD_SAMESITE: SamesiteType = "strict" + + +class SessionManager: + """Session manager for handling secure authentication sessions. + + This class implements a comprehensive session-based authentication system with the following features: + + - Secure session creation and validation + - CSRF protection with token generation and validation + - Session expiration and automatic cleanup + - Device fingerprinting and user agent tracking + - Multi-device support with configurable session limits per user + - IP address tracking for security monitoring + - Session metadata for storing additional authentication context + - Rate limiting for login attempts with IP and username tracking + + Authentication Flow: + 1. When a user logs in successfully, create_session() generates a new session and CSRF token + 2. Session cookies are set via set_session_cookies() - a httpOnly session_id and a non-httpOnly csrf_token + 3. On subsequent requests, validate_session() confirms the session is valid and not expired + 4. For state-changing operations, validate_csrf_token() provides protection against CSRF attacks + 5. Sessions automatically expire after inactivity, or can be manually terminated + 6. Periodic cleanup_expired_sessions() removes stale sessions + + Security Features: + - Sessions are stored server-side with only the ID transmitted to clients + - CSRF protection through synchronized tokens + - Session hijacking protection via IP and user agent tracking + - Automatic session expiration after configurable timeout + - Forced logout of oldest sessions when session limit is reached + - Different SameSite cookie settings for development and production + - Rate limiting for login attempts to prevent brute force attacks + + Usage: + Sessions should be validated on each authenticated request, with CSRF tokens validated + for any state-changing operations. The cleanup method should be called periodically + to remove expired sessions. + """ + + def __init__( + self, + session_storage: AbstractSessionStorage[SessionData], + max_sessions_per_user: int = 5, + session_timeout_minutes: int = 30, + cleanup_interval_minutes: int = 15, + csrf_token_bytes: int = 32, + rate_limiter=None, + login_max_attempts: int = 5, + login_window_minutes: int = 15, + ): + """Initialize the session manager. + + Args: + session_storage: Storage backend for sessions + max_sessions_per_user: Maximum number of active sessions per user + session_timeout_minutes: Session timeout in minutes + cleanup_interval_minutes: Interval for cleaning up expired sessions + csrf_token_bytes: Number of bytes to use for CSRF tokens + rate_limiter: Optional rate limiter implementation for login attempts + login_max_attempts: Maximum failed login attempts before rate limiting + login_window_minutes: Time window for tracking failed login attempts + """ + self.storage = session_storage + self.max_sessions = max_sessions_per_user + self.session_timeout = timedelta(minutes=session_timeout_minutes) + self.cleanup_interval = timedelta(minutes=cleanup_interval_minutes) + self.last_cleanup = datetime.now(UTC) + self.csrf_token_bytes = csrf_token_bytes + self.rate_limiter = rate_limiter + self.login_max_attempts = login_max_attempts + self.login_window = timedelta(minutes=login_window_minutes) + + csrf_storage_settings = {"prefix": "csrf:", "expiration": session_timeout_minutes * 60} + self.csrf_storage: AbstractSessionStorage[CSRFToken] = get_session_storage( + backend=settings.SESSION_BACKEND, model_type=CSRFToken, **csrf_storage_settings + ) + + def parse_user_agent(self, user_agent_string: str) -> UserAgentInfo: + """Parse User-Agent string into structured information. + + Args: + user_agent_string: Raw User-Agent header + + Returns: + Structured UserAgentInfo + """ + ua_parser = parse(user_agent_string) + return UserAgentInfo( + browser=ua_parser.browser.family, + browser_version=ua_parser.browser.version_string, + os=ua_parser.os.family, + device=ua_parser.device.family, + is_mobile=ua_parser.is_mobile, + is_tablet=ua_parser.is_tablet, + is_pc=ua_parser.is_pc, + ) + + async def create_session(self, request: Request, user_id: int, metadata: dict[str, Any] | None = None) -> tuple[str, str]: + """Create a new session for a user and generate a CSRF token. + + Args: + request: The request object + user_id: The user ID + metadata: Optional session metadata + + Returns: + Tuple of (session_id, csrf_token) + + Raises: + ValueError: If the request client is invalid + """ + logger.info(f"Creating new session for user_id: {user_id}") + + try: + user_agent = request.headers.get("user-agent", "") + current_time = datetime.now(UTC) + + client = request.client + if client is None: + logger.error("Request client is None. Cannot retrieve IP address.") + raise ValueError("Invalid request client.") + + device_info = self.parse_user_agent(user_agent).model_dump() + + ip_address = request.headers.get("x-forwarded-for", client.host).split(",")[0].strip() + + await self._enforce_session_limit(user_id) + + session_data = SessionCreate( + user_id=user_id, + ip_address=ip_address, + user_agent=user_agent, + device_info=device_info, + last_activity=current_time, + is_active=True, + metadata=metadata or {}, + ) + + session_id = await self.storage.create(session_data, session_id=session_data.session_id) + csrf_token = await self._generate_csrf_token(user_id, session_id) + + logger.info(f"Session {session_id} created successfully") + return session_id, csrf_token + + except Exception as e: + logger.error(f"Error creating session: {str(e)}", exc_info=True) + raise + + async def validate_session(self, session_id: str, update_activity: bool = True) -> SessionData | None: + """Validate if a session is active and not timed out. + + Args: + session_id: The session ID + update_activity: Whether to update the last activity timestamp + + Returns: + The session data if valid, None otherwise + """ + if not session_id: + return None + + try: + session_data = await self.storage.get(session_id, SessionData) + if session_data is None: + logger.warning(f"Session not found: {session_id}") + return None + + if not session_data.is_active: + logger.warning(f"Session is not active: {session_id}") + return None + + current_time = datetime.now(UTC) + session_age = current_time - session_data.last_activity + + if session_age > self.session_timeout: + logger.warning(f"Session timed out: {session_id}") + await self.terminate_session(session_id) + return None + + if update_activity: + session_data.last_activity = current_time + await self.storage.update(session_id, session_data) + + return session_data + + except Exception as e: + logger.error(f"Error validating session: {str(e)}", exc_info=True) + return None + + async def validate_csrf_token( + self, + session_id: str, + csrf_token: str, + ) -> bool: + """Validate a CSRF token for a session. + + Args: + session_id: The session ID + csrf_token: The CSRF token to validate + + Returns: + True if valid, False otherwise + """ + if not session_id or not csrf_token: + logger.warning(f"Missing session_id or csrf_token: session_id={session_id}, csrf_token={csrf_token}") + return False + + try: + token_data = await self.csrf_storage.get(csrf_token, CSRFToken) + if token_data is None: + logger.warning(f"CSRF token not found in storage: {csrf_token}") + return False + + if token_data.session_id != session_id: + logger.warning( + f"CSRF token session mismatch: {csrf_token} should be for session {session_id}, " + f"but is for session {token_data.session_id}" + ) + return False + + current_time = datetime.now(UTC) + if token_data.expiry < current_time: + logger.warning( + f"CSRF token expired: {csrf_token}, expired at {token_data.expiry}, current time is {current_time}" + ) + await self.csrf_storage.delete(csrf_token) + return False + + return True + + except Exception as e: + logger.error(f"Error validating CSRF token: {str(e)}", exc_info=True) + return False + + async def regenerate_csrf_token( + self, + user_id: int, + session_id: str, + ) -> str: + """Regenerate a CSRF token for an existing session. + + Args: + user_id: The user ID + session_id: The session ID + + Returns: + The new CSRF token + """ + return await self._generate_csrf_token(user_id, session_id) + + async def _generate_csrf_token( + self, + user_id: int, + session_id: str, + ) -> str: + """Generate a new CSRF token for a session. + + Args: + user_id: The user ID + session_id: The session ID + + Returns: + The CSRF token + """ + token = secrets.token_hex(self.csrf_token_bytes) + expiry = datetime.now(UTC) + self.session_timeout + + csrf_data = CSRFToken( + token=token, + user_id=user_id, + session_id=session_id, + expiry=expiry, + ) + + await self.csrf_storage.create(csrf_data, session_id=token) + return token + + async def terminate_session(self, session_id: str) -> bool: + """Terminate a specific session. + + Args: + session_id: The session ID + + Returns: + True if the session was terminated, False otherwise + """ + try: + session_data = await self.storage.get(session_id, SessionData) + if session_data is None: + return False + + session_data.is_active = False + session_data.metadata = { + **session_data.metadata, + "terminated_at": datetime.now(UTC).isoformat(), + "termination_reason": "manual_termination", + } + + return await self.storage.update(session_id, session_data) + + except Exception as e: + logger.error(f"Error terminating session: {str(e)}", exc_info=True) + return False + + async def _enforce_session_limit(self, user_id: int) -> None: + """Enforce the maximum number of sessions per user. + + Terminates the oldest sessions if the limit is exceeded. + + Args: + user_id: The user ID + """ + try: + active_sessions = [] + + if hasattr(self.storage, "get_user_sessions"): + try: + session_ids = await self.storage.get_user_sessions(user_id) + for session_id in session_ids: + try: + session_data = await self.storage.get(session_id, SessionData) + if session_data and session_data.is_active: + active_sessions.append(session_data) + except Exception as e: + logger.warning(f"Error processing session {session_id}: {e}") + continue + except Exception as e: + logger.warning(f"Error getting user sessions: {e}") + active_sessions = await self._get_active_sessions_by_scan(user_id) + else: + active_sessions = await self._get_active_sessions_by_scan(user_id) + + if len(active_sessions) >= self.max_sessions: + active_sessions.sort(key=lambda s: s.last_activity) + + excess_count = len(active_sessions) - self.max_sessions + 1 + for i in range(excess_count): + if i < len(active_sessions): + await self.terminate_session(active_sessions[i].session_id) + + except Exception as e: + logger.error(f"Error enforcing session limit: {e}", exc_info=True) + + async def _get_active_sessions_by_scan(self, user_id: int) -> list[SessionData]: + """Get active sessions for a user by scanning all keys. + + This is a fallback method when indexed groups are not available. + + Args: + user_id: The user ID + + Returns: + List of active sessions for the user + """ + active_sessions = [] + + if hasattr(self.storage, "_scan_iter"): + keys = await self.storage._scan_iter(match=f"{self.storage.prefix}*") + for key in keys: + try: + session_data_bytes = await self.storage.get( + session_id=key[len(self.storage.prefix) :], model_class=SessionData + ) + if session_data_bytes and session_data_bytes.user_id == user_id and session_data_bytes.is_active: + active_sessions.append(session_data_bytes) + except Exception as e: + logger.warning(f"Error processing session during cleanup: {e}") + continue + elif hasattr(self.storage, "client") and hasattr(self.storage.client, "scan_iter"): + async for key in self.storage.client.scan_iter(match=f"{self.storage.prefix}*"): + try: + if isinstance(key, bytes): + key = key.decode("utf-8") + session_id = key[len(self.storage.prefix) :] + + session_data = await self.storage.get(session_id, SessionData) + if session_data and session_data.user_id == user_id and session_data.is_active: + active_sessions.append(session_data) + except Exception as e: + logger.warning(f"Error processing session during cleanup: {e}") + continue + + return active_sessions + + async def cleanup_expired_sessions(self) -> None: + """Cleanup expired and inactive sessions. + + This should be called periodically. + """ + now = datetime.now(UTC) + + if now - self.last_cleanup < self.cleanup_interval: + return + + timeout_threshold = now - self.session_timeout + + try: + if hasattr(self.storage, "_scan_iter"): + keys = await self.storage._scan_iter(match=f"{self.storage.prefix}*") + for key in keys: + try: + session_id = key[len(self.storage.prefix) :] + session_data = await self.storage.get(session_id, SessionData) + if session_data and session_data.is_active and session_data.last_activity < timeout_threshold: + session_data.is_active = False + session_data.metadata = { + **session_data.metadata, + "terminated_at": now.isoformat(), + "termination_reason": "session_timeout", + } + await self.storage.update(session_id, session_data) + except Exception as e: + logger.warning(f"Error processing session during cleanup: {e}") + continue + elif hasattr(self.storage, "client") and hasattr(self.storage.client, "scan_iter"): + async for key in self.storage.client.scan_iter(match=f"{self.storage.prefix}*"): + try: + if isinstance(key, bytes): + key = key.decode("utf-8") + session_id = key[len(self.storage.prefix) :] + + session_data = await self.storage.get(session_id, SessionData) + if session_data and session_data.is_active and session_data.last_activity < timeout_threshold: + session_data.is_active = False + session_data.metadata = { + **session_data.metadata, + "terminated_at": now.isoformat(), + "termination_reason": "session_timeout", + } + await self.storage.update(session_data.session_id, session_data) + except Exception as e: + logger.warning(f"Error processing session during cleanup: {e}") + continue + + if self.rate_limiter: + try: + await self.cleanup_rate_limits() + except Exception as e: + logger.error(f"Error cleaning up rate limits: {e}") + + self.last_cleanup = now + + except Exception as e: + logger.error(f"Error during session cleanup: {e}", exc_info=True) + + def set_session_cookies( + self, + response: Response, + session_id: str, + csrf_token: str, + max_age: int | None = None, + path: str = "/", + secure: bool = True, + ) -> None: + """Set session cookies in the response. + + Args: + response: The response object + session_id: The session ID + csrf_token: The CSRF token + max_age: Cookie max age in seconds + path: Cookie path + secure: Whether to set the Secure flag + """ + samesite: SamesiteType = DEV_SAMESITE if settings.DEBUG else PROD_SAMESITE + cookie_max_age = max_age if max_age is not None else settings.SESSION_COOKIE_MAX_AGE + + response.set_cookie( + key="session_id", + value=session_id, + httponly=True, + secure=secure, + samesite=samesite, + path=path, + max_age=cookie_max_age, + ) + + response.set_cookie( + key="csrf_token", + value=csrf_token, + httponly=False, + secure=secure, + samesite=samesite, + path=path, + max_age=cookie_max_age, + ) + + def clear_session_cookies( + self, + response: Response, + path: str = "/", + ) -> None: + """Clear session cookies from the response. + + Args: + response: The response object + path: Cookie path + """ + response.delete_cookie(key="session_id", path=path) + response.delete_cookie(key="csrf_token", path=path) + + async def track_login_attempt(self, ip_address: str, username: str, success: bool = False) -> tuple[bool, int | None]: + """Track login attempts and apply rate limiting. + + Args: + ip_address: Client IP address + username: Username being used for login + success: Whether the login attempt was successful + + Returns: + Tuple of (is_allowed, attempts_remaining) + + If rate limiting is not configured, this will always return (True, None) + but log a warning about missing rate limiting. + """ + if not self.rate_limiter: + logger.warning( + "No rate limiter configured for login attempts. " + "It is strongly recommended to configure rate limiting for security." + ) + return True, None + + try: + ip_key = f"login:ip:{ip_address}" + username_key = f"login:user:{username}" + + if success: + try: + await self.rate_limiter.delete(ip_key) + await self.rate_limiter.delete(username_key) + return True, None + except Exception as e: + logger.warning(f"Error clearing rate limit after successful login: {e}") + return True, None + + try: + expiry_seconds = int(self.login_window.total_seconds()) + ip_count = await self.rate_limiter.increment(ip_key, 1, expiry_seconds) + username_count = await self.rate_limiter.increment(username_key, 1, expiry_seconds) + except Exception as e: + logger.warning(f"Error tracking login attempt rate limits: {e}") + return True, None + + attempt_count = max(ip_count, username_count) + remaining = max(0, self.login_max_attempts - attempt_count) + + is_allowed = attempt_count <= self.login_max_attempts + + if not is_allowed: + logger.warning(f"Rate limit exceeded for login: {ip_address}, username: {username}, attempts: {attempt_count}") + + return is_allowed, remaining + + except Exception as e: + logger.error(f"Unexpected error in login rate limiting: {e}", exc_info=True) + return True, None + + async def cleanup_rate_limits(self) -> None: + """Clean up expired rate limit records. + + This should be called periodically along with session cleanup. + """ + if not self.rate_limiter: + return + + try: + if hasattr(self.rate_limiter, "delete_pattern"): + await self.rate_limiter.delete_pattern("login:*") + else: + logger.debug("Rate limiter does not support pattern-based cleanup") + except Exception as e: + logger.error(f"Error cleaning up rate limit records: {e}", exc_info=True) diff --git a/backend/src/infrastructure/auth/session/schemas.py b/backend/src/infrastructure/auth/session/schemas.py new file mode 100644 index 00000000..618e7623 --- /dev/null +++ b/backend/src/infrastructure/auth/session/schemas.py @@ -0,0 +1,54 @@ +from datetime import UTC, datetime +from typing import Any +from uuid import uuid4 + +from pydantic import BaseModel, Field + + +class SessionData(BaseModel): + """Common base data for any user session.""" + + user_id: int + session_id: str = Field(default_factory=lambda: str(uuid4())) + ip_address: str + user_agent: str + device_info: dict[str, Any] = Field(default_factory=dict) + created_at: datetime = Field(default_factory=lambda: datetime.now(UTC)) + last_activity: datetime = Field(default_factory=lambda: datetime.now(UTC)) + is_active: bool = True + metadata: dict[str, Any] = Field(default_factory=dict) + + +class SessionCreate(SessionData): + """Schema for creating a new session.""" + + pass + + +class SessionUpdate(BaseModel): + """Schema for updating a session.""" + + last_activity: datetime | None = None + is_active: bool | None = None + metadata: dict[str, Any] | None = None + + +class UserAgentInfo(BaseModel): + """Parsed User-Agent information.""" + + browser: str + browser_version: str + os: str + device: str + is_mobile: bool + is_tablet: bool + is_pc: bool + + +class CSRFToken(BaseModel): + """CSRF token schema.""" + + token: str + user_id: int + session_id: str + expiry: datetime diff --git a/backend/src/infrastructure/auth/session/storage.py b/backend/src/infrastructure/auth/session/storage.py new file mode 100644 index 00000000..b549b360 --- /dev/null +++ b/backend/src/infrastructure/auth/session/storage.py @@ -0,0 +1,59 @@ +"""Session storage factory + backwards-compatible re-export of the abstract base. + +The abstract base (``AbstractSessionStorage``) lives in ``base.py`` so +that the concrete ``backends/`` implementations don't form a cycle with +this module. Existing callers that import ``AbstractSessionStorage`` +from ``.storage`` keep working via the re-export below. +""" + +from typing import Generic, TypeVar, cast + +from pydantic import BaseModel + +from ...config import SessionBackend +from ...logging import get_logger +from .backends.memcached import MemcachedSessionStorage +from .backends.memory import MemorySessionStorage +from .backends.redis import RedisSessionStorage +from .base import AbstractSessionStorage + +T = TypeVar("T", bound=BaseModel) +logger = get_logger(__name__) + +__all__ = ["AbstractSessionStorage", "SessionStorage", "get_session_storage"] + + +class SessionStorage(AbstractSessionStorage[T], Generic[T]): + def __new__(cls, backend: str = "memory", **kwargs) -> "SessionStorage[T]": + """Factory method to create the appropriate session storage backend. + + Args: + backend: The backend to use ("redis", "memcached", "memory") + **kwargs: Additional arguments to pass to the backend + + Returns: + An initialized storage backend + """ + storage: AbstractSessionStorage[T] = get_session_storage(backend, cast(type[T], BaseModel), **kwargs) + return cast("SessionStorage[T]", storage) + + +def get_session_storage(backend: str, model_type: type[BaseModel], **kwargs) -> AbstractSessionStorage[T]: + """Get the appropriate session storage backend. + + Args: + backend: The backend to use ("redis", "memcached", "memory") + model_type: The pydantic model type for type checking + **kwargs: Additional arguments to pass to the backend + + Returns: + An initialized storage backend + """ + if backend == SessionBackend.REDIS.value: + return RedisSessionStorage(**kwargs) + elif backend == SessionBackend.MEMCACHED.value: + return MemcachedSessionStorage(**kwargs) + elif backend == SessionBackend.MEMORY.value: + return MemorySessionStorage(**kwargs) + else: + raise ValueError(f"Unknown backend: {backend}") diff --git a/backend/src/infrastructure/auth/session/user_agents_types.py b/backend/src/infrastructure/auth/session/user_agents_types.py new file mode 100644 index 00000000..85703c4a --- /dev/null +++ b/backend/src/infrastructure/auth/session/user_agents_types.py @@ -0,0 +1,61 @@ +""" +Type-annotated wrapper for the user_agents module. +This module provides proper type annotations for the user_agents module. +""" + +from typing import NamedTuple, cast + +# mypy: disable-error-code="import-untyped" +from user_agents import parse as _parse + + +class Browser(NamedTuple): + """Browser information.""" + + family: str + version: str | None = None + version_string: str = "" + + +class OperatingSystem(NamedTuple): + """Operating system information.""" + + family: str + version: str | None = None + version_string: str = "" + + +class Device(NamedTuple): + """Device information.""" + + family: str + brand: str | None = None + model: str | None = None + + +class UserAgent: + """User agent information with proper typing.""" + + browser: Browser + os: OperatingSystem + device: Device + is_mobile: bool + is_tablet: bool + is_pc: bool + is_bot: bool + + def __str__(self) -> str: + return f"{self.browser.family}/{self.browser.version_string} ({self.os.family})" + + +def parse(user_agent_string: str) -> UserAgent: + """ + Parse a user agent string into structured data. + + Args: + user_agent_string: The user agent string to parse + + Returns: + A UserAgent object with parsed information + """ + return cast(UserAgent, _parse(user_agent_string)) diff --git a/backend/src/infrastructure/auth/utils.py b/backend/src/infrastructure/auth/utils.py new file mode 100644 index 00000000..faa0b704 --- /dev/null +++ b/backend/src/infrastructure/auth/utils.py @@ -0,0 +1,80 @@ +import bcrypt + + +async def verify_password(plain_password: str, hashed_password: str) -> bool: + """Verify a plaintext password against its bcrypt hash. + + Performs secure password verification using bcrypt's built-in comparison + function, which includes protection against timing attacks through + constant-time comparison. + + Args: + plain_password: The plaintext password to verify. + hashed_password: The bcrypt hash to compare against. + + Returns: + True if the password matches the hash, False otherwise. + + Note: + This function uses bcrypt.checkpw() which: + - Automatically handles salt extraction from the hash + - Performs constant-time comparison to prevent timing attacks + - Works with any valid bcrypt hash format + - Is designed to be computationally expensive to prevent brute force + + Example: + ```python + # During user authentication + stored_hash = user.password_hash + entered_password = "user_entered_password" + + if await verify_password(entered_password, stored_hash): + # Password is correct - authenticate user + return authenticate_user(user) + else: + # Password is incorrect - deny access + raise AuthenticationError("Invalid password") + ``` + """ + verified: bool = bcrypt.checkpw(plain_password.encode(), hashed_password.encode()) + return verified + + +def get_password_hash(password: str) -> str: + """Generate a secure bcrypt hash for a plaintext password. + + Creates a bcrypt hash using a randomly generated salt, providing + strong password protection against rainbow table attacks and + ensuring each password has a unique hash. + + Args: + password: The plaintext password to hash. + + Returns: + The bcrypt hash as a string, including the salt. + + Note: + This function uses bcrypt.hashpw() with bcrypt.gensalt() which: + - Generates a random salt for each password + - Uses a default cost factor (rounds) appropriate for security + - Produces hashes that are compatible with standard bcrypt libraries + - Creates hashes that include the salt and cost parameters + + Example: + ```python + # During user registration + plain_password = "user_new_password" + hashed_password = get_password_hash(plain_password) + + # Store the hash in the database + user = User( + email="user@example.com", + password_hash=hashed_password + ) + await session.add(user) + await session.commit() + ``` + """ + hashed_password: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt()) + decoded_password: str = hashed_password.decode() + return decoded_password diff --git a/backend/src/infrastructure/cache/__init__.py b/backend/src/infrastructure/cache/__init__.py new file mode 100644 index 00000000..5bddd8b3 --- /dev/null +++ b/backend/src/infrastructure/cache/__init__.py @@ -0,0 +1,38 @@ +from importlib.util import find_spec + +from .base import CacheBackend +from .decorator import cache +from .provider import cache_provider, clear, delete, delete_pattern, exists, get, set + +MEMCACHED_INSTALLED = find_spec("aiomcache") is not None +REDIS_INSTALLED = find_spec("redis.asyncio") is not None + +if MEMCACHED_INSTALLED: + from .backends.memcached import MemcachedBackend, MemcachedSettings +else: + MemcachedBackend = None # type: ignore + MemcachedSettings = None # type: ignore + +if REDIS_INSTALLED: + from .backends.redis import RedisBackend, RedisSettings +else: + RedisBackend = None # type: ignore + RedisSettings = None # type: ignore + +__all__ = [ + "CacheBackend", + "MemcachedBackend", + "RedisBackend", + "MemcachedSettings", + "RedisSettings", + "cache", + "cache_provider", + "get", + "set", + "delete", + "delete_pattern", + "exists", + "clear", + "REDIS_INSTALLED", + "MEMCACHED_INSTALLED", +] diff --git a/backend/src/infrastructure/cache/backends/__init__.py b/backend/src/infrastructure/cache/backends/__init__.py new file mode 100644 index 00000000..9d5470f6 --- /dev/null +++ b/backend/src/infrastructure/cache/backends/__init__.py @@ -0,0 +1,32 @@ +""" +Caching backend implementations. + +This module contains implementations of various cache backends that follow the +CacheBackend interface. +""" + +from importlib.util import find_spec + +MEMCACHED_INSTALLED = find_spec("aiomcache") is not None +REDIS_INSTALLED = find_spec("redis.asyncio") is not None + +if MEMCACHED_INSTALLED: + from .memcached import ( + MemcachedBackend, + MemcachedSettings, + PatternMatchingNotSupportedError, + ) +else: + MemcachedBackend = None # type: ignore + MemcachedSettings = None # type: ignore + PatternMatchingNotSupportedError = None # type: ignore + +if REDIS_INSTALLED: + from .redis import RedisBackend, RedisSettings +else: + RedisBackend = None # type: ignore + RedisSettings = None # type: ignore + +__all__ = ["MemcachedBackend", "MemcachedSettings", "RedisBackend", "RedisSettings"] +if MEMCACHED_INSTALLED: + __all__.append("PatternMatchingNotSupportedError") diff --git a/backend/src/infrastructure/cache/backends/memcached.py b/backend/src/infrastructure/cache/backends/memcached.py new file mode 100644 index 00000000..c60a996b --- /dev/null +++ b/backend/src/infrastructure/cache/backends/memcached.py @@ -0,0 +1,155 @@ +import json +from typing import Any + +try: + import aiomcache +except ImportError: + raise ImportError( + "The aiomcache package is not installed. " + "Please install it with 'pip install aiomcache' or 'pip install -e \".[memcached]\"'" + ) + +from pydantic import BaseModel + +from ...config.settings import get_settings +from ..base import CacheBackend +from ..exceptions import CacheException + +settings = get_settings() + + +class PatternMatchingNotSupportedError(CacheException): + """Raised when attempting to use pattern matching with Memcached.""" + + def __init__(self, pattern: str): + self.message = f"Memcached doesn't support pattern-based deletion. Pattern '{pattern}' cannot be used." + super().__init__(self.message) + + +class MemcachedSettings(BaseModel): + """Settings for Memcached connection. + + This class defines the configuration for connecting to a Memcached server. + + Attributes: + host: Memcached server hostname. Default is "localhost". + port: Memcached server port. Default is 11211. + pool_size: Maximum number of connections in the pool. Default is 10. + connect_timeout: Connection timeout in seconds. Default is 5. + Note: This parameter is not currently used by aiomcache.Client but is + kept for API consistency with other cache backends. + """ + + host: str = "localhost" + port: int = 11211 + pool_size: int = 10 + connect_timeout: int = 5 + + +class MemcachedBackend(CacheBackend): + """Memcached implementation of the cache backend.""" + + def __init__(self, settings: MemcachedSettings | None = None): + """Initialize the Memcached backend. + + Args: + settings: Custom settings for Memcached connection. If None, default settings are used. + """ + self.settings = settings or MemcachedSettings() + self.client = aiomcache.Client( + host=self.settings.host, + port=self.settings.port, + pool_size=self.settings.pool_size, + ) + + async def get(self, key: str) -> Any | None: + """Get a value from the cache. + + Args: + key: The cache key to get. + + Returns: + The cached value or None if the key doesn't exist. + """ + key_bytes = key.encode("utf-8") + result = await self.client.get(key_bytes) + + if result is None: + return None + + try: + return json.loads(result.decode("utf-8")) + except (json.JSONDecodeError, UnicodeDecodeError): + return result + + async def set(self, key: str, value: Any, expiration: int = 3600) -> None: + """Set a value in the cache. + + Args: + key: The cache key to set. + value: The value to cache. + expiration: Time in seconds before the key expires (default: 3600). + """ + key_bytes = key.encode("utf-8") + + if not isinstance(value, bytes | str | int | float | bool): + value_bytes = json.dumps(value).encode("utf-8") + elif isinstance(value, str): + value_bytes = value.encode("utf-8") + elif isinstance(value, bytes): + value_bytes = value + else: + value_bytes = str(value).encode("utf-8") + + await self.client.set(key_bytes, value_bytes, exptime=expiration) + + async def delete(self, key: str) -> None: + """Delete a key from the cache. + + Args: + key: The cache key to delete. + """ + key_bytes = key.encode("utf-8") + await self.client.delete(key_bytes) + + async def delete_pattern(self, pattern: str) -> None: + """Delete all keys matching a pattern. + + Args: + pattern: The pattern to match against keys. + + Raises: + PatternMatchingNotSupportedError: Always raised because Memcached doesn't + support pattern matching for keys. + """ + raise PatternMatchingNotSupportedError(pattern) + + async def exists(self, key: str) -> bool: + """Check if a key exists in the cache. + + Args: + key: The cache key to check. + + Returns: + True if the key exists, False otherwise. + """ + result = await self.get(key) + return result is not None + + async def clear(self) -> None: + """Clear the entire cache.""" + await self.client.flush_all() + + async def ping(self) -> bool: + """Check if the cache is available. + + Returns: + True if the cache is available, False otherwise. + """ + try: + test_key = b"_memcached_ping_test" + await self.client.set(test_key, b"1", exptime=1) + result = await self.client.get(test_key) + return bool(result == b"1") + except Exception: + return False diff --git a/backend/src/infrastructure/cache/backends/redis.py b/backend/src/infrastructure/cache/backends/redis.py new file mode 100644 index 00000000..b0e454cb --- /dev/null +++ b/backend/src/infrastructure/cache/backends/redis.py @@ -0,0 +1,149 @@ +import json +from typing import Any + +try: + from redis.asyncio import Redis +except ImportError: + raise ImportError( + "The redis package is not installed. Please install it with 'pip install redis' or 'pip install -e \".[redis]\"'" + ) + +from pydantic import BaseModel + +from ...config.settings import get_settings +from ..base import CacheBackend + +settings = get_settings() + + +class RedisSettings(BaseModel): + """Settings for Redis connection. + + This class defines the configuration for connecting to a Redis server. + + Attributes: + host: Redis server hostname. Default is "localhost". + port: Redis server port. Default is 6379. + db: Redis database number. Default is 0. + password: Redis server password. Default is None. + connect_timeout: Connection timeout in seconds. Default is 5. + pool_size: Maximum number of connections in the pool. Default is 10. + """ + + host: str = "localhost" + port: int = 6379 + db: int = 0 + password: str | None = None + connect_timeout: int = 5 + pool_size: int = 10 + + +class RedisBackend(CacheBackend): + """Redis implementation of the cache backend.""" + + def __init__(self, settings: RedisSettings | None = None): + """Initialize the Redis backend. + + Args: + settings: Custom settings for Redis connection. If None, default settings are used. + """ + self.settings = settings or RedisSettings() + self.client = Redis( + host=self.settings.host, + port=self.settings.port, + db=self.settings.db, + password=self.settings.password, + socket_timeout=self.settings.connect_timeout, + max_connections=self.settings.pool_size, + ) + + async def get(self, key: str) -> Any | None: + """Get a value from the cache. + + Args: + key: The cache key to get. + + Returns: + The cached value or None if the key doesn't exist. + """ + result = await self.client.get(key) + + if result is None: + return None + + try: + return json.loads(result.decode("utf-8")) + except (json.JSONDecodeError, UnicodeDecodeError): + return result + + async def set(self, key: str, value: Any, expiration: int = 3600) -> None: + """Set a value in the cache. + + Args: + key: The cache key to set. + value: The value to cache. + expiration: Time in seconds before the key expires (default: 3600). + """ + if not isinstance(value, bytes | str | int | float | bool): + value_bytes = json.dumps(value).encode("utf-8") + elif isinstance(value, str): + value_bytes = value.encode("utf-8") + elif isinstance(value, bytes): + value_bytes = value + else: + value_bytes = str(value).encode("utf-8") + + await self.client.set(key, value_bytes, ex=expiration) + + async def delete(self, key: str) -> None: + """Delete a key from the cache. + + Args: + key: The cache key to delete. + """ + await self.client.delete(key) + + async def delete_pattern(self, pattern: str) -> None: + """Delete all keys matching a pattern. + + Args: + pattern: The pattern to match against keys. + """ + cursor = 0 + keys_to_delete = [] + + cursor_response, keys = await self.client.scan(cursor=cursor, match=pattern + "*", count=100) + + if keys: + keys_to_delete.extend(keys) + + if keys_to_delete: + await self.client.delete(*keys_to_delete) + + async def exists(self, key: str) -> bool: + """Check if a key exists in the cache. + + Args: + key: The cache key to check. + + Returns: + True if the key exists, False otherwise. + """ + result = await self.client.exists(key) + return bool(result > 0) + + async def clear(self) -> None: + """Clear the entire cache.""" + await self.client.flushdb() + + async def ping(self) -> bool: + """Check if the cache is available. + + Returns: + True if the cache is available, False otherwise. + """ + try: + result = await self.client.ping() # type: ignore[misc] + return bool(result) + except Exception: + return False diff --git a/backend/src/infrastructure/cache/base.py b/backend/src/infrastructure/cache/base.py new file mode 100644 index 00000000..1bdf9e40 --- /dev/null +++ b/backend/src/infrastructure/cache/base.py @@ -0,0 +1,243 @@ +from abc import ABC, abstractmethod +from typing import Any + + +class CacheBackend(ABC): + """Abstract base class for cache backends with comprehensive caching interface. + + Defines the standard interface that all cache backend implementations must follow, + providing consistent caching operations across different backend technologies + like Redis, Memcached, or in-memory storage. + + This abstract base class ensures: + - Consistent API across different cache implementations + - Type safety with proper return types + - Comprehensive cache operations including pattern-based deletion + - Health checking and connection management + - Proper error handling patterns + + Implementations should handle: + - Serialization/deserialization of cached values + - Connection management and retries + - Backend-specific optimizations + - Error handling and fallback behavior + + Example: + ```python + class RedisCacheBackend(CacheBackend): + async def get(self, key: str) -> Optional[Any]: + try: + return await self.redis.get(key) + except ConnectionError: + return None + ``` + """ + + @abstractmethod + async def get(self, key: str) -> Any | None: + """Retrieve a value from the cache by key. + + Attempts to fetch a cached value for the given key. Returns None + if the key doesn't exist, has expired, or if there's a connection error. + + Args: + key: The cache key to retrieve. Should be a string identifier. + + Returns: + The cached value if found, None otherwise. The value is automatically + deserialized from the backend's storage format. + + Note: + Implementation should handle: + - Key normalization and validation + - Automatic deserialization of stored values + - Connection errors gracefully (return None) + - Expired key cleanup where applicable + + Example: + ```python + # Get user data from cache + user_data = await cache.get("user:123") + if user_data is None: + # Cache miss - load from database + user_data = await load_user_from_db(123) + await cache.set("user:123", user_data, 3600) + ``` + """ + pass + + @abstractmethod + async def set(self, key: str, value: Any, expiration: int = 3600) -> None: + """Store a value in the cache with optional expiration. + + Stores a value in the cache with the specified key and expiration time. + The value is automatically serialized for storage in the backend. + + Args: + key: The cache key to store the value under. + value: The value to cache. Will be automatically serialized. + expiration: Time in seconds before the key expires (default: 3600). + + Note: + Implementation should handle: + - Automatic serialization of values + - Key normalization and validation + - Expiration time validation and limits + - Connection errors gracefully + - Backend-specific storage optimizations + + Example: + ```python + # Cache user data for 1 hour + await cache.set("user:123", user_data, 3600) + + # Cache with default expiration (1 hour) + await cache.set("session:abc", session_data) + ``` + """ + pass + + @abstractmethod + async def delete(self, key: str) -> None: + """Remove a specific key from the cache. + + Deletes a single cache entry by key. Operations should be idempotent + and not raise errors if the key doesn't exist. + + Args: + key: The cache key to delete. + + Note: + Implementation should handle: + - Key normalization and validation + - Idempotent deletion (no error if key doesn't exist) + - Connection errors gracefully + - Cleanup of any related metadata + + Example: + ```python + # Delete user cache when user is updated + await cache.delete("user:123") + + # Delete session on logout + await cache.delete(f"session:{session_id}") + ``` + """ + pass + + @abstractmethod + async def delete_pattern(self, pattern: str) -> None: + """Delete all keys matching a specific pattern. + + Removes multiple cache entries that match the given pattern. + This is useful for invalidating related cache entries or + clearing cache namespaces. + + Args: + pattern: The pattern to match against keys. Pattern syntax + depends on the backend implementation (e.g., Redis glob patterns). + + Note: + Implementation should handle: + - Backend-specific pattern syntax + - Efficient bulk deletion operations + - Connection errors gracefully + - Large result set handling with pagination + + Example: + ```python + # Delete all user-related cache entries + await cache.delete_pattern("user:*") + + # Delete all cache entries for a specific tenant + await cache.delete_pattern("tenant:123:*") + ``` + """ + pass + + @abstractmethod + async def exists(self, key: str) -> bool: + """Check if a key exists in the cache. + + Determines whether a key exists in the cache without retrieving its value. + This is more efficient than getting the value when you only need to + check existence. + + Args: + key: The cache key to check for existence. + + Returns: + True if the key exists and hasn't expired, False otherwise. + + Note: + Implementation should handle: + - Key normalization and validation + - Expired key detection + - Connection errors gracefully (return False) + - Backend-specific existence checks + + Example: + ```python + # Check if user is cached before expensive operation + if await cache.exists("user:123"): + user_data = await cache.get("user:123") + else: + user_data = await load_user_from_db(123) + await cache.set("user:123", user_data) + ``` + """ + pass + + @abstractmethod + async def clear(self) -> None: + """Clear the entire cache. + + Removes all cache entries from the backend. This is a destructive + operation that should be used with caution. + + Note: + Implementation should handle: + - Efficient bulk deletion of all entries + - Connection errors gracefully + - Backend-specific clear operations + - Cleanup of any metadata or indexes + + Example: + ```python + # Clear all cache during deployment + await cache.clear() + + # Clear cache in test cleanup + await cache.clear() + ``` + """ + pass + + @abstractmethod + async def ping(self) -> bool: + """Check if the cache backend is available and responsive. + + Performs a health check on the cache backend to determine if it's + available and responding to requests. This is useful for health + checks and monitoring. + + Returns: + True if the cache backend is available and responsive, False otherwise. + + Note: + Implementation should handle: + - Quick connectivity test + - Timeout handling for unresponsive backends + - Authentication validation + - Minimal resource usage for health checks + + Example: + ```python + # Health check endpoint + if await cache.ping(): + return {"cache": "healthy"} + else: + return {"cache": "unhealthy"} + ``` + """ + pass diff --git a/backend/src/infrastructure/cache/decorator.py b/backend/src/infrastructure/cache/decorator.py new file mode 100644 index 00000000..d9a0cc93 --- /dev/null +++ b/backend/src/infrastructure/cache/decorator.py @@ -0,0 +1,124 @@ +import functools +from collections.abc import Callable +from typing import Any, TypeVar, cast + +from fastapi import Request, Response +from fastapi.encoders import jsonable_encoder + +from ..logging import get_logger +from .exceptions import CacheException, InvalidRequestError +from .provider import cache_provider +from .utils import format_extra_data, format_prefix, infer_resource_id + + +class PatternMatchingNotSupportedError(CacheException): + """Exception raised when pattern-based deletion is not supported by the backend.""" + + def __init__(self, pattern: str): + self.message = f"Pattern-based deletion is not supported. Pattern '{pattern}' cannot be used." + super().__init__(self.message) + + +try: + from .backends.memcached import ( + PatternMatchingNotSupportedError as MemcachedPatternMatchingNotSupportedError, + ) +except (ImportError, AttributeError): + pass + +logger = get_logger() + +T = TypeVar("T", bound=Callable[..., Any]) + + +def cache( + key_prefix: str, + resource_id_name: Any = None, + expiration: int = 3600, + resource_id_type: type | tuple[type, ...] = int, + to_invalidate_extra: dict[str, Any] | None = None, + pattern_to_invalidate_extra: list[str] | None = None, + backend_name: str | None = None, +) -> Callable[[T], T]: + """Cache decorator for FastAPI endpoints. + + Args: + key_prefix: A unique prefix to identify the cache key. + resource_id_name: The name of the resource ID argument. If None, it will be inferred. + expiration: The expiration time for the cached data in seconds. Default is 3600 (1 hour). + resource_id_type: The expected type of the resource ID. Default is int. + to_invalidate_extra: Additional cache keys to invalidate. + pattern_to_invalidate_extra: Patterns for additional cache keys to invalidate. + backend_name: The name of the cache backend to use. If None, the default is used. + + Returns: + A decorator function for FastAPI endpoint functions. + + Example: + @app.get("/users/{user_id}") + @cache(key_prefix="user", resource_id_name="user_id", expiration=600) + async def get_user(request: Request, user_id: int): + # Your logic here + return {"id": user_id, "name": "John Doe"} + """ + + def wrapper(func: T) -> T: + @functools.wraps(func) + async def inner(request: Request, *args: Any, **kwargs: Any) -> Response: + try: + backend = cache_provider.get_backend(backend_name) + except Exception as e: + logger.warning(f"Cache backend not available: {str(e)}") + return cast(Response, await func(request, *args, **kwargs)) + + if resource_id_name: + resource_id = kwargs[resource_id_name] + else: + try: + resource_id = infer_resource_id(kwargs=kwargs, resource_id_type=resource_id_type) + except Exception: + logger.warning("Could not infer resource ID, skipping cache") + return cast(Response, await func(request, *args, **kwargs)) + + formatted_key_prefix = format_prefix(key_prefix, kwargs) + cache_key = f"{formatted_key_prefix}:{resource_id}" + + if request.method == "GET": + if to_invalidate_extra is not None or pattern_to_invalidate_extra is not None: + raise InvalidRequestError("Cache invalidation not allowed on GET requests") + + cached_data = await backend.get(cache_key) + if cached_data: + return cast(Response, cached_data) + + result = await func(request, *args, **kwargs) + + if request.method == "GET": + serializable_data = jsonable_encoder(result) + await backend.set(cache_key, serializable_data, expiration) + + else: + await backend.delete(cache_key) + + if to_invalidate_extra is not None: + formatted_extra = format_extra_data(to_invalidate_extra, kwargs) + for prefix, id in formatted_extra.items(): + extra_cache_key = f"{prefix}:{id}" + await backend.delete(extra_cache_key) + + if pattern_to_invalidate_extra is not None: + for pattern in pattern_to_invalidate_extra: + try: + formatted_pattern = format_prefix(pattern, kwargs) + await backend.delete_pattern(formatted_pattern) + except ( + PatternMatchingNotSupportedError, + MemcachedPatternMatchingNotSupportedError, + ) as e: + logger.error(str(e)) + + return cast(Response, result) + + return cast(T, inner) + + return wrapper diff --git a/backend/src/infrastructure/cache/exceptions.py b/backend/src/infrastructure/cache/exceptions.py new file mode 100644 index 00000000..dc1d84a3 --- /dev/null +++ b/backend/src/infrastructure/cache/exceptions.py @@ -0,0 +1,199 @@ +class CacheException(Exception): + """Base exception for all cache-related errors in the system. + + Serves as the parent class for all cache-specific exceptions, + providing a common interface for error handling throughout + the caching infrastructure. + + Args: + message: Detailed error message describing the cache failure. + + Note: + This base class should not be raised directly. Instead, use + specific subclasses that better describe the type of cache error. + + All cache-related exceptions inherit from this base class, + allowing for comprehensive error handling with a single + exception type when needed. + + Example: + ```python + try: + # Cache operations + await cache.set("key", "value") + except CacheException as e: + logger.error(f"Cache operation failed: {e}") + # Handle any cache-related error + ``` + """ + + pass + + +class CacheBackendNotAvailableError(CacheException): + """Raised when the cache backend is not available or unreachable. + + This exception is thrown when the cache backend (Redis, Memcached, etc.) + cannot be reached, is not responding, or has failed health checks. + + Args: + message: Detailed error message about the backend availability issue. + + Note: + This exception typically indicates: + - Network connectivity issues + - Backend service is down or restarting + - Authentication or authorization failures + - Configuration errors preventing connection + - Resource exhaustion on the backend + + Example: + ```python + try: + await cache.ping() + except CacheBackendNotAvailableError: + logger.warning("Cache backend unavailable, falling back to database") + # Implement fallback logic + ``` + """ + + def __init__(self, message: str = "Cache backend is not available."): + self.message = message + super().__init__(self.message) + + +class BackendNotFoundError(CacheException): + """Raised when the specified cache backend is not found or registered. + + This exception occurs when trying to use a cache backend that hasn't + been registered with the cache provider or doesn't exist in the + backend registry. + + Args: + message: Detailed error message about the missing backend. + + Note: + This exception typically indicates: + - Backend name typo in configuration + - Backend not properly registered during initialization + - Missing backend dependencies or imports + - Configuration mismatch between environments + + Example: + ```python + try: + cache = get_cache_backend("nonexistent_backend") + except BackendNotFoundError as e: + logger.error(f"Cache backend not found: {e}") + # Fall back to default backend or raise configuration error + ``` + """ + + def __init__(self, message: str = "Cache backend not found."): + self.message = message + super().__init__(self.message) + + +class CacheIdentificationInferenceError(CacheException): + """Raised when a resource ID cannot be inferred from function arguments. + + This exception occurs in cache decorators when the system cannot + automatically determine the cache key from the function's arguments. + This typically happens with complex argument structures or when + the expected ID parameter is missing. + + Args: + message: Detailed error message about the identification inference failure. + + Note: + This exception typically indicates: + - Function arguments don't contain expected ID fields + - Complex argument structures that can't be automatically parsed + - Missing or incorrectly named parameters + - Need for explicit cache key specification + + Example: + ```python + @cache_decorator(ttl=3600) + async def get_user_profile(user_data: dict): + # This might fail if user_data doesn't contain 'id' field + return process_user_data(user_data) + + # Solution: Use explicit cache key + @cache_decorator(ttl=3600, key="user:{user_id}") + async def get_user_profile(user_id: int, user_data: dict): + return process_user_data(user_data) + ``` + """ + + def __init__(self, message: str = "Could not infer resource ID from function arguments."): + self.message = message + super().__init__(self.message) + + +class InvalidRequestError(CacheException): + """Raised when an invalid request configuration is detected. + + This exception occurs when cache operations receive invalid + configuration parameters, malformed requests, or incompatible + operation settings. + + Args: + message: Detailed error message about the invalid request configuration. + + Note: + This exception typically indicates: + - Invalid cache key format or characters + - Negative or zero expiration times + - Incompatible serialization settings + - Invalid pattern syntax for pattern-based operations + - Malformed cache decorator parameters + + Example: + ```python + try: + # Invalid expiration time + await cache.set("key", "value", expiration=-1) + except InvalidRequestError as e: + logger.error(f"Invalid cache request: {e}") + # Use default expiration or fix the configuration + ``` + """ + + def __init__(self, message: str = "Invalid request configuration for cache."): + self.message = message + super().__init__(self.message) + + +class MissingClientError(CacheException): + """Raised when the cache client is missing or not initialized. + + This exception occurs when attempting to use cache operations + before the cache client has been properly initialized or when + the client becomes unavailable during runtime. + + Args: + message: Detailed error message about the missing client. + + Note: + This exception typically indicates: + - Cache client not initialized during startup + - Client connection lost during operation + - Configuration issues preventing client creation + - Dependency injection failures + - Client cleanup during shutdown + + Example: + ```python + try: + await cache.get("key") + except MissingClientError: + logger.error("Cache client not initialized") + # Initialize client or use fallback + await initialize_cache_client() + ``` + """ + + def __init__(self, message: str = "Cache client is missing or not initialized."): + self.message = message + super().__init__(self.message) diff --git a/backend/src/infrastructure/cache/initialize.py b/backend/src/infrastructure/cache/initialize.py new file mode 100644 index 00000000..825a8ba1 --- /dev/null +++ b/backend/src/infrastructure/cache/initialize.py @@ -0,0 +1,73 @@ +"""Module for initializing the cache backends.""" + +from ..config import CacheBackend +from ..config.settings import get_settings +from . import MEMCACHED_INSTALLED, REDIS_INSTALLED +from .provider import cache_provider + +if MEMCACHED_INSTALLED: + from .backends import MemcachedBackend, MemcachedSettings + +if REDIS_INSTALLED: + from .backends import RedisBackend, RedisSettings + + +async def initialize_cache() -> None: + """Initialize the cache backends. + + This function initializes the cache backends based on the application settings. + It is called during application startup. + """ + settings = get_settings() + + if not settings.CACHE_ENABLED: + return + + if settings.CACHE_BACKEND == CacheBackend.MEMCACHED.value: + if not MEMCACHED_INSTALLED: + raise ImportError("The aiomcache package is not installed. Please install it with 'pip install aiomcache'.") + + memcached_settings = MemcachedSettings( + host=settings.CACHE_MEMCACHED_HOST, + port=settings.CACHE_MEMCACHED_PORT, + pool_size=settings.CACHE_MEMCACHED_POOL_SIZE, + connect_timeout=settings.CACHE_MEMCACHED_CONNECT_TIMEOUT, + ) + memcached_backend = MemcachedBackend(settings=memcached_settings) + cache_provider.register_backend(CacheBackend.MEMCACHED.value, memcached_backend, default=True) + + elif settings.CACHE_BACKEND == CacheBackend.REDIS.value: + if not REDIS_INSTALLED: + raise ImportError("The redis package is not installed. Please install it with 'pip install redis'.") + + redis_settings = RedisSettings( + host=settings.CACHE_REDIS_HOST, + port=settings.CACHE_REDIS_PORT, + db=settings.CACHE_REDIS_DB, + password=settings.CACHE_REDIS_PASSWORD, + connect_timeout=settings.CACHE_REDIS_CONNECT_TIMEOUT, + pool_size=settings.CACHE_REDIS_POOL_SIZE, + ) + redis_backend = RedisBackend(settings=redis_settings) + cache_provider.register_backend(CacheBackend.REDIS.value, redis_backend, default=True) + + +async def close_cache() -> None: + """Close all cache connections. + + This function should be called during application shutdown to clean up resources. + """ + settings = get_settings() + + if not settings.CACHE_ENABLED: + return + + if settings.CACHE_BACKEND == CacheBackend.MEMCACHED.value and MEMCACHED_INSTALLED: + backend = cache_provider.get_backend(CacheBackend.MEMCACHED.value) + if hasattr(backend, "client") and hasattr(backend.client, "close"): + await backend.client.close() + + elif settings.CACHE_BACKEND == CacheBackend.REDIS.value and REDIS_INSTALLED: + backend = cache_provider.get_backend(CacheBackend.REDIS.value) + if hasattr(backend, "client") and hasattr(backend.client, "close"): + await backend.client.close() diff --git a/backend/src/infrastructure/cache/provider.py b/backend/src/infrastructure/cache/provider.py new file mode 100644 index 00000000..3c00af4e --- /dev/null +++ b/backend/src/infrastructure/cache/provider.py @@ -0,0 +1,166 @@ +from typing import Any + +from .base import CacheBackend +from .exceptions import BackendNotFoundError + + +class CacheProvider: + """Provider for cache backends. + + This class manages the different cache backends and provides a single point of access + for all cache operations. It supports registering multiple backends and switching + between them at runtime. + """ + + def __init__(self) -> None: + """Initialize the cache provider.""" + self._backends: dict[str, CacheBackend] = {} + self._default_backend: str | None = None + + def register_backend(self, name: str, backend: CacheBackend, default: bool = False) -> None: + """Register a cache backend. + + Args: + name: The name of the backend. + backend: The backend instance. + default: Whether this backend should be the default. + """ + self._backends[name] = backend + if default or self._default_backend is None: + self._default_backend = name + + def get_backend(self, name: str | None = None) -> CacheBackend: + """Get a cache backend by name. + + Args: + name: The name of the backend to get. If None, the default backend is returned. + + Returns: + The requested cache backend. + + Raises: + BackendNotFoundError: If the requested backend is not available. + """ + backend_name = name or self._default_backend + if backend_name is None or backend_name not in self._backends: + raise BackendNotFoundError(f"Backend '{backend_name}' is not available.") + + return self._backends[backend_name] + + def set_default_backend(self, name: str) -> None: + """Set the default backend to use. + + Args: + name: The name of the backend to set as default. + + Raises: + BackendNotFoundError: If the backend does not exist. + """ + if name not in self._backends: + raise BackendNotFoundError(f"Backend '{name}' not found. Cannot set as default.") + + self._default_backend = name + + async def ping_all(self) -> dict[str, bool]: + """Ping all registered backends. + + Returns: + A dictionary mapping backend names to their availability. + """ + results = {} + for name, backend in self._backends.items(): + results[name] = await backend.ping() + return results + + def list_backends(self) -> dict[str, type[CacheBackend]]: + """List all registered backends. + + Returns: + A dictionary mapping backend names to their types. + """ + return {name: type(backend) for name, backend in self._backends.items()} + + @property + def default_backend_name(self) -> str | None: + """Get the name of the default backend. + + Returns: + The name of the default backend, or None if no backends are registered. + """ + return self._default_backend + + +cache_provider = CacheProvider() + + +async def get(key: str, backend_name: str | None = None) -> Any: + """Get a value from the cache. + + Args: + key: The cache key to get. + backend_name: The name of the backend to use. If None, the default backend is used. + + Returns: + The cached value, or None if it doesn't exist. + """ + backend = cache_provider.get_backend(backend_name) + return await backend.get(key) + + +async def set(key: str, value: Any, expiration: int = 3600, backend_name: str | None = None) -> None: + """Set a value in the cache. + + Args: + key: The cache key to set. + value: The value to cache. + expiration: Time in seconds before the key expires (default: 3600). + backend_name: The name of the backend to use. If None, the default backend is used. + """ + backend = cache_provider.get_backend(backend_name) + await backend.set(key, value, expiration) + + +async def delete(key: str, backend_name: str | None = None) -> None: + """Delete a key from the cache. + + Args: + key: The cache key to delete. + backend_name: The name of the backend to use. If None, the default backend is used. + """ + backend = cache_provider.get_backend(backend_name) + await backend.delete(key) + + +async def delete_pattern(pattern: str, backend_name: str | None = None) -> None: + """Delete all keys matching a pattern. + + Args: + pattern: The pattern to match against keys. + backend_name: The name of the backend to use. If None, the default backend is used. + """ + backend = cache_provider.get_backend(backend_name) + await backend.delete_pattern(pattern) + + +async def exists(key: str, backend_name: str | None = None) -> bool: + """Check if a key exists in the cache. + + Args: + key: The cache key to check. + backend_name: The name of the backend to use. If None, the default backend is used. + + Returns: + True if the key exists, False otherwise. + """ + backend = cache_provider.get_backend(backend_name) + return await backend.exists(key) + + +async def clear(backend_name: str | None = None) -> None: + """Clear the entire cache. + + Args: + backend_name: The name of the backend to use. If None, the default backend is used. + """ + backend = cache_provider.get_backend(backend_name) + await backend.clear() diff --git a/backend/src/infrastructure/cache/utils.py b/backend/src/infrastructure/cache/utils.py new file mode 100644 index 00000000..5fbe5cd6 --- /dev/null +++ b/backend/src/infrastructure/cache/utils.py @@ -0,0 +1,111 @@ +import re +from typing import Any + +from .exceptions import CacheIdentificationInferenceError + + +def infer_resource_id(kwargs: dict[str, Any], resource_id_type: type | tuple[type, ...]) -> int | str: + """Infer the resource ID from a dictionary of keyword arguments. + + Args: + kwargs: A dictionary of keyword arguments. + resource_id_type: The expected type of the resource ID (int, str, or tuple of types). + + Returns: + The inferred resource ID. + + Raises: + CacheIdentificationInferenceError: If the resource ID cannot be inferred. + """ + if not isinstance(resource_id_type, tuple): + resource_id_type = (resource_id_type,) + + for arg_name, arg_value in kwargs.items(): + if "id" in arg_name.lower() and any(isinstance(arg_value, t) for t in resource_id_type): + if isinstance(arg_value, int | str): + return arg_value + return str(arg_value) + + for arg_name, arg_value in kwargs.items(): + if any(isinstance(arg_value, t) for t in resource_id_type): + if isinstance(arg_value, int | str): + return arg_value + return str(arg_value) + + raise CacheIdentificationInferenceError() + + +def extract_data_inside_brackets(input_string: str) -> list[str]: + """Extract data inside curly brackets from a given string. + + Args: + input_string: The input string containing data in curly brackets. + + Returns: + A list of strings found inside curly brackets. + """ + data_inside_brackets = re.findall(r"{(.*?)}", input_string) + return data_inside_brackets + + +def construct_data_dict(data_inside_brackets: list[str], kwargs: dict[str, Any]) -> dict[str, Any]: + """Construct a dictionary based on data inside brackets and keyword arguments. + + Args: + data_inside_brackets: A list of keys found inside brackets. + kwargs: A dictionary of keyword arguments. + + Returns: + A dictionary with keys from data_inside_brackets and values from kwargs. + """ + data_dict = {} + for key in data_inside_brackets: + if key not in kwargs: + continue + data_dict[key] = kwargs[key] + return data_dict + + +def format_prefix(prefix: str, kwargs: dict[str, Any]) -> str: + """Format a prefix using keyword arguments. + + Args: + prefix: The prefix template to format. + kwargs: A dictionary of keyword arguments. + + Returns: + The formatted prefix. + """ + data_inside_brackets = extract_data_inside_brackets(prefix) + data_dict = construct_data_dict(data_inside_brackets, kwargs) + formatted_prefix = prefix.format(**data_dict) + return formatted_prefix + + +def format_extra_data(to_invalidate_extra: dict[str, str], kwargs: dict[str, Any]) -> dict[str, Any]: + """Format extra data for cache invalidation. + + Args: + to_invalidate_extra: A dictionary of cache key prefixes and ID templates. + kwargs: A dictionary of keyword arguments. + + Returns: + A dictionary of formatted prefixes and IDs. + """ + formatted_extra = {} + for prefix, id_name in to_invalidate_extra.items(): + if id_name in kwargs: + formatted_extra[prefix] = kwargs[id_name] + continue + + if "{" in id_name: + id_vars = extract_data_inside_brackets(id_name) + if not id_vars: + continue + + id_var = id_vars[0] + if id_var in kwargs: + formatted_id = id_name.format(**{id_var: kwargs[id_var]}) + formatted_extra[prefix] = formatted_id + + return formatted_extra diff --git a/backend/src/infrastructure/config/__init__.py b/backend/src/infrastructure/config/__init__.py new file mode 100644 index 00000000..078bcd11 --- /dev/null +++ b/backend/src/infrastructure/config/__init__.py @@ -0,0 +1,12 @@ +from .enums import CacheBackend, LogFormat, LogLevel, SessionBackend, TaskiqBrokerType +from .settings import get_settings, settings + +__all__ = [ + "settings", + "get_settings", + "CacheBackend", + "SessionBackend", + "TaskiqBrokerType", + "LogLevel", + "LogFormat", +] diff --git a/backend/src/infrastructure/config/enums.py b/backend/src/infrastructure/config/enums.py new file mode 100644 index 00000000..047fd452 --- /dev/null +++ b/backend/src/infrastructure/config/enums.py @@ -0,0 +1,60 @@ +"""Infrastructure configuration enums.""" + +from enum import StrEnum + + +class CacheBackend(StrEnum): + """Cache backend types. + + Supported backends for caching and rate limiting. + """ + + REDIS = "redis" + MEMCACHED = "memcached" + MEMORY = "memory" + + +class SessionBackend(StrEnum): + """Session storage backend types. + + Supported backends for session storage. + """ + + REDIS = "redis" + MEMCACHED = "memcached" + MEMORY = "memory" + + +class TaskiqBrokerType(StrEnum): + """Taskiq message broker types. + + Supported message brokers for async task processing. + """ + + REDIS = "redis" + RABBITMQ = "rabbitmq" + + +class LogLevel(StrEnum): + """Log level types. + + Standard Python logging levels. + """ + + DEBUG = "DEBUG" + INFO = "INFO" + WARNING = "WARNING" + ERROR = "ERROR" + CRITICAL = "CRITICAL" + + +class LogFormat(StrEnum): + """Log format types. + + Supported log output formats. + """ + + SIMPLE = "simple" + DETAILED = "detailed" + STRUCTURED = "structured" + JSON = "json" diff --git a/backend/src/infrastructure/config/settings.py b/backend/src/infrastructure/config/settings.py new file mode 100644 index 00000000..dae51ae9 --- /dev/null +++ b/backend/src/infrastructure/config/settings.py @@ -0,0 +1,396 @@ +import logging +import os +from enum import StrEnum + +from pydantic_settings import BaseSettings +from starlette.config import Config + +from .enums import CacheBackend, LogFormat, LogLevel, SessionBackend, TaskiqBrokerType + +logger = logging.getLogger(__name__) + +current_file_dir = os.path.dirname(os.path.realpath(__file__)) +project_root = os.path.abspath(os.path.join(current_file_dir, "..", "..", "..", "..")) + +env_paths = [ + "/app/.env", + os.path.join(project_root, ".env"), + "/.env", +] + +env_path = next((path for path in env_paths if os.path.isfile(path)), env_paths[0]) +logger.info(f"Using environment file at: {env_path}") + +config = Config(env_path) + + +class EnvironmentOption(StrEnum): + """Environment options for the application.""" + + PRODUCTION = "production" + STAGING = "staging" + DEVELOPMENT = "development" + LOCAL = "local" + + +class EnvironmentSettings(BaseSettings): + """Environment-related settings.""" + + ENVIRONMENT: EnvironmentOption = config("ENVIRONMENT", default=EnvironmentOption.DEVELOPMENT, cast=EnvironmentOption) + + +class DatabaseSettings(BaseSettings): + """Database-related settings.""" + + POSTGRES_USER: str = config("POSTGRES_USER", default="postgres") + POSTGRES_PASSWORD: str = config("POSTGRES_PASSWORD", default="postgres") + POSTGRES_SERVER: str = config("POSTGRES_SERVER", default="localhost") + POSTGRES_PORT: int = config("POSTGRES_PORT", default=5432) + POSTGRES_DB: str = config("POSTGRES_DB", default="postgres") + POSTGRES_SYNC_PREFIX: str = config("POSTGRES_SYNC_PREFIX", default="postgresql://") + POSTGRES_ASYNC_PREFIX: str = config("POSTGRES_ASYNC_PREFIX", default="postgresql+asyncpg://") + CREATE_TABLES_ON_STARTUP: bool = config("CREATE_TABLES_ON_STARTUP", default=True, cast=bool) + + POSTGRES_POOL_SIZE: int = config("POSTGRES_POOL_SIZE", default=20, cast=int) + POSTGRES_MAX_OVERFLOW: int = config("POSTGRES_MAX_OVERFLOW", default=0, cast=int) + + @property + def DATABASE_URL(self) -> str: + """Get the full database URL. + + Checks for DATABASE_URL environment variable first (production pattern), + then falls back to constructing from individual components (development pattern). + """ + direct_url = config("DATABASE_URL", default=None) + if direct_url: + return direct_url + + return ( + f"{self.POSTGRES_ASYNC_PREFIX}{self.POSTGRES_USER}:" + f"{self.POSTGRES_PASSWORD}@{self.POSTGRES_SERVER}:" + f"{self.POSTGRES_PORT}/{self.POSTGRES_DB}" + ) + + +class CacheSettings(BaseSettings): + """Cache-related settings. + + This class defines settings for cache connections and behavior across + the application. + + Attributes: + CACHE_ENABLED: Whether to enable caching. Default is True. + CACHE_BACKEND: The cache backend to use. Default is "memcached". + + # Memcached settings + CACHE_MEMCACHED_HOST: Memcached server hostname. Default is "localhost". + CACHE_MEMCACHED_PORT: Memcached server port. Default is 11211. + CACHE_MEMCACHED_POOL_SIZE: Maximum number of connections in the pool. Default is 10. + CACHE_MEMCACHED_CONNECT_TIMEOUT: Connection timeout in seconds. Default is 5. + Note: This is not currently used by aiomcache.Client but is + kept for API consistency with other cache backends. + + # Redis settings + CACHE_REDIS_HOST: Redis server hostname. Default is "localhost". + CACHE_REDIS_PORT: Redis server port. Default is 6379. + CACHE_REDIS_DB: Redis database number. Default is 0. + CACHE_REDIS_PASSWORD: Redis server password. Default is None. + CACHE_REDIS_CONNECT_TIMEOUT: Connection timeout in seconds. Default is 5. + CACHE_REDIS_POOL_SIZE: Maximum number of connections in the pool. Default is 10. + + DEFAULT_CACHE_EXPIRATION: Default expiration time for cache entries in seconds. + Default is 3600 (1 hour). + """ + + CACHE_ENABLED: bool = config("CACHE_ENABLED", default=True, cast=bool) + CACHE_BACKEND: str = config("CACHE_BACKEND", default=CacheBackend.MEMCACHED.value) + + CACHE_MEMCACHED_HOST: str = config("CACHE_MEMCACHED_HOST", default="localhost") + CACHE_MEMCACHED_PORT: int = config("CACHE_MEMCACHED_PORT", default=11211, cast=int) + CACHE_MEMCACHED_POOL_SIZE: int = config("CACHE_MEMCACHED_POOL_SIZE", default=10, cast=int) + CACHE_MEMCACHED_CONNECT_TIMEOUT: int = config("CACHE_MEMCACHED_CONNECT_TIMEOUT", default=5, cast=int) + + CACHE_REDIS_HOST: str = config("CACHE_REDIS_HOST", default="localhost") + CACHE_REDIS_PORT: int = config("CACHE_REDIS_PORT", default=6379, cast=int) + CACHE_REDIS_DB: int = config("CACHE_REDIS_DB", default=0, cast=int) + CACHE_REDIS_PASSWORD: str | None = config("CACHE_REDIS_PASSWORD", default=None) + CACHE_REDIS_CONNECT_TIMEOUT: int = config("CACHE_REDIS_CONNECT_TIMEOUT", default=5, cast=int) + CACHE_REDIS_POOL_SIZE: int = config("CACHE_REDIS_POOL_SIZE", default=10, cast=int) + + DEFAULT_CACHE_EXPIRATION: int = config("DEFAULT_CACHE_EXPIRATION", default=3600, cast=int) + + CLIENT_CACHE_ENABLED: bool = config("CLIENT_CACHE_ENABLED", default=True, cast=bool) + CLIENT_CACHE_MAX_AGE: int = config("CLIENT_CACHE_MAX_AGE", default=60, cast=int) + + +class RateLimiterSettings(BaseSettings): + """Rate limiter settings. + + This class defines settings for rate limiting connections and behavior across + the application. + + Attributes: + RATE_LIMITER_ENABLED: Whether to enable rate limiting. Default is True. + RATE_LIMITER_BACKEND: The rate limiter backend to use. Default is "memcached". + RATE_LIMITER_FAIL_OPEN: Whether to fail open (allow requests) when errors occur. Default is True. + + # Default rate limit settings + DEFAULT_RATE_LIMIT_LIMIT: Default number of requests allowed. Default is 100. + DEFAULT_RATE_LIMIT_PERIOD: Default period in seconds. Default is 60. + + # Memcached settings + RATE_LIMITER_MEMCACHED_HOST: Memcached server hostname. Default is "localhost". + RATE_LIMITER_MEMCACHED_PORT: Memcached server port. Default is 11211. + RATE_LIMITER_MEMCACHED_POOL_SIZE: Maximum number of connections in the pool. Default is 10. + + # Redis settings + RATE_LIMITER_REDIS_HOST: Redis server hostname. Default is "localhost". + RATE_LIMITER_REDIS_PORT: Redis server port. Default is 6379. + RATE_LIMITER_REDIS_DB: Redis database number. Default is 1. + RATE_LIMITER_REDIS_PASSWORD: Redis server password. Default is None. + RATE_LIMITER_REDIS_CONNECT_TIMEOUT: Connection timeout in seconds. Default is 5. + RATE_LIMITER_REDIS_POOL_SIZE: Maximum number of connections in the pool. Default is 10. + """ + + RATE_LIMITER_ENABLED: bool = config("RATE_LIMITER_ENABLED", default=True, cast=bool) + RATE_LIMITER_BACKEND: str = config("RATE_LIMITER_BACKEND", default=CacheBackend.MEMCACHED.value) + RATE_LIMITER_FAIL_OPEN: bool = config("RATE_LIMITER_FAIL_OPEN", default=True, cast=bool) + + DEFAULT_RATE_LIMIT_LIMIT: int = config("DEFAULT_RATE_LIMIT_LIMIT", default=100, cast=int) + DEFAULT_RATE_LIMIT_PERIOD: int = config("DEFAULT_RATE_LIMIT_PERIOD", default=60, cast=int) + + RATE_LIMITER_MEMCACHED_HOST: str = config("RATE_LIMITER_MEMCACHED_HOST", default="localhost") + RATE_LIMITER_MEMCACHED_PORT: int = config("RATE_LIMITER_MEMCACHED_PORT", default=11211, cast=int) + RATE_LIMITER_MEMCACHED_POOL_SIZE: int = config("RATE_LIMITER_MEMCACHED_POOL_SIZE", default=10, cast=int) + + RATE_LIMITER_REDIS_HOST: str = config("RATE_LIMITER_REDIS_HOST", default="localhost") + RATE_LIMITER_REDIS_PORT: int = config("RATE_LIMITER_REDIS_PORT", default=6379, cast=int) + RATE_LIMITER_REDIS_DB: int = config("RATE_LIMITER_REDIS_DB", default=1, cast=int) + RATE_LIMITER_REDIS_PASSWORD: str | None = config("RATE_LIMITER_REDIS_PASSWORD", default=None) + RATE_LIMITER_REDIS_CONNECT_TIMEOUT: int = config("RATE_LIMITER_REDIS_CONNECT_TIMEOUT", default=5, cast=int) + RATE_LIMITER_REDIS_POOL_SIZE: int = config("RATE_LIMITER_REDIS_POOL_SIZE", default=10, cast=int) + + +class CORSSettings(BaseSettings): + """CORS-related settings.""" + + CORS_ENABLED: bool = config("CORS_ENABLED", default=True, cast=bool) + CORS_ORIGINS: str = config("CORS_ORIGINS", default="*") + CORS_ALLOW_CREDENTIALS: bool = config("CORS_ALLOW_CREDENTIALS", default=True, cast=bool) + + @property + def CORS_ORIGINS_LIST(self) -> list[str]: + """Get CORS origins as a list.""" + if not self.CORS_ORIGINS: + return ["*"] + return [x.strip() for x in self.CORS_ORIGINS.split(",") if x.strip()] + + CORS_ALLOW_METHODS: str = config("CORS_ALLOW_METHODS", default="*") + CORS_ALLOW_HEADERS: str = config("CORS_ALLOW_HEADERS", default="*") + + +class CompressionSettings(BaseSettings): + """Compression-related settings.""" + + GZIP_ENABLED: bool = config("GZIP_ENABLED", default=True, cast=bool) + GZIP_MINIMUM_SIZE: int = config("GZIP_MINIMUM_SIZE", default=1000, cast=int) + + +class APIDocSettings(BaseSettings): + """API documentation settings.""" + + ENABLE_DOCS_IN_PRODUCTION: bool = config("ENABLE_DOCS_IN_PRODUCTION", default=False, cast=bool) + OPENAPI_PREFIX: str = config("OPENAPI_PREFIX", default="") + DOCS_URL: str = config("DOCS_URL", default="/docs") + REDOC_URL: str = config("REDOC_URL", default="/redoc") + OPENAPI_URL: str = config("OPENAPI_URL", default="/openapi.json") + + API_TITLE: str = config("API_TITLE", default="") + API_SUMMARY: str = config("API_SUMMARY", default="") + API_DESCRIPTION: str = config("API_DESCRIPTION", default="") + API_VERSION: str = config("API_VERSION", default="") + API_TERMS_OF_SERVICE: str = config("API_TERMS_OF_SERVICE", default="") + + API_CONTACT_NAME: str = config("API_CONTACT_NAME", default="") + API_CONTACT_URL: str = config("API_CONTACT_URL", default="") + API_CONTACT_EMAIL: str = config("API_CONTACT_EMAIL", default="") + + API_LICENSE_NAME: str = config("API_LICENSE_NAME", default="") + API_LICENSE_URL: str = config("API_LICENSE_URL", default="") + API_LICENSE_IDENTIFIER: str = config("API_LICENSE_IDENTIFIER", default="") + + API_TAGS_METADATA: str = config("API_TAGS_METADATA", default="[]") + + +class AuthSettings(BaseSettings): + """Authentication-related settings.""" + + SECRET_KEY: str = config("SECRET_KEY", default="insecure-secret-key-change-this") + ALGORITHM: str = config("ALGORITHM", default="HS256") + ACCESS_TOKEN_EXPIRE_MINUTES: int = config("ACCESS_TOKEN_EXPIRE_MINUTES", default=30, cast=int) + REFRESH_TOKEN_EXPIRE_DAYS: int = config("REFRESH_TOKEN_EXPIRE_DAYS", default=7, cast=int) + + SESSION_TIMEOUT_MINUTES: int = config("SESSION_TIMEOUT_MINUTES", default=30, cast=int) + SESSION_CLEANUP_INTERVAL_MINUTES: int = config("SESSION_CLEANUP_INTERVAL_MINUTES", default=15, cast=int) + MAX_SESSIONS_PER_USER: int = config("MAX_SESSIONS_PER_USER", default=5, cast=int) + SESSION_SECURE_COOKIES: bool = config("SESSION_SECURE_COOKIES", default=True, cast=bool) + SESSION_BACKEND: str = config("SESSION_BACKEND", default=SessionBackend.REDIS.value) + SESSION_COOKIE_MAX_AGE: int = config("SESSION_COOKIE_MAX_AGE", default=86400, cast=int) + + CSRF_ENABLED: bool = config("CSRF_ENABLED", default=True, cast=bool) + + LOGIN_MAX_ATTEMPTS: int = config("LOGIN_MAX_ATTEMPTS", default=5, cast=int) + LOGIN_WINDOW_MINUTES: int = config("LOGIN_WINDOW_MINUTES", default=15, cast=int) + + OAUTH_GOOGLE_CLIENT_ID: str = config("OAUTH_GOOGLE_CLIENT_ID", default="") + OAUTH_GOOGLE_CLIENT_SECRET: str = config("OAUTH_GOOGLE_CLIENT_SECRET", default="") + OAUTH_GITHUB_CLIENT_ID: str = config("OAUTH_GITHUB_CLIENT_ID", default="") + OAUTH_GITHUB_CLIENT_SECRET: str = config("OAUTH_GITHUB_CLIENT_SECRET", default="") + OAUTH_REDIRECT_BASE_URL: str = config("OAUTH_REDIRECT_BASE_URL", default="http://localhost:8000") + + +class APISettings(BaseSettings): + """API-related settings.""" + + API_PREFIX: str = "/api" + + +class AppSettings(BaseSettings): + """Application-related settings.""" + + # Note: For API documentation, prefer using API_* fields in APIDocSettings + APP_NAME: str = "FastAPI Boilerplate" + APP_DESCRIPTION: str = "Modular FastAPI starter" + DEBUG: bool = config("DEBUG", default=False, cast=bool) + VERSION: str = "0.1.0" + CONTACT_NAME: str = config("CONTACT_NAME", default="Support") + CONTACT_EMAIL: str = config("CONTACT_EMAIL", default="support@example.com") + LICENSE_NAME: str = config("LICENSE_NAME", default="All rights reserved.") + + +class AdminSettings(BaseSettings): + """Admin user settings for initial setup.""" + + ADMIN_NAME: str = config("ADMIN_NAME", default="") + ADMIN_EMAIL: str = config("ADMIN_EMAIL", default="") + ADMIN_USERNAME: str = config("ADMIN_USERNAME", default="") + ADMIN_PASSWORD: str = config("ADMIN_PASSWORD", default="") + DEFAULT_TIER_NAME: str = config("DEFAULT_TIER_NAME", default="free") + + +class SQLAdminSettings(BaseSettings): + """SQLAdmin interface settings.""" + + ADMIN_ENABLED: bool = config("ADMIN_ENABLED", default=True, cast=bool) + + +class SecuritySettings(BaseSettings): + """Security validation settings.""" + + PRODUCTION_SECURITY_VALIDATION_ENABLED: bool = config("PRODUCTION_SECURITY_VALIDATION_ENABLED", default=True, cast=bool) + PRODUCTION_SECURITY_STRICT_MODE: bool = config("PRODUCTION_SECURITY_STRICT_MODE", default=False, cast=bool) + SECURITY_HEADERS_ENABLED: bool = config("SECURITY_HEADERS_ENABLED", default=True, cast=bool) + + +class LoggingSettings(BaseSettings): + """Centralized logging configuration settings.""" + + LOG_LEVEL: str = config("LOG_LEVEL", default=LogLevel.INFO.value) + LOG_FORMAT: str = config("LOG_FORMAT", default=LogFormat.STRUCTURED.value) + + LOG_CONSOLE_ENABLED: bool = config("LOG_CONSOLE_ENABLED", default=True, cast=bool) + LOG_FILE_ENABLED: bool = config("LOG_FILE_ENABLED", default=False, cast=bool) + LOG_FILE_PATH: str = config("LOG_FILE_PATH", default="logs/app.log") + LOG_FILE_MAX_SIZE: int = config("LOG_FILE_MAX_SIZE", default=10485760, cast=int) + LOG_FILE_BACKUP_COUNT: int = config("LOG_FILE_BACKUP_COUNT", default=5, cast=int) + + LOG_CORRELATION_ID: bool = config("LOG_CORRELATION_ID", default=True, cast=bool) + LOG_STRUCTURED_CONTEXT: bool = config("LOG_STRUCTURED_CONTEXT", default=True, cast=bool) + LOG_PERFORMANCE_METRICS: bool = config("LOG_PERFORMANCE_METRICS", default=False, cast=bool) + + LOG_SQL_QUERIES: bool = config("LOG_SQL_QUERIES", default=False, cast=bool) + LOG_INCLUDE_STACKTRACE: bool = config("LOG_INCLUDE_STACKTRACE", default=True, cast=bool) + + LOG_DEVELOPMENT_VERBOSE: bool = config("LOG_DEVELOPMENT_VERBOSE", default=True, cast=bool) + LOG_PRODUCTION_OPTIMIZE: bool = config("LOG_PRODUCTION_OPTIMIZE", default=True, cast=bool) + + @property + def LOG_LEVEL_INT(self) -> int: + """Convert string log level to integer.""" + level_map = { + LogLevel.DEBUG.value: logging.DEBUG, + LogLevel.INFO.value: logging.INFO, + LogLevel.WARNING.value: logging.WARNING, + LogLevel.ERROR.value: logging.ERROR, + LogLevel.CRITICAL.value: logging.CRITICAL, + } + return level_map.get(self.LOG_LEVEL.upper(), logging.INFO) + + +class TaskiqSettings(BaseSettings): + """Taskiq async task queue settings.""" + + TASKIQ_ENABLED: bool = config("TASKIQ_ENABLED", default=True, cast=bool) + TASKIQ_BROKER_TYPE: str = config("TASKIQ_BROKER_TYPE", default=TaskiqBrokerType.REDIS.value) + + TASKIQ_REDIS_HOST: str = config("TASKIQ_REDIS_HOST", default="localhost") + TASKIQ_REDIS_PORT: int = config("TASKIQ_REDIS_PORT", default=6379, cast=int) + TASKIQ_REDIS_DB: int = config("TASKIQ_REDIS_DB", default=3, cast=int) + TASKIQ_REDIS_PASSWORD: str | None = config("TASKIQ_REDIS_PASSWORD", default=None) + + TASKIQ_RABBITMQ_HOST: str = config("TASKIQ_RABBITMQ_HOST", default="localhost") + TASKIQ_RABBITMQ_PORT: int = config("TASKIQ_RABBITMQ_PORT", default=5672, cast=int) + TASKIQ_RABBITMQ_USER: str = config("TASKIQ_RABBITMQ_USER", default="guest") + TASKIQ_RABBITMQ_PASSWORD: str = config("TASKIQ_RABBITMQ_PASSWORD", default="guest") + TASKIQ_RABBITMQ_VHOST: str = config("TASKIQ_RABBITMQ_VHOST", default="/") + + TASKIQ_WORKER_CONCURRENCY: int = config("TASKIQ_WORKER_CONCURRENCY", default=2, cast=int) + TASKIQ_MAX_TASKS_PER_WORKER: int = config("TASKIQ_MAX_TASKS_PER_WORKER", default=1000, cast=int) + + @property + def TASKIQ_BROKER_URL(self) -> str: + """Generate broker URL based on configured backend.""" + if self.TASKIQ_BROKER_TYPE == TaskiqBrokerType.REDIS.value: + password_part = f":{self.TASKIQ_REDIS_PASSWORD}@" if self.TASKIQ_REDIS_PASSWORD else "" + return f"redis://{password_part}{self.TASKIQ_REDIS_HOST}:{self.TASKIQ_REDIS_PORT}/{self.TASKIQ_REDIS_DB}" + elif self.TASKIQ_BROKER_TYPE == TaskiqBrokerType.RABBITMQ.value: + vhost = self.TASKIQ_RABBITMQ_VHOST + if vhost.startswith("/"): + vhost = vhost[1:] + return f"amqp://{self.TASKIQ_RABBITMQ_USER}:{self.TASKIQ_RABBITMQ_PASSWORD}@{self.TASKIQ_RABBITMQ_HOST}:{self.TASKIQ_RABBITMQ_PORT}/{vhost}" + else: + raise ValueError(f"Unsupported broker type: {self.TASKIQ_BROKER_TYPE}") + + +class Settings( + EnvironmentSettings, + DatabaseSettings, + CacheSettings, + RateLimiterSettings, + CORSSettings, + CompressionSettings, + APIDocSettings, + AuthSettings, + APISettings, + AppSettings, + AdminSettings, + SQLAdminSettings, + SecuritySettings, + LoggingSettings, + TaskiqSettings, +): + """Main settings class that combines all setting categories.""" + + pass + + +settings = Settings() + + +def get_settings() -> Settings: + """Get application settings. + + Returns: + The application settings. + """ + return settings diff --git a/backend/src/infrastructure/database/__init__.py b/backend/src/infrastructure/database/__init__.py new file mode 100644 index 00000000..6b9aaa3c --- /dev/null +++ b/backend/src/infrastructure/database/__init__.py @@ -0,0 +1,3 @@ +from .session import Base, async_session, engine + +__all__ = ["Base", "engine", "async_session"] diff --git a/backend/src/infrastructure/database/models.py b/backend/src/infrastructure/database/models.py new file mode 100644 index 00000000..136212bb --- /dev/null +++ b/backend/src/infrastructure/database/models.py @@ -0,0 +1,172 @@ +import uuid as uuid_pkg +from datetime import UTC, datetime + +from sqlalchemy import DateTime, text +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column +from sqlalchemy.types import TIMESTAMP + + +class UUIDMixin(MappedAsDataclass): + """Mixin to add UUID primary key to database models. + + This mixin provides a standardized UUID primary key for all database models, + ensuring consistent identification across the application with PostgreSQL's + built-in UUID generation capabilities. + + Features: + - UUID4 primary key generation + - Server-side UUID generation fallback + - Automatic initialization exclusion + - PostgreSQL-optimized UUID storage + - Consistent identification across services + + Attributes: + uuid: The UUID primary key field with automatic generation. + + Note: + The UUID is generated using Python's uuid4() function by default, + with a PostgreSQL server-side fallback using gen_random_uuid(). + This ensures UUID generation even if the client-side generation fails. + + The field is excluded from dataclass initialization (init=False) + to prevent manual UUID assignment during model creation. + + Example: + ```python + from sqlalchemy.orm import DeclarativeBase + + class MyModel(UUIDMixin, DeclarativeBase): + name: Mapped[str] = mapped_column(String(100)) + + # Usage + model = MyModel(name="example") + # model.uuid is automatically generated + ``` + """ + + uuid: Mapped[uuid_pkg.UUID] = mapped_column( + UUID, + primary_key=True, + default=uuid_pkg.uuid4, + server_default=text("gen_random_uuid()"), + init=False, + ) + + +class TimestampMixin(MappedAsDataclass): + """Mixin for adding created_at and updated_at timestamp columns. + + This mixin provides automatic timestamp tracking for database models, + recording when records are created and last updated with timezone-aware + datetime values. + + Features: + - Automatic creation timestamp + - Automatic update timestamp tracking + - Timezone-aware datetime storage + - UTC timezone standardization + - Dataclass integration with init exclusion + + Attributes: + created_at: Timestamp when the record was created. + updated_at: Timestamp when the record was last updated. + + Note: + Both timestamps use UTC timezone for consistency across different + deployment environments. The created_at field is never null, while + updated_at can be null for newly created records. + + The timestamps are excluded from dataclass initialization (init=False) + to prevent manual timestamp manipulation during model creation. + + For automatic updated_at tracking, you may need to implement + update triggers or handle updates in your application logic. + + Example: + ```python + from sqlalchemy.orm import DeclarativeBase + + class MyModel(TimestampMixin, DeclarativeBase): + name: Mapped[str] = mapped_column(String(100)) + + # Usage + model = MyModel(name="example") + # model.created_at and model.updated_at are automatically set + ``` + """ + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + default_factory=lambda: datetime.now(UTC), + nullable=False, + init=False, + ) + + updated_at: Mapped[datetime | None] = mapped_column( + DateTime(timezone=True), + default_factory=lambda: datetime.now(UTC), + nullable=True, + init=False, + ) + + +class SoftDeleteMixin(MappedAsDataclass): + """Mixin to add soft delete functionality to database models. + + This mixin provides soft deletion capabilities, allowing records to be + marked as deleted without actually removing them from the database. + This is useful for audit trails, data recovery, and maintaining + referential integrity. + + Features: + - Soft delete with timestamp tracking + - Boolean flag for quick deletion checks + - Audit trail preservation + - Data recovery capabilities + - Referential integrity maintenance + + Attributes: + deleted_at: Timestamp when the record was soft deleted. + is_deleted: Boolean flag indicating if the record is deleted. + + Note: + The deleted_at timestamp is nullable and only set when a record + is soft deleted. The is_deleted boolean provides a quick way to + check deletion status without null checks. + + Both fields are excluded from dataclass initialization (init=False) + to prevent manual deletion state manipulation during model creation. + + You'll need to implement query filters to exclude soft-deleted + records in your application logic unless you specifically want + to include them. + + Example: + ```python + from sqlalchemy.orm import DeclarativeBase + + class MyModel(SoftDeleteMixin, DeclarativeBase): + name: Mapped[str] = mapped_column(String(100)) + + # Usage + model = MyModel(name="example") + + # Soft delete + model.deleted_at = datetime.now(UTC) + model.is_deleted = True + + # Query active records + active_records = session.query(MyModel).filter(MyModel.is_deleted == False) + ``` + """ + + deleted_at: Mapped[datetime | None] = mapped_column( + TIMESTAMP(timezone=True), + nullable=True, + init=False, + ) + is_deleted: Mapped[bool] = mapped_column( + default=False, + init=False, + ) diff --git a/backend/src/infrastructure/database/session.py b/backend/src/infrastructure/database/session.py new file mode 100644 index 00000000..04cf6f25 --- /dev/null +++ b/backend/src/infrastructure/database/session.py @@ -0,0 +1,128 @@ +from collections.abc import AsyncGenerator + +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.orm import DeclarativeBase, MappedAsDataclass + +from ..config.settings import settings + +engine = create_async_engine( + settings.DATABASE_URL, + echo=False, + future=True, + pool_size=settings.POSTGRES_POOL_SIZE, + max_overflow=settings.POSTGRES_MAX_OVERFLOW, +) + +local_session = async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) + + +class Base(DeclarativeBase, MappedAsDataclass): + """Base class for all database models with comprehensive functionality. + + This base class combines SQLAlchemy's DeclarativeBase with MappedAsDataclass + to provide a powerful foundation for all database models in the application. + + Features: + - Automatic dataclass generation from SQLAlchemy models + - Type-safe model definitions with Mapped annotations + - Consistent model structure across the application + - Built-in serialization capabilities + - Integration with modern SQLAlchemy patterns + + Note: + All database models should inherit from this base class to ensure + consistent behavior and access to shared functionality. + + The MappedAsDataclass mixin automatically generates dataclass + methods (__init__, __repr__, __eq__, etc.) based on the model's + mapped columns. + + Example: + ```python + from sqlalchemy.orm import Mapped, mapped_column + from sqlalchemy import String, Integer + + class User(Base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + name: Mapped[str] = mapped_column(String(100)) + email: Mapped[str] = mapped_column(String(255), unique=True) + + # Usage + user = User(name="John Doe", email="john@example.com") + ``` + """ + + pass + + +async def async_session() -> AsyncGenerator[AsyncSession, None]: + """Dependency for database session management with proper lifecycle. + + This function provides an async database session for use in FastAPI + dependencies and other async contexts. It ensures proper session + lifecycle management with automatic cleanup. + + Yields: + AsyncSession: A configured async database session. + + Note: + This function is designed to be used as a FastAPI dependency + via Depends(async_session). It automatically handles session + creation, lifecycle management, and cleanup. + + The session is configured with: + - expire_on_commit=False for better performance + - Automatic transaction management + - Proper cleanup on context exit + + Example: + ```python + from fastapi import Depends + from sqlalchemy.ext.asyncio import AsyncSession + + @app.get("/users/") + async def get_users(db: AsyncSession = Depends(async_session)): + result = await db.execute(select(User)) + return result.scalars().all() + ``` + """ + async_get_db = local_session + async with async_get_db() as db: + yield db + + +async def create_tables() -> None: + """Create all tables in the database if they don't exist. + + This function creates all database tables defined by the models + that inherit from the Base class. It's typically used during + application initialization or database setup. + + Note: + This function is idempotent - it will only create tables that + don't already exist. Existing tables are left unchanged. + + The function uses SQLAlchemy's metadata.create_all() method + within an async transaction for safe table creation. + + For production deployments, consider using migration tools + like Alembic instead of this function for better control + over database schema changes. + + Example: + ```python + # In application startup + async def startup_event(): + await create_tables() + logger.info("Database tables created successfully") + + # Or in a setup script + if __name__ == "__main__": + import asyncio + asyncio.run(create_tables()) + ``` + """ + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) diff --git a/backend/src/infrastructure/logging/__init__.py b/backend/src/infrastructure/logging/__init__.py new file mode 100644 index 00000000..6a41505b --- /dev/null +++ b/backend/src/infrastructure/logging/__init__.py @@ -0,0 +1,34 @@ +"""Centralized logging infrastructure. + +This module provides a unified logging system that integrates with the application's +settings and provides environment-aware configuration. It replaces scattered +logging.getLogger(__name__) calls with a centralized, configurable system. + +Key Features: +- Environment-aware logging configuration +- Integration with application settings +- Structured logging support +- Consistent formatting across all modules +- Performance optimized for production + +Usage: + ```python + from infrastructure.logging import get_logger + + logger = get_logger() # Auto-detects module name + logger.info("Application started") + + # Or with explicit name + logger = get_logger("my.module") + logger.debug("Debug information", extra={"user_id": 123}) + ``` +""" + +from .config import setup_logging_configuration +from .factory import configure_logging, get_logger + +__all__ = [ + "get_logger", + "configure_logging", + "setup_logging_configuration", +] diff --git a/backend/src/infrastructure/logging/config.py b/backend/src/infrastructure/logging/config.py new file mode 100644 index 00000000..cb373e74 --- /dev/null +++ b/backend/src/infrastructure/logging/config.py @@ -0,0 +1,350 @@ +"""Logging configuration module for environment-aware setup. + +This module provides the main configuration logic that sets up logging +based on application settings and environment. It intelligently configures +different handlers, formatters, and levels based on the deployment context. + +Configuration Logic: +- Development: Verbose console logging with colors +- Staging: Structured logging with file output +- Production: Optimized logging with JSON format +- Testing: Minimal logging to avoid test output noise +""" + +import contextvars +import inspect +import logging +import logging.config +import threading +import uuid + +from ..config import LogFormat +from ..config.settings import EnvironmentOption, get_settings +from .handlers import ( + create_console_handler, + create_file_handler, + create_null_handler, +) + + +def setup_logging_configuration() -> None: + """Set up logging configuration based on application settings. + + This function configures the root logger and sets up appropriate + handlers based on the current environment and settings. It should + be called once during application startup. + + Configuration by Environment: + - Development: Console with colors, detailed format, DEBUG level + - Staging: Console + file, structured format, INFO level + - Production: Console + file, JSON format, WARNING level + - Testing: Minimal output to avoid noise + """ + settings = get_settings() + + logging.getLogger().handlers.clear() + + if settings.ENVIRONMENT == EnvironmentOption.DEVELOPMENT: + _configure_development_logging(settings) + elif settings.ENVIRONMENT == EnvironmentOption.STAGING: + _configure_staging_logging(settings) + elif settings.ENVIRONMENT == EnvironmentOption.PRODUCTION: + _configure_production_logging(settings) + else: + _configure_development_logging(settings) + + root_logger = logging.getLogger() + root_logger.setLevel(settings.LOG_LEVEL_INT) + + if settings.ENVIRONMENT == EnvironmentOption.PRODUCTION: + _configure_noisy_loggers() + + +def _configure_development_logging(settings) -> None: + """Configure logging for development environment. + + Features: + - Colored console output for better readability + - Detailed formatting with timestamps + - DEBUG level for comprehensive information + - Optional file logging if enabled + """ + handlers = [] + + if settings.LOG_CONSOLE_ENABLED: + console_level = logging.DEBUG if settings.LOG_DEVELOPMENT_VERBOSE else settings.LOG_LEVEL_INT + console_handler = create_console_handler(format_type=LogFormat.DETAILED.value, level=console_level, use_colors=True) + handlers.append(console_handler) + + if settings.LOG_FILE_ENABLED: + file_handler = create_file_handler( + filepath=settings.LOG_FILE_PATH, + format_type=LogFormat.STRUCTURED.value, + level=logging.DEBUG, + max_bytes=settings.LOG_FILE_MAX_SIZE, + backup_count=settings.LOG_FILE_BACKUP_COUNT, + ) + handlers.append(file_handler) + + root_logger = logging.getLogger() + for handler in handlers: + root_logger.addHandler(handler) + + +def _configure_staging_logging(settings) -> None: + """Configure logging for staging environment. + + Features: + - Structured console output for machine parsing + - File logging enabled by default + - INFO level for balanced detail + """ + handlers = [] + + if settings.LOG_CONSOLE_ENABLED: + console_handler = create_console_handler( + format_type=LogFormat.STRUCTURED.value, + level=settings.LOG_LEVEL_INT, + use_colors=False, + ) + handlers.append(console_handler) + + file_enabled = settings.LOG_FILE_ENABLED + if file_enabled: + file_handler = create_file_handler( + filepath=settings.LOG_FILE_PATH, + format_type=LogFormat.STRUCTURED.value, + level=logging.DEBUG, + max_bytes=settings.LOG_FILE_MAX_SIZE, + backup_count=settings.LOG_FILE_BACKUP_COUNT, + ) + handlers.append(file_handler) + + root_logger = logging.getLogger() + for handler in handlers: + root_logger.addHandler(handler) + + +def _configure_production_logging(settings) -> None: + """Configure logging for production environment. + + Features: + - JSON console output for log aggregation + - File logging with rotation + - Optimized log levels to reduce noise + - Performance optimizations + """ + handlers = [] + + if settings.LOG_CONSOLE_ENABLED: + console_level = logging.WARNING if settings.LOG_PRODUCTION_OPTIMIZE else settings.LOG_LEVEL_INT + console_handler = create_console_handler(format_type=LogFormat.JSON.value, level=console_level, use_colors=False) + handlers.append(console_handler) + + if settings.LOG_FILE_ENABLED: + file_handler = create_file_handler( + filepath=settings.LOG_FILE_PATH, + format_type=LogFormat.JSON.value, + level=settings.LOG_LEVEL_INT, + max_bytes=settings.LOG_FILE_MAX_SIZE, + backup_count=settings.LOG_FILE_BACKUP_COUNT, + ) + handlers.append(file_handler) + + root_logger = logging.getLogger() + for handler in handlers: + root_logger.addHandler(handler) + + +def _configure_noisy_loggers() -> None: + """Configure noisy third-party loggers to reduce noise in production. + + Sets appropriate log levels for common third-party libraries that + tend to be verbose, ensuring they don't overwhelm production logs. + """ + noisy_loggers = { + "urllib3.connectionpool": logging.WARNING, + "requests.packages.urllib3": logging.WARNING, + "asyncpg": logging.WARNING, + "sqlalchemy.engine": logging.WARNING, + "sqlalchemy.dialects": logging.WARNING, + "sqlalchemy.pool": logging.WARNING, + "aiomcache": logging.WARNING, + "redis": logging.WARNING, + } + + for logger_name, level in noisy_loggers.items(): + logger = logging.getLogger(logger_name) + logger.setLevel(level) + + +def configure_testing_logging() -> None: + """Configure minimal logging for testing environments. + + Sets up logging that minimizes output during tests while still + capturing important error information. Can be called from test + fixtures to override normal logging configuration. + """ + root_logger = logging.getLogger() + root_logger.handlers.clear() + + root_logger.addHandler(create_null_handler()) + + root_logger.setLevel(logging.ERROR) + + test_loggers = { + "sqlalchemy.engine": logging.ERROR, + "asyncpg": logging.ERROR, + } + + for logger_name, level in test_loggers.items(): + logger = logging.getLogger(logger_name) + logger.setLevel(level) + + +def get_configured_logger(name: str) -> logging.Logger: + """Get a logger that inherits from the configured root logger. + + This function returns a logger that will use the handlers and + configuration set up by setup_logging_configuration(). + + Args: + name: The name for the logger, typically __name__ + + Returns: + Configured logger instance + """ + return logging.getLogger(name) + + +def add_correlation_id_filter() -> None: + """Add a filter to automatically include correlation IDs in log records. + + This can be used to add request correlation IDs or trace IDs to + all log records automatically. Adds the filter to the root logger + so all child loggers inherit the correlation ID functionality. + """ + root_logger = logging.getLogger() + correlation_filter = CorrelationIdFilter() + root_logger.addFilter(correlation_filter) + + +class CorrelationIdFilter(logging.Filter): + """Logging filter that adds correlation ID to log records. + + This filter checks for correlation ID in context variables and adds + it to log records for distributed tracing and request tracking. + """ + + def filter(self, record: logging.LogRecord) -> bool: + """Add correlation ID to log record if available. + + Args: + record: Log record to modify + + Returns: + True to allow the record to be processed + """ + try: + correlation_id = self._get_correlation_id() + except Exception: + correlation_id = None + + setattr(record, "correlation_id", correlation_id or "no-correlation") + + if not hasattr(record, "extra"): + setattr(record, "extra", {}) + getattr(record, "extra")["correlation_id"] = getattr(record, "correlation_id") + + return True + + def _get_correlation_id(self) -> str | None: + """Get correlation ID from various sources. + + Checks multiple sources for correlation ID: + 1. Context variables (from middleware) + 2. Thread local storage + 3. Request headers (if available) + + Returns: + Correlation ID string or None if not found + """ + try: + correlation_id = correlation_id_var.get() + if correlation_id: + return correlation_id + except (LookupError, AttributeError): + pass + + try: + thread_local = getattr(threading.current_thread(), "correlation_id", None) + if thread_local: + return str(thread_local) + except AttributeError: + pass + + try: + frame = inspect.currentframe() + while frame: + if "request" in frame.f_locals: + request = frame.f_locals["request"] + if hasattr(request, "state") and hasattr(request.state, "correlation_id"): + return str(request.state.correlation_id) + if hasattr(request, "headers"): + correlation_id = request.headers.get("x-correlation-id") or request.headers.get("x-request-id") + if correlation_id: + return str(correlation_id) + frame = frame.f_back + except Exception: + pass + + return None + + +correlation_id_var: contextvars.ContextVar[str] = contextvars.ContextVar("correlation_id") + + +def set_correlation_id(correlation_id: str) -> None: + """Set correlation ID in context for current request. + + Args: + correlation_id: Unique identifier for request tracing + """ + correlation_id_var.set(correlation_id) + + +def get_correlation_id() -> str | None: + """Get current correlation ID from context. + + Returns: + Current correlation ID or None if not set + """ + try: + return correlation_id_var.get() + except LookupError: + return None + + +def generate_correlation_id() -> str: + """Generate a new correlation ID. + + Returns: + New UUID-based correlation ID + """ + return str(uuid.uuid4()) + + +def reconfigure_logger_level(logger_name: str, level: int) -> None: + """Dynamically reconfigure a specific logger's level. + + Useful for debugging specific components without changing + the entire logging configuration. + + Args: + logger_name: Name of the logger to reconfigure + level: New logging level (logging.DEBUG, INFO, etc.) + """ + logger = logging.getLogger(logger_name) + logger.setLevel(level) + + logging.getLogger(__name__).info(f"Logger level changed: {logger_name} -> {logging.getLevelName(level)}") diff --git a/backend/src/infrastructure/logging/factory.py b/backend/src/infrastructure/logging/factory.py new file mode 100644 index 00000000..e7ddf355 --- /dev/null +++ b/backend/src/infrastructure/logging/factory.py @@ -0,0 +1,240 @@ +"""Smart logger factory with automatic configuration and settings integration. + +This module provides the main interface for obtaining loggers throughout +the application. It automatically detects calling modules, applies +configuration based on settings, and provides a simple API for getting +properly configured loggers. + +The factory integrates with the application's settings system to provide +environment-aware logging configuration while maintaining a simple API +for developers. +""" + +import inspect +import logging +from collections.abc import MutableMapping +from threading import Lock +from typing import Any, Union + +from ..config.settings import get_settings +from .config import get_configured_logger, setup_logging_configuration + +_logging_configured = False +_configuration_lock = Lock() + + +def get_logger(name: str | None = None, **extra_context) -> logging.Logger | logging.LoggerAdapter: + """Get a properly configured logger with automatic module detection. + + This is the main interface for obtaining loggers throughout the application. + It automatically detects the calling module name if not provided and ensures + the logging system is properly configured based on application settings. + + Args: + name: Logger name. If None, automatically detects from calling module. + **extra_context: Additional context to include in log records. + + Returns: + Configured logger instance ready for use. + + Example: + ```python + # Auto-detect module name + logger = get_logger() + logger.info("Application started") + + # Explicit name + logger = get_logger("my.custom.logger") + logger.debug("Custom logger message") + + # With extra context + logger = get_logger(service="auth", version="1.0") + logger.info("Service initialized", extra={"user_count": 150}) + ``` + """ + _ensure_logging_configured() + + if name is None: + name = _detect_calling_module() + + base_logger = get_configured_logger(name) + + if extra_context: + logger: logging.Logger | logging.LoggerAdapter = logging.LoggerAdapter(base_logger, extra_context) + else: + logger = base_logger + + return logger + + +def configure_logging() -> None: + """Manually trigger logging configuration. + + This function can be called to explicitly configure logging, + though it's typically called automatically when first logger + is requested. Useful for early application setup. + """ + global _logging_configured + + with _configuration_lock: + if not _logging_configured: + setup_logging_configuration() + _logging_configured = True + + logger = logging.getLogger(__name__) + try: + settings = get_settings() + logger.info( + f"Logging configured for {settings.ENVIRONMENT.value} environment", + extra={ + "log_level": settings.LOG_LEVEL, + "log_format": settings.LOG_FORMAT, + "console_enabled": settings.LOG_CONSOLE_ENABLED, + "file_enabled": settings.LOG_FILE_ENABLED, + }, + ) + except Exception: + logger.info("Logging configured for development environment") + + +def _ensure_logging_configured() -> None: + """Ensure logging is configured, calling setup if needed.""" + global _logging_configured + + if not _logging_configured: + configure_logging() + + +def _detect_calling_module() -> str: + """Detect the module name of the calling function. + + Uses the call stack to determine the module name of the code + that called get_logger(). This provides automatic module + detection for convenience. + + Returns: + Module name of the calling code. + """ + frame = inspect.currentframe() + + try: + for _ in range(3): + if frame is None: + break + frame = frame.f_back + + if frame is not None: + module_name = frame.f_globals.get("__name__", "unknown") + return str(module_name) + else: + return "unknown" + + finally: + del frame + + +class LoggerAdapter(logging.LoggerAdapter): + """Enhanced logger adapter that merges context automatically. + + Extends the standard LoggerAdapter to provide better context + merging and handling of extra parameters. This ensures that + context provided when creating the logger is automatically + included in all log records. + """ + + def __init__(self, logger: logging.Logger, extra: dict[str, Any]): + """Initialize the adapter with a logger and extra context. + + Args: + logger: The underlying logger instance. + extra: Dictionary of extra context to include in all records. + """ + super().__init__(logger, extra) + + def process(self, msg: str, kwargs: MutableMapping[str, Any]) -> tuple[str, MutableMapping[str, Any]]: + """Process the log record to merge context. + + Merges the adapter's extra context with any context + provided in the specific log call. + + Args: + msg: The log message. + kwargs: Keyword arguments from the log call. + + Returns: + Tuple of (message, merged_kwargs). + """ + extra = kwargs.get("extra", {}) + + adapter_extra = self.extra if isinstance(self.extra, dict) else {} + if isinstance(extra, dict): + merged_extra = {**adapter_extra, **extra} + else: + merged_extra = adapter_extra + + kwargs["extra"] = merged_extra + + return msg, kwargs + + +def create_child_logger( + parent_logger: logging.Logger, child_name: str, **extra_context +) -> Union[logging.Logger, "LoggerAdapter"]: + """Create a child logger with additional context. + + Creates a child logger that inherits from the parent while + adding additional context. Useful for creating specialized + loggers for specific components or operations. + + Args: + parent_logger: The parent logger to inherit from. + child_name: Name suffix for the child logger. + **extra_context: Additional context for the child logger. + + Returns: + Child logger with combined context. + + Example: + ```python + service_logger = get_logger() + auth_logger = create_child_logger(service_logger, "auth", component="authentication") + auth_logger.info("User logged in") # Will include component=authentication + ``` + """ + child_logger_name = f"{parent_logger.name}.{child_name}" + base_child_logger = get_configured_logger(child_logger_name) + + if extra_context: + child_logger: logging.Logger | LoggerAdapter = LoggerAdapter(base_child_logger, extra_context) + else: + child_logger = base_child_logger + + return child_logger + + +def get_logger_with_correlation_id(correlation_id: str, name: str | None = None) -> LoggerAdapter: + """Get a logger with automatic correlation ID inclusion. + + Creates a logger that automatically includes the provided + correlation ID in all log records. Useful for request + tracing and distributed system debugging. + + Args: + correlation_id: The correlation ID to include. + name: Logger name, auto-detected if None. + + Returns: + Logger with correlation ID context. + + Example: + ```python + logger = get_logger_with_correlation_id("req-123456") + logger.info("Processing request") # Will include correlation_id=req-123456 + ``` + """ + logger = get_logger(name) + if isinstance(logger, logging.LoggerAdapter): + base_logger = logger.logger + else: + base_logger = logger + return LoggerAdapter(base_logger, {"correlation_id": correlation_id}) diff --git a/backend/src/infrastructure/logging/formatters.py b/backend/src/infrastructure/logging/formatters.py new file mode 100644 index 00000000..7b42bada --- /dev/null +++ b/backend/src/infrastructure/logging/formatters.py @@ -0,0 +1,189 @@ +"""Custom logging formatters for different environments and output types. + +This module provides specialized formatters that adapt to different environments +and use cases. Each formatter is optimized for its intended output medium and +provides the appropriate level of detail and structure. + +Available Formatters: +- SimpleFormatter: Basic console output for development +- DetailedFormatter: Verbose console output with full context +- StructuredFormatter: Structured logging with key-value pairs +- JSONFormatter: Machine-readable JSON format for production +""" + +import json +import logging +import traceback +from datetime import UTC, datetime + + +class SimpleFormatter(logging.Formatter): + """Simple formatter for basic console output. + + Provides clean, readable output for development environments + where human readability is prioritized over structure. + + Format: [LEVEL] module_name: message + Example: [INFO] app.users.service: User created successfully + """ + + def __init__(self): + super().__init__(fmt="[%(levelname)s] %(name)s: %(message)s", datefmt="%H:%M:%S") + + +class DetailedFormatter(logging.Formatter): + """Detailed formatter with timestamp and context information. + + Provides comprehensive information for debugging and development, + including timestamps, module information, and optional context. + + Format: YYYY-MM-DD HH:MM:SS [LEVEL] module_name: message + """ + + def __init__(self): + super().__init__(fmt="%(asctime)s [%(levelname)8s] %(name)s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S") + + +class StructuredFormatter(logging.Formatter): + """Structured formatter with key-value pairs. + + Provides structured logging that's both human-readable and + machine-parseable. Includes automatic context extraction. + + Format: timestamp level=LEVEL module=name message="text" key1=value1 key2=value2 + """ + + def format(self, record: logging.LogRecord) -> str: + timestamp = datetime.now(UTC).isoformat() + parts = [ + f"timestamp={timestamp}", + f"level={record.levelname}", + f"module={record.name}", + f'message="{record.getMessage()}"', + ] + + if hasattr(record, "__dict__"): + for key, value in record.__dict__.items(): + if key not in [ + "name", + "msg", + "args", + "levelname", + "levelno", + "pathname", + "filename", + "module", + "lineno", + "funcName", + "created", + "msecs", + "relativeCreated", + "thread", + "threadName", + "processName", + "process", + "message", + "exc_info", + "exc_text", + "stack_info", + ]: + if isinstance(value, str): + parts.append(f'{key}="{value}"') + elif isinstance(value, int | float | bool): + parts.append(f"{key}={value}") + else: + parts.append(f'{key}="{str(value)}"') + + if record.exc_info: + exc_text = self.formatException(record.exc_info) + exc_text_escaped = exc_text.replace("\n", "\\n") + parts.append(f'exception="{exc_text_escaped}"') + + return " ".join(parts) + + +class JSONFormatter(logging.Formatter): + """JSON formatter for machine-readable structured logging. + + Optimized for production environments where logs are processed + by log aggregation systems. Provides complete context in JSON format. + """ + + def format(self, record: logging.LogRecord) -> str: + log_data = { + "timestamp": datetime.now(UTC).isoformat(), + "level": record.levelname, + "module": record.name, + "message": record.getMessage(), + "filename": record.filename, + "function": record.funcName, + "line_number": record.lineno, + "thread_id": record.thread, + "process_id": record.process, + } + + for key, value in record.__dict__.items(): + if key not in [ + "name", + "msg", + "args", + "levelname", + "levelno", + "pathname", + "filename", + "module", + "lineno", + "funcName", + "created", + "msecs", + "relativeCreated", + "thread", + "threadName", + "processName", + "process", + "message", + "exc_info", + "exc_text", + "stack_info", + ]: + try: + json.dumps(value) + log_data[key] = value + except (TypeError, ValueError): + log_data[key] = str(value) + + if record.exc_info: + log_data["exception"] = { + "type": record.exc_info[0].__name__ if record.exc_info[0] else None, + "message": str(record.exc_info[1]) if record.exc_info[1] else None, + "traceback": traceback.format_exception(*record.exc_info), + } + + return json.dumps(log_data, ensure_ascii=False) + + +def get_formatter(format_type: str) -> logging.Formatter: + """Get the appropriate formatter based on format type. + + Args: + format_type: The type of formatter to create + ("simple", "detailed", "structured", "json") + + Returns: + Configured formatter instance + + Raises: + ValueError: If format_type is not recognized + """ + formatters: dict[str, type[logging.Formatter]] = { + "simple": SimpleFormatter, + "detailed": DetailedFormatter, + "structured": StructuredFormatter, + "json": JSONFormatter, + } + + formatter_class = formatters.get(format_type.lower()) + if formatter_class is None: + raise ValueError(f"Unknown format type: {format_type}. Available: {', '.join(formatters.keys())}") + + return formatter_class() diff --git a/backend/src/infrastructure/logging/handlers.py b/backend/src/infrastructure/logging/handlers.py new file mode 100644 index 00000000..378a0155 --- /dev/null +++ b/backend/src/infrastructure/logging/handlers.py @@ -0,0 +1,77 @@ +"""Custom logging handlers for different output destinations.""" + +import logging +import logging.handlers +import sys +from pathlib import Path + +from .formatters import get_formatter + + +class ColoredConsoleHandler(logging.StreamHandler): + """Enhanced console handler with color support.""" + + COLORS = { + "DEBUG": "\033[36m", + "INFO": "\033[32m", + "WARNING": "\033[33m", + "ERROR": "\033[31m", + "CRITICAL": "\033[35m", + } + RESET = "\033[0m" + + def __init__(self, stream=None): + super().__init__(stream or sys.stdout) + self.use_colors = self._should_use_colors() + + def _should_use_colors(self) -> bool: + return hasattr(self.stream, "isatty") and self.stream.isatty() and sys.platform != "win32" + + def format(self, record: logging.LogRecord) -> str: + formatted = super().format(record) + if self.use_colors and record.levelname in self.COLORS: + color = self.COLORS[record.levelname] + formatted = formatted.replace(f"[{record.levelname}]", f"[{color}{record.levelname}{self.RESET}]") + return formatted + + +class RotatingFileHandler(logging.handlers.RotatingFileHandler): + """Enhanced rotating file handler with automatic directory creation.""" + + def __init__(self, filename: str, max_bytes: int = 10485760, backup_count: int = 5, encoding: str = "utf-8"): + log_path = Path(filename) + log_path.parent.mkdir(parents=True, exist_ok=True) + super().__init__(filename=filename, maxBytes=max_bytes, backupCount=backup_count, encoding=encoding) + + +def create_console_handler( + format_type: str = "detailed", level: int = logging.INFO, use_colors: bool = True +) -> logging.Handler: + """Create a configured console handler.""" + handler: logging.Handler + if use_colors: + handler = ColoredConsoleHandler() + else: + handler = logging.StreamHandler(sys.stdout) + handler.setLevel(level) + handler.setFormatter(get_formatter(format_type)) + return handler + + +def create_file_handler( + filepath: str, + format_type: str = "structured", + level: int = logging.DEBUG, + max_bytes: int = 10485760, + backup_count: int = 5, +) -> logging.Handler: + """Create a configured rotating file handler.""" + handler = RotatingFileHandler(filename=filepath, max_bytes=max_bytes, backup_count=backup_count) + handler.setLevel(level) + handler.setFormatter(get_formatter(format_type)) + return handler + + +def create_null_handler() -> logging.Handler: + """Create a null handler that discards all log records.""" + return logging.NullHandler() diff --git a/backend/src/infrastructure/middleware.py b/backend/src/infrastructure/middleware.py new file mode 100644 index 00000000..e89b4116 --- /dev/null +++ b/backend/src/infrastructure/middleware.py @@ -0,0 +1,52 @@ +"""Middleware components for the FastAPI application.""" + +from fastapi import Request, Response +from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint +from starlette.types import ASGIApp + +from .auth.constants import HSTS_MAX_AGE_SECONDS + + +class ClientCacheMiddleware(BaseHTTPMiddleware): + """Set Cache-Control headers. + + API endpoints get no-cache (authenticated, dynamic data). + Static assets get public caching with the configured max_age. + """ + + def __init__(self, app: ASGIApp, max_age: int = 60) -> None: + super().__init__(app) + self.max_age: int = max_age + + async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: + response: Response = await call_next(request) + if request.url.path.startswith("/api/"): + response.headers["Cache-Control"] = "private, no-cache, no-store, must-revalidate" + else: + response.headers["Cache-Control"] = f"public, max-age={self.max_age}" + return response + + +class SecurityHeadersMiddleware(BaseHTTPMiddleware): + """Set standard security headers on every response. + + Adds X-Content-Type-Options, X-Frame-Options, Referrer-Policy, + Permissions-Policy, and HSTS (production/staging only). + """ + + def __init__(self, app: ASGIApp, environment: str = "development") -> None: + super().__init__(app) + self.environment = environment + + async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: + response: Response = await call_next(request) + response.headers["X-Content-Type-Options"] = "nosniff" + response.headers["X-Frame-Options"] = "DENY" + response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin" + response.headers["X-XSS-Protection"] = "0" + response.headers["Permissions-Policy"] = "camera=(), microphone=(), geolocation=()" + + if self.environment in ("production", "staging"): + response.headers["Strict-Transport-Security"] = f"max-age={HSTS_MAX_AGE_SECONDS}; includeSubDomains" + + return response diff --git a/backend/src/infrastructure/rate_limit/__init__.py b/backend/src/infrastructure/rate_limit/__init__.py new file mode 100644 index 00000000..8b8331ba --- /dev/null +++ b/backend/src/infrastructure/rate_limit/__init__.py @@ -0,0 +1,35 @@ +"""Rate limiter infrastructure. + +This module contains the rate limiting infrastructure components, including middleware +and backend implementations. +""" + +import importlib.util + +from .base import RateLimiterBackend +from .exceptions import RateLimiterBackendException, RateLimitException +from .initialize import close_rate_limiter, initialize_rate_limiter +from .middleware import RateLimiterMiddleware, _check_rate_limit, check_rate_limit +from .provider import get_count, increment_and_check, rate_limiter_provider, reset +from .utils import sanitize_path + +MEMCACHED_INSTALLED = importlib.util.find_spec("aiomcache") is not None +REDIS_INSTALLED = importlib.util.find_spec("redis") is not None + +__all__ = [ + "RateLimiterMiddleware", + "check_rate_limit", + "_check_rate_limit", + "RateLimitException", + "RateLimiterBackendException", + "rate_limiter_provider", + "increment_and_check", + "get_count", + "reset", + "initialize_rate_limiter", + "close_rate_limiter", + "sanitize_path", + "RateLimiterBackend", + "MEMCACHED_INSTALLED", + "REDIS_INSTALLED", +] diff --git a/backend/src/infrastructure/rate_limit/backends/__init__.py b/backend/src/infrastructure/rate_limit/backends/__init__.py new file mode 100644 index 00000000..c9a54402 --- /dev/null +++ b/backend/src/infrastructure/rate_limit/backends/__init__.py @@ -0,0 +1,26 @@ +"""Rate limiter backend implementations. + +This package contains implementations of rate limiter backends for different storage engines. +""" + +import importlib.util + +MEMCACHED_INSTALLED = importlib.util.find_spec("aiomcache") is not None +REDIS_INSTALLED = importlib.util.find_spec("redis") is not None + +if MEMCACHED_INSTALLED: + from .memcached import MemcachedBackend, MemcachedSettings # noqa: F401 + + __all__ = ["MemcachedBackend", "MemcachedSettings"] +else: + MemcachedBackendType: type | None = None + MemcachedSettingsType: type | None = None + __all__ = [] + +if REDIS_INSTALLED: + from .redis import RedisBackend, RedisSettings # noqa: F401 + + __all__.extend(["RedisBackend", "RedisSettings"]) +else: + RedisBackendType: type | None = None + RedisSettingsType: type | None = None diff --git a/backend/src/infrastructure/rate_limit/backends/memcached.py b/backend/src/infrastructure/rate_limit/backends/memcached.py new file mode 100644 index 00000000..bac102c6 --- /dev/null +++ b/backend/src/infrastructure/rate_limit/backends/memcached.py @@ -0,0 +1,179 @@ +import hashlib +from datetime import UTC, datetime + +try: + import aiomcache +except ImportError: + raise ImportError( + "The aiomcache package is not installed. " + "Please install it with 'pip install aiomcache' or 'pip install -e \".[memcached]\"'" + ) + +from pydantic import BaseModel + +from ....modules.common.utils.logger import get_logger +from ..base import RateLimiterBackend +from ..exceptions import RateLimiterBackendException + +logger = get_logger(__name__) + + +class MemcachedSettings(BaseModel): + """Settings for Memcached connection. + + This class defines the configuration for connecting to a Memcached server. + + Attributes: + host: Memcached server hostname. Default is "localhost". + port: Memcached server port. Default is 11211. + pool_size: Maximum number of connections in the pool. Default is 10. + connect_timeout: Connection timeout in seconds. Default is 5. + Note: This parameter is not currently used by aiomcache.Client but is + kept for API consistency with other rate limiter backends. + """ + + host: str = "localhost" + port: int = 11211 + pool_size: int = 10 + connect_timeout: int = 5 + + +class MemcachedBackend(RateLimiterBackend): + """Memcached implementation of the rate limiter backend.""" + + def __init__(self, settings: MemcachedSettings | None = None, fail_open: bool = True): + """Initialize the Memcached backend. + + Args: + settings: Memcached connection settings. If None, default settings are used. + fail_open: Whether to fail open (allow requests) when rate limiting errors occur. + Default is True for safety. + """ + super().__init__(fail_open=fail_open) + self.settings = settings or MemcachedSettings() + try: + self.client = aiomcache.Client( + host=self.settings.host, + port=self.settings.port, + pool_size=self.settings.pool_size, + ) + except Exception as e: + logger.error(f"Failed to initialize Memcached client: {e}") + raise RateLimiterBackendException(f"Failed to initialize Memcached client: {e}") + + async def increment_and_check(self, key: str, limit: int, period: int) -> tuple[int, bool]: + """Increment the counter for a key and check if rate limit is exceeded. + + Args: + key: The rate limit key to increment. + limit: Maximum number of requests allowed in the period. + period: Time period in seconds. + + Returns: + Tuple of (current_count, is_rate_limited) where: + - current_count: The current count of requests + - is_rate_limited: True if the rate limit is exceeded, False otherwise + """ + try: + key_hash = hashlib.md5(key.encode()).hexdigest() + current_timestamp = int(datetime.now(UTC).timestamp()) + window_start = current_timestamp - (current_timestamp % period) + rate_limit_key = f"{key_hash}:{window_start}".encode() + + value = await self.client.get(rate_limit_key) + current_count = int(value.decode()) if value else 0 + + current_count += 1 + await self.client.set(rate_limit_key, str(current_count).encode(), exptime=period) + + is_rate_limited = current_count > limit + return current_count, is_rate_limited + + except Exception as e: + logger.error(f"Error checking rate limit for key {key}: {e}") + return 0, not self.fail_open + + async def get_count(self, key: str) -> int | None: + """Get the current count for a key. + + Args: + key: The rate limit key to check. + + Returns: + The current count or None if the key doesn't exist. + """ + try: + value = await self.client.get(key.encode()) + if value: + return int(value.decode()) + return None + except Exception as e: + logger.error(f"Error getting rate limit count for key {key}: {e}") + return None + + async def reset(self, key: str) -> None: + """Reset the counter for a key. + + Args: + key: The rate limit key to reset. + """ + try: + await self.client.delete(key.encode()) + except Exception as e: + logger.error(f"Error resetting rate limit for key {key}: {e}") + + async def increment(self, key: str, amount: int = 1, expiry: int = 300) -> int: + """Increment a counter by the given amount and set expiry. + + Args: + key: The key to increment + amount: Amount to increment by + expiry: Time in seconds for the key to expire + + Returns: + The new value after incrementing + """ + try: + key_bytes = key.encode() + value = await self.client.get(key_bytes) + current_count = int(value.decode()) if value else 0 + + new_count = current_count + amount + await self.client.set(key_bytes, str(new_count).encode(), exptime=expiry) + + return new_count + except Exception as e: + logger.error(f"Error incrementing count for key {key}: {e}") + return 0 + + async def delete(self, key: str) -> bool: + """Delete a key. + + Args: + key: The key to delete + + Returns: + True if deleted, False otherwise + """ + try: + await self.client.delete(key.encode()) + return True + except Exception as e: + logger.error(f"Error deleting key {key}: {e}") + return False + + async def ping(self) -> bool: + """Check if the rate limiter backend is available. + + Returns: + True if the backend is available, False otherwise. + """ + try: + test_key = b"rate_limiter_ping_test" + test_value = b"1" + await self.client.set(test_key, test_value, exptime=1) + result = await self.client.get(test_key) + return bool(result == test_value) + except Exception as e: + logger.error(f"Failed to ping Memcached server: {e}") + return False diff --git a/backend/src/infrastructure/rate_limit/backends/redis.py b/backend/src/infrastructure/rate_limit/backends/redis.py new file mode 100644 index 00000000..397ddb68 --- /dev/null +++ b/backend/src/infrastructure/rate_limit/backends/redis.py @@ -0,0 +1,192 @@ +from datetime import UTC, datetime + +try: + from redis.asyncio import Redis + from redis.exceptions import RedisError +except ImportError: + raise ImportError( + "The redis package is not installed. Please install it with 'pip install redis' or 'pip install -e \".[redis]\"'" + ) + +from pydantic import BaseModel + +from ....modules.common.utils.logger import get_logger +from ..base import RateLimiterBackend +from ..exceptions import RateLimiterBackendException + +logger = get_logger(__name__) + + +class RedisSettings(BaseModel): + """Settings for Redis connection. + + This class defines the configuration for connecting to a Redis server. + + Attributes: + host: Redis server hostname. Default is "localhost". + port: Redis server port. Default is 6379. + db: Redis database number. Default is 0. + password: Redis server password. Default is None. + connect_timeout: Connection timeout in seconds. Default is 5. + pool_size: Maximum number of connections in the pool. Default is 10. + """ + + host: str = "localhost" + port: int = 6379 + db: int = 0 + password: str | None = None + connect_timeout: int = 5 + pool_size: int = 10 + + +class RedisBackend(RateLimiterBackend): + """Redis implementation of the rate limiter backend.""" + + def __init__(self, settings: RedisSettings | None = None, fail_open: bool = True): + """Initialize the Redis backend. + + Args: + settings: Redis connection settings. If None, default settings are used. + fail_open: Whether to fail open (allow requests) when rate limiting errors occur. + Default is True for safety. + """ + super().__init__(fail_open=fail_open) + self.settings = settings or RedisSettings() + try: + self.client = Redis( + host=self.settings.host, + port=self.settings.port, + db=self.settings.db, + password=self.settings.password, + socket_timeout=self.settings.connect_timeout, + socket_connect_timeout=self.settings.connect_timeout, + socket_keepalive=True, + decode_responses=True, + max_connections=self.settings.pool_size, + ) + except Exception as e: + logger.error(f"Failed to initialize Redis client: {e}") + raise RateLimiterBackendException(f"Failed to initialize Redis client: {e}") + + async def increment_and_check(self, key: str, limit: int, period: int) -> tuple[int, bool]: + """Increment the counter for a key and check if rate limit is exceeded. + + Args: + key: The rate limit key to increment. + limit: Maximum number of requests allowed in the period. + period: Time period in seconds. + + Returns: + Tuple of (current_count, is_rate_limited) where: + - current_count: The current count of requests + - is_rate_limited: True if the rate limit is exceeded, False otherwise + """ + try: + current_timestamp = int(datetime.now(UTC).timestamp()) + window_start = current_timestamp - (current_timestamp % period) + rate_limit_key = f"{key}:{window_start}" + + pipe = self.client.pipeline() + pipe.incr(rate_limit_key) + pipe.expire(rate_limit_key, int(period)) + result = await pipe.execute() + + current_count = result[0] + + is_rate_limited = current_count > limit + return current_count, is_rate_limited + + except RedisError as e: + logger.error(f"Redis error checking rate limit for key {key}: {e}") + return 0, not self.fail_open + except Exception as e: + logger.error(f"Error checking rate limit for key {key}: {e}") + return 0, not self.fail_open + + async def get_count(self, key: str) -> int | None: + """Get the current count for a key. + + Args: + key: The rate limit key to check. + + Returns: + The current count or None if the key doesn't exist. + """ + try: + value = await self.client.get(key) + if value: + return int(value) + return None + except Exception as e: + logger.error(f"Error getting rate limit count for key {key}: {e}") + return None + + async def reset(self, key: str) -> None: + """Reset the counter for a key. + + Args: + key: The rate limit key to reset. + """ + try: + await self.client.delete(key) + except Exception as e: + logger.error(f"Error resetting rate limit for key {key}: {e}") + + async def increment(self, key: str, amount: int = 1, expiry: int = 300) -> int: + """Increment a counter by the given amount and set expiry. + + Args: + key: The key to increment + amount: Amount to increment by + expiry: Time in seconds for the key to expire + + Returns: + The new value after incrementing + """ + try: + expiry_int = int(expiry) + + pipe = self.client.pipeline() + pipe.incrby(key, amount) + pipe.expire(key, expiry_int) + result: list[int] = await pipe.execute() + return result[0] + except RedisError as e: + logger.error(f"Redis error incrementing count for key {key}: {type(e).__name__}: {str(e)}") + if not self.fail_open: + raise RateLimiterBackendException(f"Redis pipeline failed for key {key}") + return 0 + except Exception as e: + logger.error(f"Error incrementing count for key {key}: {type(e).__name__}: {str(e)}") + if not self.fail_open: + raise RateLimiterBackendException(f"Unexpected error for key {key}") + return 0 + + async def delete(self, key: str) -> bool: + """Delete a key. + + Args: + key: The key to delete + + Returns: + True if deleted, False otherwise + """ + try: + result = await self.client.delete(key) + return bool(result) + except Exception as e: + logger.error(f"Error deleting key {key}: {e}") + return False + + async def ping(self) -> bool: + """Check if the rate limiter backend is available. + + Returns: + True if the backend is available, False otherwise. + """ + try: + result = await self.client.ping() # type: ignore[misc] + return bool(result) + except Exception as e: + logger.error(f"Failed to ping Redis server: {e}") + return False diff --git a/backend/src/infrastructure/rate_limit/base.py b/backend/src/infrastructure/rate_limit/base.py new file mode 100644 index 00000000..deb56cdd --- /dev/null +++ b/backend/src/infrastructure/rate_limit/base.py @@ -0,0 +1,252 @@ +from abc import ABC, abstractmethod + + +class RateLimiterBackend(ABC): + """Abstract base class for rate limiter backends with comprehensive interface. + + Defines the standard interface that all rate limiter backend implementations + must follow, providing consistent rate limiting operations across different + backend technologies like Redis, Memcached, or in-memory storage. + + This abstract base class ensures: + - Consistent API across different rate limiter implementations + - Flexible failure handling with fail-open/fail-closed policies + - Comprehensive rate limiting operations including counters and resets + - Health checking and connection management + - Proper error handling patterns + + Implementations should handle: + - Thread-safe counter operations + - Atomic increment-and-check operations + - Connection management and retries + - Backend-specific optimizations + - Error handling and fallback behavior + + Example: + ```python + class RedisRateLimiterBackend(RateLimiterBackend): + async def increment_and_check(self, key: str, limit: int, period: int) -> tuple[int, bool]: + try: + count = await self.redis.incr(key) + if count == 1: + await self.redis.expire(key, period) + return count, count > limit + except ConnectionError: + return (0, False) if self.fail_open else (limit + 1, True) + ``` + """ + + def __init__(self, fail_open: bool = True): + """Initialize the rate limiter backend with failure handling policy. + + Args: + fail_open: Whether to fail open (allow requests) when rate limiting + errors occur. If True, allows requests when backend is unavailable. + If False, blocks requests when backend errors occur. + Default is True for safety and availability. + + Note: + Fail-open vs fail-closed policies: + - Fail-open: Prioritizes availability over strict rate limiting + - Fail-closed: Prioritizes security over availability + + Choose based on your application's requirements: + - Critical APIs may prefer fail-closed for security + - Public APIs may prefer fail-open for availability + """ + self.fail_open = fail_open + + @abstractmethod + async def increment_and_check(self, key: str, limit: int, period: int) -> tuple[int, bool]: + """Increment the counter for a key and check if rate limit is exceeded. + + Performs an atomic increment-and-check operation to determine if a + request should be rate limited. This is the core operation for most + rate limiting scenarios. + + Args: + key: The rate limit key to increment. Should be unique per user/IP/resource. + limit: Maximum number of requests allowed in the period. + period: Time period in seconds for the rate limit window. + + Returns: + Tuple of (current_count, is_rate_limited) where: + - current_count: The current count of requests in the window + - is_rate_limited: True if the rate limit is exceeded, False otherwise + + Note: + Implementation should handle: + - Atomic increment operations to prevent race conditions + - Automatic key expiration after the period + - Connection errors according to fail_open policy + - Efficient sliding window or fixed window algorithms + + Example: + ```python + # Check if user can make a request (10 requests per minute) + count, is_limited = await backend.increment_and_check( + key="user:123:api_calls", + limit=10, + period=60 + ) + + if is_limited: + raise RateLimitException(f"Rate limit exceeded. {count}/{limit} requests used.") + ``` + """ + pass + + @abstractmethod + async def get_count(self, key: str) -> int | None: + """Get the current count for a rate limit key. + + Retrieves the current count without incrementing it. Useful for + monitoring, dashboards, and providing rate limit information to clients. + + Args: + key: The rate limit key to check. + + Returns: + The current count or None if the key doesn't exist or has expired. + + Note: + Implementation should handle: + - Key normalization and validation + - Expired key cleanup where applicable + - Connection errors gracefully (return None) + - Efficient read operations + + Example: + ```python + # Check current usage for rate limit headers + current_count = await backend.get_count("user:123:api_calls") + if current_count is not None: + remaining = max(0, limit - current_count) + headers["X-RateLimit-Remaining"] = str(remaining) + ``` + """ + pass + + @abstractmethod + async def reset(self, key: str) -> None: + """Reset the counter for a specific rate limit key. + + Removes or resets the counter for a given key, effectively clearing + the rate limit for that key. Useful for administrative actions, + premium users, or error recovery. + + Args: + key: The rate limit key to reset. + + Note: + Implementation should handle: + - Key normalization and validation + - Idempotent deletion (no error if key doesn't exist) + - Connection errors gracefully + - Cleanup of any related metadata + + Example: + ```python + # Reset rate limit for premium user + await backend.reset("user:123:api_calls") + + # Reset after resolving user issue + await backend.reset(f"user:{user_id}:failed_logins") + ``` + """ + pass + + @abstractmethod + async def increment(self, key: str, amount: int = 1, expiry: int = 300) -> int: + """Increment a counter by the given amount and set expiry. + + Provides flexible counter increment operations with configurable + expiry times. Useful for custom rate limiting scenarios and + batched operations. + + Args: + key: The key to increment. + amount: Amount to increment by (default: 1). + expiry: Time in seconds for the key to expire (default: 300). + + Returns: + The new value after incrementing. + + Note: + Implementation should handle: + - Atomic increment operations + - Automatic key expiration + - Connection errors according to fail_open policy + - Efficient batch operations for multiple increments + + Example: + ```python + # Increment by custom amount for bulk operations + new_count = await backend.increment( + key="user:123:bulk_uploads", + amount=10, # 10 files uploaded + expiry=3600 # 1 hour window + ) + ``` + """ + pass + + @abstractmethod + async def delete(self, key: str) -> bool: + """Delete a rate limit key. + + Removes a specific key from the rate limiter backend. Similar to reset + but returns information about whether the key existed. + + Args: + key: The key to delete. + + Returns: + True if the key existed and was deleted, False otherwise. + + Note: + Implementation should handle: + - Key normalization and validation + - Atomic deletion operations + - Connection errors gracefully + - Cleanup of any related metadata + + Example: + ```python + # Clean up expired user session + existed = await backend.delete("user:123:session_requests") + if existed: + logger.info("Cleaned up rate limit data for user session") + ``` + """ + pass + + @abstractmethod + async def ping(self) -> bool: + """Check if the rate limiter backend is available and responsive. + + Performs a health check on the rate limiter backend to determine if it's + available and responding to requests. This is essential for monitoring + and graceful degradation. + + Returns: + True if the backend is available and responsive, False otherwise. + + Note: + Implementation should handle: + - Quick connectivity test + - Timeout handling for unresponsive backends + - Authentication validation + - Minimal resource usage for health checks + + Example: + ```python + # Health check in monitoring system + if await backend.ping(): + metrics.gauge("rate_limiter.health", 1) + else: + metrics.gauge("rate_limiter.health", 0) + logger.warning("Rate limiter backend is unavailable") + ``` + """ + pass diff --git a/backend/src/infrastructure/rate_limit/exceptions.py b/backend/src/infrastructure/rate_limit/exceptions.py new file mode 100644 index 00000000..4f432323 --- /dev/null +++ b/backend/src/infrastructure/rate_limit/exceptions.py @@ -0,0 +1,156 @@ +from fastapi import HTTPException, status + + +class RateLimitException(HTTPException): + """Exception raised when a rate limit is exceeded. + + This HTTP exception is thrown when a client exceeds their allowed request + rate, providing appropriate HTTP status code and headers for rate limiting. + + The exception automatically sets: + - HTTP 429 (Too Many Requests) status code + - Retry-After header indicating when to retry + - Detailed error message for the client + + Args: + detail: Custom error message describing the rate limit violation. + Defaults to "Rate limit exceeded". + + Note: + This exception follows RFC 6585 standards for HTTP 429 responses. + The Retry-After header helps clients implement proper backoff strategies. + + Consider including additional information in the detail message: + - Current rate limit values + - Time until reset + - Suggested retry intervals + + Example: + ```python + # Basic rate limit exceeded + raise RateLimitException("Rate limit exceeded") + + # With detailed information + raise RateLimitException( + f"Rate limit exceeded. {count}/{limit} requests used. " + f"Try again in {period} seconds." + ) + + # In middleware or endpoint + try: + await rate_limiter.check_limit(user_id, endpoint) + except RateLimitException as e: + logger.warning(f"Rate limit exceeded for user {user_id}: {e}") + raise + ``` + """ + + def __init__(self, detail: str = "Rate limit exceeded"): + super().__init__( + status_code=status.HTTP_429_TOO_MANY_REQUESTS, + detail=detail, + headers={"Retry-After": "60"}, + ) + + +class RateLimiterBackendException(Exception): + """Base exception for rate limiter backend errors. + + Serves as the parent class for all rate limiter backend-specific exceptions, + providing a common interface for error handling throughout the rate limiting + infrastructure. + + Args: + message: Detailed error message describing the backend failure. + + Note: + This base class should not be raised directly. Instead, use + specific subclasses that better describe the type of backend error. + + All rate limiter backend exceptions inherit from this base class, + allowing for comprehensive error handling with a single exception + type when needed. + + Example: + ```python + try: + # Rate limiter backend operations + await backend.increment_and_check(key, limit, period) + except RateLimiterBackendException as e: + logger.error(f"Rate limiter backend error: {e}") + # Handle any backend-related error + ``` + """ + + def __init__(self, message: str = "Rate limiter backend error"): + self.message = message + super().__init__(self.message) + + +class BackendNotFoundError(RateLimiterBackendException): + """Raised when a requested rate limiter backend is not found. + + This exception occurs when trying to use a rate limiter backend that hasn't + been registered with the rate limiter provider or doesn't exist in the + backend registry. + + Args: + backend_name: The name of the backend that was not found. + + Note: + This exception typically indicates: + - Backend name typo in configuration + - Backend not properly registered during initialization + - Missing backend dependencies or imports + - Configuration mismatch between environments + + Example: + ```python + try: + backend = rate_limiter_provider.get_backend("nonexistent_backend") + except BackendNotFoundError as e: + logger.error(f"Rate limiter backend not found: {e}") + # Fall back to default backend or raise configuration error + ``` + """ + + def __init__(self, backend_name: str): + self.message = f"Rate limiter backend '{backend_name}' not found." + super().__init__(self.message) + + +class BackendInitializationError(RateLimiterBackendException): + """Raised when a rate limiter backend fails to initialize. + + This exception occurs when a backend cannot be properly initialized due to + configuration errors, connection failures, or missing dependencies. + + Args: + backend_name: The name of the backend that failed to initialize. + reason: The specific reason why initialization failed. + + Note: + This exception typically indicates: + - Invalid configuration parameters + - Network connectivity issues + - Missing credentials or authentication failures + - Backend service unavailability + - Resource constraints or permission issues + + Example: + ```python + try: + redis_backend = RedisRateLimiterBackend( + host="invalid_host", + port=6379 + ) + await redis_backend.initialize() + except BackendInitializationError as e: + logger.error(f"Failed to initialize rate limiter: {e}") + # Try alternative backend or raise startup error + ``` + """ + + def __init__(self, backend_name: str, reason: str): + self.message = f"Failed to initialize rate limiter backend '{backend_name}': {reason}" + super().__init__(self.message) diff --git a/backend/src/infrastructure/rate_limit/initialize.py b/backend/src/infrastructure/rate_limit/initialize.py new file mode 100644 index 00000000..a8081aa8 --- /dev/null +++ b/backend/src/infrastructure/rate_limit/initialize.py @@ -0,0 +1,75 @@ +"""Module for initializing the rate limiter backends.""" + +import importlib.util + +from ..config import CacheBackend, get_settings +from .provider import rate_limiter_provider + +MEMCACHED_INSTALLED = importlib.util.find_spec("aiomcache") is not None +REDIS_INSTALLED = importlib.util.find_spec("redis") is not None + +if MEMCACHED_INSTALLED: + from .backends import MemcachedBackend, MemcachedSettings + +if REDIS_INSTALLED: + from .backends import RedisBackend, RedisSettings + + +async def initialize_rate_limiter() -> None: + """Initialize the rate limiter backends. + + This function initializes the rate limiter backends based on the application settings. + It is called during application startup. + """ + settings = get_settings() + + if not settings.RATE_LIMITER_ENABLED: + return + + if settings.RATE_LIMITER_BACKEND == CacheBackend.MEMCACHED.value: + if not MEMCACHED_INSTALLED: + raise ImportError("The aiomcache package is not installed. Please install it with 'pip install aiomcache'.") + + memcached_settings = MemcachedSettings( + host=settings.RATE_LIMITER_MEMCACHED_HOST, + port=settings.RATE_LIMITER_MEMCACHED_PORT, + pool_size=settings.RATE_LIMITER_MEMCACHED_POOL_SIZE, + ) + memcached_backend = MemcachedBackend(settings=memcached_settings, fail_open=settings.RATE_LIMITER_FAIL_OPEN) + rate_limiter_provider.register_backend(CacheBackend.MEMCACHED.value, memcached_backend, default=True) + + elif settings.RATE_LIMITER_BACKEND == CacheBackend.REDIS.value: + if not REDIS_INSTALLED: + raise ImportError("The redis package is not installed. Please install it with 'pip install redis'.") + + redis_settings = RedisSettings( + host=settings.RATE_LIMITER_REDIS_HOST, + port=settings.RATE_LIMITER_REDIS_PORT, + db=settings.RATE_LIMITER_REDIS_DB, + password=settings.RATE_LIMITER_REDIS_PASSWORD, + connect_timeout=settings.RATE_LIMITER_REDIS_CONNECT_TIMEOUT, + pool_size=settings.RATE_LIMITER_REDIS_POOL_SIZE, + ) + redis_backend = RedisBackend(settings=redis_settings, fail_open=settings.RATE_LIMITER_FAIL_OPEN) + rate_limiter_provider.register_backend(CacheBackend.REDIS.value, redis_backend, default=True) + + +async def close_rate_limiter() -> None: + """Close all rate limiter connections. + + This function should be called during application shutdown to clean up resources. + """ + settings = get_settings() + + if not settings.RATE_LIMITER_ENABLED: + return + + if settings.RATE_LIMITER_BACKEND == CacheBackend.MEMCACHED.value and MEMCACHED_INSTALLED: + backend = rate_limiter_provider.get_backend(CacheBackend.MEMCACHED.value) + if hasattr(backend, "client") and hasattr(backend.client, "close"): + await backend.client.close() + + elif settings.RATE_LIMITER_BACKEND == CacheBackend.REDIS.value and REDIS_INSTALLED: + backend = rate_limiter_provider.get_backend(CacheBackend.REDIS.value) + if hasattr(backend, "client") and hasattr(backend.client, "close"): + await backend.client.close() diff --git a/backend/src/infrastructure/rate_limit/middleware.py b/backend/src/infrastructure/rate_limit/middleware.py new file mode 100644 index 00000000..0e310e95 --- /dev/null +++ b/backend/src/infrastructure/rate_limit/middleware.py @@ -0,0 +1,151 @@ +from collections.abc import Callable +from typing import Any, cast + +from fastapi import Depends, Request +from sqlalchemy.ext.asyncio import AsyncSession +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.responses import Response + +from ...modules.common.utils.logger import get_logger +from ...modules.rate_limit.crud import crud_rate_limits +from ...modules.rate_limit.schemas import RateLimitSelect +from ...modules.tier.crud import crud_tiers +from ...modules.tier.schemas import TierSelect +from ..config import get_settings +from ..database import async_session +from .exceptions import RateLimitException +from .provider import increment_and_check +from .utils import sanitize_path + +logger = get_logger(__name__) + +settings = get_settings() +DEFAULT_LIMIT = settings.DEFAULT_RATE_LIMIT_LIMIT +DEFAULT_PERIOD = settings.DEFAULT_RATE_LIMIT_PERIOD + + +async def get_optional_user(request: Request) -> dict[str, Any] | None: + """Get the current user from the request, or None if not authenticated. + + This is a simplified version that assumes the user is stored in request.state.user. + In a real application, you would need to implement proper user extraction from + authentication tokens. + """ + if hasattr(request.state, "user"): + return cast(dict[str, Any], request.state.user) + return None + + +async def _check_rate_limit(request: Request, db: AsyncSession, user: dict[str, Any] | None = None) -> None: + """Internal implementation of check_rate_limit without FastAPI dependency injection. + + Args: + request: The current request. + db: The database session. + user: The authenticated user, or None if not authenticated. + + Raises: + RateLimitException: If the rate limit is exceeded. + """ + if not settings.RATE_LIMITER_ENABLED: + return + + if hasattr(request.app.state, "initialization_complete"): + await request.app.state.initialization_complete.wait() + + original_path = request.url.path + sanitized_path = sanitize_path(original_path) + + if user: + user_id = user["id"] + tier = await crud_tiers.get(db=db, id=user["tier_id"], schema_to_select=TierSelect) + + if tier: + rate_limit = await crud_rate_limits.get( + db=db, tier_id=tier["id"], path=sanitized_path, schema_to_select=RateLimitSelect + ) + + if not rate_limit: + rate_limit = await crud_rate_limits.get( + db=db, tier_id=tier["id"], path=original_path, schema_to_select=RateLimitSelect + ) + + if rate_limit: + limit, period = rate_limit["limit"], rate_limit["period"] + else: + logger.warning( + f"User {user_id} with tier '{tier['name']}' has no specific rate limit for path '{original_path}'. " + "Applying default rate limit." + ) + limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD + else: + logger.warning(f"User {user_id} has no assigned tier. Applying default rate limit.") + limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD + else: + user_id = request.client.host if request.client and hasattr(request.client, "host") else "unknown" + limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD + + key = f"ratelimit:{user_id}:{sanitized_path}" + + try: + count, is_limited = await increment_and_check( + key=key, limit=limit, period=period, fail_open=settings.RATE_LIMITER_FAIL_OPEN + ) + + request.state.rate_limit_headers = { + "X-RateLimit-Limit": str(limit), + "X-RateLimit-Remaining": str(max(0, limit - count)), + "X-RateLimit-Reset": str(period), + } + + if is_limited: + logger.warning(f"Rate limit exceeded for {user_id} on path {sanitized_path}. Count: {count}, Limit: {limit}") + raise RateLimitException(f"Rate limit exceeded. Try again in {period} seconds.") + + except RateLimitException: + raise + except Exception as e: + logger.error(f"Error checking rate limit for {user_id} on path {sanitized_path}: {e}") + if not settings.RATE_LIMITER_FAIL_OPEN: + logger.warning("Blocking request due to fail-closed policy") + raise RateLimitException("Error checking rate limit. Access denied as a precaution.") + + +async def check_rate_limit( + request: Request, + db: AsyncSession = Depends(async_session), + user: dict[str, Any] | None = Depends(get_optional_user), +) -> None: + """Check if the current request exceeds rate limits. + + Args: + request: The current request. + db: The database session. + user: The authenticated user, or None if not authenticated. + + Raises: + RateLimitException: If the rate limit is exceeded. + """ + await _check_rate_limit(request, db, user) + + +class RateLimiterMiddleware(BaseHTTPMiddleware): + """Middleware for applying rate limits to all requests.""" + + async def dispatch(self, request: Request, call_next: Callable) -> Response: + """Process a request through the middleware. + + Args: + request: The incoming request. + call_next: The next middleware or handler in the chain. + + Returns: + The response from the next middleware or handler. + """ + response = await call_next(request) + + if hasattr(request.state, "rate_limit_headers"): + for key, value in request.state.rate_limit_headers.items(): + response.headers[key] = value + + return cast(Response, response) diff --git a/backend/src/infrastructure/rate_limit/provider.py b/backend/src/infrastructure/rate_limit/provider.py new file mode 100644 index 00000000..1e85c265 --- /dev/null +++ b/backend/src/infrastructure/rate_limit/provider.py @@ -0,0 +1,369 @@ +from .base import RateLimiterBackend +from .exceptions import BackendNotFoundError + + +class RateLimiterProvider: + """Provider for rate limiter backends with comprehensive backend management. + + This class manages multiple rate limiter backends and provides a centralized + access point for all rate limiting operations. It supports dynamic backend + registration, switching between backends, and health monitoring. + + The provider enables: + - Multi-backend support for different use cases + - Dynamic backend switching based on configuration + - Health monitoring and fallback strategies + - Centralized rate limiter configuration management + + Example: + ```python + # Initialize provider and register backends + provider = RateLimiterProvider() + + # Register Redis backend for production + redis_backend = RedisRateLimiterBackend(host="redis.example.com") + provider.register_backend("redis", redis_backend, default=True) + + # Register in-memory backend for testing + memory_backend = MemoryRateLimiterBackend() + provider.register_backend("memory", memory_backend) + + # Use the provider + backend = provider.get_backend("redis") + count, is_limited = await backend.increment_and_check("user:123", 10, 60) + ``` + """ + + def __init__(self) -> None: + """Initialize the rate limiter provider. + + Creates an empty provider with no registered backends. Backends must be + registered before use via register_backend(). + + Note: + The provider starts with no default backend. The first registered + backend becomes the default, or you can explicitly set a default + using the default=True parameter in register_backend(). + """ + self._backends: dict[str, RateLimiterBackend] = {} + self._default_backend: str | None = None + + def register_backend(self, name: str, backend: RateLimiterBackend, default: bool = False) -> None: + """Register a rate limiter backend with the provider. + + Adds a backend to the provider's registry, making it available for + rate limiting operations. Optionally sets the backend as the default. + + Args: + name: The name of the backend for identification and retrieval. + backend: The backend instance to register. + default: Whether this backend should be the default. If True, or if + no default is set, this backend becomes the default. + + Note: + Backend names should be unique within the provider. Registering + a backend with an existing name will replace the previous backend. + + The first registered backend automatically becomes the default + unless explicitly overridden. + + Example: + ```python + # Register primary Redis backend + provider.register_backend( + "redis-primary", + RedisRateLimiterBackend(host="redis-primary.example.com"), + default=True + ) + + # Register backup Redis backend + provider.register_backend( + "redis-backup", + RedisRateLimiterBackend(host="redis-backup.example.com") + ) + ``` + """ + self._backends[name] = backend + if default or self._default_backend is None: + self._default_backend = name + + def get_backend(self, name: str | None = None) -> RateLimiterBackend: + """Get a rate limiter backend by name. + + Retrieves a registered backend by name, or returns the default backend + if no name is specified. + + Args: + name: The name of the backend to retrieve. If None, returns the + default backend. + + Returns: + The requested rate limiter backend. + + Raises: + BackendNotFoundError: If the requested backend is not found or + no default backend is available. + + Example: + ```python + # Get default backend + default_backend = provider.get_backend() + + # Get specific backend + redis_backend = provider.get_backend("redis") + + # Handle missing backend + try: + backend = provider.get_backend("nonexistent") + except BackendNotFoundError: + backend = provider.get_backend() # Fall back to default + ``` + """ + backend_name = name or self._default_backend + if not backend_name or backend_name not in self._backends: + raise BackendNotFoundError(backend_name or "default") + return self._backends[backend_name] + + def set_default_backend(self, name: str) -> None: + """Set the default backend for the provider. + + Changes the default backend to the specified registered backend. + The default backend is used when no specific backend is requested. + + Args: + name: The name of the backend to set as default. + + Raises: + BackendNotFoundError: If the requested backend is not found. + + Example: + ```python + # Switch to backup backend as default + provider.set_default_backend("redis-backup") + + # Now all default operations use the backup backend + backend = provider.get_backend() # Returns redis-backup + ``` + """ + if name not in self._backends: + raise BackendNotFoundError(name) + self._default_backend = name + + async def ping_all(self) -> dict[str, bool]: + """Ping all registered backends to check their availability. + + Performs health checks on all registered backends to determine their + current availability status. This is useful for monitoring, alerting, + and automatic failover decisions. + + Returns: + A dictionary mapping backend names to their availability status. + True indicates the backend is available, False indicates it's not. + + Example: + ```python + # Check all backend health + health_status = await provider.ping_all() + + # Log unhealthy backends + for backend_name, is_healthy in health_status.items(): + if not is_healthy: + logger.warning(f"Backend {backend_name} is unhealthy") + + # Find healthy backends + healthy_backends = [name for name, status in health_status.items() if status] + ``` + """ + results = {} + for name, backend in self._backends.items(): + results[name] = await backend.ping() + return results + + def list_backends(self) -> dict[str, type[RateLimiterBackend]]: + """List all registered backends with their types. + + Returns information about all registered backends, including their + implementation types. Useful for debugging, monitoring, and + administrative interfaces. + + Returns: + A dictionary mapping backend names to their implementation types. + + Example: + ```python + # List all backends + backends = provider.list_backends() + for name, backend_type in backends.items(): + print(f"Backend: {name}, Type: {backend_type.__name__}") + + # Filter for Redis backends + redis_backends = { + name: backend_type for name, backend_type in backends.items() + if "Redis" in backend_type.__name__ + } + ``` + """ + return {name: type(backend) for name, backend in self._backends.items()} + + @property + def default_backend_name(self) -> str | None: + """Get the name of the default backend. + + Returns: + The name of the default backend, or None if no default backend is set. + + Example: + ```python + # Check current default backend + default_name = provider.default_backend_name + if default_name: + print(f"Default backend: {default_name}") + else: + print("No default backend configured") + ``` + """ + return self._default_backend + + +rate_limiter_provider = RateLimiterProvider() + + +def get_rate_limiter_backend(backend_name: str | None = None) -> RateLimiterBackend: + """Get a rate limiter backend by name from the global provider. + + This is a convenience function to get a rate limiter backend from the + global provider instance. It provides a simple interface for accessing + rate limiter backends throughout the application. + + Args: + backend_name: The name of the backend to get. If None, the default + backend is used. + + Returns: + The requested rate limiter backend. + + Raises: + BackendNotFoundError: If the requested backend is not found. + + Example: + ```python + # Get default backend + backend = get_rate_limiter_backend() + + # Get specific backend + redis_backend = get_rate_limiter_backend("redis") + + # Use in dependency injection + async def rate_limited_endpoint( + backend: RateLimiterBackend = Depends(get_rate_limiter_backend) + ): + count, is_limited = await backend.increment_and_check("api_calls", 100, 3600) + ``` + """ + return rate_limiter_provider.get_backend(backend_name) + + +async def increment_and_check( + key: str, limit: int, period: int, backend_name: str | None = None, fail_open: bool | None = None +) -> tuple[int, bool]: + """Increment the counter for a key and check if rate limit is exceeded. + + Convenience function that combines backend retrieval and rate limit checking + in a single operation. Supports temporary fail-open policy overrides. + + Args: + key: The rate limit key to increment. + limit: Maximum number of requests allowed in the period. + period: Time period in seconds. + backend_name: The name of the backend to use. If None, the default + backend is used. + fail_open: Whether to fail open if an error occurs. If None, uses + the backend's configured setting. + + Returns: + Tuple of (current_count, is_rate_limited) where: + - current_count: The current count of requests + - is_rate_limited: True if the rate limit is exceeded, False otherwise + + Example: + ```python + # Basic rate limit check + count, is_limited = await increment_and_check( + key="user:123:api_calls", + limit=100, + period=3600 + ) + + # With specific backend and fail-open override + count, is_limited = await increment_and_check( + key="user:123:critical_api", + limit=10, + period=60, + backend_name="redis-primary", + fail_open=False # Strict enforcement + ) + ``` + """ + backend = rate_limiter_provider.get_backend(backend_name) + + original_fail_open = None + if fail_open is not None and fail_open != backend.fail_open: + original_fail_open = backend.fail_open + backend.fail_open = fail_open + + try: + return await backend.increment_and_check(key, limit, period) + finally: + if original_fail_open is not None: + backend.fail_open = original_fail_open + + +async def get_count(key: str, backend_name: str | None = None) -> int | None: + """Get the current count for a key from the specified backend. + + Convenience function to get the current count for a rate limit key + without incrementing it. + + Args: + key: The rate limit key to check. + backend_name: The name of the backend to use. If None, the default + backend is used. + + Returns: + The current count or None if the key doesn't exist. + + Example: + ```python + # Check current usage + current_count = await get_count("user:123:api_calls") + if current_count is not None: + remaining = max(0, limit - current_count) + print(f"Remaining requests: {remaining}") + ``` + """ + backend = rate_limiter_provider.get_backend(backend_name) + return await backend.get_count(key) + + +async def reset(key: str, backend_name: str | None = None) -> None: + """Reset the counter for a key using the specified backend. + + Convenience function to reset a rate limit counter, effectively + clearing the rate limit for that key. + + Args: + key: The rate limit key to reset. + backend_name: The name of the backend to use. If None, the default + backend is used. + + Example: + ```python + # Reset rate limit for premium user + await reset("user:123:api_calls") + + # Reset after resolving issue + await reset("user:123:failed_logins", backend_name="redis-primary") + ``` + """ + backend = rate_limiter_provider.get_backend(backend_name) + await backend.reset(key) diff --git a/backend/src/infrastructure/rate_limit/utils.py b/backend/src/infrastructure/rate_limit/utils.py new file mode 100644 index 00000000..18698987 --- /dev/null +++ b/backend/src/infrastructure/rate_limit/utils.py @@ -0,0 +1,3 @@ +def sanitize_path(path: str) -> str: + """Sanitize API path for use in rate limiting keys.""" + return path.strip("/").replace("/", "_") diff --git a/backend/src/infrastructure/security/__init__.py b/backend/src/infrastructure/security/__init__.py new file mode 100644 index 00000000..487fc93b --- /dev/null +++ b/backend/src/infrastructure/security/__init__.py @@ -0,0 +1,5 @@ +"""Security utilities and validation.""" + +from .production_validator import ProductionSecurityError, ProductionSecurityValidator, validate_production_security + +__all__ = ["ProductionSecurityValidator", "ProductionSecurityError", "validate_production_security"] diff --git a/backend/src/infrastructure/security/production_validator.py b/backend/src/infrastructure/security/production_validator.py new file mode 100644 index 00000000..f212667f --- /dev/null +++ b/backend/src/infrastructure/security/production_validator.py @@ -0,0 +1,707 @@ +"""Production security validation. + +This module provides comprehensive security validation for production environments, +checking for common misconfigurations that could lead to security vulnerabilities. +""" + +import re + +from ..config.settings import EnvironmentOption, Settings +from ..logging import get_logger + +logger = get_logger() + + +class ProductionSecurityError(Exception): + """Exception raised when critical security issues are found in production. + + This exception is thrown when security vulnerabilities are detected that + could compromise the application's security posture. It indicates issues + that should prevent the application from starting in production. + + Args: + message: Detailed error message describing the security issues. + + Note: + This exception is only raised for critical security issues that + pose immediate threats to the application's security. Non-critical + issues are logged as warnings instead. + + Example: + ```python + try: + validate_production_security(settings) + except ProductionSecurityError as e: + logger.critical(f"Critical security issues prevent startup: {e}") + sys.exit(1) + ``` + """ + + pass + + +class ProductionSecurityValidator: + """Comprehensive security validator for production environments. + + This validator performs extensive security checks on production configurations, + identifying potential vulnerabilities and misconfigurations that could expose + the application to security threats. + + The validator categorizes issues into: + - Critical errors: Issues that prevent application startup + - Warnings: Issues that are logged but don't prevent startup + + Features: + - Secret key strength validation + - Database credential security checks + - Redis configuration security analysis + - CORS policy validation + - Session security configuration checks + - Admin interface security validation + - Debug mode and documentation exposure checks + + Example: + ```python + validator = ProductionSecurityValidator(settings) + + try: + validator.validate_production_security() + logger.info("Production security validation passed") + except ProductionSecurityError as e: + logger.critical(f"Security validation failed: {e}") + raise + ``` + """ + + def __init__(self, settings: Settings): + """Initialize the production security validator. + + Args: + settings: Application settings to validate for security issues. + + Note: + The validator examines all security-relevant settings including + database credentials, Redis configurations, session settings, + and admin interface configurations. + """ + self.settings = settings + self.logger = get_logger() + + def validate_production_security(self) -> None: + """Validate production security configuration comprehensively. + + Performs a complete security audit of the production configuration, + checking for critical security issues that could compromise the + application's security posture. + + Raises: + ProductionSecurityError: If critical security issues are found + that should prevent production deployment. + + Note: + This method only runs validation in production environments. + For non-production environments, it logs a debug message and returns. + + The validation process includes: + - Critical security checks (raise exceptions) + - Warning security checks (log warnings) + - Comprehensive reporting of all issues found + + Example: + ```python + # In application startup + try: + validator.validate_production_security() + logger.info("Production security validation passed") + except ProductionSecurityError as e: + logger.error(f"Production security validation failed: {e}") + sys.exit(1) + ``` + """ + if not self._is_production(): + self.logger.debug("Not in production environment, skipping security validation") + return + + self.logger.info("Running production security validation...") + + critical_errors = self._validate_critical_security() + if critical_errors: + error_msg = "Critical security issues detected in production:\n" + "\n".join( + f" • {error}" for error in critical_errors + ) + self.logger.error(error_msg) + raise ProductionSecurityError(error_msg) + + self._validate_warning_security() + + self.logger.info("Production security validation completed successfully") + + def _is_production(self) -> bool: + """Check if the application is running in production environment. + + Returns: + True if the environment is set to production, False otherwise. + + Note: + This method checks the ENVIRONMENT setting to determine if + comprehensive security validation should be performed. + """ + return self.settings.ENVIRONMENT == EnvironmentOption.PRODUCTION + + def _validate_critical_security(self) -> list[str]: + """Validate critical security issues that should prevent startup. + + Performs checks for security vulnerabilities that pose immediate + threats to the application's security and should prevent the + application from starting in production. + + Returns: + List of critical security error messages. Empty list if no + critical issues are found. + + Note: + Critical security issues include: + - Insecure secret keys + - Unprotected admin interfaces + - Default database credentials + - Empty database passwords + + These issues can lead to immediate security breaches and + should be fixed before production deployment. + + Example: + Critical issues that would be detected: + - SECRET_KEY using default values + - Admin interface with no IP restrictions + - Database using 'postgres' password + - Empty database password + """ + errors = [] + + if self._is_insecure_secret_key(): + errors.append( + "SECRET_KEY is using default or insecure value. " + "This compromises session security, CSRF protection, and JWT tokens. " + "Generate a strong, unique secret key for production." + ) + + if self._is_database_using_default_credentials(): + errors.append( + "Database is using default credentials (POSTGRES_PASSWORD='postgres'). " + "This is a well-known default that attackers will try first. " + "Use a strong, unique password for production." + ) + + if self._is_database_password_empty(): + errors.append( + "Database password is empty (POSTGRES_PASSWORD is not set). " + "This leaves your database completely unprotected. " + "Set a strong password for production." + ) + + return errors + + def _validate_warning_security(self) -> None: + """Log warnings for security concerns that don't prevent startup. + + Identifies security issues that should be addressed but don't pose + immediate threats severe enough to prevent application startup. + These issues are logged as warnings for review and remediation. + + Note: + Warning-level security issues include: + - Redis instances without passwords + - Overly permissive CORS settings + - Debug mode enabled in production + - API documentation exposed + - Insecure session configurations + - Weak admin credentials + + While these don't prevent startup, they should be addressed + to maintain optimal security posture. + + Example: + Warning issues that would be detected: + - CORS_ORIGINS set to '*' + - Redis without password authentication + - Session timeout too long + - Weak admin usernames or passwords + """ + warnings = [] + + redis_warnings = self._check_redis_security() + warnings.extend(redis_warnings) + + if self._is_cors_too_permissive(): + warnings.append( + "CORS_ORIGINS is set to '*' (allow all origins). This can enable " + "cross-origin attacks. Consider restricting to specific domains in production." + ) + + if self._is_debug_enabled(): + warnings.append( + "DEBUG mode is enabled in production. This can expose sensitive information " + "in error responses and enable debug endpoints. Set DEBUG=false for production." + ) + + docs_warning = self._check_docs_security() + if docs_warning: + warnings.append(docs_warning) + + session_warnings = self._check_session_security() + warnings.extend(session_warnings) + + admin_warnings = self._check_admin_credentials() + warnings.extend(admin_warnings) + + for warning in warnings: + self.logger.warning(f"PRODUCTION SECURITY WARNING: {warning}") + + if warnings: + self.logger.warning( + f"Found {len(warnings)} production security warnings. " + "While not critical, these should be reviewed for optimal security." + ) + + def _is_insecure_secret_key(self) -> bool: + """Check if SECRET_KEY is insecure or uses default values. + + Analyzes the secret key for common security weaknesses including + default values, predictable patterns, and insufficient entropy. + + Returns: + True if the secret key is insecure, False otherwise. + + Note: + The validation checks for: + - Empty or missing secret keys + - Common default values and patterns + - Insufficient length (< 32 characters) + - Predictable patterns and repetition + - Common weak strings + + A secure secret key should be: + - At least 32 characters long + - Randomly generated + - Unique to the application + - Free of predictable patterns + """ + secret = self.settings.SECRET_KEY + + if not secret: + return True + + insecure_patterns = [ + "insecure-secret-key-change-this", + "change-me", + "change-this", + "default", + "secret", + "password", + "secretkey", + "key", + "123456", + "abc123", + "test", + "dev", + "development", + ] + + secret_lower = secret.lower() + if any(pattern in secret_lower for pattern in insecure_patterns): + return True + + if len(secret) < 32: + return True + + if self._has_predictable_pattern(secret): + return True + + return False + + def _has_predictable_pattern(self, secret: str) -> bool: + """Check if secret has predictable patterns that reduce security. + + Args: + secret: The secret string to analyze for patterns. + + Returns: + True if predictable patterns are found, False otherwise. + + Note: + Predictable patterns include: + - Repeated characters (e.g., "aaaa", "1111") + - Sequential characters (e.g., "1234", "abcd") + - Common keyboard patterns (e.g., "qwerty") + + These patterns reduce the entropy of the secret key and + make it more susceptible to brute force attacks. + """ + if re.search(r"(.)\1{3,}", secret): + return True + + if "1234" in secret or "abcd" in secret.lower() or "qwerty" in secret.lower(): + return True + + return False + + def _is_admin_access_completely_open(self) -> bool: + """Check if admin interface has no access restrictions. + + Returns: + True if admin access is completely open, False otherwise. + + Note: + The admin interface uses SQLAdmin with session-based authentication. + Additional IP restrictions should be handled at the reverse proxy level + (e.g., nginx, caddy) in production environments. + """ + return False + + def _is_database_using_default_credentials(self) -> bool: + """Check if database is using well-known default credentials. + + Returns: + True if database is using default credentials, False otherwise. + + Note: + The default PostgreSQL password "postgres" is well-known and + commonly targeted by attackers. Production systems should + use strong, unique passwords. + """ + return self.settings.POSTGRES_PASSWORD == "postgres" + + def _is_database_password_empty(self) -> bool: + """Check if database password is empty or missing. + + Returns: + True if database password is empty, False otherwise. + + Note: + Empty database passwords leave the database completely + unprotected and accessible to anyone who can reach it. + """ + return not self.settings.POSTGRES_PASSWORD or self.settings.POSTGRES_PASSWORD.strip() == "" + + def _check_redis_security(self) -> list[str]: + """Check Redis security configuration for all Redis instances. + + Analyzes all Redis configurations used by the application for + security issues including authentication and encryption. + + Returns: + List of Redis security warning messages. + + Note: + Redis security checks include: + - Password authentication for all instances + - SSL/TLS encryption for remote connections + - Instance isolation between services + + Multiple Redis instances may be configured for different + services (cache, rate limiting, sessions, admin). + """ + warnings = [] + + redis_configs = self._get_redis_configurations() + + for config in redis_configs: + if not config["password"]: + warnings.append( + f"Redis instance for {config['service']} ({config['host']}:{config['port']} " + f"DB {config['db']}) has no password protection. Consider setting a password " + f"to prevent unauthorized access." + ) + + if not config["ssl"] and config["host"] not in ["localhost", "127.0.0.1"]: + warnings.append( + f"Redis instance for {config['service']} ({config['host']}:{config['port']}) " + f"is not using SSL/TLS encryption. Consider enabling SSL for production." + ) + + same_instance_warning = self._check_redis_instance_sharing() + if same_instance_warning: + warnings.append(same_instance_warning) + + return warnings + + def _get_redis_configurations(self) -> list[dict]: + """Get all Redis configurations used by the application. + + Returns: + List of Redis configuration dictionaries containing connection + details, authentication, and service information. + + Note: + This method collects Redis configurations from all services + that may use Redis including cache, rate limiting, admin + interface, and session storage. + """ + configs = [] + + if self.settings.CACHE_BACKEND == "redis": + configs.append( + { + "service": "cache", + "host": self.settings.CACHE_REDIS_HOST, + "port": self.settings.CACHE_REDIS_PORT, + "db": self.settings.CACHE_REDIS_DB, + "password": self.settings.CACHE_REDIS_PASSWORD, + "ssl": False, + } + ) + + if self.settings.RATE_LIMITER_BACKEND == "redis": + configs.append( + { + "service": "rate_limiter", + "host": self.settings.RATE_LIMITER_REDIS_HOST, + "port": self.settings.RATE_LIMITER_REDIS_PORT, + "db": self.settings.RATE_LIMITER_REDIS_DB, + "password": self.settings.RATE_LIMITER_REDIS_PASSWORD, + "ssl": False, + } + ) + + if self.settings.SESSION_BACKEND == "redis": + configs.append( + { + "service": "sessions", + "host": self.settings.CACHE_REDIS_HOST, + "port": self.settings.CACHE_REDIS_PORT, + "db": self.settings.CACHE_REDIS_DB, + "password": self.settings.CACHE_REDIS_PASSWORD, + "ssl": False, + } + ) + + return configs + + def _check_redis_instance_sharing(self) -> str: + """Check if the same Redis instance is used by multiple services. + + Returns: + Warning message if Redis instance sharing is detected, + empty string otherwise. + + Note: + While Redis instance sharing is not a security vulnerability, + it can lead to data conflicts and performance issues. Best + practice is to use separate Redis databases or instances + for different services. + """ + configs = self._get_redis_configurations() + + instances: dict[str, list[str]] = {} + for config in configs: + instance_key = f"{config['host']}:{config['port']}:{config['db']}" + if instance_key not in instances: + instances[instance_key] = [] + instances[instance_key].append(config["service"]) + + shared_instances = {k: v for k, v in instances.items() if len(v) > 1} + + if shared_instances: + shared_details = [] + for instance, services in shared_instances.items(): + shared_details.append(f"{instance} (used by: {', '.join(services)})") + + return ( + f"Multiple services are sharing the same Redis instance: {'; '.join(shared_details)}. " + f"Consider using separate Redis databases or instances for different services " + f"to improve isolation and prevent data conflicts." + ) + + return "" + + def _is_cors_too_permissive(self) -> bool: + """Check if CORS is configured too permissively. + + Returns: + True if CORS allows all origins, False otherwise. + + Note: + CORS configured with '*' allows any origin to make requests + to the API, which can enable cross-origin attacks. Production + applications should restrict CORS to specific domains. + """ + return self.settings.CORS_ENABLED and "*" in self.settings.CORS_ORIGINS_LIST + + def _is_debug_enabled(self) -> bool: + """Check if debug mode is enabled in production. + + Returns: + True if debug mode is enabled, False otherwise. + + Note: + Debug mode can expose sensitive information in error responses + and enable debug endpoints that should not be available in + production environments. + """ + return self.settings.DEBUG + + def _check_docs_security(self) -> str: + """Check API documentation security configuration. + + Returns: + Warning message if documentation is exposed, empty string otherwise. + + Note: + API documentation exposes the complete API schema and endpoints, + which can provide valuable information to attackers. Consider + restricting access or disabling documentation in production. + """ + if self.settings.ENABLE_DOCS_IN_PRODUCTION: + return ( + "API documentation is enabled in production (ENABLE_DOCS_IN_PRODUCTION=true). " + "This exposes your API schema and endpoints publicly. Ensure proper access " + "controls are in place or disable docs in production." + ) + return "" + + def _check_session_security(self) -> list[str]: + """Check session security configuration. + + Returns: + List of session security warning messages. + + Note: + Session security checks include: + - Secure cookie configuration + - Appropriate session timeout settings + - CSRF protection enablement + + Insecure session configurations can lead to session hijacking + and other session-based attacks. + """ + warnings: list[str] = [] + + if not self.settings.SESSION_SECURE_COOKIES: + warnings.append( + "SESSION_SECURE_COOKIES is disabled. This allows session cookies to be " + "transmitted over unencrypted HTTP connections, making them vulnerable " + "to interception. Enable secure cookies in production." + ) + + if self.settings.SESSION_TIMEOUT_MINUTES > 120: + warnings.append( + f"Session timeout is set to {self.settings.SESSION_TIMEOUT_MINUTES} minutes " + f"(more than 2 hours). Long session timeouts increase security risk if " + f"a session is compromised. Consider reducing the timeout for production." + ) + + if not self.settings.CSRF_ENABLED: + warnings.append( + "CSRF protection is disabled. This makes your application vulnerable to " + "Cross-Site Request Forgery attacks. Enable CSRF protection in production." + ) + + return warnings + + def _check_admin_credentials(self) -> list[str]: + """Check admin credentials security. + + Returns: + List of admin credential security warning messages. + + Note: + Admin credential security checks include: + - Username predictability + - Password strength and length + - Common weak password detection + + Weak admin credentials are a common attack vector and should + be strengthened in production environments. + """ + warnings: list[str] = [] + + if not self.settings.ADMIN_ENABLED: + return warnings + + if not self.settings.ADMIN_USERNAME or not self.settings.ADMIN_PASSWORD: + return warnings + + weak_usernames = ["admin", "administrator", "root", "user", "test", "demo"] + if self.settings.ADMIN_USERNAME.lower() in weak_usernames: + warnings.append( + f"Admin username '{self.settings.ADMIN_USERNAME}' is predictable. " + f"Consider using a less obvious username for better security." + ) + + password = self.settings.ADMIN_PASSWORD + if len(password) < 12: + warnings.append( + "Admin password is shorter than 12 characters. Use a longer, " + "stronger password for admin accounts in production." + ) + + weak_passwords = { + "password", + "123456", + "admin", + "password123", + "admin123", + "qwerty", + "letmein", + "welcome", + "changeme", + "123456", + "12345678", + "1234", + "123", + "12345", + "123456789", + "adminisp", + "demo", + "root", + "123123", + "admin@123", + "123456aA@", + "01031974", + "Admin@123", + "111111", + "admin1234", + "admin1", + } + if password.lower() in weak_passwords: + warnings.append( + "Admin password appears to be a common weak password. Use a strong, unique password for admin accounts." + ) + + return warnings + + +def validate_production_security(settings: Settings) -> None: + """Convenience function to validate production security configuration. + + Creates a ProductionSecurityValidator instance and runs comprehensive + security validation on the provided settings. + + Args: + settings: Application settings to validate for security issues. + + Raises: + ProductionSecurityError: If critical security issues are found. + + Note: + This is a convenience function that provides a simple interface + for production security validation. It's equivalent to creating + a validator instance and calling validate_production_security(). + + Example: + ```python + from infrastructure.security import validate_production_security + from infrastructure.config import get_settings + + settings = get_settings() + + try: + validate_production_security(settings) + logger.info("Production security validation passed") + except ProductionSecurityError as e: + logger.critical(f"Security validation failed: {e}") + sys.exit(1) + ``` + """ + validator = ProductionSecurityValidator(settings) + validator.validate_production_security() diff --git a/backend/src/infrastructure/taskiq/__init__.py b/backend/src/infrastructure/taskiq/__init__.py new file mode 100644 index 00000000..74f777ed --- /dev/null +++ b/backend/src/infrastructure/taskiq/__init__.py @@ -0,0 +1,12 @@ +"""Taskiq infrastructure package.""" + +from .brokers import default_broker +from .deps import DBSession +from .registry import register_task, task_registry + +__all__ = [ + "default_broker", + "DBSession", + "task_registry", + "register_task", +] diff --git a/backend/src/infrastructure/taskiq/app.py b/backend/src/infrastructure/taskiq/app.py new file mode 100644 index 00000000..76c11004 --- /dev/null +++ b/backend/src/infrastructure/taskiq/app.py @@ -0,0 +1,45 @@ +"""Taskiq app configuration and worker lifecycle management.""" + +import logging + +from taskiq import AsyncBroker +from taskiq.events import TaskiqEvents +from taskiq.state import TaskiqState + +from .brokers import default_broker + +logger = logging.getLogger(__name__) + + +async def startup_taskiq_worker(state: TaskiqState) -> None: + """Initialize worker startup procedures. + + Args: + state: The taskiq state instance + """ + logger.info("Starting taskiq worker...") + logger.info("Taskiq worker startup complete") + + +async def shutdown_taskiq_worker(state: TaskiqState) -> None: + """Cleanup worker shutdown procedures. + + Args: + state: The taskiq state instance + """ + logger.info("Shutting down taskiq worker...") + logger.info("Taskiq worker shutdown complete") + + +def configure_broker_lifecycle(broker: AsyncBroker) -> None: + """Configure broker with startup and shutdown handlers. + + Args: + broker: The broker to configure + """ + broker.add_middlewares() + broker.add_event_handler(TaskiqEvents.WORKER_STARTUP, startup_taskiq_worker) + broker.add_event_handler(TaskiqEvents.WORKER_SHUTDOWN, shutdown_taskiq_worker) + + +configure_broker_lifecycle(default_broker) diff --git a/backend/src/infrastructure/taskiq/brokers.py b/backend/src/infrastructure/taskiq/brokers.py new file mode 100644 index 00000000..bea8ede6 --- /dev/null +++ b/backend/src/infrastructure/taskiq/brokers.py @@ -0,0 +1,52 @@ +"""Taskiq broker configuration and initialization.""" + +from taskiq import AsyncBroker +from taskiq_aio_pika import AioPikaBroker +from taskiq_redis import ListQueueBroker, RedisAsyncResultBackend + +from ..config import TaskiqBrokerType, get_settings + +settings = get_settings() + + +def create_default_broker() -> AsyncBroker: + """Create email broker for taskiq based on configured broker type. + + Returns: + Configured AsyncBroker instance for email tasks (Redis or RabbitMQ) + """ + if settings.TASKIQ_BROKER_TYPE == TaskiqBrokerType.REDIS.value: + return _create_redis_broker() + elif settings.TASKIQ_BROKER_TYPE == TaskiqBrokerType.RABBITMQ.value: + return _create_rabbitmq_broker() + else: + raise ValueError(f"Unsupported broker type: {settings.TASKIQ_BROKER_TYPE}") + + +def _create_redis_broker() -> AsyncBroker: + """Create Redis-based broker for taskiq.""" + redis_host = settings.TASKIQ_REDIS_HOST + redis_port = settings.TASKIQ_REDIS_PORT + redis_db = settings.TASKIQ_REDIS_DB + redis_password = settings.TASKIQ_REDIS_PASSWORD + + password_part = f":{redis_password}@" if redis_password else "" + redis_url = f"redis://{password_part}{redis_host}:{redis_port}/{redis_db}" + + broker = ListQueueBroker(url=redis_url, queue_name="default").with_result_backend( + RedisAsyncResultBackend(redis_url=redis_url) + ) + + return broker + + +def _create_rabbitmq_broker() -> AsyncBroker: + """Create RabbitMQ-based broker for taskiq.""" + rabbitmq_url = settings.TASKIQ_BROKER_URL + + broker = AioPikaBroker(url=rabbitmq_url, queue_name="default") + + return broker + + +default_broker = create_default_broker() diff --git a/backend/src/infrastructure/taskiq/deps.py b/backend/src/infrastructure/taskiq/deps.py new file mode 100644 index 00000000..d81bc08f --- /dev/null +++ b/backend/src/infrastructure/taskiq/deps.py @@ -0,0 +1,40 @@ +"""Task dependencies for taskiq integration.""" + +from collections.abc import AsyncGenerator +from typing import Annotated + +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.pool import NullPool +from taskiq import TaskiqDepends + +from ..config import get_settings + +settings = get_settings() + +taskiq_engine = create_async_engine( + settings.DATABASE_URL, + echo=False, + future=True, + poolclass=NullPool, +) + +taskiq_session_factory = async_sessionmaker(bind=taskiq_engine, class_=AsyncSession, expire_on_commit=False) + + +async def get_db_session() -> AsyncGenerator[AsyncSession, None]: + """Get a database session for taskiq tasks. + + Provides a database session with proper lifecycle management for + taskiq tasks, ensuring clean connection handling and transaction management. + + Yields: + AsyncSession: Database session configured for taskiq usage. + """ + async with taskiq_session_factory() as session: + try: + yield session + finally: + await session.close() + + +DBSession = Annotated[AsyncSession, TaskiqDepends(get_db_session)] diff --git a/backend/src/infrastructure/taskiq/registry.py b/backend/src/infrastructure/taskiq/registry.py new file mode 100644 index 00000000..b2bf5409 --- /dev/null +++ b/backend/src/infrastructure/taskiq/registry.py @@ -0,0 +1,65 @@ +"""Simple task registry for development and monitoring.""" + +import logging +from collections import defaultdict +from datetime import datetime +from typing import Any + +logger = logging.getLogger(__name__) + + +class TaskRegistry: + """Simple registry to track registered tasks for development and monitoring.""" + + def __init__(self): + self._tasks: dict[str, dict[str, Any]] = {} + self._stats: dict[str, int] = defaultdict(int) + self._registered_at = datetime.now() + + def register_task(self, task_name: str, broker_name: str, task_func: Any) -> None: + """Register a task with the registry. + + Args: + task_name: Name of the task + broker_name: Name of the broker the task is registered with + task_func: The task function + """ + self._tasks[task_name] = { + "broker": broker_name, + "function": task_func.__name__ if hasattr(task_func, "__name__") else str(task_func), + "module": getattr(task_func, "__module__", None), + "registered_at": datetime.now(), + } + self._stats["total_tasks"] += 1 + logger.debug(f"Registered task: {task_name} on broker: {broker_name}") + + def get_tasks(self) -> dict[str, dict[str, Any]]: + """Get all registered tasks.""" + return self._tasks.copy() + + def get_task_names(self) -> list[str]: + """Get list of all task names.""" + return list(self._tasks.keys()) + + def get_tasks_by_broker(self, broker_name: str) -> list[str]: + """Get tasks registered with a specific broker.""" + return [task_name for task_name, task_info in self._tasks.items() if task_info["broker"] == broker_name] + + def get_stats(self) -> dict[str, Any]: + """Get registry statistics.""" + return { + "total_tasks": self._stats["total_tasks"], + "registry_created_at": self._registered_at, + "tasks_by_broker": { + broker: len([t for t in self._tasks.values() if t["broker"] == broker]) + for broker in set(t["broker"] for t in self._tasks.values()) + }, + } + + +task_registry = TaskRegistry() + + +def register_task(task_name: str, broker_name: str, task_func: Any) -> None: + """Register a task with the global registry.""" + task_registry.register_task(task_name, broker_name, task_func) diff --git a/backend/src/infrastructure/taskiq/worker.py b/backend/src/infrastructure/taskiq/worker.py new file mode 100644 index 00000000..0a1c26b0 --- /dev/null +++ b/backend/src/infrastructure/taskiq/worker.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 +"""Taskiq worker entry point.""" + +from .brokers import default_broker + +__all__ = ["default_broker"] + +if __name__ == "__main__": + # Run with: python -m taskiq worker infrastructure.taskiq.worker:default_broker + pass diff --git a/src/app/admin/__init__.py b/backend/src/interfaces/__init__.py similarity index 100% rename from src/app/admin/__init__.py rename to backend/src/interfaces/__init__.py diff --git a/src/app/core/__init__.py b/backend/src/interfaces/admin/__init__.py similarity index 100% rename from src/app/core/__init__.py rename to backend/src/interfaces/admin/__init__.py diff --git a/backend/src/interfaces/admin/auth.py b/backend/src/interfaces/admin/auth.py new file mode 100644 index 00000000..8ec6b6f2 --- /dev/null +++ b/backend/src/interfaces/admin/auth.py @@ -0,0 +1,33 @@ +"""Authentication backend for SQLAdmin.""" + +from sqladmin.authentication import AuthenticationBackend +from starlette.requests import Request + +from ...infrastructure.config.settings import get_settings + + +class AdminAuth(AuthenticationBackend): + """Session-based authentication for the admin interface.""" + + async def login(self, request: Request) -> bool: + """Validate login credentials and create session.""" + form = await request.form() + username = form.get("username") + password = form.get("password") + + settings = get_settings() + + if username == settings.ADMIN_USERNAME and password == settings.ADMIN_PASSWORD: + request.session.update({"admin_authenticated": True}) + return True + + return False + + async def logout(self, request: Request) -> bool: + """Clear the admin session.""" + request.session.clear() + return True + + async def authenticate(self, request: Request) -> bool: + """Check if the current request is authenticated.""" + return bool(request.session.get("admin_authenticated", False)) diff --git a/backend/src/interfaces/admin/initialize.py b/backend/src/interfaces/admin/initialize.py new file mode 100644 index 00000000..837a348d --- /dev/null +++ b/backend/src/interfaces/admin/initialize.py @@ -0,0 +1,36 @@ +"""SQLAdmin interface initialization.""" + +from sqladmin import Admin + +from ...infrastructure.config.settings import get_settings +from ...infrastructure.database.session import engine +from .auth import AdminAuth +from .views import register_admin_views + + +def create_admin_interface(app) -> Admin | None: + """Create and configure the SQLAdmin interface. + + Args: + app: The FastAPI application instance. + + Returns: + Configured Admin instance or None if admin is disabled. + """ + settings = get_settings() + + if not settings.ADMIN_ENABLED: + return None + + authentication_backend = AdminAuth(secret_key=settings.SECRET_KEY) + + admin = Admin( + app=app, + engine=engine, + authentication_backend=authentication_backend, + title="Admin", + ) + + register_admin_views(admin) + + return admin diff --git a/backend/src/interfaces/admin/mixins.py b/backend/src/interfaces/admin/mixins.py new file mode 100644 index 00000000..be12df4e --- /dev/null +++ b/backend/src/interfaces/admin/mixins.py @@ -0,0 +1,64 @@ +"""Mixins for SQLAdmin views to handle dataclass-based models.""" + +from typing import Any + +from starlette.requests import Request + + +class DataclassModelMixin: + """Mixin for SQLAdmin ModelView to support dataclass-based SQLAlchemy models. + + SQLAdmin's default insert_model creates an empty model instance via model(), + then sets attributes. This fails for MappedAsDataclass models with required + fields that have no defaults. + + This mixin overrides insert_model to create the model WITH the form data, + which works correctly with dataclass __init__ signatures. + + Usage: + class MyAdmin(DataclassModelMixin, ModelView, model=MyModel): + ... + + # For custom data transformation before model creation: + class UserAdmin(DataclassModelMixin, ModelView, model=User): + async def on_model_change(self, data, model, is_created, request): + if is_created: + # Transform data BEFORE model is created + data["hashed_password"] = hash(data.pop("password")) + """ + + async def insert_model(self, request: Request, data: dict[str, Any]) -> Any: + """Create model instance with data for dataclass compatibility. + + Instead of creating an empty model then setting attributes, + we create the model with all data at once, which satisfies + dataclass required field constraints. + """ + await self.on_model_change(data, None, True, request) # type: ignore[attr-defined] + + clean_data = {} + for key, value in data.items(): + if hasattr(self, "_mapper") and key in self._mapper.relationships: + rel = self._mapper.relationships[key] + if rel.direction.name == "MANYTOONE": + fk_columns = list(rel.local_columns) + if fk_columns: + fk_col_name = fk_columns[0].name + clean_data[fk_col_name] = int(value) if value else None + continue + + if value == "" and hasattr(self, "_mapper"): + col = self._mapper.columns.get(key) + if col is not None and col.nullable: + value = None + + clean_data[key] = value + + obj = self.model(**clean_data) # type: ignore[attr-defined] + + async with self.session_maker(expire_on_commit=False) as session: # type: ignore[attr-defined] + session.add(obj) + await session.commit() + await session.refresh(obj) + await self.after_model_change(data, obj, True, request) # type: ignore[attr-defined] + return obj diff --git a/backend/src/interfaces/admin/views/__init__.py b/backend/src/interfaces/admin/views/__init__.py new file mode 100644 index 00000000..4995ea41 --- /dev/null +++ b/backend/src/interfaces/admin/views/__init__.py @@ -0,0 +1,18 @@ +"""SQLAdmin model views for the admin interface.""" + +from sqladmin import Admin + +from .tiers import TierAdmin +from .users import UserAdmin + +__all__ = [ + "UserAdmin", + "TierAdmin", + "register_admin_views", +] + + +def register_admin_views(admin: Admin) -> None: + """Register all model views with the admin interface.""" + admin.add_view(UserAdmin) + admin.add_view(TierAdmin) diff --git a/backend/src/interfaces/admin/views/tiers.py b/backend/src/interfaces/admin/views/tiers.py new file mode 100644 index 00000000..2f249e91 --- /dev/null +++ b/backend/src/interfaces/admin/views/tiers.py @@ -0,0 +1,56 @@ +"""Admin view for Tier model.""" + +from sqladmin import ModelView +from starlette.requests import Request + +from ....infrastructure.database.session import local_session +from ....modules.tier.crud import crud_tiers +from ....modules.tier.models import Tier +from ....modules.tier.schemas import TierCreate, TierUpdate +from ....modules.tier.service import TierService +from ..mixins import DataclassModelMixin + + +class TierAdmin(DataclassModelMixin, ModelView, model=Tier): + """Admin view for Tier model.""" + + name = "Tier" + name_plural = "Tiers" + icon = "fa-solid fa-layer-group" + category = "Users & Access" + + column_list = [Tier.id, Tier.name, Tier.description] + column_details_list = "__all__" + column_searchable_list = [Tier.name] + column_sortable_list = [Tier.id, Tier.name] + + can_create = True + can_edit = True + can_delete = True + can_view_details = True + can_export = True + + form_create_rules = list(TierCreate.model_fields.keys()) + form_edit_rules = list(TierUpdate.model_fields.keys()) + + async def delete_model(self, request: Request, pk: str) -> None: + """Override delete to permanently remove tier from database. + + Uses the tier service's permanent_delete method which validates + that no users or rate limits are associated before deletion. + + Args: + request: The incoming request object. + pk: Primary key (ID) of the tier to delete. + + Raises: + ValueError: If tier not found or has dependencies. + """ + async with local_session() as db: + tier_service = TierService() + + tier = await crud_tiers.get(db=db, id=int(pk)) + if not tier: + raise ValueError(f"Tier with ID {pk} not found") + + await tier_service.permanent_delete(tier["name"], db) diff --git a/backend/src/interfaces/admin/views/users.py b/backend/src/interfaces/admin/views/users.py new file mode 100644 index 00000000..5cb51750 --- /dev/null +++ b/backend/src/interfaces/admin/views/users.py @@ -0,0 +1,70 @@ +"""Admin view for User model.""" + +from typing import Any + +from sqladmin import ModelView +from starlette.requests import Request +from wtforms import SelectField + +from ....infrastructure.auth.utils import get_password_hash +from ....infrastructure.database.session import local_session +from ....modules.user.enums import OAuthProvider +from ....modules.user.models import User +from ....modules.user.schemas import UserUpdate +from ....modules.user.service import UserService +from ..mixins import DataclassModelMixin + +OAUTH_PROVIDER_CHOICES = [("", "None")] + [(p.value, p.value.title()) for p in OAuthProvider] + + +class UserAdmin(DataclassModelMixin, ModelView, model=User): + """Admin view for User model with password hashing.""" + + name = "User" + name_plural = "Users" + icon = "fa-solid fa-user" + category = "Users & Access" + + column_list = [User.id, User.name, User.username, User.email, User.is_superuser, User.tier] + column_details_list = "__all__" + column_searchable_list = [User.name, User.username, User.email] + column_sortable_list = [User.id, User.name, User.username, User.email] + column_default_sort = [(User.id, True)] + + can_create = True + can_edit = True + can_delete = True + can_view_details = True + can_export = True + + column_labels = {"hashed_password": "Password"} + + form_create_rules = ["name", "username", "email", "hashed_password", "tier_id", "is_superuser"] + form_edit_rules = [*UserUpdate.model_fields.keys(), "tier_id", "is_superuser"] + + form_overrides = {"oauth_provider": SelectField} + form_args = {"oauth_provider": {"choices": OAUTH_PROVIDER_CHOICES}} + + async def on_model_change(self, data: dict[str, Any], model: Any, is_created: bool, request: Request) -> None: + """Hash the password before saving.""" + if is_created and "hashed_password" in data and data["hashed_password"]: + data["hashed_password"] = get_password_hash(data["hashed_password"]) + if "oauth_provider" in data and data["oauth_provider"] == "": + data["oauth_provider"] = None + + async def delete_model(self, request: Request, pk: str) -> None: + """Override delete to anonymize user instead of removing. + + GDPR/LGPD compliant deletion that: + - Anonymizes all PII (name, username, password, OAuth data) + - Retains email and timestamps for legal compliance + - Soft deletes the user (is_deleted = True) + - Maintains foreign key relationships + + Args: + request: The incoming request object. + pk: Primary key (ID) of the user to anonymize. + """ + async with local_session() as db: + user_service = UserService() + await user_service.anonymize_user(user_id=int(pk), db=db) diff --git a/src/app/api/__init__.py b/backend/src/interfaces/api/__init__.py similarity index 70% rename from src/app/api/__init__.py rename to backend/src/interfaces/api/__init__.py index 0b6c856d..4afb5c81 100644 --- a/src/app/api/__init__.py +++ b/backend/src/interfaces/api/__init__.py @@ -1,6 +1,6 @@ from fastapi import APIRouter -from ..api.v1 import router as v1_router +from .v1 import router as v1_router router = APIRouter(prefix="/api") router.include_router(v1_router) diff --git a/backend/src/interfaces/api/v1/__init__.py b/backend/src/interfaces/api/v1/__init__.py new file mode 100644 index 00000000..98f3b69e --- /dev/null +++ b/backend/src/interfaces/api/v1/__init__.py @@ -0,0 +1,14 @@ +from fastapi import APIRouter + +from ....infrastructure.auth.routes import router as auth_router +from ....modules.api_keys.routes import router as api_keys_router +from ....modules.rate_limit.routes import router as rate_limits_router +from ....modules.tier.routes import router as tiers_router +from ....modules.user.routes import router as users_router + +router = APIRouter(prefix="/v1") +router.include_router(users_router, prefix="/users") +router.include_router(tiers_router, prefix="/tiers") +router.include_router(rate_limits_router, prefix="/rate-limits") +router.include_router(auth_router, prefix="/auth") +router.include_router(api_keys_router, prefix="/api-keys") diff --git a/backend/src/interfaces/main.py b/backend/src/interfaces/main.py new file mode 100644 index 00000000..9c3f3037 --- /dev/null +++ b/backend/src/interfaces/main.py @@ -0,0 +1,74 @@ +from collections.abc import AsyncGenerator +from contextlib import asynccontextmanager + +from fastapi import FastAPI +from starlette.middleware.sessions import SessionMiddleware + +from ..infrastructure.app_factory import create_application, lifespan_factory +from ..infrastructure.config.settings import get_settings +from ..infrastructure.security import validate_production_security +from ..interfaces.api import router +from .admin.initialize import create_admin_interface + +settings = get_settings() + + +@asynccontextmanager +async def lifespan_with_security(app: FastAPI) -> AsyncGenerator[None, None]: + """Custom lifespan that includes security validation.""" + if settings.PRODUCTION_SECURITY_VALIDATION_ENABLED: + validate_production_security(settings) + + default_lifespan = lifespan_factory(settings) + + async with default_lifespan(app): + yield + + +app = create_application( + router=router, + settings=settings, + lifespan=lifespan_with_security, + create_tables_on_startup=None, + enable_cors=None, + cors_origins=None, + enable_docs_in_production=None, + docs_production_dependency=None, + enable_gzip=None, + openapi_prefix=None, + title="FastAPI Boilerplate", + summary="A modular FastAPI starter with a plugin system", + description=""" + # FastAPI Boilerplate + + A modern FastAPI starter with: + + * Vertical-slice modules and a clean infrastructure layer + * Session-based auth with OAuth providers + * Swappable cache, queue, and rate-limit backends + * SQLAdmin admin UI + """, + version="0.18.0", + contact={ + "name": "Benav Labs", + "url": "https://github.com/benavlabs/FastAPI-boilerplate", + "email": "contact@benav.io", + }, + license_info={ + "name": "MIT", + "identifier": "MIT", + }, + openapi_tags=None, + docs_url="/docs", + redoc_url="/redoc", + openapi_url="/openapi.json", +) + +app.add_middleware(SessionMiddleware, secret_key=settings.SECRET_KEY) +create_admin_interface(app) + + +@app.get("/health", tags=["System"]) +async def health_check() -> dict[str, str]: + """Health check endpoint for monitoring and load balancers.""" + return {"status": "healthy"} diff --git a/backend/src/modules/__init__.py b/backend/src/modules/__init__.py new file mode 100644 index 00000000..186d38e1 --- /dev/null +++ b/backend/src/modules/__init__.py @@ -0,0 +1,15 @@ +"""Initialize all modules and models to ensure SQLAlchemy registration.""" + +from .api_keys.models import APIKey, KeyPermission, KeyUsage +from .rate_limit.models import RateLimit +from .tier.models import Tier +from .user.models import User + +__all__ = [ + "User", + "Tier", + "RateLimit", + "APIKey", + "KeyUsage", + "KeyPermission", +] diff --git a/backend/src/modules/api_keys/__init__.py b/backend/src/modules/api_keys/__init__.py new file mode 100644 index 00000000..7e82dfb1 --- /dev/null +++ b/backend/src/modules/api_keys/__init__.py @@ -0,0 +1,74 @@ +"""API Key Management Module. + +This module provides comprehensive API key management functionality +for developer-facing products and API-first business models. + +Key Features: +- Secure API key generation and storage +- Permission-based access control +- Usage tracking per API key +- Key rotation and revocation +- Analytics and usage reporting +- Granular permissions system +""" + +from .crud import crud_api_keys, crud_key_permissions, crud_key_usage +from .enums import HTTPMethod, KeyPermissionAction, KeyPermissionResource, KeyStatus, KeyType +from .models import APIKey, KeyPermission, KeyUsage +from .schemas import ( + APIKeyBase, + APIKeyCreate, + APIKeyRead, + APIKeyResponse, + APIKeyUpdate, + APIKeyValidationRequest, + APIKeyValidationResponse, + APIKeyWithPermissions, + KeyPermissionBase, + KeyPermissionCreate, + KeyPermissionRead, + KeyPermissionUpdate, + KeyUsageAnalytics, + KeyUsageBase, + KeyUsageCreate, + KeyUsageRead, + UserAPIKeySummary, +) +from .service import APIKeyService + +__all__ = [ + # Models + "APIKey", + "KeyUsage", + "KeyPermission", + # Schemas + "APIKeyBase", + "APIKeyCreate", + "APIKeyRead", + "APIKeyResponse", + "APIKeyUpdate", + "KeyUsageBase", + "KeyUsageCreate", + "KeyUsageRead", + "KeyPermissionBase", + "KeyPermissionCreate", + "KeyPermissionRead", + "KeyPermissionUpdate", + "APIKeyWithPermissions", + "KeyUsageAnalytics", + "UserAPIKeySummary", + "APIKeyValidationRequest", + "APIKeyValidationResponse", + # CRUD + "crud_api_keys", + "crud_key_usage", + "crud_key_permissions", + # Service + "APIKeyService", + # Enums + "HTTPMethod", + "KeyPermissionAction", + "KeyPermissionResource", + "KeyStatus", + "KeyType", +] diff --git a/backend/src/modules/api_keys/crud.py b/backend/src/modules/api_keys/crud.py new file mode 100644 index 00000000..4a6528bd --- /dev/null +++ b/backend/src/modules/api_keys/crud.py @@ -0,0 +1,7 @@ +from fastcrud import FastCRUD + +from .models import APIKey, KeyPermission, KeyUsage + +crud_api_keys: FastCRUD = FastCRUD(APIKey) +crud_key_usage: FastCRUD = FastCRUD(KeyUsage) +crud_key_permissions: FastCRUD = FastCRUD(KeyPermission) diff --git a/backend/src/modules/api_keys/enums.py b/backend/src/modules/api_keys/enums.py new file mode 100644 index 00000000..e9b31be5 --- /dev/null +++ b/backend/src/modules/api_keys/enums.py @@ -0,0 +1,70 @@ +"""API Key management enums.""" + +from enum import StrEnum + + +class KeyStatus(StrEnum): + """API key status enumeration.""" + + ACTIVE = "active" + INACTIVE = "inactive" + SUSPENDED = "suspended" + EXPIRED = "expired" + REVOKED = "revoked" + + +class KeyPermissionResource(StrEnum): + """API key permission resources.""" + + CONVERSATIONS = "conversations" + CREDITS = "credits" + AI_USAGE = "ai_usage" + USER_PROFILE = "user_profile" + ANALYTICS = "analytics" + ADMIN = "admin" + BILLING = "billing" + API_KEYS = "api_keys" + WILDCARD = "*" + + +class KeyPermissionAction(StrEnum): + """API key permission actions.""" + + READ = "read" + WRITE = "write" + DELETE = "delete" + CREATE = "create" + UPDATE = "update" + LIST = "list" + ADMIN = "admin" + WILDCARD = "*" + + +class KeyType(StrEnum): + """API key type enumeration. + + Types: + PUBLIC: Limited read-only access + PRIVATE: Full access for user's data + ADMIN: Administrative access + SERVICE: Service-to-service communication + WEBHOOK: Webhook authentication + """ + + PUBLIC = "public" + PRIVATE = "private" + ADMIN = "admin" + SERVICE = "service" + WEBHOOK = "webhook" + + +class HTTPMethod(StrEnum): + """HTTP method enumeration for API key usage tracking.""" + + GET = "GET" + POST = "POST" + PUT = "PUT" + PATCH = "PATCH" + DELETE = "DELETE" + HEAD = "HEAD" + OPTIONS = "OPTIONS" diff --git a/backend/src/modules/api_keys/models.py b/backend/src/modules/api_keys/models.py new file mode 100644 index 00000000..bfa5787e --- /dev/null +++ b/backend/src/modules/api_keys/models.py @@ -0,0 +1,86 @@ +"""API Key Management Models. + +This module defines the database models for managing API keys, +permissions, and usage tracking for developer-facing products. +""" + +from datetime import datetime +from typing import Any + +from sqlalchemy import BigInteger, Boolean, DateTime, ForeignKey, Index, Integer, String, Text +from sqlalchemy.dialects.postgresql import JSON +from sqlalchemy.orm import Mapped, mapped_column + +from ...infrastructure.database.models import TimestampMixin +from ...infrastructure.database.session import Base +from .enums import KeyPermissionAction, KeyPermissionResource + + +class APIKey(Base, TimestampMixin): + """API key for programmatic access.""" + + __tablename__ = "api_keys" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True, init=False) + user_id: Mapped[int] = mapped_column(Integer, ForeignKey("user.id"), index=True) + name: Mapped[str] = mapped_column(String(100)) + key_hash: Mapped[str] = mapped_column(String(255), unique=True, index=True) + key_prefix: Mapped[str] = mapped_column(String(20), index=True) + permissions: Mapped[dict[str, Any]] = mapped_column(JSON, insert_default=dict) + usage_limits: Mapped[dict[str, Any]] = mapped_column(JSON, insert_default=dict) + last_used_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) + last_used_ip: Mapped[str | None] = mapped_column(String(45), default=None) + is_active: Mapped[bool] = mapped_column(Boolean, default=True) + expires_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) + key_metadata: Mapped[dict[str, Any] | None] = mapped_column(JSON, default=None) + + __table_args__ = ( + Index("idx_api_keys_user_active", "user_id", "is_active"), + Index("idx_api_keys_prefix", "key_prefix"), + Index("idx_api_keys_expires_at", "expires_at"), + ) + + +class KeyUsage(Base, TimestampMixin): + """API key usage tracking for analytics and billing.""" + + __tablename__ = "key_usage" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True, init=False) + api_key_id: Mapped[int] = mapped_column(Integer, ForeignKey("api_keys.id", ondelete="CASCADE"), index=True) + user_id: Mapped[int] = mapped_column(Integer, ForeignKey("user.id"), index=True) + endpoint: Mapped[str] = mapped_column(String(255), index=True) + method: Mapped[str] = mapped_column(String(10)) + status_code: Mapped[int] = mapped_column(Integer, index=True) + tokens_used: Mapped[int | None] = mapped_column(Integer, default=None) + cost_microcents: Mapped[int | None] = mapped_column(BigInteger, default=None) + response_time_ms: Mapped[int | None] = mapped_column(Integer, default=None) + ip_address: Mapped[str | None] = mapped_column(String(45), default=None) + user_agent: Mapped[str | None] = mapped_column(Text, default=None) + error_message: Mapped[str | None] = mapped_column(Text, default=None) + usage_metadata: Mapped[dict[str, Any] | None] = mapped_column(JSON, default=None) + + __table_args__ = ( + Index("idx_key_usage_key_created", "api_key_id", "created_at"), + Index("idx_key_usage_user_created", "user_id", "created_at"), + Index("idx_key_usage_endpoint", "endpoint"), + Index("idx_key_usage_status", "status_code"), + ) + + +class KeyPermission(Base, TimestampMixin): + """Granular permissions for API keys.""" + + __tablename__ = "key_permissions" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True, init=False) + api_key_id: Mapped[int] = mapped_column(Integer, ForeignKey("api_keys.id", ondelete="CASCADE"), index=True) + resource: Mapped[KeyPermissionResource] = mapped_column(index=True) # KeyPermissionResource enum values + action: Mapped[KeyPermissionAction] = mapped_column(index=True) # KeyPermissionAction enum values + conditions: Mapped[dict[str, Any] | None] = mapped_column(JSON, default=None) + is_allowed: Mapped[bool] = mapped_column(Boolean, default=True) + + __table_args__ = ( + Index("idx_key_permissions_key_resource", "api_key_id", "resource", "action", unique=True), + Index("idx_key_permissions_resource_action", "resource", "action"), + ) diff --git a/backend/src/modules/api_keys/routes.py b/backend/src/modules/api_keys/routes.py new file mode 100644 index 00000000..25a63e38 --- /dev/null +++ b/backend/src/modules/api_keys/routes.py @@ -0,0 +1,403 @@ +"""API endpoints for API key management.""" + +from typing import Any + +from fastapi import APIRouter, Depends, HTTPException, Path, Query, status +from fastcrud import PaginatedListResponse, compute_offset, paginated_response +from sqlalchemy.ext.asyncio import AsyncSession + +from ...infrastructure.auth.session.dependencies import get_current_user +from ...infrastructure.database.session import async_session +from ..common.exceptions import ( + PermissionDeniedError, + ResourceNotFoundError, +) +from ..common.utils.error_handler import handle_exception +from ..user.models import User +from .schemas import ( + APIKeyCreate, + APIKeyRead, + APIKeyUpdate, + KeyUsageRead, +) +from .service import APIKeyService + +router = APIRouter(tags=["API Keys"]) + + +def get_api_key_service() -> APIKeyService: + """Dependency for providing an APIKeyService instance.""" + return APIKeyService() + + +@router.post( + "/", + status_code=201, + summary="Create API Key", + description=""" + Creates a new API key for the authenticated user. + + API keys enable programmatic access to the API and are useful + for building developer-facing products and integrations. + + - **name**: Human-readable name for the API key + - **permissions**: Permission settings for the key + - **usage_limits**: Usage limits specific to this key + - **expires_at**: Optional expiration date + + ⚠️ **Important**: The full API key is only shown once during creation. + Store it securely as it cannot be retrieved again. + """, + responses={ + 201: {"description": "API key created successfully"}, + 400: {"description": "Invalid API key data"}, + 401: {"description": "Authentication required"}, + }, + response_description="Created API key with full key (shown only once)", +) +async def create_api_key( + key_data: APIKeyCreate, + current_user: User = Depends(get_current_user), + api_key_service: APIKeyService = Depends(get_api_key_service), + db: AsyncSession = Depends(async_session), +) -> dict[str, Any]: + """Create a new API key for the authenticated user.""" + try: + return await api_key_service.create_api_key( + user_id=current_user["id"] if isinstance(current_user, dict) else current_user.id, + key_data=key_data, + db=db, + ) + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get( + "/", + response_model=PaginatedListResponse[APIKeyRead], + summary="Get User API Keys", + description=""" + Retrieves all API keys for the authenticated user. + + - **active_only**: Whether to return only active keys (default: true) + - **page**: Page number (default: 1) + - **items_per_page**: Items per page (default: 50) + + Returns keys sorted by creation date (newest first). + For security, only the key prefix is shown, not the full key. + """, + responses={ + 200: {"description": "API keys retrieved successfully"}, + 401: {"description": "Authentication required"}, + }, + response_description="Paginated list of user's API keys", +) +async def get_user_api_keys( + current_user: User = Depends(get_current_user), + api_key_service: APIKeyService = Depends(get_api_key_service), + db: AsyncSession = Depends(async_session), + active_only: bool = Query(True, description="Return only active keys"), + page: int = Query(1, ge=1, description="Page number"), + items_per_page: int = Query(50, ge=1, le=100, description="Items per page"), +) -> dict[str, Any]: + """Get all API keys for the authenticated user.""" + try: + result = await api_key_service.get_user_api_keys( + user_id=current_user["id"] if isinstance(current_user, dict) else current_user.id, + active_only=active_only, + limit=items_per_page, + offset=compute_offset(page, items_per_page), + db=db, + ) + + return paginated_response( + crud_data=result, + page=page, + items_per_page=items_per_page, + ) + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get( + "/{key_id}", + summary="Get API Key Details", + description=""" + Retrieves details for a specific API key. + + - **key_id**: ID of the API key to retrieve + + Users can only access their own API keys. + Returns comprehensive key information including usage limits and permissions. + """, + responses={ + 200: {"description": "API key details retrieved successfully"}, + 401: {"description": "Authentication required"}, + 403: {"description": "Access denied to this API key"}, + 404: {"description": "API key not found"}, + }, + response_description="API key details", +) +async def get_api_key( + key_id: int = Path(..., description="API key ID"), + current_user: User = Depends(get_current_user), + api_key_service: APIKeyService = Depends(get_api_key_service), + db: AsyncSession = Depends(async_session), +) -> dict[str, Any]: + """Get details for a specific API key.""" + try: + return await api_key_service.get_api_key( + key_id=key_id, + user_id=current_user["id"] if isinstance(current_user, dict) else current_user.id, + db=db, + ) + except ResourceNotFoundError as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) + except PermissionDeniedError as e: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(e)) + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.patch( + "/{key_id}", + summary="Update API Key", + description=""" + Updates an existing API key. + + - **key_id**: ID of the API key to update + + Allows updating name, permissions, usage limits, active status, and expiration. + Users can only update their own API keys. + """, + responses={ + 200: {"description": "API key updated successfully"}, + 400: {"description": "Invalid update data"}, + 401: {"description": "Authentication required"}, + 403: {"description": "Access denied to this API key"}, + 404: {"description": "API key not found"}, + }, + response_description="Updated API key details", +) +async def update_api_key( + update_data: APIKeyUpdate, + key_id: int = Path(..., description="API key ID"), + current_user: User = Depends(get_current_user), + api_key_service: APIKeyService = Depends(get_api_key_service), + db: AsyncSession = Depends(async_session), +) -> dict[str, Any]: + """Update an existing API key.""" + try: + return await api_key_service.update_api_key( + key_id=key_id, + user_id=current_user["id"] if isinstance(current_user, dict) else current_user.id, + update_data=update_data, + db=db, + ) + except ResourceNotFoundError as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) + except PermissionDeniedError as e: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(e)) + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.delete( + "/{key_id}", + status_code=204, + summary="Delete API Key", + description=""" + Deletes (deactivates) an API key. + + - **key_id**: ID of the API key to delete + + This operation deactivates the key rather than permanently deleting it + to maintain usage history and audit trails. + + Users can only delete their own API keys. + """, + responses={ + 204: {"description": "API key deleted successfully"}, + 401: {"description": "Authentication required"}, + 403: {"description": "Access denied to this API key"}, + 404: {"description": "API key not found"}, + }, +) +async def delete_api_key( + key_id: int = Path(..., description="API key ID"), + current_user: User = Depends(get_current_user), + api_key_service: APIKeyService = Depends(get_api_key_service), + db: AsyncSession = Depends(async_session), +) -> None: + """Delete (deactivate) an API key.""" + try: + await api_key_service.delete_api_key( + key_id=key_id, + user_id=current_user["id"] if isinstance(current_user, dict) else current_user.id, + db=db, + ) + except ResourceNotFoundError as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) + except PermissionDeniedError as e: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(e)) + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get( + "/{key_id}/usage", + response_model=PaginatedListResponse[KeyUsageRead], + summary="Get API Key Usage History", + description=""" + Retrieves usage history for a specific API key. + + - **key_id**: ID of the API key + - **page**: Page number (default: 1) + - **items_per_page**: Items per page (default: 100) + + Returns usage records in reverse chronological order (newest first). + Includes details like endpoints used, response times, costs, and errors. + """, + responses={ + 200: {"description": "Usage history retrieved successfully"}, + 401: {"description": "Authentication required"}, + 403: {"description": "Access denied to this API key"}, + 404: {"description": "API key not found"}, + }, + response_description="Paginated list of usage records", +) +async def get_key_usage( + key_id: int = Path(..., description="API key ID"), + current_user: User = Depends(get_current_user), + api_key_service: APIKeyService = Depends(get_api_key_service), + db: AsyncSession = Depends(async_session), + page: int = Query(1, ge=1, description="Page number"), + items_per_page: int = Query(100, ge=1, le=1000, description="Items per page"), +) -> dict[str, Any]: + """Get usage history for an API key.""" + try: + result = await api_key_service.get_key_usage( + key_id=key_id, + user_id=current_user["id"] if isinstance(current_user, dict) else current_user.id, + limit=items_per_page, + offset=compute_offset(page, items_per_page), + db=db, + ) + + return paginated_response( + crud_data=result, + page=page, + items_per_page=items_per_page, + ) + except ResourceNotFoundError as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) + except PermissionDeniedError as e: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(e)) + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get( + "/{key_id}/analytics", + summary="Get API Key Usage Analytics", + description=""" + Retrieves comprehensive usage analytics for a specific API key. + + - **key_id**: ID of the API key + - **days**: Number of days to analyze (default: 30) + + Returns detailed analytics including: + - Total and successful request counts + - Token usage and costs + - Average response times + - Most used endpoints + - Error breakdown + """, + responses={ + 200: {"description": "Analytics retrieved successfully"}, + 401: {"description": "Authentication required"}, + 403: {"description": "Access denied to this API key"}, + 404: {"description": "API key not found"}, + }, + response_description="Usage analytics for the API key", +) +async def get_key_analytics( + key_id: int = Path(..., description="API key ID"), + current_user: User = Depends(get_current_user), + api_key_service: APIKeyService = Depends(get_api_key_service), + db: AsyncSession = Depends(async_session), + days: int = Query(30, ge=1, le=365, description="Number of days to analyze"), +) -> dict[str, Any]: + """Get usage analytics for an API key.""" + try: + return await api_key_service.get_usage_analytics( + key_id=key_id, + user_id=current_user["id"] if isinstance(current_user, dict) else current_user.id, + days=days, + db=db, + ) + except ResourceNotFoundError as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) + except PermissionDeniedError as e: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=str(e)) + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get( + "/summary/user", + summary="Get User API Key Summary", + description=""" + Retrieves a comprehensive summary of all API keys for the authenticated user. + + Includes: + - Total and active key counts + - Overall usage statistics + - Total costs across all keys + - List of all keys with basic information + + This endpoint provides a dashboard-style overview of the user's API key usage. + """, + responses={ + 200: {"description": "User summary retrieved successfully"}, + 401: {"description": "Authentication required"}, + }, + response_description="Comprehensive API key summary for the user", +) +async def get_user_summary( + current_user: User = Depends(get_current_user), + api_key_service: APIKeyService = Depends(get_api_key_service), + db: AsyncSession = Depends(async_session), +) -> dict[str, Any]: + """Get comprehensive API key summary for the authenticated user.""" + try: + return await api_key_service.get_user_summary( + user_id=current_user["id"] if isinstance(current_user, dict) else current_user.id, + db=db, + ) + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="Internal server error") diff --git a/backend/src/modules/api_keys/schemas.py b/backend/src/modules/api_keys/schemas.py new file mode 100644 index 00000000..ddded942 --- /dev/null +++ b/backend/src/modules/api_keys/schemas.py @@ -0,0 +1,189 @@ +"""Pydantic schemas for API key management validation.""" + +from datetime import datetime +from typing import Annotated, Any + +from pydantic import BaseModel, ConfigDict, Field, field_validator + +from ..common.schemas import TimestampSchema +from .enums import HTTPMethod, KeyPermissionAction, KeyPermissionResource + +VALID_HTTP_METHODS = {m.value for m in HTTPMethod} + + +class APIKeyBase(BaseModel): + """Base schema for API key data.""" + + name: Annotated[str, Field(min_length=1, max_length=100, description="Human-readable name for the API key")] + permissions: dict[str, Any] = Field(default_factory=dict, description="Permission settings") + usage_limits: dict[str, Any] = Field(default_factory=dict, description="Usage limits per key") + expires_at: datetime | None = Field(default=None, description="Key expiration timestamp") + key_metadata: dict[str, Any] | None = Field(default=None, description="Additional key metadata") + + +class APIKeyCreate(APIKeyBase): + """Schema for creating a new API key.""" + + pass + + +class APIKeyCreateInternal(APIKeyBase): + """Internal schema for creating a new API key with additional fields.""" + + user_id: int + key_hash: str + key_prefix: str + + +class APIKeyUpdate(BaseModel): + """Schema for updating an existing API key.""" + + name: Annotated[str, Field(min_length=1, max_length=100)] | None = None + permissions: dict[str, Any] | None = None + usage_limits: dict[str, Any] | None = None + is_active: bool | None = None + expires_at: datetime | None = None + key_metadata: dict[str, Any] | None = None + + +class APIKeyRead(TimestampSchema, APIKeyBase): + """Schema for reading API key data.""" + + model_config = ConfigDict(from_attributes=True) + + id: int + user_id: int + key_prefix: str + last_used_at: datetime | None + last_used_ip: str | None + is_active: bool + + +class APIKeyResponse(APIKeyRead): + """Schema for API key creation response (includes full key).""" + + api_key: str = Field(description="Full API key - only shown once during creation") + + +class KeyUsageBase(BaseModel): + """Base schema for key usage data.""" + + endpoint: Annotated[str, Field(max_length=255, description="API endpoint used")] + method: Annotated[str, Field(max_length=10, description="HTTP method")] + status_code: Annotated[int, Field(ge=100, le=599, description="Response status code")] + tokens_used: int | None = Field(default=None, ge=0, description="AI tokens consumed") + + cost_microcents: int | None = Field(default=None, ge=0, description="Cost in microcents") + response_time_ms: int | None = Field(default=None, ge=0, description="Response time in milliseconds") + ip_address: str | None = Field(default=None, max_length=45, description="Client IP address") + user_agent: str | None = Field(default=None, description="Client user agent") + error_message: str | None = Field(default=None, description="Error details if any") + usage_metadata: dict[str, Any] | None = Field(default=None, description="Additional usage metadata") + + @field_validator("method") + @classmethod + def validate_method(cls, v: str) -> str: + """Validate method against HTTPMethod enum values.""" + v_upper = v.upper() + if v_upper not in VALID_HTTP_METHODS: + raise ValueError(f"method must be one of: {sorted(VALID_HTTP_METHODS)}") + return v_upper + + +class KeyUsageCreate(KeyUsageBase): + """Schema for creating a new key usage record.""" + + api_key_id: int + user_id: int + + +class KeyUsageRead(TimestampSchema, KeyUsageBase): + """Schema for reading key usage data.""" + + model_config = ConfigDict(from_attributes=True) + + id: int + api_key_id: int + user_id: int + + +class KeyPermissionBase(BaseModel): + """Base schema for key permission data.""" + + resource: Annotated[KeyPermissionResource, Field(description="Resource type")] + action: Annotated[KeyPermissionAction, Field(description="Action type")] + conditions: dict[str, Any] | None = Field(default=None, description="Additional conditions") + is_allowed: bool = Field(default=True, description="Whether permission is granted") + + +class KeyPermissionCreate(KeyPermissionBase): + """Schema for creating a new key permission.""" + + api_key_id: int + + +class KeyPermissionUpdate(BaseModel): + """Schema for updating an existing key permission.""" + + conditions: dict[str, Any] | None = None + is_allowed: bool | None = None + + +class KeyPermissionRead(TimestampSchema, KeyPermissionBase): + """Schema for reading key permission data.""" + + model_config = ConfigDict(from_attributes=True) + + id: int + api_key_id: int + + +class APIKeyWithPermissions(APIKeyRead): + """Schema for API key with its permissions.""" + + permissions_list: list[KeyPermissionRead] = Field(default_factory=list, description="Detailed permissions") + + +class KeyUsageAnalytics(BaseModel): + """Schema for key usage analytics.""" + + api_key_id: int + total_requests: int + successful_requests: int + failed_requests: int + total_tokens: int + total_cost_microcents: int + average_response_time_ms: float | None + most_used_endpoints: list[dict[str, Any]] + error_breakdown: dict[str, int] + usage_by_day: list[dict[str, Any]] + + +class UserAPIKeySummary(BaseModel): + """Schema for user API key summary.""" + + user_id: int + total_keys: int + active_keys: int + total_requests: int + total_cost_microcents: int + keys: list[APIKeyRead] + + +class APIKeyValidationRequest(BaseModel): + """Schema for API key validation requests.""" + + api_key: str = Field(description="API key to validate") + resource: KeyPermissionResource = Field(description="Resource being accessed") + action: KeyPermissionAction = Field(description="Action being performed") + + +class APIKeyValidationResponse(BaseModel): + """Schema for API key validation responses.""" + + is_valid: bool + api_key_id: int | None = None + user_id: int | None = None + permissions: dict[str, Any] | None = None + usage_limits: dict[str, Any] | None = None + error_message: str | None = None diff --git a/backend/src/modules/api_keys/service.py b/backend/src/modules/api_keys/service.py new file mode 100644 index 00000000..cc4b2493 --- /dev/null +++ b/backend/src/modules/api_keys/service.py @@ -0,0 +1,590 @@ +"""API key management service for developer-facing products.""" + +import base64 +import binascii +import hashlib +import hmac +import secrets +from datetime import UTC, datetime, timedelta +from typing import Any + +from fastcrud.types import GetMultiResponseDict +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from ...infrastructure.logging import get_logger +from ..common.exceptions import PermissionDeniedError, ResourceNotFoundError +from .crud import crud_api_keys, crud_key_permissions, crud_key_usage +from .enums import KeyPermissionAction, KeyPermissionResource +from .models import APIKey +from .schemas import ( + APIKeyCreate, + APIKeyCreateInternal, + APIKeyRead, + APIKeyUpdate, + APIKeyValidationResponse, + KeyUsageCreate, + KeyUsageRead, +) +from .utils import ( + calculate_basic_metrics, + calculate_daily_usage, + calculate_endpoint_usage, + calculate_error_breakdown, + calculate_response_time_metrics, + parse_usage_records, +) + +logger = get_logger() + +_SCRYPT_N = 2**14 +_SCRYPT_R = 8 +_SCRYPT_P = 1 +_SCRYPT_DKLEN = 32 + + +class APIKeyService: + """Service for managing API keys, permissions, and usage tracking. + + Provides high-level operations for API key lifecycle management, + permission validation, usage tracking, and analytics. + """ + + def __init__(self): + """Initialize API key service.""" + self.key_prefix_length = 8 + self.key_length = 48 + + def _generate_api_key(self) -> tuple[str, str, str]: + """Generate a new API key with prefix and hash. + + Returns: + Tuple of (full_key, prefix, hash) + """ + raw_key = secrets.token_urlsafe(self.key_length) + prefix = raw_key[: self.key_prefix_length] + api_key = f"fai_{prefix}_{raw_key[self.key_prefix_length :]}" + key_hash = self._hash_api_key(api_key) + + return api_key, prefix, key_hash + + def _hash_api_key(self, api_key: str) -> str: + """Hash an API key for storage using scrypt with a per-row salt. + + Stored format: ``scrypt$N$r$p$salt_b64$derived_b64``. Non-deterministic; + DB lookup uses ``key_prefix`` (already indexed) instead of ``key_hash``. + """ + salt = secrets.token_bytes(16) + derived = hashlib.scrypt( + api_key.encode("utf-8"), + salt=salt, + n=_SCRYPT_N, + r=_SCRYPT_R, + p=_SCRYPT_P, + dklen=_SCRYPT_DKLEN, + ) + salt_b64 = base64.b64encode(salt).decode("ascii") + derived_b64 = base64.b64encode(derived).decode("ascii") + return f"scrypt${_SCRYPT_N}${_SCRYPT_R}${_SCRYPT_P}${salt_b64}${derived_b64}" + + def _verify_api_key(self, api_key: str, stored_hash: str) -> bool: + """Verify a candidate ``api_key`` against a stored scrypt hash.""" + try: + scheme, n_str, r_str, p_str, salt_b64, derived_b64 = stored_hash.split("$", 5) + except ValueError: + return False + if scheme != "scrypt": + return False + try: + n = int(n_str) + r = int(r_str) + p = int(p_str) + salt = base64.b64decode(salt_b64) + expected = base64.b64decode(derived_b64) + except (ValueError, binascii.Error): + return False + actual = hashlib.scrypt( + api_key.encode("utf-8"), + salt=salt, + n=n, + r=r, + p=p, + dklen=len(expected), + ) + return hmac.compare_digest(actual, expected) + + async def create_api_key( + self, + user_id: int, + key_data: APIKeyCreate, + db: AsyncSession, + ) -> dict[str, Any]: + """Create a new API key for a user. + + Args: + user_id: User ID + key_data: API key creation data + db: Database session + + Returns: + Created API key with full key (only shown once) + """ + api_key, prefix, key_hash = self._generate_api_key() + + key_dict = key_data.model_dump() + key_dict.update( + { + "user_id": user_id, + "key_hash": key_hash, + "key_prefix": prefix, + } + ) + + key_internal = APIKeyCreateInternal(**key_dict) + created_key = await crud_api_keys.create(db=db, object=key_internal, schema_to_select=APIKeyRead) + + if not created_key: + raise ValueError("Failed to create API key") + + logger.info(f"Created API key {created_key['id']} for user {user_id}") + + response_data = created_key.copy() + response_data["api_key"] = api_key + + return response_data + + async def get_user_api_keys( + self, + user_id: int, + db: AsyncSession, + active_only: bool = True, + limit: int = 50, + offset: int = 0, + ) -> GetMultiResponseDict: + """Get all API keys for a user. + + Args: + user_id: User ID + db: Database session + active_only: Whether to return only active keys + + Returns: + List of API keys + """ + if active_only: + return await crud_api_keys.get_multi( + db=db, + limit=limit, + offset=offset, + sort_columns="created_at", + sort_orders="desc", + user_id=user_id, + is_active=True, + schema_to_select=APIKeyRead, + ) + else: + return await crud_api_keys.get_multi( + db=db, + limit=limit, + offset=offset, + sort_columns="created_at", + sort_orders="desc", + user_id=user_id, + schema_to_select=APIKeyRead, + ) + + async def get_api_key( + self, + key_id: int, + user_id: int, + db: AsyncSession, + ) -> dict[str, Any]: + """Get a specific API key for a user. + + Args: + key_id: API key ID + user_id: User ID (for ownership verification) + db: Database session + + Returns: + API key data + + Raises: + ResourceNotFoundError: If key not found + PermissionDeniedError: If user doesn't own the key + """ + key = await crud_api_keys.get(db=db, id=key_id, schema_to_select=APIKeyRead) + + if not key: + raise ResourceNotFoundError("API key not found") + + if key["user_id"] != user_id: + raise PermissionDeniedError("Access denied to this API key") + + return key + + async def update_api_key( + self, + key_id: int, + user_id: int, + update_data: APIKeyUpdate, + db: AsyncSession, + ) -> dict[str, Any]: + """Update an API key. + + Args: + key_id: API key ID + user_id: User ID (for ownership verification) + update_data: Update data + db: Database session + + Returns: + Updated API key data + """ + await self.get_api_key(key_id=key_id, user_id=user_id, db=db) + + update_dict = update_data.model_dump(exclude_unset=True) + updated_key = await crud_api_keys.update( + db=db, + object=update_dict, + id=key_id, + return_columns=list(APIKeyRead.model_fields.keys()), + ) + + logger.info(f"Updated API key {key_id} for user {user_id}") + + if updated_key is None: + updated_key = await crud_api_keys.get(db=db, id=key_id, schema_to_select=APIKeyRead) + + if updated_key is None: + raise ResourceNotFoundError("API key not found after update") + + return updated_key + + async def delete_api_key( + self, + key_id: int, + user_id: int, + db: AsyncSession, + ) -> None: + """Delete (deactivate) an API key. + + Args: + key_id: API key ID + user_id: User ID (for ownership verification) + db: Database session + """ + await self.get_api_key(key_id=key_id, user_id=user_id, db=db) + + await crud_api_keys.update( + db=db, + object={"is_active": False}, + id=key_id, + ) + + logger.info(f"Deactivated API key {key_id} for user {user_id}") + + async def validate_api_key( + self, + api_key: str, + resource: str, + action: str, + db: AsyncSession, + ) -> APIKeyValidationResponse: + """Validate an API key and check permissions. + + Args: + api_key: API key to validate + resource: Resource being accessed + action: Action being performed + db: Database session + + Returns: + Validation response with key details and permissions + """ + prefix_start = len("fai_") + prefix_end = prefix_start + self.key_prefix_length + if not api_key.startswith("fai_") or len(api_key) <= prefix_end or api_key[prefix_end] != "_": + return APIKeyValidationResponse( + is_valid=False, + error_message="Invalid API key", + ) + prefix = api_key[prefix_start:prefix_end] + + result = await db.execute(select(APIKey).where(APIKey.key_prefix == prefix).execution_options(populate_existing=True)) + candidates = result.scalars().all() + + matched: APIKey | None = None + for candidate in candidates: + if self._verify_api_key(api_key, candidate.key_hash): + matched = candidate + break + + if matched is None: + return APIKeyValidationResponse( + is_valid=False, + error_message="Invalid API key", + ) + + key = APIKeyRead.model_validate(matched).model_dump() + + if not key["is_active"]: + return APIKeyValidationResponse( + is_valid=False, + error_message="API key is inactive", + ) + + if key["expires_at"] and key["expires_at"] < datetime.now(UTC): + return APIKeyValidationResponse( + is_valid=False, + error_message="API key has expired", + ) + + has_permission = await self._check_permission( + api_key_id=key["id"], + resource=resource, + action=action, + db=db, + ) + + if not has_permission: + return APIKeyValidationResponse( + is_valid=False, + error_message=f"No permission for {action} on {resource}", + ) + + await crud_api_keys.update( + db=db, + object={ + "last_used_at": datetime.now(UTC), + }, + id=key["id"], + ) + + return APIKeyValidationResponse( + is_valid=True, + api_key_id=key["id"], + user_id=key["user_id"], + permissions=key["permissions"], + usage_limits=key["usage_limits"], + ) + + async def record_usage( + self, + api_key_id: int, + user_id: int, + usage_data: KeyUsageCreate, + db: AsyncSession, + ) -> dict[str, Any]: + """Record API key usage for analytics and billing. + + Args: + api_key_id: API key ID + user_id: User ID + usage_data: Usage data + db: Database session + + Returns: + Created usage record + """ + if usage_data.api_key_id != api_key_id or usage_data.user_id != user_id: + usage_dict = usage_data.model_dump() + usage_dict.update( + { + "api_key_id": api_key_id, + "user_id": user_id, + } + ) + usage_data = KeyUsageCreate(**usage_dict) + + usage_record = await crud_key_usage.create(db=db, object=usage_data, schema_to_select=KeyUsageRead) + + if not usage_record: + raise ValueError("Failed to create usage record") + + return usage_record + + async def get_key_usage( + self, + key_id: int, + user_id: int, + db: AsyncSession, + limit: int = 100, + offset: int = 0, + ) -> GetMultiResponseDict: + """Get usage history for an API key. + + Args: + key_id: API key ID + user_id: User ID (for ownership verification) + db: Database session + limit: Maximum number of records + offset: Number of records to skip + + Returns: + List of usage records + """ + await self.get_api_key(key_id=key_id, user_id=user_id, db=db) + + return await crud_key_usage.get_multi( + db=db, + limit=limit, + offset=offset, + sort_columns="created_at", + sort_orders="desc", + api_key_id=key_id, + schema_to_select=KeyUsageRead, + ) + + async def get_usage_analytics( + self, + key_id: int, + user_id: int, + db: AsyncSession, + days: int = 30, + ) -> dict[str, Any]: + """Get usage analytics for an API key. + + Args: + key_id: API key ID + user_id: User ID (for ownership verification) + db: Database session + days: Number of days to analyze + + Returns: + Usage analytics + """ + await self.get_api_key(key_id=key_id, user_id=user_id, db=db) + + since_date = datetime.now(UTC) - timedelta(days=days) + + result = await crud_key_usage.get_multi( + db=db, + api_key_id=key_id, + created_at__gte=since_date, + schema_to_select=KeyUsageRead, + ) + + usage_records = parse_usage_records(result) + basic_metrics = calculate_basic_metrics(usage_records) + avg_response_time = calculate_response_time_metrics(usage_records) + most_used_endpoints = calculate_endpoint_usage(usage_records) + error_breakdown = calculate_error_breakdown(usage_records) + usage_by_day = calculate_daily_usage(usage_records) + + return { + "api_key_id": key_id, + "total_requests": basic_metrics["total_requests"], + "successful_requests": basic_metrics["successful_requests"], + "failed_requests": basic_metrics["failed_requests"], + "total_tokens": basic_metrics["total_tokens"], + "total_cost_microcents": basic_metrics["total_cost"], + "average_response_time_ms": avg_response_time, + "most_used_endpoints": most_used_endpoints, + "error_breakdown": error_breakdown, + "usage_by_day": usage_by_day, + } + + async def get_user_summary( + self, + user_id: int, + db: AsyncSession, + ) -> dict[str, Any]: + """Get comprehensive API key summary for a user. + + Args: + user_id: User ID + db: Database session + + Returns: + User API key summary + """ + keys_result = await self.get_user_api_keys(user_id=user_id, db=db, active_only=False) + keys_data = keys_result.get("data", []) if isinstance(keys_result, dict) else [] + + total_requests_result = await crud_key_usage.count(db=db, user_id=user_id) + total_requests = total_requests_result if isinstance(total_requests_result, int) else 0 + + usage_result = await crud_key_usage.get_multi(db=db, user_id=user_id, schema_to_select=KeyUsageRead) + total_cost = 0 + if isinstance(usage_result, dict) and usage_result.get("data"): + usage_data = usage_result["data"] + if isinstance(usage_data, list): + for u in usage_data: + if isinstance(u, dict) and u.get("cost_microcents"): + total_cost += u["cost_microcents"] + + return { + "user_id": user_id, + "total_keys": len(keys_data), + "active_keys": len([k for k in keys_data if isinstance(k, dict) and k.get("is_active")]), + "total_requests": total_requests, + "total_cost_microcents": total_cost, + "keys": keys_data, + } + + async def _check_permission( + self, + api_key_id: int, + resource: str, + action: str, + db: AsyncSession, + ) -> bool: + """Check if an API key has permission for a resource/action. + + Args: + api_key_id: API key ID + resource: Resource type + action: Action type + db: Database session + + Returns: + True if permission granted, False otherwise + """ + resource_enum = None + action_enum = None + + try: + resource_enum = KeyPermissionResource(resource) + except ValueError: + pass + + try: + action_enum = KeyPermissionAction(action) + except ValueError: + pass + + permission = None + if resource_enum and action_enum: + permission = await crud_key_permissions.get( + db=db, + api_key_id=api_key_id, + resource=resource_enum, + action=action_enum, + ) + + if not permission and action_enum: + permission = await crud_key_permissions.get( + db=db, + api_key_id=api_key_id, + resource=KeyPermissionResource.WILDCARD, + action=action_enum, + ) + + if not permission and resource_enum: + permission = await crud_key_permissions.get( + db=db, + api_key_id=api_key_id, + resource=resource_enum, + action=KeyPermissionAction.WILDCARD, + ) + + if not permission: + permission = await crud_key_permissions.get( + db=db, + api_key_id=api_key_id, + resource=KeyPermissionResource.WILDCARD, + action=KeyPermissionAction.WILDCARD, + ) + + return permission["is_allowed"] if permission else False diff --git a/backend/src/modules/api_keys/utils.py b/backend/src/modules/api_keys/utils.py new file mode 100644 index 00000000..59eb1c07 --- /dev/null +++ b/backend/src/modules/api_keys/utils.py @@ -0,0 +1,149 @@ +"""Utility functions for API key analytics and data processing.""" + +from datetime import datetime +from typing import Any + + +def calculate_basic_metrics(usage_records: list[dict[str, Any]]) -> dict[str, Any]: + """Calculate basic usage metrics from usage records. + + Args: + usage_records: List of usage record dictionaries + + Returns: + Dictionary containing basic metrics + """ + total_requests = len(usage_records) + successful_requests = len([u for u in usage_records if isinstance(u, dict) and 200 <= u.get("status_code", 0) < 300]) + failed_requests = total_requests - successful_requests + + total_tokens = sum(u.get("tokens_used", 0) or 0 for u in usage_records if isinstance(u, dict)) + + total_cost = sum(u.get("cost_microcents", 0) or 0 for u in usage_records if isinstance(u, dict)) + + return { + "total_requests": total_requests, + "successful_requests": successful_requests, + "failed_requests": failed_requests, + "total_tokens": total_tokens, + "total_cost": total_cost, + } + + +def calculate_response_time_metrics(usage_records: list[dict[str, Any]]) -> float | None: + """Calculate average response time from usage records. + + Args: + usage_records: List of usage record dictionaries + + Returns: + Average response time in milliseconds or None if no data + """ + response_times = [] + for u in usage_records: + if isinstance(u, dict) and u.get("response_time_ms") is not None: + response_times.append(u["response_time_ms"]) + + return sum(response_times) / len(response_times) if response_times else None + + +def calculate_endpoint_usage(usage_records: list[dict[str, Any]], limit: int = 10) -> list[dict[str, Any]]: + """Calculate most used endpoints from usage records. + + Args: + usage_records: List of usage record dictionaries + limit: Maximum number of endpoints to return + + Returns: + List of endpoint usage dictionaries sorted by count + """ + endpoint_counts: dict[str, int] = {} + for record in usage_records: + if isinstance(record, dict): + endpoint = record.get("endpoint", "") + endpoint_counts[endpoint] = endpoint_counts.get(endpoint, 0) + 1 + + return [ + {"endpoint": endpoint, "count": count} + for endpoint, count in sorted(endpoint_counts.items(), key=lambda x: x[1], reverse=True)[:limit] + ] + + +def calculate_error_breakdown(usage_records: list[dict[str, Any]]) -> dict[str, int]: + """Calculate error status code breakdown from usage records. + + Args: + usage_records: List of usage record dictionaries + + Returns: + Dictionary mapping status codes to counts + """ + error_counts: dict[str, int] = {} + for record in usage_records: + if isinstance(record, dict) and record.get("status_code", 0) >= 400: + status = record.get("status_code", 0) + error_counts[str(status)] = error_counts.get(str(status), 0) + 1 + + return error_counts + + +def calculate_daily_usage(usage_records: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Calculate daily usage breakdown from usage records. + + Args: + usage_records: List of usage record dictionaries + + Returns: + List of daily usage dictionaries sorted by date + """ + daily_usage: dict[str, dict[str, Any]] = {} + + for record in usage_records: + if not isinstance(record, dict) or not record.get("created_at"): + continue + + created_at = record["created_at"] + if isinstance(created_at, str): + try: + created_at = datetime.fromisoformat(created_at.replace("Z", "+00:00")) + except (ValueError, AttributeError): + continue + + day_key = created_at.strftime("%Y-%m-%d") + if day_key not in daily_usage: + daily_usage[day_key] = { + "date": day_key, + "requests": 0, + "successful_requests": 0, + "failed_requests": 0, + "tokens": 0, + "cost_microcents": 0, + } + + daily_usage[day_key]["requests"] += 1 + if 200 <= record.get("status_code", 0) < 300: + daily_usage[day_key]["successful_requests"] += 1 + else: + daily_usage[day_key]["failed_requests"] += 1 + daily_usage[day_key]["tokens"] += record.get("tokens_used", 0) or 0 + daily_usage[day_key]["cost_microcents"] += record.get("cost_microcents", 0) or 0 + + return sorted(daily_usage.values(), key=lambda x: x["date"]) + + +def parse_usage_records(result: Any) -> list[dict[str, Any]]: + """Parse usage records from database result. + + Args: + result: Database query result + + Returns: + List of usage record dictionaries + """ + usage_records: list[dict[str, Any]] = [] + if isinstance(result, dict) and result.get("data"): + data = result["data"] + if isinstance(data, list): + usage_records = data + + return usage_records diff --git a/backend/src/modules/common/__init__.py b/backend/src/modules/common/__init__.py new file mode 100644 index 00000000..89bf44e6 --- /dev/null +++ b/backend/src/modules/common/__init__.py @@ -0,0 +1 @@ +"""Common utilities and shared functionality across features.""" diff --git a/backend/src/modules/common/constants.py b/backend/src/modules/common/constants.py new file mode 100644 index 00000000..417f744e --- /dev/null +++ b/backend/src/modules/common/constants.py @@ -0,0 +1,45 @@ +"""Common constants used across the application.""" + +from collections.abc import Callable + +from ...infrastructure.auth.http_exceptions import ( + DuplicateValueException, + ForbiddenException, + HTTPException, + NotFoundException, + UnprocessableEntityException, +) +from .exceptions import ( + DomainError, + InsufficientCreditsError, + PermissionDeniedError, + RateLimitNotFoundError, + ResourceExistsError, + ResourceNotFoundError, + TierNotFoundError, + UserExistsError, + UserNotFoundError, + ValidationError, +) + +# Generic error message for client-facing responses (never leak internal details) +GENERIC_ERROR_MESSAGE = "Something went wrong. Please try again." +SUPPORT_ID_LENGTH = 8 + +# Safety limits for queries that could be unbounded +MAX_ENTITLEMENTS_PER_USER = 100 +DEFAULT_BATCH_SIZE = 100 + +EXCEPTION_MAPPING: dict[type[DomainError], Callable[[str], HTTPException]] = { + InsufficientCreditsError: lambda message: HTTPException(status_code=402, detail=message or "Insufficient credits."), + ResourceNotFoundError: lambda message: NotFoundException(detail="The requested resource was not found."), + ResourceExistsError: lambda message: DuplicateValueException(detail="This resource already exists."), + ValidationError: lambda message: UnprocessableEntityException(detail=message), + PermissionDeniedError: lambda message: ForbiddenException(detail="You don't have permission for this action."), + UserNotFoundError: lambda message: NotFoundException(detail="User not found."), + UserExistsError: lambda message: DuplicateValueException( + detail=message or "A user with this email or username already exists." + ), + TierNotFoundError: lambda message: NotFoundException(detail="The requested tier was not found."), + RateLimitNotFoundError: lambda message: NotFoundException(detail="Rate limit configuration not found."), +} diff --git a/backend/src/modules/common/exceptions.py b/backend/src/modules/common/exceptions.py new file mode 100644 index 00000000..3dc4d497 --- /dev/null +++ b/backend/src/modules/common/exceptions.py @@ -0,0 +1,67 @@ +"""Domain exception classes for business logic errors.""" + + +class DomainError(Exception): + """Base class for all domain-specific errors.""" + + pass + + +class ResourceNotFoundError(DomainError): + """Raised when a requested resource cannot be found.""" + + pass + + +class ResourceExistsError(DomainError): + """Raised when attempting to create a resource that already exists.""" + + pass + + +class ValidationError(DomainError): + """Raised when data validation fails.""" + + pass + + +class PermissionDeniedError(DomainError): + """Raised when a user attempts an action they don't have permission for.""" + + pass + + +class UserNotFoundError(ResourceNotFoundError): + """Raised when a user cannot be found.""" + + pass + + +class UserExistsError(ResourceExistsError): + """Raised when attempting to create a user with an existing email or username.""" + + pass + + +class TierNotFoundError(ResourceNotFoundError): + """Raised when a tier cannot be found.""" + + pass + + +class RateLimitNotFoundError(ResourceNotFoundError): + """Raised when a rate limit cannot be found.""" + + pass + + +class InsufficientCreditsError(DomainError): + """Raised when a user doesn't have enough credits for an operation.""" + + pass + + +class UsageLimitExceededError(DomainError): + """Raised when a user exceeds their usage limits.""" + + pass diff --git a/src/app/core/schemas.py b/backend/src/modules/common/schemas.py similarity index 56% rename from src/app/core/schemas.py rename to backend/src/modules/common/schemas.py index 9566aa52..847b2632 100644 --- a/src/app/core/schemas.py +++ b/backend/src/modules/common/schemas.py @@ -1,31 +1,7 @@ -import uuid as uuid_pkg from datetime import UTC, datetime from typing import Any from pydantic import BaseModel, Field, field_serializer -from uuid6 import uuid7 - - -class HealthCheck(BaseModel): - status: str - environment: str - version: str - timestamp: str - - -class ReadyCheck(BaseModel): - status: str - environment: str - version: str - app: str - database: str - redis: str - timestamp: str - - -# -------------- mixins -------------- -class UUIDSchema(BaseModel): - uuid: uuid_pkg.UUID = Field(default_factory=uuid7) class TimestampSchema(BaseModel): @@ -57,30 +33,3 @@ def serialize_dates(self, deleted_at: datetime | None, _info: Any) -> str | None return deleted_at.isoformat() return None - - -# -------------- token -------------- -class Token(BaseModel): - access_token: str - token_type: str - - -class TokenData(BaseModel): - username_or_email: str - - -class TokenBlacklistBase(BaseModel): - token: str - expires_at: datetime - - -class TokenBlacklistRead(TokenBlacklistBase): - id: int - - -class TokenBlacklistCreate(TokenBlacklistBase): - pass - - -class TokenBlacklistUpdate(TokenBlacklistBase): - pass diff --git a/backend/src/modules/common/utils/__init__.py b/backend/src/modules/common/utils/__init__.py new file mode 100644 index 00000000..e40643ea --- /dev/null +++ b/backend/src/modules/common/utils/__init__.py @@ -0,0 +1 @@ +"""Utility functions and helpers.""" diff --git a/backend/src/modules/common/utils/error_handler.py b/backend/src/modules/common/utils/error_handler.py new file mode 100644 index 00000000..6fff5ff8 --- /dev/null +++ b/backend/src/modules/common/utils/error_handler.py @@ -0,0 +1,100 @@ +"""Utility functions for mapping domain exceptions to HTTP exceptions.""" + +import uuid as uuid_mod + +from fastapi import FastAPI, Request +from fastapi.exceptions import RequestValidationError +from fastapi.responses import JSONResponse +from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint +from starlette.responses import Response + +from ....infrastructure.auth.http_exceptions import ( + HTTPException, +) +from ....infrastructure.logging import get_logger +from ..constants import EXCEPTION_MAPPING, GENERIC_ERROR_MESSAGE, SUPPORT_ID_LENGTH +from ..exceptions import ( + DomainError, + InsufficientCreditsError, +) + +logger = get_logger() + + +def _generate_support_id() -> str: + """Generate a short support ID for error tracking.""" + return str(uuid_mod.uuid4())[:SUPPORT_ID_LENGTH] + + +def map_exception(error: DomainError) -> HTTPException: + """Map a domain exception to a corresponding HTTP exception.""" + for exception_class, mapper in EXCEPTION_MAPPING.items(): + if isinstance(error, exception_class): + return mapper(str(error)) + + logger.error(f"Unmapped domain error: {type(error).__name__}: {error}") + return HTTPException(status_code=500, detail=GENERIC_ERROR_MESSAGE) + + +class CatchAllErrorMiddleware(BaseHTTPMiddleware): + """Catch unhandled exceptions and return generic 500 with support ID.""" + + async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: + try: + return await call_next(request) + except Exception as exc: + support_id = _generate_support_id() + logger.exception(f"Unhandled error [{support_id}] on {request.method} {request.url.path}: {exc}") + return JSONResponse( + status_code=500, + content={"detail": GENERIC_ERROR_MESSAGE, "support_id": support_id}, + ) + + +def register_exception_handlers(app: FastAPI) -> None: + """Register global exception handlers for domain and validation exceptions. + + All handlers log full details server-side and return only a generic message + + support_id to the client. Exception: InsufficientCreditsError (402) keeps + its message since the frontend needs the credit info for upgrade prompts. + """ + app.add_middleware(CatchAllErrorMiddleware) + + @app.exception_handler(RequestValidationError) + async def validation_exception_handler(request: Request, exc: RequestValidationError) -> JSONResponse: + support_id = _generate_support_id() + logger.warning(f"Validation error [{support_id}] on {request.method} {request.url.path}: {exc.errors()}") + return JSONResponse( + status_code=422, + content={"detail": "Invalid request. Please check your input and try again.", "support_id": support_id}, + ) + + @app.exception_handler(DomainError) + async def domain_exception_handler(request: Request, exc: DomainError) -> JSONResponse: + support_id = _generate_support_id() + http_exception = map_exception(exc) + + if isinstance(exc, InsufficientCreditsError): + logger.info(f"Insufficient credits [{support_id}] on {request.method} {request.url.path}: {exc}") + return JSONResponse( + status_code=http_exception.status_code, + content={"detail": http_exception.detail, "support_id": support_id}, + ) + + logger.warning(f"Domain error [{support_id}] on {request.method} {request.url.path}: {type(exc).__name__}: {exc}") + return JSONResponse( + status_code=http_exception.status_code, + content={"detail": GENERIC_ERROR_MESSAGE, "support_id": support_id}, + ) + + +def handle_exception(error: Exception) -> HTTPException | None: + """Handle an exception and return an appropriate HTTP exception if possible. + + For use in route handlers when you want to handle exceptions manually. + """ + if isinstance(error, DomainError): + return map_exception(error) + elif isinstance(error, HTTPException): + return error + return None diff --git a/backend/src/modules/common/utils/logger.py b/backend/src/modules/common/utils/logger.py new file mode 100644 index 00000000..efde5fbd --- /dev/null +++ b/backend/src/modules/common/utils/logger.py @@ -0,0 +1,43 @@ +"""Logging utilities for the application. + +This module provides backward compatibility with the old logging utility +while redirecting to the new centralized logging infrastructure. + +For new code, prefer importing directly from infrastructure.logging: + from infrastructure.logging import get_logger +""" + +import logging + +from ....infrastructure.logging import get_logger as _get_centralized_logger + + +def get_logger(name: str, level: int | None = None) -> logging.Logger | logging.LoggerAdapter[logging.Logger]: + """Get a configured logger with backward compatibility. + + This function provides backward compatibility with the old logging + utility while using the new centralized logging infrastructure. + + Args: + name: The name of the logger, typically __name__ + level: The logging level (will override configuration for this logger) + + Returns: + A configured logger instance + + Note: + For new code, prefer importing directly from infrastructure.logging: + from infrastructure.logging import get_logger + + The level parameter is deprecated - use environment variables instead: + LOG_LEVEL=DEBUG + """ + logger = _get_centralized_logger(name) + + if level is not None: + if isinstance(logger, logging.LoggerAdapter): + logger.logger.setLevel(level) + else: + logger.setLevel(level) + + return logger diff --git a/backend/src/modules/rate_limit/__init__.py b/backend/src/modules/rate_limit/__init__.py new file mode 100644 index 00000000..72e5d094 --- /dev/null +++ b/backend/src/modules/rate_limit/__init__.py @@ -0,0 +1,17 @@ +"""Rate limiting feature. + +This module contains the domain models and CRUD operations for rate limits. +The actual implementation of rate limiting is in the infrastructure layer. +""" + +from .crud import crud_rate_limits +from .models import RateLimit +from .schemas import RateLimitCreate, RateLimitRead, RateLimitUpdate + +__all__ = [ + "RateLimitCreate", + "RateLimitUpdate", + "RateLimitRead", + "RateLimit", + "crud_rate_limits", +] diff --git a/backend/src/modules/rate_limit/crud.py b/backend/src/modules/rate_limit/crud.py new file mode 100644 index 00000000..4260e754 --- /dev/null +++ b/backend/src/modules/rate_limit/crud.py @@ -0,0 +1,5 @@ +from fastcrud import FastCRUD + +from .models import RateLimit + +crud_rate_limits: FastCRUD = FastCRUD(RateLimit) diff --git a/backend/src/modules/rate_limit/models.py b/backend/src/modules/rate_limit/models.py new file mode 100644 index 00000000..071df409 --- /dev/null +++ b/backend/src/modules/rate_limit/models.py @@ -0,0 +1,25 @@ +from sqlalchemy import ForeignKey, Integer, String +from sqlalchemy.orm import Mapped, mapped_column + +from ...infrastructure.database import Base +from ...infrastructure.database.models import SoftDeleteMixin, TimestampMixin + + +class RateLimit(Base, TimestampMixin, SoftDeleteMixin): + """Rate limit configuration for API endpoints.""" + + __tablename__ = "rate_limits" + + id: Mapped[int] = mapped_column( + "id", + autoincrement=True, + nullable=False, + unique=True, + primary_key=True, + init=False, + ) + tier_id: Mapped[int] = mapped_column(ForeignKey("tiers.id"), index=True) + name: Mapped[str] = mapped_column(String, nullable=False, unique=True) + path: Mapped[str] = mapped_column(String, nullable=False) + limit: Mapped[int] = mapped_column(Integer, nullable=False) + period: Mapped[int] = mapped_column(Integer, nullable=False) diff --git a/backend/src/modules/rate_limit/routes.py b/backend/src/modules/rate_limit/routes.py new file mode 100644 index 00000000..a768ca89 --- /dev/null +++ b/backend/src/modules/rate_limit/routes.py @@ -0,0 +1,207 @@ +from typing import Annotated, Any + +from fastapi import APIRouter, Depends +from fastcrud import PaginatedListResponse, compute_offset, paginated_response +from sqlalchemy.ext.asyncio import AsyncSession + +from ...infrastructure.auth.http_exceptions import DuplicateValueException, HTTPException, NotFoundException +from ...infrastructure.auth.session.dependencies import get_current_superuser +from ...infrastructure.database.session import async_session +from ..common.exceptions import ResourceExistsError, ResourceNotFoundError +from ..common.utils.error_handler import handle_exception +from .schemas import ( + RateLimitRead, + RateLimitUpdate, +) +from .service import RateLimitService + +router = APIRouter(tags=["Rate Limits"]) + + +def get_rate_limit_service() -> RateLimitService: + """Dependency for providing a RateLimitService instance.""" + return RateLimitService() + + +@router.get( + "/", + response_model=PaginatedListResponse[RateLimitRead], + summary="List All Rate Limits", + description=""" + Retrieves a paginated list of all rate limits defined in the system. + + This endpoint provides information about the API rate limits configured + for different subscription tiers. Each rate limit defines: + - The API path it applies to + - The maximum number of requests allowed (limit) + - The time period in seconds for the limit (period) + - The tier it belongs to + + Results are paginated to handle systems with many rate limit configurations. + """, + responses={401: {"description": "Not authenticated"}}, + response_description="A paginated list of rate limits with their configuration details", +) +async def get_rate_limits( + db: Annotated[AsyncSession, Depends(async_session)], + rate_limit_service: Annotated[RateLimitService, Depends(get_rate_limit_service)], + page: int = 1, + items_per_page: int = 10, +) -> dict[str, Any]: + """ + Get a paginated list of all rate limits. + This endpoint is available to all authenticated users. + """ + try: + rate_limits_data = await rate_limit_service.get_all( + db=db, + skip=compute_offset(page, items_per_page), + limit=items_per_page, + ) + + return paginated_response(crud_data=rate_limits_data, page=page, items_per_page=items_per_page) + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") + + +@router.get( + "/{name}", + response_model=RateLimitRead, + summary="Get Active Rate Limit Details", + description=""" + Retrieves detailed information about a specific rate limit by name. + + This endpoint returns configuration details for a single rate limit, + identified by its unique name. The response includes: + - The API path it applies to + - The maximum number of requests allowed (limit) + - The time period in seconds for the limit (period) + - The tier it belongs to + + Rate limit names are typically in the format of `path:limit:period`. + """, + responses={401: {"description": "Not authenticated"}, 404: {"description": "Rate limit not found"}}, + response_description="Detailed configuration of the requested rate limit", +) +async def get_rate_limit( + name: str, + db: Annotated[AsyncSession, Depends(async_session)], + rate_limit_service: Annotated[RateLimitService, Depends(get_rate_limit_service)], +) -> dict[str, Any] | None: + """ + Get detailed information about a specific rate limit by name. + This endpoint is available to all authenticated users. + """ + try: + rate_limit = await rate_limit_service.get_by_name(name, db) + return rate_limit + except ResourceNotFoundError: + raise NotFoundException("Rate limit not found") + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") + + +@router.patch( + "/{name}", + summary="Update Rate Limit (Admin)", + description=""" + Updates an existing rate limit configuration. + + This admin-only endpoint allows modifying the properties of a rate limit + identified by its unique name. The following properties can be updated: + - API path: The endpoint pattern to apply the limit to + - Limit: Maximum number of requests allowed in the period + - Period: Time window in seconds for the limit + - Name: The identifier of the rate limit + + Only the fields provided in the request will be updated. Omitted fields + will retain their current values. + + Note that updating a rate limit immediately affects all users in the + associated tier. + """, + responses={ + 200: {"description": "Rate limit updated successfully"}, + 400: {"description": "Invalid rate limit data"}, + 403: {"description": "Not authorized - requires admin privileges"}, + 404: {"description": "Rate limit not found"}, + 409: {"description": "New rate limit name already exists"}, + }, + response_description="Success confirmation message", +) +async def update_rate_limit( + name: str, + values: RateLimitUpdate, + db: Annotated[AsyncSession, Depends(async_session)], + rate_limit_service: Annotated[RateLimitService, Depends(get_rate_limit_service)], + _: Annotated[dict[str, Any], Depends(get_current_superuser)], +) -> dict[str, str]: + """ + Update an existing rate limit. + This endpoint is restricted to superusers only. + """ + try: + await rate_limit_service.update(name, values, db) + return {"message": "Rate limit updated"} + except ResourceNotFoundError: + raise NotFoundException("Rate limit not found") + except ResourceExistsError: + raise DuplicateValueException("Rate limit name already exists") + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") + + +@router.delete( + "/{name}", + summary="Permanent Delete Rate Limit (Admin)", + description=""" + Permanently removes a rate limit configuration from the system. + + This admin-only endpoint allows deletion of a rate limit identified + by its unique name. Deleting a rate limit will immediately affect all + users in the associated tier. + + Once a rate limit is deleted, the API endpoints previously governed + by that limit will fall back to either: + - Another less specific rate limit configuration for the same tier + - The default system-wide rate limit configuration + + Use this endpoint with caution, as removing rate limits could potentially + allow users to make unlimited requests to certain API endpoints. + """, + responses={ + 200: {"description": "Rate limit deleted successfully"}, + 403: {"description": "Not authorized - requires admin privileges"}, + 404: {"description": "Rate limit not found"}, + }, + response_description="Success confirmation message", +) +async def delete_rate_limit( + name: str, + db: Annotated[AsyncSession, Depends(async_session)], + rate_limit_service: Annotated[RateLimitService, Depends(get_rate_limit_service)], + _: Annotated[dict[str, Any], Depends(get_current_superuser)], +) -> dict[str, str]: + """ + Delete a rate limit. + This endpoint is restricted to superusers only. + """ + try: + await rate_limit_service.delete(name, db) + return {"message": "Rate limit deleted"} + except ResourceNotFoundError: + raise NotFoundException("Rate limit not found") + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") diff --git a/backend/src/modules/rate_limit/schemas.py b/backend/src/modules/rate_limit/schemas.py new file mode 100644 index 00000000..b00609d5 --- /dev/null +++ b/backend/src/modules/rate_limit/schemas.py @@ -0,0 +1,97 @@ +import re +from datetime import datetime +from typing import Annotated + +from pydantic import BaseModel, ConfigDict, Field, field_validator + +from ..common.schemas import TimestampSchema + + +class RateLimitBase(BaseModel): + """Base rate limit schema with common attributes.""" + + path: Annotated[str, Field(examples=["/api/v1/users"])] + limit: Annotated[int, Field(examples=[5], gt=0)] + period: Annotated[int, Field(examples=[60], gt=0)] + + @field_validator("path") + def validate_path_format(cls, v: str) -> str: + """Validate path has proper API path format.""" + if not v.startswith("/"): + raise ValueError("Path must start with a forward slash (/)") + + if not re.match(r"^\/[a-zA-Z0-9_\-\/]+$", v): + raise ValueError("Path must be a valid API path format, e.g. /api/v1/users") + + return v + + +class RateLimit(TimestampSchema, RateLimitBase): + """Complete rate limit schema.""" + + tier_id: int + name: Annotated[str | None, Field(default=None, examples=["users:5:60"])] + + +class RateLimitSelect(BaseModel): + """Minimal schema for selecting only required rate limit fields.""" + + limit: int + period: int + + +class RateLimitRead(RateLimitBase): + """Schema for reading rate limit data.""" + + id: int + tier_id: int + name: str + is_deleted: bool = False + + +class RateLimitCreate(RateLimitBase): + """Schema for creating a new rate limit.""" + + model_config = ConfigDict(extra="forbid") + name: Annotated[str | None, Field(default=None, examples=["api_v1_users:5:60"])] + + +class RateLimitCreateInternal(RateLimitCreate): + """Internal schema for rate limit creation.""" + + tier_id: int + + +class RateLimitUpdate(BaseModel): + """Schema for updating rate limit information.""" + + path: str | None = Field(default=None) + limit: int | None = Field(default=None, gt=0) + period: int | None = Field(default=None, gt=0) + name: str | None = None + + @field_validator("path") + def validate_path_format(cls, v: str | None) -> str | None: + """Validate path has proper API path format.""" + if v is None: + return None + + if not v.startswith("/"): + raise ValueError("Path must start with a forward slash (/)") + + if not re.match(r"^\/[a-zA-Z0-9_\-\/]+$", v): + raise ValueError("Path must be a valid API path format, e.g. /api/v1/users") + + return v + + +class RateLimitUpdateInternal(RateLimitUpdate): + """Internal schema for rate limit updates.""" + + updated_at: datetime + + +class RateLimitDelete(BaseModel): + """Schema for deleting a rate limit.""" + + pass diff --git a/backend/src/modules/rate_limit/service.py b/backend/src/modules/rate_limit/service.py new file mode 100644 index 00000000..9161d8a9 --- /dev/null +++ b/backend/src/modules/rate_limit/service.py @@ -0,0 +1,115 @@ +import uuid +from datetime import UTC, datetime +from typing import Any + +from fastcrud.types import GetMultiResponseDict +from sqlalchemy.ext.asyncio import AsyncSession + +from ..common.exceptions import ( + PermissionDeniedError, + ResourceExistsError, + ResourceNotFoundError, + TierNotFoundError, +) +from ..tier.crud import crud_tiers +from .crud import crud_rate_limits +from .schemas import ( + RateLimitCreate, + RateLimitCreateInternal, + RateLimitRead, + RateLimitUpdate, + RateLimitUpdateInternal, +) + + +class RateLimitService: + """Service class for rate limit-related operations.""" + + async def create(self, rate_limit: RateLimitCreate, tier_id: int, db: AsyncSession) -> dict[str, Any]: + """Create a new rate limit for a tier.""" + tier_exists = await crud_tiers.exists(db=db, id=tier_id) + if not tier_exists: + raise TierNotFoundError(f"Tier with ID {tier_id} not found") + + rate_limit_dict = rate_limit.model_dump() + + if not rate_limit_dict.get("name"): + unique_id = uuid.uuid4().hex[:6] + rate_limit_dict["name"] = f"rate_limit_{unique_id}" + + name_exists = await crud_rate_limits.exists(db=db, name=rate_limit_dict["name"]) + if name_exists: + raise ResourceExistsError(f"Rate limit with name '{rate_limit_dict['name']}' already exists") + + rate_limit_internal = RateLimitCreateInternal(**rate_limit_dict, tier_id=tier_id) + created_rate_limit = await crud_rate_limits.create(db=db, object=rate_limit_internal, schema_to_select=RateLimitRead) + + if not created_rate_limit: + raise ResourceExistsError("Failed to create rate limit") + return created_rate_limit + + async def get_all(self, db: AsyncSession, skip: int = 0, limit: int = 100) -> GetMultiResponseDict: + """Get all rate limits with pagination.""" + return await crud_rate_limits.get_multi( + db=db, offset=skip, limit=limit, schema_to_select=RateLimitRead, is_deleted=False + ) + + async def get_by_id(self, rate_limit_id: int, db: AsyncSession) -> dict[str, Any]: + """Get a rate limit by ID.""" + rate_limit = await crud_rate_limits.get( + db=db, + id=rate_limit_id, + schema_to_select=RateLimitRead, + is_deleted=False, + ) + if not rate_limit: + raise ResourceNotFoundError(f"Rate limit with ID {rate_limit_id} not found") + return rate_limit + + async def get_by_name(self, name: str, db: AsyncSession) -> dict[str, Any]: + """Get an active rate limit by name.""" + rate_limit = await crud_rate_limits.get( + db=db, + name=name, + schema_to_select=RateLimitRead, + is_deleted=False, + ) + if not rate_limit: + raise ResourceNotFoundError(f"Rate limit with name '{name}' not found") + return rate_limit + + async def get_active_and_inactive_by_name(self, name: str, db: AsyncSession) -> dict[str, Any]: + """Get an active or inactive rate limit by name.""" + rate_limit = await crud_rate_limits.get(db=db, name=name, schema_to_select=RateLimitRead) + if not rate_limit: + raise ResourceNotFoundError(f"Rate limit with name '{name}' not found") + return rate_limit + + async def update(self, name: str, rate_limit_update: RateLimitUpdate, db: AsyncSession) -> None: + """Update a rate limit by name.""" + existing_rate_limit = await crud_rate_limits.get(db=db, name=name, schema_to_select=RateLimitRead) + if not existing_rate_limit: + raise ResourceNotFoundError(f"Rate limit with name '{name}' not found") + + update_data = rate_limit_update.model_dump(exclude_unset=True) + if "name" in update_data and update_data["name"] != name: + name_exists = await crud_rate_limits.exists(db=db, name=update_data["name"]) + if name_exists: + raise ResourceExistsError(f"Rate limit with name '{update_data['name']}' already exists") + + internal_update = RateLimitUpdateInternal(**update_data, updated_at=datetime.now(UTC)) + + await crud_rate_limits.update(db=db, object=internal_update, name=name) + + async def delete(self, name: str, db: AsyncSession) -> None: + """Permanently delete a rate limit by name.""" + existing_rate_limit = await crud_rate_limits.get(db=db, name=name, schema_to_select=RateLimitRead) + if not existing_rate_limit: + raise ResourceNotFoundError(f"Rate limit with name '{name}' not found") + + await crud_rate_limits.db_delete(db=db, name=name) + + async def verify_superuser(self, user: dict[str, Any], action: str = "manage rate limits") -> None: + """Verify that the user is a superuser.""" + if not user.get("is_superuser", False): + raise PermissionDeniedError(f"Only superusers can {action}") diff --git a/backend/src/modules/tier/__init__.py b/backend/src/modules/tier/__init__.py new file mode 100644 index 00000000..f4fdcb91 --- /dev/null +++ b/backend/src/modules/tier/__init__.py @@ -0,0 +1,27 @@ +from .models import Tier as TierModel +from .schemas import ( + Tier as TierSchema, +) +from .schemas import ( + TierBase, + TierCreate, + TierCreateInternal, + TierDelete, + TierRead, + TierUpdate, + TierUpdateInternal, +) + +__all__ = [ + # Models + "TierModel", + # Schemas + "TierSchema", + "TierBase", + "TierCreate", + "TierCreateInternal", + "TierDelete", + "TierRead", + "TierUpdate", + "TierUpdateInternal", +] diff --git a/backend/src/modules/tier/crud.py b/backend/src/modules/tier/crud.py new file mode 100644 index 00000000..3f1a58f4 --- /dev/null +++ b/backend/src/modules/tier/crud.py @@ -0,0 +1,5 @@ +from fastcrud import FastCRUD + +from .models import Tier + +crud_tiers: FastCRUD = FastCRUD(Tier) diff --git a/backend/src/modules/tier/models.py b/backend/src/modules/tier/models.py new file mode 100644 index 00000000..a2c8d40d --- /dev/null +++ b/backend/src/modules/tier/models.py @@ -0,0 +1,32 @@ +from typing import TYPE_CHECKING + +from sqlalchemy import String, Text +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from ...infrastructure.database.models import SoftDeleteMixin, TimestampMixin +from ...infrastructure.database.session import Base + +if TYPE_CHECKING: + from ..user.models import User + + +class Tier(Base, TimestampMixin, SoftDeleteMixin): + """Tier model — bare model for user categorization. No business logic, no pricing.""" + + __tablename__ = "tiers" + + id: Mapped[int] = mapped_column( + "id", + autoincrement=True, + nullable=False, + unique=True, + primary_key=True, + init=False, + ) + name: Mapped[str] = mapped_column(String, nullable=False, unique=True) + description: Mapped[str | None] = mapped_column(Text, default=None) + + users: Mapped[list["User"]] = relationship("User", back_populates="tier", lazy="selectin", default_factory=list, init=False) + + def __repr__(self) -> str: + return self.name diff --git a/backend/src/modules/tier/routes.py b/backend/src/modules/tier/routes.py new file mode 100644 index 00000000..efbecdfd --- /dev/null +++ b/backend/src/modules/tier/routes.py @@ -0,0 +1,59 @@ +from typing import Annotated, Any + +from fastapi import APIRouter, Depends, HTTPException +from fastcrud import PaginatedListResponse, compute_offset, paginated_response +from sqlalchemy.ext.asyncio import AsyncSession + +from ...infrastructure.auth.http_exceptions import NotFoundException +from ...infrastructure.database.session import async_session +from ..common.exceptions import TierNotFoundError +from ..common.utils.error_handler import handle_exception +from .schemas import TierRead +from .service import TierService + +router = APIRouter(tags=["Tiers"]) + + +def get_tier_service() -> TierService: + """Dependency for providing a TierService instance.""" + return TierService() + + +@router.get("/", response_model=PaginatedListResponse[TierRead], summary="List tiers") +async def get_tiers( + db: Annotated[AsyncSession, Depends(async_session)], + tier_service: Annotated[TierService, Depends(get_tier_service)], + page: int = 1, + items_per_page: int = 10, +) -> dict: + """Paginated list of tiers.""" + try: + tiers_data = await tier_service.get_all( + db=db, + skip=compute_offset(page, items_per_page), + limit=items_per_page, + ) + return paginated_response(crud_data=tiers_data, page=page, items_per_page=items_per_page) + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") + + +@router.get("/{name}", response_model=TierRead, summary="Get a tier by name") +async def get_tier_by_name( + name: str, + db: Annotated[AsyncSession, Depends(async_session)], + tier_service: Annotated[TierService, Depends(get_tier_service)], +) -> dict[str, Any]: + """Get a tier by name.""" + try: + return await tier_service.get_by_name(name, db) + except TierNotFoundError: + raise NotFoundException("Tier not found") + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") diff --git a/backend/src/modules/tier/schemas.py b/backend/src/modules/tier/schemas.py new file mode 100644 index 00000000..0f26b03b --- /dev/null +++ b/backend/src/modules/tier/schemas.py @@ -0,0 +1,95 @@ +from datetime import datetime +from typing import Annotated + +from pydantic import BaseModel, Field + +from ..common.schemas import TimestampSchema + + +class TierBase(BaseModel): + """Base tier schema with common attributes.""" + + name: Annotated[ + str, + Field( + description="Name of the tier", + examples=["free", "basic", "pro", "enterprise"], + min_length=1, + max_length=50, + ), + ] + + +class Tier(TimestampSchema, TierBase): + """Complete tier schema with timestamps.""" + + pass + + +class TierSelect(BaseModel): + """Minimal schema for selecting only required tier fields.""" + + id: int + name: str + + +class TierRead(TierBase): + """Schema for reading tier data.""" + + id: int + created_at: datetime + description: str | None = None + is_deleted: bool = False + + +class TierCreate(TierBase): + """Schema for creating a new tier.""" + + description: Annotated[ + str | None, + Field( + description="Description of the tier", + max_length=500, + default=None, + ), + ] + + +class TierCreateInternal(TierCreate): + """Internal schema for tier creation.""" + + pass + + +class TierUpdate(BaseModel): + """Schema for updating tier information.""" + + name: Annotated[ + str | None, + Field( + description="Name of the tier", + min_length=1, + max_length=50, + default=None, + ), + ] + description: Annotated[ + str | None, + Field( + description="Description of the tier", + max_length=500, + default=None, + ), + ] + + +class TierUpdateInternal(TierUpdate): + """Internal schema for tier updates.""" + + updated_at: datetime + + +class TierDelete(BaseModel): + """Schema for deleting a tier.""" + + pass diff --git a/backend/src/modules/tier/service.py b/backend/src/modules/tier/service.py new file mode 100644 index 00000000..57908338 --- /dev/null +++ b/backend/src/modules/tier/service.py @@ -0,0 +1,83 @@ +from typing import Any + +from fastcrud.types import GetMultiResponseDict +from sqlalchemy.ext.asyncio import AsyncSession + +from ..common.exceptions import PermissionDeniedError, ResourceExistsError, TierNotFoundError +from .crud import crud_tiers +from .schemas import ( + TierCreate, + TierCreateInternal, + TierRead, + TierUpdate, +) + + +class TierService: + """Service class for tier-related operations. + + Tiers are bare categorization labels. They have no business logic of their own — + consumers wire tiers to whatever they need (rate limits, feature flags, billing). + """ + + async def create(self, tier: TierCreate, db: AsyncSession) -> dict[str, Any]: + """Create a new tier.""" + tier_dict = tier.model_dump() + if await crud_tiers.exists(db=db, name=tier_dict["name"]): + raise ResourceExistsError(f"Tier with name '{tier_dict['name']}' already exists") + + tier_internal = TierCreateInternal(**tier_dict) + created_tier = await crud_tiers.create(db=db, object=tier_internal, schema_to_select=TierRead) + if not created_tier: + raise ResourceExistsError("Failed to create tier") + return created_tier + + async def get_all(self, db: AsyncSession, skip: int = 0, limit: int = 100) -> GetMultiResponseDict: + """Retrieve all tiers with pagination.""" + return await crud_tiers.get_multi(db=db, offset=skip, limit=limit, schema_to_select=TierRead, is_deleted=False) + + async def get_by_id(self, tier_id: int, db: AsyncSession) -> dict[str, Any]: + """Retrieve a tier by ID.""" + tier = await crud_tiers.get(db=db, id=tier_id, schema_to_select=TierRead, is_deleted=False) + if not tier: + raise TierNotFoundError(f"Tier with ID {tier_id} not found") + return tier + + async def get_by_name(self, name: str, db: AsyncSession) -> dict[str, Any]: + """Retrieve a tier by name.""" + tier = await crud_tiers.get(db=db, name=name, schema_to_select=TierRead, is_deleted=False) + if not tier: + raise TierNotFoundError(f"Tier with name '{name}' not found") + return tier + + async def update(self, name: str, tier_update: TierUpdate, db: AsyncSession) -> None: + """Update a tier by name.""" + existing_tier = await crud_tiers.get(db=db, name=name, schema_to_select=TierRead) + if not existing_tier: + raise TierNotFoundError(f"Tier with name '{name}' not found") + + update_data = tier_update.model_dump(exclude_unset=True) + if "name" in update_data and update_data["name"] != name: + if await crud_tiers.exists(db=db, name=update_data["name"]): + raise ResourceExistsError(f"Tier with name '{update_data['name']}' already exists") + + await crud_tiers.update(db=db, object=tier_update, name=name) + + async def delete(self, name: str, db: AsyncSession) -> None: + """Soft delete a tier.""" + existing_tier = await crud_tiers.get(db=db, name=name, schema_to_select=TierRead, is_deleted=False) + if not existing_tier: + raise TierNotFoundError(f"Tier with name '{name}' not found") + await crud_tiers.delete(db=db, name=name) + + async def permanent_delete(self, name: str, db: AsyncSession) -> None: + """Permanently delete a tier.""" + existing_tier = await crud_tiers.get(db=db, name=name, schema_to_select=TierRead) + if not existing_tier: + raise TierNotFoundError(f"Tier with name '{name}' not found") + await crud_tiers.db_delete(db=db, name=name) + + async def verify_superuser(self, user: dict[str, Any], action: str = "manage tiers") -> None: + """Verify that a user has superuser privileges.""" + if not user.get("is_superuser", False): + raise PermissionDeniedError(f"Only superusers can {action}") diff --git a/backend/src/modules/user/__init__.py b/backend/src/modules/user/__init__.py new file mode 100644 index 00000000..d48d619c --- /dev/null +++ b/backend/src/modules/user/__init__.py @@ -0,0 +1,34 @@ +"""User module for user management.""" + +from .enums import OAuthProvider +from .models import User as UserModel +from .schemas import ( + User as UserSchema, +) +from .schemas import ( + UserBase, + UserCreate, + UserDelete, + UserRead, + UserRestoreDeleted, + UserTierUpdate, + UserUpdate, + UserUpdateInternal, +) + +__all__ = [ + # Enums + "OAuthProvider", + # Models + "UserModel", + # Schemas + "UserSchema", + "UserBase", + "UserCreate", + "UserDelete", + "UserRead", + "UserRestoreDeleted", + "UserTierUpdate", + "UserUpdate", + "UserUpdateInternal", +] diff --git a/backend/src/modules/user/crud.py b/backend/src/modules/user/crud.py new file mode 100644 index 00000000..629537e1 --- /dev/null +++ b/backend/src/modules/user/crud.py @@ -0,0 +1,5 @@ +from fastcrud import FastCRUD + +from .models import User + +crud_users: FastCRUD = FastCRUD(User) diff --git a/backend/src/modules/user/enums.py b/backend/src/modules/user/enums.py new file mode 100644 index 00000000..33b2c660 --- /dev/null +++ b/backend/src/modules/user/enums.py @@ -0,0 +1,15 @@ +"""User enums for OAuth provider management.""" + +from enum import StrEnum + + +class OAuthProvider(StrEnum): + """OAuth provider types for user authentication. + + These values are used to identify the OAuth provider used for registration + and login. The string values must match the provider names used in the + OAuth configuration and factory registration. + """ + + GOOGLE = "google" + GITHUB = "github" diff --git a/backend/src/modules/user/models.py b/backend/src/modules/user/models.py new file mode 100644 index 00000000..a294e557 --- /dev/null +++ b/backend/src/modules/user/models.py @@ -0,0 +1,54 @@ +from datetime import datetime +from typing import TYPE_CHECKING + +from sqlalchemy import DateTime, ForeignKey, Integer, String +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from ...infrastructure.database.models import SoftDeleteMixin, TimestampMixin +from ...infrastructure.database.session import Base + +if TYPE_CHECKING: + from ..tier.models import Tier + + +class User(Base, TimestampMixin, SoftDeleteMixin): + """User model representing application users.""" + + __tablename__ = "user" + + id: Mapped[int] = mapped_column( + "id", + autoincrement=True, + nullable=False, + unique=True, + primary_key=True, + init=False, + ) + + name: Mapped[str] = mapped_column(String(30)) + username: Mapped[str] = mapped_column(String(20), unique=True, index=True) + email: Mapped[str] = mapped_column(String(50), unique=True, index=True) + hashed_password: Mapped[str] = mapped_column(String(100)) + + profile_image_url: Mapped[str] = mapped_column(String, default="https://profileimageurl.com") + + tier_id: Mapped[int | None] = mapped_column( + Integer, + ForeignKey("tiers.id"), + index=True, + default=None, + ) + + is_superuser: Mapped[bool] = mapped_column(default=False) + + google_id: Mapped[str | None] = mapped_column(String(50), unique=True, index=True, default=None) + github_id: Mapped[str | None] = mapped_column(String(50), unique=True, index=True, default=None) + oauth_provider: Mapped[str | None] = mapped_column(String(20), default=None) + email_verified: Mapped[bool] = mapped_column(default=False) + oauth_created_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) + oauth_updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) + + tier: Mapped["Tier | None"] = relationship("Tier", back_populates="users", lazy="selectin", init=False) + + def __repr__(self) -> str: + return f"{self.name} ({self.email})" diff --git a/backend/src/modules/user/routes.py b/backend/src/modules/user/routes.py new file mode 100644 index 00000000..d3b2bde6 --- /dev/null +++ b/backend/src/modules/user/routes.py @@ -0,0 +1,477 @@ +from typing import Annotated, Any + +from fastapi import APIRouter, Depends +from fastcrud import PaginatedListResponse, compute_offset, paginated_response +from sqlalchemy.ext.asyncio import AsyncSession + +from ...infrastructure.auth.http_exceptions import HTTPException +from ...infrastructure.auth.session.dependencies import ( + get_current_superuser, + get_current_user, +) +from ...infrastructure.database.session import async_session +from ..common.utils.error_handler import handle_exception +from .schemas import ( + UserCreate, + UserRead, + UserTierUpdate, + UserUpdate, +) +from .service import UserService + +router = APIRouter(tags=["Users"]) + + +def get_user_service() -> UserService: + """Dependency for providing a UserService instance.""" + return UserService() + + +@router.post( + "/", + status_code=201, + response_model=UserRead, + summary="Create New User Account", + description=""" + Creates a new user account in the system. + + This endpoint allows registration of new users with their basic information: + - Full name + - Username (must be lowercase alphanumeric) + - Email address + - Password (with security requirements) + + New accounts are automatically assigned to the default tier. + """, + responses={ + 201: {"description": "User account created successfully"}, + 400: {"description": "Invalid user data"}, + 409: {"description": "Username or email already exists"}, + }, + response_description="The created user profile with assigned ID", +) +async def create_user( + user: UserCreate, + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], +) -> dict[str, Any]: + """Create a new user account.""" + try: + return await user_service.create(user, db) + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") + + +@router.get( + "/", + response_model=PaginatedListResponse[UserRead], + summary="List All Users (Admin)", + description=""" + Retrieves a paginated list of all users in the system. + + This admin-only endpoint provides access to all user accounts and supports + pagination to handle large numbers of users efficiently. The results include + basic profile information for each user. + + For security reasons, sensitive information like passwords is never included + in the response. + """, + responses={401: {"description": "Not authenticated"}, 403: {"description": "Not authorized - requires admin privileges"}}, + response_description="A paginated list of users with total count and pagination metadata", +) +async def get_users( + db: Annotated[AsyncSession, Depends(async_session)], + _: Annotated[dict[str, Any], Depends(get_current_superuser)], + user_service: Annotated[UserService, Depends(get_user_service)], + page: int = 1, + items_per_page: int = 10, +) -> dict[str, Any]: + """Get paginated list of all users (admin only).""" + users_data = await user_service.get_paginated(skip=compute_offset(page, items_per_page), limit=items_per_page, db=db) + + return paginated_response(crud_data=users_data, page=page, items_per_page=items_per_page) + + +@router.get( + "/me", + response_model=UserRead, + summary="Get Current User Profile", + description=""" + Retrieves the profile information of the currently authenticated user. + + This endpoint provides users with their own profile data including: + - Basic profile information (name, username, email) + - Profile image URL + - Subscription tier information + - Authentication details (superuser status, email verification) + + This is a convenient way for frontend applications to get the current + user's information for display or personalization purposes. + """, + responses={401: {"description": "Not authenticated"}}, + response_description="The current user's profile data", +) +async def get_current_user_profile( + current_user: Annotated[dict[str, Any], Depends(get_current_user)], +) -> dict[str, Any]: + """Get current authenticated user's profile.""" + return current_user + + +@router.get( + "/{username}", + response_model=UserRead, + summary="Get User Profile by Username", + description=""" + Retrieves a user's profile information by their unique username. + + This endpoint can be used to look up any active user in the system by their + username. It returns the same profile data structure as other user + endpoints but does not include sensitive information. + + Note that usernames are case-sensitive in lookup operations. + """, + responses={404: {"description": "User not found"}}, + response_description="The requested user's profile data", +) +async def get_user_by_username( + username: str, + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], +) -> dict[str, Any]: + """Get user profile by username.""" + try: + user = await user_service.get_by_username(username, db) + if user is None: + raise HTTPException(status_code=404, detail=f"User with username {username} not found") + return user + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") + + +@router.get( + "/active-and-inactive/{username}", + response_model=UserRead, + summary="Get Active and Inactive User Profile by Username(Admin)", + description=""" + Retrieve a user's profile information by their unique username. + + This endpoint can be used to look up any user in the system by their + username. It returns the same profile data structure as other user + endpoints but does not include sensitive information. + + Note that usernames are case-sensitive in lookup operations. + """, + responses={ + 401: {"description": "Not authenticated"}, + 403: {"description": "Not authorized - requires admin privileges"}, + 404: {"description": "User not found"}, + }, + response_description="the requested user's profile data", +) +async def get_active_and_inactive_user_by_username( + username: str, + db: Annotated[AsyncSession, Depends(async_session)], + _: Annotated[dict[str, Any], Depends(get_current_superuser)], + user_service: Annotated[UserService, Depends(get_user_service)], +) -> dict[str, Any]: + """Get active and inactive profile by username.""" + try: + user = await user_service.get_active_and_inactive_by_username(username, db) + if user is None: + raise HTTPException(status_code=404, detail=f"User with username {username} not found") + return user + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") + + +@router.patch( + "/{username}", + summary="Update User Profile", + description=""" + Updates a user's profile information. + + This endpoint allows users to modify their own profile data or administrators + to modify any user's data. Only the fields provided in the request will be + updated, and all fields are optional. + + Permission rules: + - Regular users can only update their own profiles + - Administrators can update any user's profile + - Note: Tier updates are handled by a separate endpoint (/users/{username}/tier) + + Username and email changes are validated to ensure uniqueness. + """, + responses={ + 200: {"description": "Profile updated successfully"}, + 400: {"description": "Invalid profile data"}, + 403: {"description": "Not authorized to update this profile"}, + 404: {"description": "User not found"}, + 409: {"description": "Username or email already exists"}, + }, + response_description="Success confirmation message", +) +async def update_user_profile( + username: str, + values: UserUpdate, + current_user: Annotated[dict[str, Any], Depends(get_current_user)], + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], +) -> dict[str, str]: + """Update user profile information.""" + try: + await user_service.verify_user_permission(current_user, username, "update profile") + user = await user_service.get_by_username(username, db) + if user is None: + raise HTTPException(status_code=404, detail=f"User with username {username} not found") + + await user_service.update(user["id"], values, db) + return {"message": "User updated successfully"} + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") + + +@router.delete( + "/{username}", + summary="Deactivate User Account", + description=""" + Soft-deletes (deactivates) a user account. + + This endpoint performs a logical deletion of a user account, marking it + as deactivated in the system rather than permanently removing it. This allows + for potential reactivation in the future. + + Permission rules: + - Regular users can only deactivate their own accounts + - Administrators can deactivate any user's account + + Deactivated accounts cannot be used for login and are typically hidden + from regular user listings. + """, + responses={ + 200: {"description": "Account deactivated successfully"}, + 403: {"description": "Not authorized to deactivate this account"}, + 404: {"description": "User not found"}, + }, + response_description="Success confirmation message", +) +async def delete_user_account( + username: str, + current_user: Annotated[dict[str, Any], Depends(get_current_user)], + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], +) -> dict[str, str]: + """Soft delete a user account.""" + try: + user = await user_service.get_by_username(username, db) + if user is None: + raise HTTPException(status_code=404, detail=f"User with username {username} not found") + + await user_service.verify_user_permission(current_user, username, "delete this account") + await user_service.delete(user["id"], db) + return {"message": "User account deactivated"} + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") + + +@router.delete( + "/db/{username}", + summary="GDPR Delete User (Admin)", + description=""" + GDPR/LGPD compliant user deletion with data anonymization. + + This admin-only endpoint anonymizes user PII while preserving business data + integrity and maintaining referential relationships for conversations and + analytics data. + + This operation: + - Removes personally identifiable information (PII) + - Retains email for legal compliance purposes + - Prevents future login by clearing credentials + - Maintains foreign key relationships (conversations, logs) + - Logs the deletion event with legal basis for audit compliance + + Unlike hard deletion, this approach: + - Complies with GDPR Article 17 (Right to Erasure) + - Preserves business analytics data + - Eliminates foreign key constraint violations + - Maintains audit trails for legal requirements + + This operation is needed for: + - GDPR/LGPD data deletion requests + - Legal compliance while preserving business data + - Safe user removal without breaking referential integrity + """, + responses={ + 200: {"description": "User anonymized successfully"}, + 403: {"description": "Not authorized - requires admin privileges"}, + 404: {"description": "User not found"}, + }, + response_description="Success confirmation message", +) +async def gdpr_delete_user( + username: str, + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], + _: Annotated[dict[str, Any], Depends(get_current_superuser)], +) -> dict[str, str]: + """GDPR compliant user anonymization (admin only).""" + try: + user = await user_service.get_active_and_inactive_by_username(username, db) + if user is None: + raise HTTPException(status_code=404, detail=f"User with username {username} not found") + await user_service.anonymize_user(user["id"], db) + return {"message": "User data anonymized in compliance with GDPR"} + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") + + +@router.get( + "/{username}/rate-limits", + summary="Get User Rate Limits", + description=""" + Retrieves the rate limit configuration for a specific user. + + This endpoint returns detailed information about API rate limits + applicable to the user based on their subscription tier. This includes + limits for different API endpoints and operations. + + Permission rules: + - Users can view their own rate limits + - Administrators can view any user's rate limits + + This is useful for applications to understand their usage allowances + and implement appropriate client-side throttling. + """, + responses={ + 200: {"description": "Rate limit information retrieved"}, + 403: {"description": "Not authorized to view these rate limits"}, + 404: {"description": "User not found"}, + }, + response_description="Detailed rate limit configuration for the user", +) +async def get_user_rate_limits( + username: str, + db: Annotated[AsyncSession, Depends(async_session)], + current_user: Annotated[dict[str, Any], Depends(get_current_user)], + user_service: Annotated[UserService, Depends(get_user_service)], +) -> dict[str, Any]: + """Get rate limits for a user.""" + try: + await user_service.verify_user_permission(current_user, username, "view rate limits") + user = await user_service.get_by_username(username, db) + if user is None: + raise HTTPException(status_code=404, detail=f"User with username {username} not found") + return await user_service.get_rate_limits(user["id"], db) + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") + + +@router.get( + "/{username}/tier", + summary="Get User Subscription Tier", + description=""" + Retrieves detailed information about a user's subscription tier. + + This endpoint returns comprehensive data about the user's current + subscription tier, including name, features, limitations, and any + custom configurations. + + Permission rules: + - Users can view their own tier information + - Administrators can view any user's tier information + + This is useful for displaying subscription information to users + or for determining available features in client applications. + """, + responses={ + 200: {"description": "Tier information retrieved"}, + 403: {"description": "Not authorized to view this tier information"}, + 404: {"description": "User not found"}, + }, + response_description="User profile with detailed tier information", +) +async def get_user_tier( + username: str, + db: Annotated[AsyncSession, Depends(async_session)], + current_user: Annotated[dict[str, Any], Depends(get_current_user)], + user_service: Annotated[UserService, Depends(get_user_service)], +) -> dict[str, Any]: + """Get detailed tier information for a user.""" + try: + await user_service.verify_user_permission(current_user, username, "view tier information") + + user = await user_service.get_by_username(username, db) + if user is None: + raise HTTPException(status_code=404, detail=f"User with username {username} not found") + return await user_service.get_user_with_tier(user["id"], db) + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") + + +@router.patch( + "/{username}/tier", + summary="Update User Subscription Tier (Admin)", + description=""" + Changes a user's subscription tier. + + This admin-only endpoint allows changing which subscription tier + a user is assigned to. This affects the user's: + - API rate limits + - Available features + - Access privileges + + When a user's tier is changed, all related configurations (such as + rate limits) are automatically updated based on the new tier's settings. + """, + responses={ + 200: {"description": "User tier updated successfully"}, + 400: {"description": "Invalid tier ID"}, + 403: {"description": "Not authorized - requires admin privileges"}, + 404: {"description": "User not found or tier not found"}, + }, + response_description="Success confirmation message", +) +async def update_user_tier( + username: str, + values: UserTierUpdate, + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], + _: Annotated[dict[str, Any], Depends(get_current_superuser)], +) -> dict[str, str]: + """Update a user's subscription tier (admin only).""" + try: + user = await user_service.get_by_username(username, db) + if user is None: + raise HTTPException(status_code=404, detail=f"User with username {username} not found") + await user_service.update_tier(user["id"], values, db) + return {"message": "User tier updated successfully"} + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") diff --git a/backend/src/modules/user/schemas.py b/backend/src/modules/user/schemas.py new file mode 100644 index 00000000..4473cee6 --- /dev/null +++ b/backend/src/modules/user/schemas.py @@ -0,0 +1,177 @@ +from datetime import datetime +from typing import Annotated + +from pydantic import BaseModel, ConfigDict, EmailStr, Field + +from ..common.schemas import PersistentDeletion, TimestampSchema + + +class UserBase(BaseModel): + name: Annotated[str, Field(min_length=2, max_length=30, examples=["User Userson"])] + username: Annotated[ + str, + Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$", examples=["userson"]), + ] + email: Annotated[EmailStr, Field(examples=["user.userson@example.com"])] + + +class User(TimestampSchema, UserBase, PersistentDeletion): + """Complete user model with all fields.""" + + hashed_password: str + is_superuser: bool = False + profile_image_url: Annotated[ + str, + Field( + default="https://www.profileimageurl.com", + description="URL of the user's profile image", + ), + ] + tier_id: int | None = None + + google_id: str | None = None + github_id: str | None = None + oauth_provider: str | None = None + email_verified: bool = False + oauth_created_at: datetime | None = None + oauth_updated_at: datetime | None = None + + +class UserRead(BaseModel): + """Schema for reading user data, excludes sensitive information.""" + + id: int + name: Annotated[str, Field(min_length=2, max_length=30, examples=["User Userson"])] + username: Annotated[ + str, + Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$", examples=["userson"]), + ] + email: Annotated[EmailStr, Field(examples=["user.userson@example.com"])] + profile_image_url: str + is_deleted: bool = False + tier_id: int | None + is_superuser: bool = False + email_verified: bool = False + oauth_provider: str | None = None + + +class UserCreate(UserBase): + """Schema for creating a new user.""" + + password: Annotated[ + str, + Field( + min_length=8, + description=( + "Password must be at least 8 characters long and include a number," + "uppercase letter, lowercase letter, and special character" + ), + examples=["Str1ngst!"], + pattern=r"^.{8,}|[0-9]+|[A-Z]+|[a-z]+|[^a-zA-Z0-9]+$", + ), + ] + google_id: str | None = None + github_id: str | None = None + oauth_provider: str | None = None + email_verified: bool = False + oauth_created_at: datetime | None = None + oauth_updated_at: datetime | None = None + + model_config = ConfigDict(extra="forbid") + + +class UserCreateInternal(UserBase): + """Internal schema for user creation with hashed password.""" + + hashed_password: str + google_id: str | None = None + github_id: str | None = None + oauth_provider: str | None = None + email_verified: bool = False + oauth_created_at: datetime | None = None + oauth_updated_at: datetime | None = None + + +class UserUpdate(BaseModel): + """Schema for updating user data.""" + + model_config = ConfigDict(extra="forbid") + + name: Annotated[ + str | None, + Field(min_length=2, max_length=30, examples=["User Userberg"], default=None), + ] + username: Annotated[ + str | None, + Field( + min_length=2, + max_length=20, + pattern=r"^[a-z0-9]+$", + examples=["userberg"], + default=None, + ), + ] + email: Annotated[EmailStr | None, Field(examples=["user.userberg@example.com"], default=None)] + profile_image_url: Annotated[ + str | None, + Field( + pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", + examples=["https://www.profileimageurl.com"], + default=None, + ), + ] + google_id: str | None = None + github_id: str | None = None + oauth_provider: str | None = None + email_verified: bool | None = None + oauth_updated_at: datetime | None = None + + +class UserUpdateInternal(UserUpdate): + """Internal schema for user updates.""" + + updated_at: datetime + + +class UserTierUpdate(BaseModel): + """Schema for updating a user's tier.""" + + tier_id: int + + +class UserDelete(BaseModel): + """Schema for soft-deleting a user.""" + + model_config = ConfigDict(extra="forbid") + + is_deleted: bool + deleted_at: datetime + + +class UserAnonymize(BaseModel): + """Schema for GDPR/LGPD compliant user anonymization. + + This schema includes all fields that need to be updated during + the user anonymization process for privacy compliance. + """ + + model_config = ConfigDict(extra="forbid") + + name: str + username: str + hashed_password: str | None = None + profile_image_url: str | None = None + tier_id: int | None = None + is_superuser: bool = False + google_id: str | None = None + github_id: str | None = None + oauth_provider: str | None = None + email_verified: bool = False + oauth_created_at: datetime | None = None + oauth_updated_at: datetime | None = None + + +class UserRestoreDeleted(BaseModel): + """Schema for restoring a deleted user.""" + + is_deleted: bool diff --git a/backend/src/modules/user/service.py b/backend/src/modules/user/service.py new file mode 100644 index 00000000..ba8742b0 --- /dev/null +++ b/backend/src/modules/user/service.py @@ -0,0 +1,628 @@ +from datetime import UTC, datetime +from typing import Any, cast + +from fastcrud import JoinConfig +from fastcrud.types import GetMultiResponseDict +from sqlalchemy.exc import MultipleResultsFound, NoResultFound +from sqlalchemy.ext.asyncio import AsyncSession + +from ...infrastructure.auth.utils import get_password_hash +from ...infrastructure.logging import get_logger +from ..common.exceptions import PermissionDeniedError, TierNotFoundError, UserExistsError, UserNotFoundError, ValidationError +from ..rate_limit.models import RateLimit +from ..rate_limit.schemas import RateLimitRead +from ..tier.crud import crud_tiers +from ..tier.models import Tier +from ..tier.schemas import TierRead +from .crud import crud_users +from .models import User +from .schemas import ( + User as UserSchema, +) +from .schemas import ( + UserAnonymize, + UserCreate, + UserCreateInternal, + UserRead, + UserTierUpdate, + UserUpdate, +) + +logger = get_logger() + + +class UserService: + """Service class for user-related operations. + + This service manages user accounts including creation, updates, authentication, + tier management, and permission handling. It provides comprehensive user + management functionality with support for soft deletion, tier-based access + control, and rate limiting through tier associations. + """ + + async def create(self, user: UserCreate, db: AsyncSession) -> dict[str, Any]: + """Create a new user account. + + Creates a new user with unique email and username validation. Automatically + hashes the password and stores user credentials securely. + + Args: + user: User creation data including email, username, and password. + db: Database session for the operation. + + Returns: + The created user data dictionary. + + Raises: + UserExistsError: If email or username already exists. + + Note: + Passwords are automatically hashed using secure password hashing. + Both email and username must be unique across the system. + + Example: + ```python + user_data = UserCreate( + email="user@example.com", + username="johndoe", + password="securepassword123" + ) + created_user = await service.create(user_data, db) + ``` + """ + email_exists = await crud_users.exists(db=db, email=user.email) + if email_exists: + raise UserExistsError("Email already registered") + + username_exists = await crud_users.exists(db=db, username=user.username) + if username_exists: + raise UserExistsError("Username already taken") + + user_internal_dict = user.model_dump() + user_internal_dict["hashed_password"] = get_password_hash(password=user_internal_dict["password"]) + del user_internal_dict["password"] + + user_internal = UserCreateInternal(**user_internal_dict) + created_user = await crud_users.create(db=db, object=user_internal, schema_to_select=UserRead) + if not created_user: + raise UserExistsError("Failed to create user") + return created_user + + async def get_paginated(self, db: AsyncSession, skip: int = 0, limit: int = 100) -> GetMultiResponseDict: + """Retrieve a paginated list of users. + + Gets users with pagination support, excluding soft-deleted accounts. + Useful for administrative interfaces and user management. + + Args: + db: Database session for the operation. + skip: Number of records to skip for pagination. + limit: Maximum number of records to return. + + Returns: + Dictionary containing paginated user data with metadata. + + Raises: + ValueError: If database session is None. + + Note: + Only returns non-deleted users (is_deleted=False). + Returns data in format: {"data": [...], "count": int, "has_more": bool} + + Example: + ```python + users = await service.get_paginated(skip=0, limit=20, db=db) + for user in users["data"]: + print(f"User: {user['username']} - {user['email']}") + ``` + """ + if db is None: + raise ValueError("Database session cannot be None") + + return await crud_users.get_multi( + db=db, + offset=skip, + limit=limit, + schema_to_select=UserRead, + is_deleted=False, + ) + + async def get_by_username(self, username: str, db: AsyncSession) -> dict[str, Any]: + """Retrieve a user by username. + + Finds a user by their unique username, excluding soft-deleted accounts. + + Args: + username: Username to search for. + db: Database session for the operation. + + Returns: + User data dictionary if found. + + Raises: + UserNotFoundError: If no user exists with the given username. + + Note: + Only returns non-deleted users. Usernames are unique identifiers + commonly used for authentication and user references. + + Example: + ```python + user = await service.get_by_username("johndoe", db) + print(f"User email: {user['email']}") + ``` + """ + user = await crud_users.get( + db=db, + schema_to_select=UserRead, + username=username, + is_deleted=False, + ) + if not user: + raise UserNotFoundError(f"User with username '{username}' not found") + return user + + async def get_active_and_inactive_by_username(self, username: str, db: AsyncSession) -> dict[str, Any]: + """Retrieve a user by username. + + Finds a user by their username, including soft-deleted accounts. + + Args: + username: Username to search for. + db: Database session for the operation. + + Returns: + User data dictionary if found. + + Note: + Usernames are unique identifiers commonly used for authentication and user references. + + Example: + ```python + user = await service.get_active_and_inactive_by_username("johndoe", db) + print(f"User email: {user['email']}") + ``` + """ + user = await crud_users.get(db=db, schema_to_select=UserRead, username=username) + if not user: + raise UserNotFoundError(f"User with username '{username}' not found") + return user + + async def get_by_email(self, email: str, db: AsyncSession) -> dict[str, Any]: + """Retrieve a user by email address. + + Finds a user by their unique email address, excluding soft-deleted accounts. + Returns the user model instance for authentication purposes. + + Args: + email: Email address to search for. + db: Database session for the operation. + + Returns: + User data dictionary if found. + + Raises: + UserNotFoundError: If no user exists with the given email. + + Note: + Only returns non-deleted users. Email addresses are unique identifiers + primarily used for authentication and account recovery. + + Example: + ```python + user = await service.get_by_email("user@example.com", db) + print(f"User ID: {user['id']}") + ``` + """ + user = await crud_users.get( + db=db, + schema_to_select=UserRead, + email=email, + is_deleted=False, + ) + if not user: + raise UserNotFoundError(f"User with email '{email}' not found") + return user + + async def update(self, user_id: int, user_update: UserUpdate, db: AsyncSession) -> dict[str, Any]: + """Update user information. + + Updates user fields with validation for unique constraints on email + and username. Only provided fields are updated. + + Args: + user_id: ID of the user to update. + user_update: Fields to update with new values. + db: Database session for the operation. + + Returns: + Updated user data dictionary. + + Raises: + UserNotFoundError: If the user doesn't exist. + UserExistsError: If email or username conflicts with existing users. + + Note: + Validates uniqueness when updating email or username. + Only non-deleted users can be updated. + + Example: + ```python + update_data = UserUpdate( + email="newemail@example.com", + first_name="John" + ) + updated_user = await service.update(123, update_data, db) + ``` + """ + existing_user = await crud_users.get(db=db, id=user_id, is_deleted=False) + if not existing_user: + raise UserNotFoundError(f"User with ID {user_id} not found") + + update_data = user_update.model_dump(exclude_unset=True) + + if "email" in update_data and update_data["email"] != existing_user["email"]: + email_exists = await crud_users.exists(db=db, email=update_data["email"]) + if email_exists: + raise UserExistsError("Email already registered") + + if "username" in update_data and update_data["username"] != existing_user["username"]: + username_exists = await crud_users.exists(db=db, username=update_data["username"]) + if username_exists: + raise UserExistsError("Username already taken") + + updated_user = await crud_users.update( + db=db, object=user_update, id=user_id, return_columns=list(UserSchema.model_fields.keys()) + ) + if not updated_user: + raise UserNotFoundError(f"User with ID {user_id} not found") + return updated_user + + async def check_update_permission(self, requester_user: dict[str, Any], target_username: str) -> bool: + """Check if user has permission to update another user. + + Determines if the requesting user has permission to update the target user. + Superusers can update any user, regular users can only update themselves. + + Args: + requester_user: User data of the user making the request. + target_username: Username of the user to be updated. + + Returns: + True if the user has permission, False otherwise. + + Note: + Permission rules: + - Superusers can update any user + - Regular users can only update their own profile + """ + if requester_user.get("is_superuser", False): + return True + + return requester_user.get("username") == target_username + + async def verify_user_permission( + self, requester_user: dict[str, Any], target_username: str, action_description: str = "perform this action" + ) -> None: + """Verify user has permission to perform an action on another user. + + Checks permissions and raises an exception if the user doesn't have + the required permissions for the specified action. + + Args: + requester_user: User data of the user making the request. + target_username: Username of the user to perform action on. + action_description: Description of the action for error messages. + + Raises: + PermissionDeniedError: If the user doesn't have permission. + + Note: + This method combines permission checking with error handling + for convenient use in API endpoints and business logic. + + Example: + ```python + await service.verify_user_permission( + current_user, "johndoe", "update profile" + ) + ``` + """ + has_permission = await self.check_update_permission(requester_user, target_username) + if not has_permission: + raise PermissionDeniedError(f"You don't have permission to {action_description} on this user") + + async def delete(self, user_id: int, db: AsyncSession) -> None: + """Soft delete a user. + + Marks a user as deleted without removing them from the database. + This preserves data integrity while making the user inaccessible. + + Args: + user_id: ID of the user to soft delete. + db: Database session for the operation. + + Raises: + UserNotFoundError: If the user doesn't exist. + ValidationError: If multiple users found with same ID. + + Note: + Soft deletion preserves user data for audit purposes while + preventing login and making the user invisible in normal queries. + """ + try: + await crud_users.delete(db=db, id=user_id) + except NoResultFound: + raise UserNotFoundError(f"User with ID {user_id} not found") + except MultipleResultsFound: + raise ValidationError("Multiple users found with same ID") + + async def permanent_delete(self, user_id: int, db: AsyncSession) -> None: + """Permanently delete a user from the database. + + Completely removes a user from the database. This operation cannot + be undone and should be used with extreme caution. + + Args: + user_id: ID of the user to permanently delete. + db: Database session for the operation. + + Raises: + UserNotFoundError: If the user doesn't exist. + ValidationError: If multiple users found with same ID. + + Note: + This is a destructive operation that removes all user data. + Consider using soft deletion instead for most use cases. + """ + try: + await crud_users.db_delete(db=db, id=user_id) + except NoResultFound: + raise UserNotFoundError(f"User with ID {user_id} not found") + except MultipleResultsFound: + raise ValidationError("Multiple users found with same ID") + + async def anonymize_user(self, user_id: int, db: AsyncSession) -> None: + """GDPR/LGPD compliant user anonymization. + + Anonymizes user PII while preserving the user record and email for legal purposes. + Maintains referential integrity with conversations and other related data. + + This method: + 1. Removes all personally identifiable information (PII) + 2. Keeps email and timestamps for documented legal compliance + 3. Prevents future login by clearing password + 4. Maintains foreign key relationships intact + 5. Marks user as deleted with soft deletion + + Args: + user_id: ID of the user to anonymize. + db: Database session for the operation. + + Raises: + UserNotFoundError: If the user doesn't exist. + + Note: + This satisfies GDPR Article 17 (Right to Erasure) requirements while + preserving business data integrity. User conversations, analytics data, + and audit trails remain intact with anonymized user references. + + Email is retained for documented legal purposes (tax, AML, etc.) + as permitted under GDPR Article 17(3)(b). + + Example: + ```python + # GDPR compliant deletion + await service.anonymize_user(user_id=123, db=db) + ``` + """ + try: + existing_user = await crud_users.get(db=db, schema_to_select=UserRead, id=user_id) + if not existing_user: + raise UserNotFoundError(f"User with ID {user_id} not found") + + timestamp = int(datetime.now(UTC).timestamp()) + + logger.info( + "User anonymization requested", + extra={ + "user_id": user_id, + "email": existing_user.get("email"), + "action": "user_anonymization_start", + }, + ) + + anonymize_data = UserAnonymize( + name="[DELETED]", + username=f"del_{user_id}_{timestamp % 10000}", + hashed_password="DELETED_INVALID_HASH", + profile_image_url="https://deleted.com/deleted.jpg", + tier_id=None, + is_superuser=False, + google_id=None, + github_id=None, + oauth_provider=None, + email_verified=False, + oauth_created_at=None, + oauth_updated_at=None, + ) + + await crud_users.update(db=db, object=anonymize_data, commit=False, id=user_id) + await crud_users.delete(db=db, id=user_id) + + anonymized_fields = list(anonymize_data.model_dump(exclude_unset=True).keys()) + logger.info( + "User anonymization completed", + extra={ + "user_id": user_id, + "retained_data": ["email", "created_at", "updated_at", "id"], + "anonymized_fields": anonymized_fields, + "action": "user_anonymization_complete", + "foreign_keys_preserved": True, + }, + ) + + except NoResultFound: + logger.warning( + "User anonymization failed - user not found", + extra={"user_id": user_id, "action": "user_anonymization_failed", "reason": "user_not_found"}, + ) + raise UserNotFoundError(f"User with ID {user_id} not found") + + async def update_tier(self, user_id: int, tier_update: UserTierUpdate, db: AsyncSession) -> dict[str, Any]: + """Update a user's tier assignment. + + Changes the tier assignment for a user, which affects their access + levels, permissions, and rate limits. + + Args: + user_id: ID of the user to update. + tier_update: New tier assignment data. + db: Database session for the operation. + + Returns: + Updated user data dictionary. + + Raises: + UserNotFoundError: If the user doesn't exist. + TierNotFoundError: If the specified tier doesn't exist. + + Note: + Tier changes immediately affect the user's access levels and + rate limits. This is typically an administrative operation. + + Example: + ```python + tier_update = UserTierUpdate(tier_id=2) + updated_user = await service.update_tier(123, tier_update, db) + ``` + """ + existing_user = await crud_users.get(db=db, id=user_id, is_deleted=False) + if not existing_user: + raise UserNotFoundError(f"User with ID {user_id} not found") + + tier_exists = await crud_tiers.exists(db=db, id=tier_update.tier_id) + if not tier_exists: + raise TierNotFoundError(f"Tier with ID {tier_update.tier_id} not found") + + updated_user = await crud_users.update( + db=db, object=tier_update, id=user_id, return_columns=list(UserSchema.model_fields.keys()) + ) + if not updated_user: + raise UserNotFoundError(f"User with ID {user_id} not found") + return updated_user + + async def get_rate_limits(self, user_id: int, db: AsyncSession) -> dict[str, Any]: + """Get rate limits for a user through their tier assignment. + + Retrieves all rate limits applicable to a user based on their tier + assignment. Uses database joins for efficient data retrieval. + + Args: + user_id: ID of the user to get rate limits for. + db: Database session for the operation. + + Returns: + Dictionary containing user data with nested rate limits. + + Raises: + UserNotFoundError: If the user doesn't exist. + + Note: + Rate limits are inherited from the user's tier. Users without + tier assignments have no rate limits. Uses advanced joins to + efficiently retrieve related data. + + Example: + ```python + user_limits = await service.get_rate_limits(123, db) + for limit in user_limits.get("rate_limits", []): + print(f"Rate limit: {limit['resource']} - {limit['limit']}") + ``` + """ + user = await crud_users.get(db=db, id=user_id, is_deleted=False, schema_to_select=UserRead) + if not user: + raise UserNotFoundError(f"User with ID {user_id} not found") + + if user["tier_id"] is None: + user["rate_limits"] = [] + return user + + joins_config = [ + JoinConfig( + model=Tier, + join_on=User.tier_id == Tier.id, + join_prefix="tier_", + schema_to_select=TierRead, + join_type="left", + ), + JoinConfig( + model=RateLimit, + join_on=Tier.id == RateLimit.tier_id, + join_prefix="rate_limits_", + schema_to_select=RateLimitRead, + join_type="left", + relationship_type="one-to-many", + ), + ] + + result = await crud_users.get_joined( + db=db, schema_to_select=UserRead, joins_config=joins_config, nest_joins=True, id=user_id + ) + + if not result: + raise UserNotFoundError(f"User with ID {user_id} not found") + + return result + + async def get_user_with_tier(self, user_id: int, db: AsyncSession) -> dict[str, Any]: + """Get user with detailed tier information. + + Retrieves a user along with their complete tier information + using database joins for efficient data access. + + Args: + user_id: ID of the user to retrieve. + db: Database session for the operation. + + Returns: + Dictionary containing user data with nested tier information. + + Raises: + UserNotFoundError: If the user doesn't exist. + + Note: + Returns complete tier details including tier name, description, + and configuration. Users without tier assignments have tier=None. + + Example: + ```python + user_data = await service.get_user_with_tier(123, db) + if user_data.get("tier"): + print(f"User tier: {user_data['tier']['name']}") + ``` + """ + user_dict = await crud_users.get(db=db, id=user_id, is_deleted=False, schema_to_select=UserRead) + if not user_dict: + raise UserNotFoundError(f"User with ID {user_id} not found") + + if user_dict.get("tier_id") is None: + user_dict["tier"] = None + return user_dict + + tier_exists = await crud_tiers.exists(db=db, id=user_dict["tier_id"]) + if not tier_exists: + user_dict["tier"] = None + return user_dict + + result = await crud_users.get_joined( + db=db, + join_model=Tier, + join_prefix="tier_", + schema_to_select=UserRead, + join_schema_to_select=TierRead, + id=user_id, + nest_joins=True, + ) + + return cast(dict[str, Any], result) diff --git a/src/app/core/db/__init__.py b/backend/tests/__init__.py similarity index 100% rename from src/app/core/db/__init__.py rename to backend/tests/__init__.py diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py new file mode 100644 index 00000000..1a785132 --- /dev/null +++ b/backend/tests/conftest.py @@ -0,0 +1,354 @@ +import os +import secrets +import sys +from pathlib import Path +from unittest.mock import MagicMock + +import pytest +import pytest_asyncio +import redis as syncredis +import redis.asyncio as aioredis +from faker import Faker +from httpx import ASGITransport, AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker +from testcontainers.core.docker_client import DockerClient + +# mypy: disable-error-code="import-untyped" +from testcontainers.postgres import PostgresContainer + +from src.infrastructure.auth.session.backends.memory import MemorySessionStorage +from src.infrastructure.auth.session.dependencies import get_current_superuser, get_current_user +from src.infrastructure.auth.session.schemas import CSRFToken, SessionData +from src.infrastructure.auth.utils import get_password_hash +from src.infrastructure.config.settings import Settings, get_settings +from src.infrastructure.database.session import Base, async_session +from src.interfaces.main import app +from src.modules.tier.models import Tier +from src.modules.user.models import User + +os.environ["SQLITE_URI"] = ":memory:" +os.environ["SQLITE_ASYNC_PREFIX"] = "sqlite+aiosqlite:///" +os.environ["SECRET_KEY"] = "test_secret_key_for_tests" + +TEST_DATABASE_URL = get_settings().DATABASE_URL + +backend_dir = Path(__file__).parent.parent +sys.path.append(str(backend_dir)) + + +def is_docker_running() -> bool: + try: + DockerClient() + return True + except Exception: + return False + + +@pytest_asyncio.fixture(scope="session") +async def pg_container(): + """Create a PostgreSQL container for testing.""" + if not is_docker_running(): + pytest.skip("Docker is required, but not running") + + with PostgresContainer() as pg: + yield pg + + +@pytest_asyncio.fixture(scope="function") +async def test_db_url(pg_container): + """Create a proper asyncpg URL for PostgreSQL.""" + host = pg_container.get_container_host_ip() + port_to_expose = 5432 + if hasattr(pg_container, "port_to_expose"): + port_to_expose = pg_container.port_to_expose + port = pg_container.get_exposed_port(port_to_expose) + + db = "test" + user = "test" + password = "test" + if hasattr(pg_container, "POSTGRES_USER"): + user = pg_container.POSTGRES_USER + if hasattr(pg_container, "POSTGRES_PASSWORD"): + password = pg_container.POSTGRES_PASSWORD + if hasattr(pg_container, "POSTGRES_DB"): + db = pg_container.POSTGRES_DB + + return f"postgresql+asyncpg://{user}:{password}@{host}:{port}/{db}" + + +@pytest_asyncio.fixture(scope="function") +async def test_db_engine(test_db_url): + """Create a SQLAlchemy engine for testing.""" + engine = create_async_engine(test_db_url, echo=False) + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + yield engine + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await engine.dispose() + + +@pytest_asyncio.fixture(scope="function") +async def test_db(test_db_engine): + """Create a test database session.""" + test_session = sessionmaker(test_db_engine, class_=AsyncSession, expire_on_commit=False) + async with test_session() as session: # type: ignore + yield session + + +@pytest_asyncio.fixture(scope="function") +async def db_session(test_db): + """Alias for test_db.""" + yield test_db + + +@pytest_asyncio.fixture(scope="function") +async def client(test_db): + """Create a test client with an overridden database session.""" + app.dependency_overrides = {} + + async def override_get_db(): + yield test_db + + app.dependency_overrides[async_session] = override_get_db + + os.environ["POSTGRES_SERVER"] = "localhost" + + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as ac: + yield ac + + app.dependency_overrides = {} + + +@pytest_asyncio.fixture +async def test_tier(db_session: AsyncSession): + """Create a test tier.""" + tier = Tier(name="free", description="Free tier") + db_session.add(tier) + await db_session.commit() + return {"id": tier.id, "name": tier.name} + + +@pytest_asyncio.fixture +async def second_test_tier(db_session: AsyncSession): + """Create a second test tier.""" + tier = Tier(name="premium", description="Premium tier") + db_session.add(tier) + await db_session.commit() + return {"id": tier.id, "name": tier.name} + + +@pytest_asyncio.fixture +async def test_user(db_session: AsyncSession, test_tier: dict): + """Create a test user.""" + fake = Faker() + user = User( + name=fake.name(), + username=f"u{fake.random_int(10000, 99999)}", + email=fake.email(), + hashed_password=get_password_hash("Password123!"), + is_superuser=False, + tier_id=test_tier["id"], + profile_image_url="https://example.com/test.jpg", + ) + db_session.add(user) + await db_session.commit() + return { + "id": user.id, + "name": user.name, + "username": user.username, + "email": user.email, + "is_superuser": user.is_superuser, + "tier_id": user.tier_id, + "password": "Password123!", + "profile_image_url": user.profile_image_url, + } + + +@pytest_asyncio.fixture +async def test_user_2(db_session: AsyncSession, test_tier: dict): + """Second test user for permission tests.""" + fake = Faker() + user = User( + name=fake.name(), + username=f"u{fake.random_int(10000, 99999)}", + email=fake.email(), + hashed_password=get_password_hash("Password123!"), + is_superuser=False, + tier_id=test_tier["id"], + profile_image_url="https://example.com/test2.jpg", + ) + db_session.add(user) + await db_session.commit() + return { + "id": user.id, + "name": user.name, + "username": user.username, + "email": user.email, + "is_superuser": user.is_superuser, + "tier_id": user.tier_id, + "password": "Password123!", + } + + +@pytest_asyncio.fixture +async def test_superuser(db_session: AsyncSession, test_tier: dict): + """Create a test superuser.""" + fake = Faker() + user = User( + name=fake.name(), + username=f"su{fake.random_int(10000, 99999)}", + email=fake.email(), + hashed_password=get_password_hash("SuperuserPass123!"), + is_superuser=True, + tier_id=test_tier["id"], + profile_image_url="https://example.com/superuser.jpg", + ) + db_session.add(user) + await db_session.commit() + return { + "id": user.id, + "name": user.name, + "username": user.username, + "email": user.email, + "is_superuser": user.is_superuser, + "tier_id": user.tier_id, + "password": "SuperuserPass123!", + } + + +@pytest_asyncio.fixture +async def auth_client(client: AsyncClient, test_user: dict): + """Authenticated test client (regular user) — overrides get_current_user dependency.""" + + async def override_get_current_user(): + return test_user + + app.dependency_overrides[get_current_user] = override_get_current_user + return client + + +@pytest_asyncio.fixture +async def auth_client_2(client: AsyncClient, test_user_2: dict): + """Authenticated test client for second user.""" + + async def override_get_current_user(): + return test_user_2 + + app.dependency_overrides[get_current_user] = override_get_current_user + return client + + +@pytest_asyncio.fixture +async def superuser_auth_client(client: AsyncClient, test_superuser: dict): + """Authenticated test client (superuser).""" + + async def override_get_current_user(): + return test_superuser + + async def override_get_current_superuser(): + return test_superuser + + app.dependency_overrides[get_current_user] = override_get_current_user + app.dependency_overrides[get_current_superuser] = override_get_current_superuser + return client + + +@pytest.fixture(autouse=True) +def mock_session_backend(monkeypatch): + """Use in-memory session backend instead of Redis during tests.""" + memory_storage: MemorySessionStorage[SessionData] = MemorySessionStorage(prefix="session:", expiration=1800) + memory_csrf_storage: MemorySessionStorage[CSRFToken] = MemorySessionStorage(prefix="csrf:", expiration=1800) + + def override_session_dependency(backend, model_type, **kwargs): + if model_type == CSRFToken: + return memory_csrf_storage + return memory_storage + + async def mock_execute(self): + return [True] + + async def mock_create(self, data): + session_id = secrets.token_hex(16) + self.data[f"{self.prefix}{session_id}"] = data.model_dump() + return session_id + + setattr(memory_storage, "execute", mock_execute) + setattr(memory_csrf_storage, "execute", mock_execute) + setattr(memory_storage, "create", mock_create) + setattr(memory_csrf_storage, "create", mock_create) + + monkeypatch.setattr("src.infrastructure.auth.session.storage.get_session_storage", override_session_dependency) + monkeypatch.setattr("src.infrastructure.auth.session.manager.get_session_storage", override_session_dependency) + monkeypatch.setenv("SESSION_BACKEND", "memory") + + +@pytest.fixture(autouse=True) +def patch_redis_pipeline_for_tests(monkeypatch): + """Patch Redis pipeline so tests don't need a live Redis.""" + + class MockPipeline: + def __init__(self, *args, **kwargs): + self.commands = [] + + def execute(self, *args, **kwargs): + return [True for _ in self.commands] + + async def aexecute(self, *args, **kwargs): + return [True for _ in self.commands] + + def set(self, *args, **kwargs): + self.commands.append(("set", args, kwargs)) + return self + + def sadd(self, *args, **kwargs): + self.commands.append(("sadd", args, kwargs)) + return self + + def srem(self, *args, **kwargs): + self.commands.append(("srem", args, kwargs)) + return self + + def expire(self, *args, **kwargs): + self.commands.append(("expire", args, kwargs)) + return self + + def delete(self, *args, **kwargs): + self.commands.append(("delete", args, kwargs)) + return self + + monkeypatch.setattr(aioredis.Redis, "pipeline", MockPipeline) + monkeypatch.setattr(syncredis.Redis, "pipeline", MockPipeline) + + +@pytest.fixture +def mock_rate_limit_settings_fail_open(): + """Mock settings with fail_open=True for rate limiter tests.""" + settings = MagicMock(spec=Settings) + settings.RATE_LIMITER_ENABLED = True + settings.RATE_LIMITER_FAIL_OPEN = True + settings.DEFAULT_RATE_LIMIT_LIMIT = 100 + settings.DEFAULT_RATE_LIMIT_PERIOD = 60 + return settings + + +@pytest.fixture +def mock_rate_limit_settings_fail_closed(): + """Mock settings with fail_open=False for rate limiter tests.""" + settings = MagicMock(spec=Settings) + settings.RATE_LIMITER_ENABLED = True + settings.RATE_LIMITER_FAIL_OPEN = False + settings.DEFAULT_RATE_LIMIT_LIMIT = 100 + settings.DEFAULT_RATE_LIMIT_PERIOD = 60 + return settings + + +@pytest.fixture(autouse=True) +def mock_oauth_settings(monkeypatch): + """Mock OAuth settings for testing.""" + monkeypatch.setenv("OAUTH_REDIRECT_BASE_URL", "http://localhost:8000") + monkeypatch.setenv("OAUTH_GOOGLE_CLIENT_ID", "mock-google-client-id") + monkeypatch.setenv("OAUTH_GOOGLE_CLIENT_SECRET", "mock-google-client-secret") + monkeypatch.setenv("OAUTH_GITHUB_CLIENT_ID", "mock-github-client-id") + monkeypatch.setenv("OAUTH_GITHUB_CLIENT_SECRET", "mock-github-client-secret") diff --git a/src/app/core/exceptions/__init__.py b/backend/tests/integration/api/__init__.py similarity index 100% rename from src/app/core/exceptions/__init__.py rename to backend/tests/integration/api/__init__.py diff --git a/src/app/core/utils/__init__.py b/backend/tests/integration/api/v1/__init__.py similarity index 100% rename from src/app/core/utils/__init__.py rename to backend/tests/integration/api/v1/__init__.py diff --git a/src/app/core/worker/__init__.py b/backend/tests/integration/api/v1/users/__init__.py similarity index 100% rename from src/app/core/worker/__init__.py rename to backend/tests/integration/api/v1/users/__init__.py diff --git a/backend/tests/integration/api/v1/users/helpers.py b/backend/tests/integration/api/v1/users/helpers.py new file mode 100644 index 00000000..31702d1e --- /dev/null +++ b/backend/tests/integration/api/v1/users/helpers.py @@ -0,0 +1,51 @@ +"""Helper functions for user API tests.""" + +import random +from datetime import datetime + + +def generate_unique_user_data(prefix: str = "user") -> dict: + """Generate unique user data for testing.""" + timestamp = int(datetime.now().timestamp()) + random_suffix = random.randint(1000, 9999) + + return { + "username": f"{prefix}_user_{timestamp}_{random_suffix}", + "email": f"{prefix}_{timestamp}_{random_suffix}@example.com", + "oauth_provider": random.choice(["google", "github"]), + "oauth_id": f"oauth_{timestamp}_{random_suffix}", + "first_name": f"First{random_suffix}", + "last_name": f"Last{random_suffix}", + "is_active": True, + "is_superuser": False, + } + + +def generate_superuser_data(prefix: str = "admin") -> dict: + """Generate superuser data for testing.""" + data = generate_unique_user_data(prefix) + data.update({"is_superuser": True, "username": f"admin_{data['username']}"}) + return data + + +def generate_oauth_user_data(provider: str = "google", prefix: str = "oauth") -> dict: + """Generate OAuth user data for specific provider.""" + data = generate_unique_user_data(prefix) + data.update({"oauth_provider": provider, "oauth_id": f"{provider}_{int(datetime.now().timestamp())}"}) + return data + + +def generate_bulk_users(count: int, prefix: str = "bulk") -> list[dict]: + """Generate multiple user test data.""" + return [generate_unique_user_data(f"{prefix}_{i}") for i in range(count)] + + +def generate_test_user_update_data() -> dict: + """Generate user update data for testing.""" + timestamp = int(datetime.now().timestamp()) + + return { + "first_name": f"UpdatedFirst{timestamp}", + "last_name": f"UpdatedLast{timestamp}", + "username": f"updated_user_{timestamp}", + } diff --git a/backend/tests/integration/api/v1/users/test_create.py b/backend/tests/integration/api/v1/users/test_create.py new file mode 100644 index 00000000..93551738 --- /dev/null +++ b/backend/tests/integration/api/v1/users/test_create.py @@ -0,0 +1,100 @@ +import logging +import uuid + +import pytest +from faker import Faker +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +from src.modules.user.models import User + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +fake = Faker() +pytestmark = pytest.mark.asyncio + + +def generate_unique_user_data(prefix="user"): + """Generate unique user data for testing.""" + unique_id = uuid.uuid4().hex[:6] + return { + "name": f"Test {prefix.capitalize()} {unique_id}", + "username": f"{prefix}{unique_id}", + "email": f"{prefix}.user.{unique_id}@example.com", + "password": "Password123!", + } + + +async def test_create_user_success(client: AsyncClient, db_session: AsyncSession): + """Test successful user creation.""" + user_data = generate_unique_user_data() + + logger.info(f"Testing user creation with username: {user_data['username']}") + response = await client.post("/api/v1/users/", json=user_data) + + assert response.status_code == 201 + data = response.json() + assert data["username"] == user_data["username"] + assert data["email"] == user_data["email"] + assert "id" in data + assert "password" not in data + assert "hashed_password" not in data + + +async def test_create_user_invalid_email(client: AsyncClient, db_session: AsyncSession): + """Test user creation with invalid email format.""" + user_data = generate_unique_user_data() + user_data["email"] = "invalid-email" + + logger.info("Testing user creation with invalid email") + response = await client.post("/api/v1/users/", json=user_data) + + assert response.status_code == 422 + data = response.json() + assert "detail" in data + + +async def test_create_user_duplicate_username(client: AsyncClient, db_session: AsyncSession, test_user: dict): + """Test user creation with duplicate username.""" + user_data = generate_unique_user_data() + user_data["username"] = test_user["username"] + + logger.info(f"Testing user creation with duplicate username: {user_data['username']}") + response = await client.post("/api/v1/users/", json=user_data) + + assert response.status_code == 422 + data = response.json() + assert "detail" in data + + +async def test_create_user_duplicate_email(client: AsyncClient, db_session: AsyncSession, test_user: dict): + """Test user creation with duplicate email.""" + user_data = generate_unique_user_data() + user_data["email"] = test_user["email"] + + logger.info(f"Testing user creation with duplicate email: {user_data['email']}") + response = await client.post("/api/v1/users/", json=user_data) + + assert response.status_code == 422 + data = response.json() + assert "detail" in data + + +async def test_create_superuser(superuser_auth_client: AsyncClient, db_session: AsyncSession): + """Test superuser creating another superuser via API and database.""" + user_data = generate_unique_user_data("admin") + + logger.info(f"Testing user creation with username: {user_data['username']}") + response = await superuser_auth_client.post("/api/v1/users/", json=user_data) + + assert response.status_code == 201 + created_user = response.json() + + user_in_db = await db_session.get(User, created_user["id"]) + assert user_in_db is not None, "User not found in database" + user_in_db.is_superuser = True + await db_session.commit() + await db_session.refresh(user_in_db) + + assert user_in_db.is_superuser is True diff --git a/backend/tests/integration/api/v1/users/test_delete.py b/backend/tests/integration/api/v1/users/test_delete.py new file mode 100644 index 00000000..9d9b9d8c --- /dev/null +++ b/backend/tests/integration/api/v1/users/test_delete.py @@ -0,0 +1,175 @@ +import logging + +import pytest +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +from .test_create import generate_unique_user_data + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +pytestmark = pytest.mark.asyncio + + +async def test_soft_delete_success( + auth_client: AsyncClient, + db_session: AsyncSession, + test_user: dict, +): + """Test successful soft deletion of user account.""" + username = test_user["username"] + + logger.info(f"Testing soft deletion for user: {username}") + response = await auth_client.delete(f"/api/v1/users/{username}") + + assert response.status_code == 200 + data = response.json() + assert data["message"] == "User account deactivated" + + get_response = await auth_client.get(f"/api/v1/users/{username}") + assert get_response.status_code == 404 + + +async def test_soft_delete_unauthorized( + client: AsyncClient, + db_session: AsyncSession, + test_user: dict, +): + """Test soft deletion without authentication.""" + username = test_user["username"] + + response = await client.delete(f"/api/v1/users/{username}") + + assert response.status_code == 401 + data = response.json() + assert "not authenticated" in data["detail"].lower() + + +async def test_soft_delete_wrong_user( + auth_client: AsyncClient, + client: AsyncClient, + db_session: AsyncSession, + test_user: dict, +): + """Test that users cannot delete other users' accounts.""" + other_user_data = generate_unique_user_data("other") + create_response = await client.post("/api/v1/users/", json=other_user_data) + assert create_response.status_code == 201 + other_username = other_user_data["username"] + + response = await auth_client.delete(f"/api/v1/users/{other_username}") + + assert response.status_code == 403 + data = response.json() + assert "permission" in data["detail"].lower() + + +async def test_soft_delete_nonexistent_user( + auth_client: AsyncClient, + db_session: AsyncSession, +): + """Test soft deletion of non-existent user.""" + response = await auth_client.delete("/api/v1/users/nonexistentuser") + + assert response.status_code == 404 + data = response.json() + assert "not found" in data["detail"].lower() + + +async def test_permanent_delete_success( + superuser_auth_client: AsyncClient, + db_session: AsyncSession, + test_user: dict, +): + """Test successful permanent deletion by superuser.""" + username = test_user["username"] + + logger.info(f"Testing permanent deletion for user: {username}") + response = await superuser_auth_client.delete(f"/api/v1/users/db/{username}") + + assert response.status_code == 200 + data = response.json() + assert data["message"] == "User data anonymized in compliance with GDPR" + + get_response = await superuser_auth_client.get(f"/api/v1/users/{username}") + assert get_response.status_code == 404 + + +async def test_permanent_delete_unauthorized( + auth_client: AsyncClient, + db_session: AsyncSession, + test_user: dict, +): + """Test that non-admin users cannot permanently delete accounts.""" + username = test_user["username"] + + response = await auth_client.delete(f"/api/v1/users/db/{username}") + + assert response.status_code == 403 + data = response.json() + assert "insufficient privileges" in data["detail"].lower() + + +async def test_permanent_delete_inactive_user( + superuser_auth_client: AsyncClient, + db_session: AsyncSession, + test_user: dict, +): + """Test permanent deletion of soft-deleted accounts""" + username = test_user["username"] + + logger.info(f"Testing soft deletion for user: {username}") + + response_soft_delete = await superuser_auth_client.delete(f"/api/v1/users/{username}") + + assert response_soft_delete.status_code == 200 + data_soft = response_soft_delete.json() + assert data_soft["message"] == "User account deactivated" + + logger.info(f"Testing permanent deletion for user: {username}") + + response_perma_delete = await superuser_auth_client.delete(f"/api/v1/users/db/{username}") + assert response_perma_delete.status_code == 200 + data_perma = response_perma_delete.json() + assert data_perma["message"] == "User data anonymized in compliance with GDPR" + + get_response = await superuser_auth_client.get(f"/api/v1/users/active-and-inactive/{username}") + assert get_response.status_code == 404 + + +async def test_permanent_delete_nonexistent_user( + superuser_auth_client: AsyncClient, + db_session: AsyncSession, +): + """Test permanent deletion of non-existent user.""" + response = await superuser_auth_client.delete("/api/v1/users/db/nonexistentuser") + + assert response.status_code == 404 + data = response.json() + assert "not found" in data["detail"].lower() + + +async def test_delete_cascade_effects( + auth_client: AsyncClient, + superuser_auth_client: AsyncClient, + db_session: AsyncSession, + test_user: dict, +): + """Test cascade effects of user deletion.""" + username = test_user["username"] + + tier_response = await auth_client.get(f"/api/v1/users/{username}/tier") + assert tier_response.status_code == 200 + + rate_limits_response = await auth_client.get(f"/api/v1/users/{username}/rate-limits") + assert rate_limits_response.status_code == 200 + + delete_response = await auth_client.delete(f"/api/v1/users/{username}") + assert delete_response.status_code == 200 + + tier_response = await superuser_auth_client.get(f"/api/v1/users/{username}/tier") + assert tier_response.status_code == 404 + + rate_limits_response = await superuser_auth_client.get(f"/api/v1/users/{username}/rate-limits") + assert rate_limits_response.status_code == 404 diff --git a/backend/tests/integration/api/v1/users/test_read.py b/backend/tests/integration/api/v1/users/test_read.py new file mode 100644 index 00000000..f192d573 --- /dev/null +++ b/backend/tests/integration/api/v1/users/test_read.py @@ -0,0 +1,101 @@ +import logging + +import pytest +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +pytestmark = pytest.mark.asyncio + + +async def test_get_user_by_username_success(auth_client: AsyncClient, db_session: AsyncSession, test_user: dict): + """Test successful retrieval of a user by username.""" + logger.info("Testing successful user retrieval by username") + username = test_user["username"] + response = await auth_client.get(f"/api/v1/users/{username}") + + assert response.status_code == 200 + data = response.json() + assert data["username"] == username + assert "id" in data + assert "email" in data + assert "name" in data + + +async def test_get_user_by_username_not_found(auth_client: AsyncClient, db_session: AsyncSession): + """Test 404 when user not found.""" + logger.info("Testing 404 when user not found") + response = await auth_client.get("/api/v1/users/nonexistentuser") + + assert response.status_code == 404 + data = response.json() + assert "detail" in data + + +async def test_get_users_unauthorized(client: AsyncClient, db_session: AsyncSession): + """Test that unauthorized users cannot access users list.""" + logger.info("Testing unauthorized access to users list") + response = await client.get("/api/v1/users/") + + assert response.status_code == 401 + data = response.json() + assert "detail" in data + + +async def test_get_users_superuser_success(superuser_auth_client: AsyncClient, db_session: AsyncSession): + """Test that superuser can access users list.""" + logger.info("Testing superuser access to users list") + response = await superuser_auth_client.get("/api/v1/users/") + + assert response.status_code == 200 + data = response.json() + assert "data" in data + assert isinstance(data["data"], list) + assert "total_count" in data + assert "page" in data + assert "items_per_page" in data + + +async def test_get_users_pagination(superuser_auth_client: AsyncClient, db_session: AsyncSession): + """Test pagination of users list.""" + logger.info("Testing users list pagination") + + response = await superuser_auth_client.get("/api/v1/users/?page=1&items_per_page=5") + assert response.status_code == 200 + data = response.json() + assert len(data["data"]) <= 5 + assert data["page"] == 1 + assert data["items_per_page"] == 5 + + +async def test_get_current_user_profile(auth_client: AsyncClient, db_session: AsyncSession, test_user: dict): + """Test retrieval of current user's profile.""" + logger.info("Testing current user profile retrieval") + response = await auth_client.get("/api/v1/users/me") + + assert response.status_code == 200 + data = response.json() + assert data["username"] == test_user["username"] + assert data["email"] == test_user["email"] + + +async def test_get_user_tier_info(auth_client: AsyncClient, db_session: AsyncSession, test_user: dict): + """Test retrieval of user's tier information.""" + logger.info("Testing user tier information retrieval") + response = await auth_client.get(f"/api/v1/users/{test_user['username']}/tier") + + assert response.status_code == 200 + data = response.json() + assert "tier" in data + + +async def test_get_user_rate_limits(auth_client: AsyncClient, db_session: AsyncSession, test_user: dict): + """Test retrieval of user's rate limits.""" + logger.info("Testing user rate limits retrieval") + response = await auth_client.get(f"/api/v1/users/{test_user['username']}/rate-limits") + + assert response.status_code == 200 + data = response.json() + assert "rate_limits" in data diff --git a/backend/tests/integration/api/v1/users/test_update.py b/backend/tests/integration/api/v1/users/test_update.py new file mode 100644 index 00000000..d6ce6337 --- /dev/null +++ b/backend/tests/integration/api/v1/users/test_update.py @@ -0,0 +1,175 @@ +import logging + +import pytest +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +from .test_create import generate_unique_user_data + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +pytestmark = pytest.mark.asyncio + + +async def test_update_user_profile_success( + auth_client: AsyncClient, + db_session: AsyncSession, + test_user: dict, +): + """Test successful profile update.""" + username = test_user["username"] + update_data = { + "name": "Updated Name", + "email": "updated.email@example.com", + "profile_image_url": "https://example.com/new-image.jpg", + } + + logger.info(f"Testing successful profile update for user: {username}, user_id: {test_user['id']}") + response = await auth_client.patch(f"/api/v1/users/{username}", json=update_data) + + if response.status_code != 200: + logger.error(f"Response status: {response.status_code}, body: {response.text}") + assert response.status_code == 200 + data = response.json() + assert "message" in data + assert data["message"] == "User updated successfully" + + get_response = await auth_client.get(f"/api/v1/users/{username}") + assert get_response.status_code == 200 + user_data = get_response.json() + assert user_data["name"] == update_data["name"] + assert user_data["email"] == update_data["email"] + + +async def test_update_user_profile_invalid_email( + auth_client: AsyncClient, + db_session: AsyncSession, + test_user: dict, +): + """Test update with invalid email format.""" + username = test_user["username"] + update_data = {"email": "invalid-email"} + + logger.info(f"Testing invalid email update for user: {username}") + response = await auth_client.patch(f"/api/v1/users/{username}", json=update_data) + + assert response.status_code == 422 + data = response.json() + assert "detail" in data + + +async def test_update_user_profile_unauthorized(client: AsyncClient, db_session: AsyncSession, test_user: dict): + """Test update without authentication.""" + username = test_user["username"] + update_data = {"name": "Unauthorized Update"} + + logger.info("Testing unauthorized profile update") + response = await client.patch(f"/api/v1/users/{username}", json=update_data) + + assert response.status_code == 401 + data = response.json() + assert "not authenticated" in data["detail"].lower() + + +async def test_update_user_profile_wrong_user( + auth_client: AsyncClient, + db_session: AsyncSession, + test_user: dict, +): + """Test that users cannot update other users' profiles.""" + other_user_data = generate_unique_user_data("other") + create_response = await auth_client.post("/api/v1/users/", json=other_user_data) + assert create_response.status_code == 201 + other_username = other_user_data["username"] + + update_data = {"name": "Unauthorized Update"} + response = await auth_client.patch(f"/api/v1/users/{other_username}", json=update_data) + + assert response.status_code == 403 + data = response.json() + assert "permission" in data["detail"].lower() + + +async def test_update_user_profile_duplicate_email( + auth_client: AsyncClient, + db_session: AsyncSession, + test_user: dict, +): + """Test update with duplicate email fails.""" + other_user_data = generate_unique_user_data("other") + create_response = await auth_client.post("/api/v1/users/", json=other_user_data) + assert create_response.status_code == 201 + + username = test_user["username"] + update_data = {"email": other_user_data["email"]} + + logger.info(f"Testing duplicate email update for user: {username}") + response = await auth_client.patch(f"/api/v1/users/{username}", json=update_data) + + assert response.status_code == 422 + data = response.json() + assert "detail" in data + + +async def test_update_user_profile_duplicate_username( + auth_client: AsyncClient, + db_session: AsyncSession, + test_user: dict, +): + """Test update with duplicate username fails.""" + other_user_data = generate_unique_user_data("other") + create_response = await auth_client.post("/api/v1/users/", json=other_user_data) + assert create_response.status_code == 201 + + username = test_user["username"] + update_data = {"username": other_user_data["username"]} + + logger.info(f"Testing duplicate username update for user: {username}") + response = await auth_client.patch(f"/api/v1/users/{username}", json=update_data) + + assert response.status_code == 422 + data = response.json() + assert "detail" in data + + +async def test_update_user_tier_superuser( + superuser_auth_client: AsyncClient, + db_session: AsyncSession, + test_user: dict, + second_test_tier: dict, +): + """Test that superuser can update user's tier.""" + username = test_user["username"] + update_data = {"tier_id": second_test_tier["id"]} + + logger.info(f"Testing tier update by superuser for user: {username}") + response = await superuser_auth_client.patch(f"/api/v1/users/{username}/tier", json=update_data) + + assert response.status_code == 200 + data = response.json() + assert "message" in data + assert data["message"] == "User tier updated successfully" + + get_response = await superuser_auth_client.get(f"/api/v1/users/{username}") + assert get_response.status_code == 200 + user_data = get_response.json() + assert user_data["tier_id"] == second_test_tier["id"] + + +async def test_update_user_tier_regular_user( + auth_client: AsyncClient, + db_session: AsyncSession, + test_user: dict, + second_test_tier: dict, +): + """Test that regular users cannot update their tier.""" + username = test_user["username"] + update_data = {"tier_id": second_test_tier["id"]} + + logger.info(f"Testing tier update by regular user: {username}") + response = await auth_client.patch(f"/api/v1/users/{username}/tier", json=update_data) + + assert response.status_code == 403 + data = response.json() + assert any(word in data["detail"].lower() for word in ["permission", "privileges", "authorized"]) diff --git a/backend/tests/integration/auth/helpers.py b/backend/tests/integration/auth/helpers.py new file mode 100644 index 00000000..d702a3c8 --- /dev/null +++ b/backend/tests/integration/auth/helpers.py @@ -0,0 +1,71 @@ +"""Helper functions for auth API tests.""" + +from datetime import UTC, datetime +from unittest.mock import AsyncMock, MagicMock + +from src.infrastructure.auth.oauth.provider import AbstractOAuthProvider +from src.infrastructure.auth.oauth.schemas import OAuthState +from src.infrastructure.auth.session.manager import SessionManager +from src.infrastructure.auth.session.storage import AbstractSessionStorage + + +def create_mock_oauth_provider(name: str = "google") -> AsyncMock: + """Create a mock OAuth provider for testing.""" + mock_provider = AsyncMock(spec=AbstractOAuthProvider) + mock_provider.name = name + mock_provider.get_authorization_url = AsyncMock( + return_value={ + "url": f"https://accounts.{name}.com/o/oauth2/v2/auth?dummy=params", + "state": "test-state-value", + "code_verifier": "test-code-verifier", + } + ) + mock_provider.exchange_code = AsyncMock(return_value={}) + return mock_provider + + +def create_mock_oauth_state_storage() -> AsyncMock: + """Create a mock OAuth state storage for testing.""" + mock_storage = AsyncMock(spec=AbstractSessionStorage) + mock_storage.create = AsyncMock(return_value="test-state-value") + mock_storage.delete = AsyncMock(return_value=None) + return mock_storage + + +def create_mock_session_manager() -> AsyncMock: + """Create a mock session manager for testing.""" + mock_manager = AsyncMock(spec=SessionManager) + mock_manager.create_session = AsyncMock(return_value=("session-id", "csrf-token")) + mock_manager.set_session_cookies = MagicMock() + return mock_manager + + +def create_oauth_state(provider: str = "google", state: str = "test-state-value") -> OAuthState: + """Create an OAuth state object for testing.""" + return OAuthState( + state=state, + provider=provider, + redirect_to="/", + code_verifier="test-code-verifier", + ) + + +def create_mock_session_data(user_id: int = 1): + """Create mock session data for testing.""" + mock_session = MagicMock() + mock_session.user_id = user_id + mock_session.created_at = datetime(2023, 1, 1, 0, 0, 0, tzinfo=UTC) + mock_session.last_activity = datetime(2023, 1, 1, 1, 0, 0, tzinfo=UTC) + return mock_session + + +def create_mock_user_data( + user_id: int = 1, username: str = "testuser", email: str = "test@example.com", provider: str = "google" +) -> dict: + """Create mock user data for testing.""" + return { + "id": user_id, + "username": username, + "email": email, + "oauth_provider": provider, + } diff --git a/backend/tests/integration/auth/test_endpoints.py b/backend/tests/integration/auth/test_endpoints.py new file mode 100644 index 00000000..d550151e --- /dev/null +++ b/backend/tests/integration/auth/test_endpoints.py @@ -0,0 +1,203 @@ +"""Tests for OAuth authentication endpoints.""" + +from datetime import UTC, datetime +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from httpx import AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession + +from src.infrastructure.auth.oauth.dependencies import ( + get_google_provider, + get_oauth_state, + get_oauth_state_storage, +) +from src.infrastructure.auth.oauth.provider import AbstractOAuthProvider +from src.infrastructure.auth.oauth.schemas import OAuthState +from src.infrastructure.auth.session.dependencies import get_current_session_data, get_session_manager +from src.infrastructure.auth.session.manager import SessionManager +from src.infrastructure.auth.session.storage import AbstractSessionStorage +from src.infrastructure.database.session import async_session +from src.interfaces.main import app + + +@pytest.mark.asyncio +async def test_oauth_google_login(client: AsyncClient): + """Test the OAuth Google login initiation endpoint.""" + mock_provider = AsyncMock(spec=AbstractOAuthProvider) + mock_provider.name = "google" + mock_provider.get_authorization_url = AsyncMock( + return_value={ + "url": "https://accounts.google.com/o/oauth2/v2/auth?dummy=params", + "state": "test-state-value", + "code_verifier": "test-code-verifier", + } + ) + + mock_state_storage = AsyncMock(spec=AbstractSessionStorage) + mock_state_storage.create = AsyncMock(return_value="test-state-value") + + original_deps = app.dependency_overrides.copy() + + try: + app.dependency_overrides[get_google_provider] = lambda: mock_provider + app.dependency_overrides[get_oauth_state_storage] = lambda: mock_state_storage + + response = await client.get("/api/v1/auth/oauth/google") + + print(f"Response status: {response.status_code}") + if response.status_code != 200: + print(f"Response body: {response.text}") + + assert response.status_code == 200 + assert "url" in response.json() + assert response.json()["url"] == "https://accounts.google.com/o/oauth2/v2/auth?dummy=params" + + mock_provider.get_authorization_url.assert_called_once() + mock_state_storage.create.assert_called_once() + finally: + app.dependency_overrides = original_deps + + +@pytest.mark.asyncio +async def test_oauth_callback_invalid_state(client: AsyncClient): + """Test the OAuth callback endpoint with an invalid state parameter.""" + original_deps = app.dependency_overrides.copy() + + try: + + async def mock_get_oauth_state_func(state: str, storage: Any) -> None: + return None + + mock_provider = AsyncMock(spec=AbstractOAuthProvider) + mock_provider.name = "google" + mock_provider.exchange_code = AsyncMock(return_value={}) + + mock_state_storage = AsyncMock(spec=AbstractSessionStorage) + mock_state_storage.delete = AsyncMock(return_value=None) + + mock_session_manager = AsyncMock(spec=SessionManager) + mock_session_manager.create_session = AsyncMock(return_value=("session-id", "csrf-token")) + mock_session_manager.set_session_cookies = MagicMock() + + app.dependency_overrides[get_oauth_state] = mock_get_oauth_state_func + app.dependency_overrides[get_google_provider] = lambda: mock_provider + app.dependency_overrides[get_oauth_state_storage] = lambda: mock_state_storage + app.dependency_overrides[get_session_manager] = lambda: mock_session_manager + + response = await client.get( + "/api/v1/auth/oauth/callback/google", + params={"code": "test-code", "state": "invalid-state"}, + ) + + assert response.status_code in [302, 500] + + response = await client.get( + "/api/v1/auth/oauth/callback/google", + params={"code": "test-code", "state": "invalid-state", "response_format": "json"}, + ) + + assert response.status_code in [400, 500] + finally: + app.dependency_overrides = original_deps + + +@pytest.mark.asyncio +async def test_oauth_callback_provider_mismatch(client: AsyncClient): + """Test the OAuth callback endpoint with a state parameter for a different provider.""" + original_deps = app.dependency_overrides.copy() + + try: + mock_state = OAuthState( + state="test-state-value", + provider="github", + redirect_to="/", + code_verifier="test-code-verifier", + ) + + async def mock_get_oauth_state_func(state: str, storage: Any) -> OAuthState: + return mock_state + + mock_provider = AsyncMock(spec=AbstractOAuthProvider) + mock_provider.name = "google" + mock_provider.exchange_code = AsyncMock(return_value={}) + + mock_state_storage = AsyncMock(spec=AbstractSessionStorage) + mock_state_storage.delete = AsyncMock(return_value=None) + + mock_session_manager = AsyncMock(spec=SessionManager) + mock_session_manager.create_session = AsyncMock(return_value=("session-id", "csrf-token")) + mock_session_manager.set_session_cookies = MagicMock() + + app.dependency_overrides[get_oauth_state] = mock_get_oauth_state_func + app.dependency_overrides[get_google_provider] = lambda: mock_provider + app.dependency_overrides[get_oauth_state_storage] = lambda: mock_state_storage + app.dependency_overrides[get_session_manager] = lambda: mock_session_manager + + response = await client.get( + "/api/v1/auth/oauth/callback/google", + params={"code": "test-code", "state": "test-state-value"}, + ) + + assert response.status_code in [302, 500] + + response = await client.get( + "/api/v1/auth/oauth/callback/google", + params={"code": "test-code", "state": "test-state-value", "response_format": "json"}, + ) + + assert response.status_code in [400, 500] + finally: + app.dependency_overrides = original_deps + + +@pytest.mark.asyncio +async def test_check_auth_authenticated(client: AsyncClient, db_session: AsyncSession): + """Test the check-auth endpoint when the user is authenticated.""" + mock_session = MagicMock() + mock_session.user_id = 1 + mock_session.created_at = datetime(2023, 1, 1, 0, 0, 0, tzinfo=UTC) + mock_session.last_activity = datetime(2023, 1, 1, 1, 0, 0, tzinfo=UTC) + + mock_user = { + "id": 1, + "username": "testuser", + "email": "test@example.com", + "oauth_provider": "google", + } + + original_deps = app.dependency_overrides.copy() + + try: + app.dependency_overrides[get_current_session_data] = lambda: mock_session + app.dependency_overrides[async_session] = lambda: db_session + + with patch("src.modules.user.crud.crud_users.get", return_value=mock_user): + response = await client.get("/api/v1/auth/check-auth") + + assert response.status_code == 200 + assert response.json()["authenticated"] is True + assert response.json()["user"]["id"] == 1 + assert response.json()["user"]["username"] == "testuser" + assert response.json()["user"]["oauth_provider"] == "google" + assert "session" in response.json() + finally: + app.dependency_overrides = original_deps + + +@pytest.mark.asyncio +async def test_check_auth_not_authenticated(client: AsyncClient): + """Test the check-auth endpoint when the user is not authenticated.""" + original_deps = app.dependency_overrides.copy() + + try: + app.dependency_overrides[get_current_session_data] = lambda: None + + response = await client.get("/api/v1/auth/check-auth") + + assert response.status_code == 200 + assert response.json()["authenticated"] is False + assert response.json()["message"] == "Not authenticated" + finally: + app.dependency_overrides = original_deps diff --git a/backend/tests/integration/conftest.py b/backend/tests/integration/conftest.py new file mode 100644 index 00000000..c4ab6697 --- /dev/null +++ b/backend/tests/integration/conftest.py @@ -0,0 +1,28 @@ +"""Configuration for integration tests.""" + +import pytest + + +def pytest_configure(config): + """Configure pytest markers for integration tests.""" + config.addinivalue_line("markers", "slow: marks tests as slow (deselect with '-m \"not slow\"')") + config.addinivalue_line("markers", "integration: marks tests as integration tests") + config.addinivalue_line("markers", "performance: marks tests as performance tests") + config.addinivalue_line("markers", "stress: marks tests as stress tests") + + +def pytest_collection_modifyitems(config, items): + """Automatically mark tests based on their location and name.""" + for item in items: + # Mark all tests in integration folder as integration tests + if "integration" in str(item.fspath): + item.add_marker(pytest.mark.integration) + + # Mark performance tests + if "performance" in str(item.fspath) or "performance" in item.name: + item.add_marker(pytest.mark.performance) + + # Mark stress tests + if "stress" in item.name.lower(): + item.add_marker(pytest.mark.stress) + item.add_marker(pytest.mark.slow) diff --git a/src/app/crud/__init__.py b/backend/tests/unit/__init__.py similarity index 100% rename from src/app/crud/__init__.py rename to backend/tests/unit/__init__.py diff --git a/backend/tests/unit/infrastructure/__init__.py b/backend/tests/unit/infrastructure/__init__.py new file mode 100644 index 00000000..20c2aebc --- /dev/null +++ b/backend/tests/unit/infrastructure/__init__.py @@ -0,0 +1 @@ +"""Infrastructure tests package.""" diff --git a/src/app/schemas/__init__.py b/backend/tests/unit/infrastructure/auth/__init__.py similarity index 100% rename from src/app/schemas/__init__.py rename to backend/tests/unit/infrastructure/auth/__init__.py diff --git a/backend/tests/unit/infrastructure/auth/oauth/__init__.py b/backend/tests/unit/infrastructure/auth/oauth/__init__.py new file mode 100644 index 00000000..351f6549 --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/oauth/__init__.py @@ -0,0 +1 @@ +"""Tests for OAuth implementation.""" diff --git a/backend/tests/unit/infrastructure/auth/oauth/test_factory.py b/backend/tests/unit/infrastructure/auth/oauth/test_factory.py new file mode 100644 index 00000000..061613ee --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/oauth/test_factory.py @@ -0,0 +1,141 @@ +"""Tests for the OAuthProviderFactory class.""" + +from typing import Any + +import pytest + +from src.infrastructure.auth.oauth.factory import OAuthProviderFactory +from src.infrastructure.auth.oauth.provider import AbstractOAuthProvider +from src.infrastructure.auth.oauth.schemas import OAuthUserInfo + + +class MockOAuthProvider(AbstractOAuthProvider): + """Mock OAuth provider for testing factory patterns.""" + + async def process_user_info(self, user_info: dict[str, Any]) -> OAuthUserInfo: + """Process user info from the provider.""" + return OAuthUserInfo( + provider="mock", + provider_user_id=str(user_info.get("id", "")), + email=user_info.get("email"), + email_verified=False, + name=user_info.get("name"), + given_name=None, + family_name=None, + username=None, + picture=None, + raw_data=user_info, + ) + + @classmethod + def create(cls, client_id: str, client_secret: str, redirect_uri: str) -> "MockOAuthProvider": + """Factory method to create a new instance.""" + return cls( + client_id=client_id, + client_secret=client_secret, + redirect_uri=redirect_uri, + scopes=["email", "profile"], + authorize_endpoint="https://example.com/authorize", + token_endpoint="https://example.com/token", + userinfo_endpoint="https://example.com/userinfo", + provider_name="mock", + ) + + +class CreatelessMockProvider(AbstractOAuthProvider): + """Mock provider without a create method to test fallback instantiation.""" + + async def process_user_info(self, user_info: dict[str, Any]) -> OAuthUserInfo: + """Process user info from the provider.""" + return OAuthUserInfo( + provider="createless", + provider_user_id=str(user_info.get("id", "")), + email=user_info.get("email"), + email_verified=False, + name=user_info.get("name"), + given_name=None, + family_name=None, + username=None, + picture=None, + raw_data=user_info, + ) + + +@pytest.fixture +def setup_factory(): + """Setup the factory with test providers and clean up after tests.""" + original_providers = OAuthProviderFactory._providers.copy() + + OAuthProviderFactory.register_provider("mock", MockOAuthProvider) + OAuthProviderFactory.register_provider("createless", CreatelessMockProvider) + + yield + + OAuthProviderFactory._providers = original_providers + + +def test_register_provider(): + """Test registering a provider class.""" + original_providers = OAuthProviderFactory._providers.copy() + + OAuthProviderFactory.register_provider("test", MockOAuthProvider) + + assert "test" in OAuthProviderFactory._providers + assert OAuthProviderFactory._providers["test"] == MockOAuthProvider + + OAuthProviderFactory._providers = original_providers + + +def test_get_provider_class_registered(setup_factory): + """Test getting a registered provider class.""" + provider_class = OAuthProviderFactory.get_provider_class("mock") + assert provider_class == MockOAuthProvider + + +def test_get_provider_class_not_registered(setup_factory): + """Test getting a non-registered provider class.""" + provider_class = OAuthProviderFactory.get_provider_class("nonexistent") + assert provider_class is None + + +def test_create_provider_with_create_method(setup_factory): + """Test creating a provider that has a create class method.""" + provider = OAuthProviderFactory.create_provider( + provider_name="mock", + client_id="test-id", + client_secret="test-secret", + redirect_uri="https://test.com/callback", + ) + + assert isinstance(provider, MockOAuthProvider) + assert provider.client_id == "test-id" + assert provider.client_secret == "test-secret" + assert provider.redirect_uri == "https://test.com/callback" + assert provider.name == "mock" + + +def test_create_provider_without_create_method(setup_factory): + """Test creating a provider without a create class method.""" + provider = OAuthProviderFactory.create_provider( + provider_name="createless", + client_id="test-id", + client_secret="test-secret", + redirect_uri="https://test.com/callback", + ) + + assert isinstance(provider, CreatelessMockProvider) + assert provider.client_id == "test-id" + assert provider.client_secret == "test-secret" + assert provider.redirect_uri == "https://test.com/callback" + assert provider.name == "createless" + + +def test_create_provider_not_registered(): + """Test creating a provider that is not registered.""" + with pytest.raises(ValueError): + OAuthProviderFactory.create_provider( + provider_name="nonexistent", + client_id="test-id", + client_secret="test-secret", + redirect_uri="https://test.com/callback", + ) diff --git a/backend/tests/unit/infrastructure/auth/oauth/test_provider.py b/backend/tests/unit/infrastructure/auth/oauth/test_provider.py new file mode 100644 index 00000000..bc6f8dac --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/oauth/test_provider.py @@ -0,0 +1,237 @@ +"""Tests for the AbstractOAuthProvider class.""" + +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from src.infrastructure.auth.oauth.provider import AbstractOAuthProvider +from src.infrastructure.auth.oauth.schemas import OAuthUserInfo + + +class ConcreteOAuthProvider(AbstractOAuthProvider): + """Concrete implementation of AbstractOAuthProvider for testing.""" + + async def process_user_info(self, user_info: dict[str, Any]) -> OAuthUserInfo: + """Process user info from the provider.""" + return OAuthUserInfo( + provider="test", + provider_user_id=str(user_info.get("id", "")), + email=user_info.get("email"), + email_verified=user_info.get("email_verified", False), + name=user_info.get("name"), + given_name=user_info.get("given_name"), + family_name=user_info.get("family_name"), + username=user_info.get("username"), + picture=user_info.get("picture"), + raw_data=user_info, + ) + + +@pytest.fixture +def oauth_provider(): + """Create a test provider instance.""" + return ConcreteOAuthProvider( + client_id="test-client-id", + client_secret="test-client-secret", + redirect_uri="https://example.com/callback", + scopes=["openid", "email", "profile"], + authorize_endpoint="https://auth.example.com/authorize", + token_endpoint="https://auth.example.com/token", + userinfo_endpoint="https://auth.example.com/userinfo", + provider_name="test", + ) + + +def test_provider_initialization(oauth_provider): + """Test provider initialization with correct attributes.""" + assert oauth_provider.client_id == "test-client-id" + assert oauth_provider.client_secret == "test-client-secret" + assert oauth_provider.redirect_uri == "https://example.com/callback" + assert oauth_provider.scopes == ["openid", "email", "profile"] + assert oauth_provider.authorize_endpoint == "https://auth.example.com/authorize" + assert oauth_provider.token_endpoint == "https://auth.example.com/token" + assert oauth_provider.userinfo_endpoint == "https://auth.example.com/userinfo" + assert oauth_provider.name == "test" + + +def test_generate_state(oauth_provider): + """Test that generate_state returns a random string of expected length.""" + state = oauth_provider.generate_state() + assert isinstance(state, str) + assert len(state) > 32 + + +def test_generate_pkce_codes(oauth_provider): + """Test that PKCE code generation returns both verifier and challenge.""" + pkce_codes = oauth_provider.generate_pkce_codes() + assert "code_verifier" in pkce_codes + assert "code_challenge" in pkce_codes + assert isinstance(pkce_codes["code_verifier"], str) + assert isinstance(pkce_codes["code_challenge"], str) + assert len(pkce_codes["code_verifier"]) >= 43 + assert len(pkce_codes["code_challenge"]) >= 43 + + +@pytest.mark.asyncio +async def test_get_authorization_url_with_pkce(oauth_provider): + """Test authorization URL generation with PKCE.""" + state = "test-state-value" + result = await oauth_provider.get_authorization_url(state=state, pkce=True) + + assert "url" in result + assert "state" in result + assert "code_verifier" in result + assert result["state"] == state + assert "client_id=test-client-id" in result["url"] + assert "redirect_uri=https%3A%2F%2Fexample.com%2Fcallback" in result["url"] + assert "code_challenge" in result["url"] + assert "code_challenge_method=S256" in result["url"] + + +@pytest.mark.asyncio +async def test_get_authorization_url_without_pkce(oauth_provider): + """Test authorization URL generation without PKCE.""" + state = "test-state-value" + result = await oauth_provider.get_authorization_url(state=state, pkce=False) + + assert "url" in result + assert "state" in result + assert "code_verifier" not in result + assert result["state"] == state + assert "client_id=test-client-id" in result["url"] + assert "redirect_uri=https%3A%2F%2Fexample.com%2Fcallback" in result["url"] + assert "code_challenge" not in result["url"] + assert "code_challenge_method=S256" not in result["url"] + + +@pytest.mark.asyncio +async def test_get_authorization_url_with_extra_params(oauth_provider): + """Test authorization URL generation with extra parameters.""" + extra_params = {"prompt": "consent", "access_type": "offline"} + result = await oauth_provider.get_authorization_url(extra_params=extra_params) + + assert "url" in result + assert "prompt=consent" in result["url"] + assert "access_type=offline" in result["url"] + + +@pytest.mark.asyncio +async def test_exchange_code_successful(oauth_provider): + """Test successful code exchange for access token.""" + mock_response = MagicMock() + mock_response.raise_for_status = MagicMock() + mock_response.json.return_value = { + "access_token": "test-access-token", + "token_type": "Bearer", + "expires_in": 3600, + "refresh_token": "test-refresh-token", + } + + mock_client = AsyncMock() + mock_client.post.return_value = mock_response + + mock_context = AsyncMock() + mock_context.__aenter__.return_value = mock_client + mock_context.__aexit__.return_value = None + + with patch("httpx.AsyncClient", return_value=mock_context): + result = await oauth_provider.exchange_code("test-code", "test-verifier") + + mock_client.post.assert_called_once() + + call_args = mock_client.post.call_args + assert call_args[0][0] == "https://auth.example.com/token" + + post_data = call_args[1]["data"] + assert post_data["client_id"] == "test-client-id" + assert post_data["client_secret"] == "test-client-secret" + assert post_data["code"] == "test-code" + assert post_data["redirect_uri"] == "https://example.com/callback" + assert post_data["code_verifier"] == "test-verifier" + + assert result["access_token"] == "test-access-token" + assert result["refresh_token"] == "test-refresh-token" + + +@pytest.mark.asyncio +async def test_get_user_info_successful(oauth_provider): + """Test successful user info retrieval.""" + test_user_info = {"id": "12345", "email": "test@example.com", "name": "Test User", "picture": "https://example.com/pic.jpg"} + + mock_response = MagicMock() + mock_response.raise_for_status = MagicMock() + mock_response.json.return_value = test_user_info + + mock_client = AsyncMock() + mock_client.get.return_value = mock_response + + mock_context = AsyncMock() + mock_context.__aenter__.return_value = mock_client + mock_context.__aexit__.return_value = None + + with patch("httpx.AsyncClient", return_value=mock_context): + result = await oauth_provider.get_user_info("test-access-token") + + mock_client.get.assert_called_once() + + call_args = mock_client.get.call_args + assert call_args[0][0] == "https://auth.example.com/userinfo" + + headers = call_args[1]["headers"] + assert headers["Authorization"] == "Bearer test-access-token" + + assert result == test_user_info + + +@pytest.mark.asyncio +async def test_process_user_info(oauth_provider): + """Test processing of user info into standardized format.""" + test_user_info = { + "id": "12345", + "email": "test@example.com", + "email_verified": True, + "name": "Test User", + "given_name": "Test", + "family_name": "User", + "username": "testuser", + "picture": "https://example.com/pic.jpg", + } + + result = await oauth_provider.process_user_info(test_user_info) + + assert isinstance(result, OAuthUserInfo) + assert result.provider == "test" + assert result.provider_user_id == "12345" + assert result.email == "test@example.com" + assert result.email_verified is True + assert result.name == "Test User" + assert result.given_name == "Test" + assert result.family_name == "User" + assert result.username == "testuser" + assert result.picture == "https://example.com/pic.jpg" + assert result.raw_data == test_user_info + + +@pytest.mark.asyncio +async def test_validate_token_valid(oauth_provider): + """Test token validation with valid token.""" + with patch.object(oauth_provider, "get_user_info", new_callable=AsyncMock) as mock_get_user_info: + mock_get_user_info.return_value = {"id": "12345"} + + result = await oauth_provider.validate_token("valid-token") + + assert result is True + mock_get_user_info.assert_called_once_with("valid-token") + + +@pytest.mark.asyncio +async def test_validate_token_invalid(oauth_provider): + """Test token validation with invalid token.""" + with patch.object(oauth_provider, "get_user_info", new_callable=AsyncMock) as mock_get_user_info: + mock_get_user_info.side_effect = Exception("Invalid token") + + result = await oauth_provider.validate_token("invalid-token") + + assert result is False + mock_get_user_info.assert_called_once_with("invalid-token") diff --git a/backend/tests/unit/infrastructure/auth/oauth/test_providers.py b/backend/tests/unit/infrastructure/auth/oauth/test_providers.py new file mode 100644 index 00000000..43e4ec04 --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/oauth/test_providers.py @@ -0,0 +1,232 @@ +"""Tests for the specific OAuth provider implementations.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from src.infrastructure.auth.oauth.providers.github import GitHubOAuthProvider +from src.infrastructure.auth.oauth.providers.google import GoogleOAuthProvider +from src.infrastructure.auth.oauth.schemas import OAuthUserInfo + + +@pytest.fixture +def google_provider(): + """Create a Google OAuth provider for testing.""" + return GoogleOAuthProvider( + client_id="google-client-id", + client_secret="google-client-secret", + redirect_uri="https://example.com/oauth/callback/google", + ) + + +@pytest.fixture +def github_provider(): + """Create a GitHub OAuth provider for testing.""" + return GitHubOAuthProvider( + client_id="github-client-id", + client_secret="github-client-secret", + redirect_uri="https://example.com/oauth/callback/github", + ) + + +def test_google_provider_initialization(google_provider): + """Test Google provider initialization with default scopes.""" + assert google_provider.client_id == "google-client-id" + assert google_provider.client_secret == "google-client-secret" + assert google_provider.redirect_uri == "https://example.com/oauth/callback/google" + assert "openid" in google_provider.scopes + assert "https://www.googleapis.com/auth/userinfo.email" in google_provider.scopes + assert "https://www.googleapis.com/auth/userinfo.profile" in google_provider.scopes + assert google_provider.authorize_endpoint == "https://accounts.google.com/o/oauth2/v2/auth" + assert google_provider.token_endpoint == "https://oauth2.googleapis.com/token" + assert google_provider.userinfo_endpoint == "https://www.googleapis.com/oauth2/v3/userinfo" + assert google_provider.name == "google" + + +def test_github_provider_initialization(github_provider): + """Test GitHub provider initialization with default scopes.""" + assert github_provider.client_id == "github-client-id" + assert github_provider.client_secret == "github-client-secret" + assert github_provider.redirect_uri == "https://example.com/oauth/callback/github" + assert "read:user" in github_provider.scopes + assert "user:email" in github_provider.scopes + assert github_provider.authorize_endpoint == "https://github.com/login/oauth/authorize" + assert github_provider.token_endpoint == "https://github.com/login/oauth/access_token" + assert github_provider.userinfo_endpoint == "https://api.github.com/user" + assert github_provider.name == "github" + + +@pytest.mark.asyncio +async def test_google_authorization_url(google_provider): + """Test Google-specific authorization URL generation.""" + auth_data = await google_provider.get_authorization_url() + + assert "url" in auth_data + assert "state" in auth_data + assert "code_verifier" in auth_data + assert "access_type=offline" in auth_data["url"] + assert "prompt=consent" in auth_data["url"] + + +@pytest.mark.asyncio +async def test_google_process_user_info(google_provider): + """Test Google-specific user info processing.""" + google_user_data = { + "sub": "123456789", + "email": "user@example.com", + "email_verified": True, + "name": "Test User", + "given_name": "Test", + "family_name": "User", + "picture": "https://example.com/photo.jpg", + } + + result = await google_provider.process_user_info(google_user_data) + + assert isinstance(result, OAuthUserInfo) + assert result.provider == "google" + assert result.provider_user_id == "123456789" + assert result.email == "user@example.com" + assert result.email_verified is True + assert result.name == "Test User" + assert result.given_name == "Test" + assert result.family_name == "User" + assert result.username is None + assert result.picture == "https://example.com/photo.jpg" + assert result.raw_data == google_user_data + + +@pytest.mark.asyncio +async def test_github_exchange_code(github_provider): + """Test GitHub-specific code exchange with Accept header.""" + mock_response = MagicMock() + mock_response.raise_for_status = MagicMock() + mock_response.json.return_value = { + "access_token": "github-token", + "token_type": "bearer", + "scope": "read:user,user:email", + } + + mock_client = AsyncMock() + mock_client.post.return_value = mock_response + + mock_context = AsyncMock() + mock_context.__aenter__.return_value = mock_client + mock_context.__aexit__.return_value = None + + with patch("httpx.AsyncClient", return_value=mock_context): + result = await github_provider.exchange_code("github-code") + + headers = mock_client.post.call_args[1]["headers"] + assert headers["Accept"] == "application/json" + + assert result["access_token"] == "github-token" + assert result["token_type"] == "bearer" + + +@pytest.mark.asyncio +async def test_github_get_user_info_with_emails(github_provider): + """Test GitHub user info retrieval with separate emails endpoint call.""" + profile_data = { + "id": 12345, + "login": "testuser", + "name": "Test User", + "email": None, + "avatar_url": "https://github.com/avatars/user.jpg", + } + + emails_data = [ + {"email": "private@example.com", "primary": True, "verified": True}, + {"email": "public@example.com", "primary": False, "verified": True}, + ] + + mock_profile_response = MagicMock() + mock_profile_response.raise_for_status = MagicMock() + mock_profile_response.json.return_value = profile_data + + mock_emails_response = MagicMock() + mock_emails_response.status_code = 200 + mock_emails_response.json.return_value = emails_data + + mock_client = AsyncMock() + mock_client.get.side_effect = [mock_profile_response, mock_emails_response] + + mock_context = AsyncMock() + mock_context.__aenter__.return_value = mock_client + mock_context.__aexit__.return_value = None + + with patch("httpx.AsyncClient", return_value=mock_context): + result = await github_provider.get_user_info("github-token") + + assert mock_client.get.call_count == 2 + + assert mock_client.get.call_args_list[0][0][0] == "https://api.github.com/user" + + assert mock_client.get.call_args_list[1][0][0] == "https://api.github.com/user/emails" + + assert result["id"] == 12345 + assert result["login"] == "testuser" + assert "emails" in result + assert result["emails"] == emails_data + + +@pytest.mark.asyncio +async def test_github_process_user_info_with_primary_email(github_provider): + """Test GitHub-specific user info processing with primary email from emails array.""" + github_user_data = { + "id": 12345, + "login": "testuser", + "name": "Test User", + "email": "public@example.com", + "avatar_url": "https://github.com/avatars/user.jpg", + "emails": [ + {"email": "private@example.com", "primary": True, "verified": True}, + {"email": "public@example.com", "primary": False, "verified": False}, + ], + } + + result = await github_provider.process_user_info(github_user_data) + + assert isinstance(result, OAuthUserInfo) + assert result.provider == "github" + assert result.provider_user_id == "12345" + assert result.email == "private@example.com" + assert result.email_verified is True + assert result.name == "Test User" + assert result.username == "testuser" + assert result.picture == "https://github.com/avatars/user.jpg" + assert result.raw_data == github_user_data + + +@pytest.mark.asyncio +async def test_google_create_class_method(): + """Test Google provider's create class method.""" + provider = GoogleOAuthProvider.create( + client_id="test-id", + client_secret="test-secret", + redirect_uri="https://example.com/callback", + ) + + assert isinstance(provider, GoogleOAuthProvider) + assert provider.client_id == "test-id" + assert provider.client_secret == "test-secret" + assert provider.redirect_uri == "https://example.com/callback" + assert provider.name == "google" + assert "openid" in provider.scopes + + +@pytest.mark.asyncio +async def test_github_create_class_method(): + """Test GitHub provider's create class method.""" + provider = GitHubOAuthProvider.create( + client_id="test-id", + client_secret="test-secret", + redirect_uri="https://example.com/callback", + ) + + assert isinstance(provider, GitHubOAuthProvider) + assert provider.client_id == "test-id" + assert provider.client_secret == "test-secret" + assert provider.redirect_uri == "https://example.com/callback" + assert provider.name == "github" + assert "read:user" in provider.scopes diff --git a/backend/tests/unit/infrastructure/auth/oauth/test_services.py b/backend/tests/unit/infrastructure/auth/oauth/test_services.py new file mode 100644 index 00000000..70001f82 --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/oauth/test_services.py @@ -0,0 +1,302 @@ +"""Tests for the OAuthAccountService class.""" + +from unittest.mock import AsyncMock, patch + +import pytest + +from src.infrastructure.auth.oauth.schemas import OAuthUserInfo +from src.infrastructure.auth.oauth.services import OAuthAccountService +from src.modules.user.crud import crud_users +from src.modules.user.schemas import UserCreateInternal + + +@pytest.fixture +def oauth_service(): + """Create an instance of OAuthAccountService for testing.""" + return OAuthAccountService() + + +@pytest.fixture +def mock_db(): + """Create a mock database session.""" + return AsyncMock() + + +@pytest.fixture +def google_user_info(): + """Create a sample Google user info for testing.""" + return OAuthUserInfo( + provider="google", + provider_user_id="123456789", + email="user@example.com", + email_verified=True, + name="Test User", + given_name="Test", + family_name="User", + username=None, + picture="https://example.com/photo.jpg", + raw_data={ + "sub": "123456789", + "email": "user@example.com", + "email_verified": True, + "name": "Test User", + "given_name": "Test", + "family_name": "User", + "picture": "https://example.com/photo.jpg", + }, + ) + + +@pytest.fixture +def github_user_info(): + """Create a sample GitHub user info for testing.""" + return OAuthUserInfo( + provider="github", + provider_user_id="987654321", + email="user@example.com", + email_verified=True, + name="Test User", + given_name=None, + family_name=None, + username="testuser", + picture="https://github.com/avatars/user.jpg", + raw_data={ + "id": 987654321, + "login": "testuser", + "name": "Test User", + "email": "user@example.com", + "avatar_url": "https://github.com/avatars/user.jpg", + "emails": [{"email": "user@example.com", "primary": True, "verified": True}], + }, + ) + + +@pytest.mark.asyncio +async def test_get_or_create_user_existing_by_provider_id(oauth_service, mock_db, google_user_info): + """Test getting a user that already exists with the provider ID.""" + existing_user = { + "id": 1, + "username": "existinguser", + "email": "user@example.com", + "google_id": "123456789", + } + + mock_db.execute = AsyncMock() + + with patch.object(crud_users, "get", new_callable=AsyncMock) as mock_get: + mock_get.return_value = existing_user + + user, created = await oauth_service.get_or_create_user(google_user_info, mock_db) + + assert user == existing_user + assert created is False + + mock_get.assert_called_once() + assert mock_get.call_args[1]["filter_by"] == {"google_id": "123456789"} + + +@pytest.mark.asyncio +async def test_get_or_create_user_existing_by_email(oauth_service, mock_db, google_user_info): + """Test getting a user that exists by email but not provider ID.""" + existing_user = { + "id": 1, + "username": "existinguser", + "email": "user@example.com", + "google_id": None, + } + + mock_db.execute = AsyncMock() + + with ( + patch.object(crud_users, "get", new_callable=AsyncMock) as mock_get, + patch.object(crud_users, "update", new_callable=AsyncMock) as mock_update, + ): + mock_get.side_effect = [None, existing_user] + mock_update.return_value = {**existing_user, "google_id": "123456789"} + + user, created = await oauth_service.get_or_create_user(google_user_info, mock_db) + + assert user["id"] == 1 + assert user["google_id"] == "123456789" + assert created is False + + assert mock_get.call_count == 2 + assert mock_get.call_args_list[0][1]["filter_by"] == {"google_id": "123456789"} + assert mock_get.call_args_list[1][1]["filter_by"] == {"email": "user@example.com"} + + mock_update.assert_called_once() + assert mock_update.call_args[1]["object_id"] == 1 + assert mock_update.call_args[1]["object"]["google_id"] == "123456789" + assert "oauth_updated_at" in mock_update.call_args[1]["object"] + + +@pytest.mark.asyncio +async def test_get_or_create_user_new_user(oauth_service, mock_db, google_user_info): + """Test creating a new user when none exists.""" + new_user = { + "id": 1, + "username": "testuser", + "email": "user@example.com", + "google_id": "123456789", + "oauth_provider": "google", + } + + with ( + patch.object(crud_users, "get", new_callable=AsyncMock) as mock_get, + patch.object(crud_users, "exists", new_callable=AsyncMock) as mock_exists, + patch.object(crud_users, "create", new_callable=AsyncMock) as mock_create, + patch("secrets.token_urlsafe", return_value="random_password"), + ): + mock_get.return_value = None + mock_exists.return_value = False + mock_create.return_value = new_user + + user, created = await oauth_service.get_or_create_user(google_user_info, mock_db) + + assert user == new_user + assert created is True + + assert mock_get.call_count == 2 + + mock_exists.assert_called_once() + + mock_create.assert_called_once() + create_args = mock_create.call_args[1]["object"] + assert create_args.email == "user@example.com" + assert create_args.name == "Test User" + assert create_args.username.startswith("test") + assert create_args.email_verified is True + assert create_args.google_id == "123456789" + assert create_args.oauth_provider == "google" + + +@pytest.mark.asyncio +async def test_get_or_create_user_username_conflict(oauth_service, mock_db, google_user_info): + """Test username generation with conflicts.""" + new_user = { + "id": 1, + "username": "test1", + "email": "user@example.com", + "google_id": "123456789", + } + + with ( + patch.object(crud_users, "get", new_callable=AsyncMock) as mock_get, + patch.object(crud_users, "exists", new_callable=AsyncMock) as mock_exists, + patch.object(crud_users, "create", new_callable=AsyncMock) as mock_create, + patch("secrets.token_urlsafe", return_value="random_password"), + ): + mock_get.return_value = None + + mock_exists.side_effect = [True, False] + + mock_create.return_value = new_user + + user, created = await oauth_service.get_or_create_user(google_user_info, mock_db) + + assert user == new_user + assert created is True + + assert mock_exists.call_count == 2 + + mock_create.assert_called_once() + create_args = mock_create.call_args[1]["object"] + assert create_args.username == "test1" + + +@pytest.mark.asyncio +async def test_create_user_from_oauth_no_email(oauth_service, mock_db, google_user_info): + """Test that creating a user without email raises ValueError.""" + user_info_no_email = OAuthUserInfo( + provider="google", + provider_user_id="123456789", + email=None, + email_verified=False, + name="Test User", + given_name="Test", + family_name="User", + username=None, + picture="https://example.com/photo.jpg", + raw_data={}, + ) + + with pytest.raises(ValueError, match="Email is required for user creation"): + await oauth_service._create_user_from_oauth(user_info_no_email, mock_db) + + +@pytest.mark.asyncio +async def test_create_user_from_oauth_with_username(oauth_service, mock_db): + """Test creating a user with a pre-existing username.""" + user_info = OAuthUserInfo( + provider="github", + provider_user_id="987654321", + email="user@example.com", + email_verified=True, + name="Test User", + given_name=None, + family_name=None, + username="testuser", + picture="https://example.com/photo.jpg", + raw_data={}, + ) + + new_user = { + "id": 1, + "username": "testuser", + "email": "user@example.com", + "github_id": "987654321", + } + + with ( + patch.object(crud_users, "exists", new_callable=AsyncMock) as mock_exists, + patch.object(crud_users, "create", new_callable=AsyncMock) as mock_create, + patch("secrets.token_urlsafe", return_value="random_password"), + ): + mock_exists.return_value = False + mock_create.return_value = new_user + + user, created = await oauth_service._create_user_from_oauth(user_info, mock_db) + + assert user == new_user + assert created is True + + mock_exists.assert_called_once() + assert mock_exists.call_args[1]["filter_by"] == {"username": "testuser"} + + mock_create.assert_called_once() + create_args = mock_create.call_args[1]["object"] + assert create_args.username == "testuser" + assert create_args.email == "user@example.com" + assert create_args.github_id == "987654321" + assert create_args.oauth_provider == "github" + + +@pytest.mark.asyncio +async def test_oauth_user_creation_uses_hashed_password(oauth_service, mock_db, google_user_info): + """OAuth user creation must use UserCreateInternal with a bcrypt-hashed password.""" + new_user = {"id": 1, "username": "test", "email": "user@example.com", "google_id": "123456789"} + + with ( + patch.object(crud_users, "get", new_callable=AsyncMock) as mock_get, + patch.object(crud_users, "exists", new_callable=AsyncMock) as mock_exists, + patch.object(crud_users, "create", new_callable=AsyncMock) as mock_create, + ): + mock_get.return_value = None + mock_exists.return_value = False + mock_create.return_value = new_user + + await oauth_service.get_or_create_user(google_user_info, mock_db) + + create_args = mock_create.call_args[1]["object"] + + # Must use UserCreateInternal, not UserCreate + assert isinstance(create_args, UserCreateInternal), f"Expected UserCreateInternal, got {type(create_args).__name__}" + + # Must have hashed_password, not password + assert hasattr(create_args, "hashed_password"), "Missing hashed_password field" + assert not hasattr(create_args, "password"), "Should not have plaintext password field" + + # Must be a bcrypt hash (starts with $2b$) + assert create_args.hashed_password.startswith("$2b$"), ( + f"Expected bcrypt hash, got: {create_args.hashed_password[:20]}..." + ) diff --git a/backend/tests/unit/infrastructure/auth/session/__init__.py b/backend/tests/unit/infrastructure/auth/session/__init__.py new file mode 100644 index 00000000..c7b728dd --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/session/__init__.py @@ -0,0 +1 @@ +"""Session auth tests.""" diff --git a/backend/tests/unit/infrastructure/auth/session/backends/__init__.py b/backend/tests/unit/infrastructure/auth/session/backends/__init__.py new file mode 100644 index 00000000..f7a6aefc --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/session/backends/__init__.py @@ -0,0 +1 @@ +"""Session backends tests.""" diff --git a/backend/tests/unit/infrastructure/auth/session/backends/test_memcached.py b/backend/tests/unit/infrastructure/auth/session/backends/test_memcached.py new file mode 100644 index 00000000..adf32f73 --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/session/backends/test_memcached.py @@ -0,0 +1,221 @@ +import hashlib +import json +from unittest.mock import AsyncMock, patch + +import pytest +from pydantic import BaseModel + +from src.infrastructure.auth.session.backends.memcached import MemcachedSessionStorage + + +class MemcachedTestSessionData(BaseModel): + """Test data model for session testing.""" + + user_id: int + session_id: str + is_active: bool = True + metadata: dict = {} + + +@pytest.fixture +def mock_memcached(): + """Create a mock Memcached client.""" + memcached_mock = AsyncMock() + memcached_mock.get = AsyncMock() + memcached_mock.set = AsyncMock() + memcached_mock.delete = AsyncMock() + return memcached_mock + + +@pytest.fixture +def memcached_storage(mock_memcached): + """Create a Memcached session storage instance with a mock client.""" + with patch("src.infrastructure.auth.session.backends.memcached.aiomcache.Client", return_value=mock_memcached): + storage: MemcachedSessionStorage[MemcachedTestSessionData] = MemcachedSessionStorage( + prefix="test_session:", expiration=1800 + ) + storage.client = mock_memcached + return storage + + +def encode_key(key): + """Helper function to encode a key the same way the storage class does.""" + if len(key) > 240: + key_hash = hashlib.sha256(key.encode()).hexdigest()[:32] + key = f"{key[:200]}:{key_hash}" + return key.encode("utf-8") + + +@pytest.mark.asyncio +async def test_create_session(memcached_storage, mock_memcached): + """Test creating a new session.""" + session_id = "test-session-id" + test_data = MemcachedTestSessionData(user_id=1, session_id=session_id) + + mock_memcached.set.return_value = True + mock_memcached.get.return_value = None + + result = await memcached_storage.create(test_data, session_id=session_id) + + assert result == session_id + assert mock_memcached.set.call_count == 2 + + session_key = memcached_storage.get_key(session_id) + encoded_key = encode_key(session_key) + assert mock_memcached.set.call_args_list[0][0][0] == encoded_key + + +@pytest.mark.asyncio +async def test_get_session(memcached_storage, mock_memcached): + """Test retrieving a session by ID.""" + session_id = "test-session-id" + test_data = MemcachedTestSessionData(user_id=1, session_id=session_id) + + encoded_data = test_data.model_dump_json().encode("utf-8") + mock_memcached.get.return_value = encoded_data + + result = await memcached_storage.get(session_id, MemcachedTestSessionData) + + assert result is not None + assert result.user_id == test_data.user_id + assert result.session_id == test_data.session_id + + session_key = memcached_storage.get_key(session_id) + encoded_key = encode_key(session_key) + mock_memcached.get.assert_called_once_with(encoded_key) + + +@pytest.mark.asyncio +async def test_get_session_not_found(memcached_storage, mock_memcached): + """Test retrieving a non-existent session.""" + session_id = "nonexistent-session-id" + + mock_memcached.get.return_value = None + result = await memcached_storage.get(session_id, MemcachedTestSessionData) + assert result is None + + session_key = memcached_storage.get_key(session_id) + encoded_key = encode_key(session_key) + mock_memcached.get.assert_called_once_with(encoded_key) + + +@pytest.mark.asyncio +async def test_update_session(memcached_storage, mock_memcached): + """Test updating an existing session.""" + session_id = "test-session-id" + test_data = MemcachedTestSessionData(user_id=1, session_id=session_id) + + session_key = memcached_storage.get_key(session_id) + encoded_key = encode_key(session_key) + + user_sessions_key = memcached_storage.get_user_sessions_key(1) + encoded_user_key = encode_key(user_sessions_key) + + def mock_get_side_effect(key): + if key == encoded_key: + return b"existing_data" + elif key == encoded_user_key: + return json.dumps(["session1", session_id]).encode("utf-8") + return None + + mock_memcached.get.side_effect = mock_get_side_effect + mock_memcached.set.return_value = True + + result = await memcached_storage.update(session_id, test_data) + + assert result is True + assert mock_memcached.get.call_count == 2 + assert encoded_key in [call_args[0][0] for call_args in mock_memcached.get.call_args_list] + assert encoded_user_key in [call_args[0][0] for call_args in mock_memcached.get.call_args_list] + + +@pytest.mark.asyncio +async def test_delete_session(memcached_storage, mock_memcached): + """Test deleting a session.""" + session_id = "test-session-id" + test_data = MemcachedTestSessionData(user_id=1, session_id=session_id) + + encoded_data = test_data.model_dump_json().encode("utf-8") + mock_memcached.get.return_value = encoded_data + + user_sessions = [session_id, "other-session"] + encoded_user_sessions = json.dumps(user_sessions).encode("utf-8") + + mock_memcached.get.side_effect = lambda key: ( + encoded_data if encode_key(memcached_storage.get_key(session_id)) == key else encoded_user_sessions + ) + + result = await memcached_storage.delete(session_id) + assert result is True + + assert mock_memcached.delete.call_count == 1 + session_key = memcached_storage.get_key(session_id) + encoded_key = encode_key(session_key) + mock_memcached.delete.assert_called_once_with(encoded_key) + + +@pytest.mark.asyncio +async def test_extend_session(memcached_storage, mock_memcached): + """Test extending the expiration of a session.""" + session_id = "test-session-id" + test_data = MemcachedTestSessionData(user_id=1, session_id=session_id) + encoded_data = test_data.model_dump_json().encode("utf-8") + + session_key = memcached_storage.get_key(session_id) + encoded_key = encode_key(session_key) + user_sessions_key = memcached_storage.get_user_sessions_key(1) + encoded_user_key = encode_key(user_sessions_key) + + def mock_get_side_effect(key): + if key == encoded_key: + return encoded_data + elif key == encoded_user_key: + return json.dumps(["session1", session_id]).encode("utf-8") + return None + + mock_memcached.get.side_effect = mock_get_side_effect + mock_memcached.set.return_value = True + + result = await memcached_storage.extend(session_id) + assert result is True + + assert mock_memcached.get.call_count == 2 + assert encoded_key in [call_args[0][0] for call_args in mock_memcached.get.call_args_list] + assert encoded_user_key in [call_args[0][0] for call_args in mock_memcached.get.call_args_list] + + assert mock_memcached.set.call_count >= 2 + session_set_calls = [c for c in mock_memcached.set.call_args_list if c[0][0] == encoded_key] + assert len(session_set_calls) == 1 + assert session_set_calls[0][1]["exptime"] == 1800 # Default expiration + + +@pytest.mark.asyncio +async def test_exists_session(memcached_storage, mock_memcached): + """Test checking if a session exists.""" + session_id = "test-session-id" + + mock_memcached.get.return_value = b"some_data" + result = await memcached_storage.exists(session_id) + assert result is True + + mock_memcached.get.return_value = None + result = await memcached_storage.exists(session_id) + assert result is False + + +@pytest.mark.asyncio +async def test_get_user_sessions(memcached_storage, mock_memcached): + """Test retrieving all sessions for a user.""" + user_id = 1 + session_ids = ["session1", "session2", "session3"] + + encoded_data = json.dumps(session_ids).encode("utf-8") + mock_memcached.get.return_value = encoded_data + + result = await memcached_storage.get_user_sessions(user_id) + + assert result == session_ids + + user_sessions_key = memcached_storage.get_user_sessions_key(user_id) + encoded_key = encode_key(user_sessions_key) + mock_memcached.get.assert_called_once_with(encoded_key) diff --git a/backend/tests/unit/infrastructure/auth/session/backends/test_redis.py b/backend/tests/unit/infrastructure/auth/session/backends/test_redis.py new file mode 100644 index 00000000..db88bca1 --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/session/backends/test_redis.py @@ -0,0 +1,262 @@ +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from pydantic import BaseModel + +from src.infrastructure.auth.session.backends.redis import RedisSessionStorage + + +class RedisTestSessionData(BaseModel): + """Test data model for session testing.""" + + user_id: int + session_id: str + is_active: bool = True + metadata: dict = {} + + +@pytest.fixture +def mock_pipeline(): + """Create a mock Redis pipeline.""" + pipeline = MagicMock() + pipeline.set = MagicMock(return_value=pipeline) + pipeline.delete = MagicMock(return_value=pipeline) + pipeline.sadd = MagicMock(return_value=pipeline) + pipeline.srem = MagicMock(return_value=pipeline) + pipeline.expire = MagicMock(return_value=pipeline) + pipeline.execute = AsyncMock(return_value=[True, True]) + return pipeline + + +@pytest.fixture +def mock_redis(mock_pipeline): + """Create a mock Redis client with a non-coroutine pipeline.""" + redis_mock = AsyncMock() + redis_mock.get = AsyncMock() + redis_mock.set = AsyncMock() + redis_mock.delete = AsyncMock() + redis_mock.sadd = AsyncMock() + redis_mock.srem = AsyncMock() + redis_mock.smembers = AsyncMock() + redis_mock.expire = AsyncMock() + redis_mock.exists = AsyncMock() + redis_mock.ttl = AsyncMock(return_value=1000) + redis_mock.pipeline = MagicMock(return_value=mock_pipeline) + + return redis_mock + + +@pytest.fixture +def redis_storage(mock_redis): + """Create a Redis session storage instance with a mock Redis client.""" + with patch("src.infrastructure.auth.session.backends.redis.AsyncRedis", return_value=mock_redis): + storage: RedisSessionStorage[RedisTestSessionData] = RedisSessionStorage(prefix="test_session:", expiration=1800) + storage.client = mock_redis + return storage + + +@pytest.fixture +def fake_redis(): + """Create a Redis storage with a more complete mock for the delete_pattern test.""" + redis_mock = AsyncMock() + redis_mock.get = AsyncMock(return_value="{}") + redis_mock.set = AsyncMock() + redis_mock.delete = AsyncMock(return_value=1) + redis_mock.exists = AsyncMock(return_value=0) + redis_mock.scan_iter = AsyncMock() + redis_mock.keys = AsyncMock( + return_value=["test_session:1", "test_session:2", "test_session:3", "test_session:4", "test_session:5"] + ) + + # Set up pipeline for create + pipeline_mock = MagicMock() + pipeline_mock.set = MagicMock(return_value=pipeline_mock) + pipeline_mock.sadd = MagicMock(return_value=pipeline_mock) + pipeline_mock.expire = MagicMock(return_value=pipeline_mock) + pipeline_mock.delete = MagicMock(return_value=pipeline_mock) + pipeline_mock.execute = AsyncMock(return_value=[True, True, True]) + + redis_mock.pipeline = MagicMock(return_value=pipeline_mock) + + # Configure scan_iter to yield login keys + async def mock_scan_iter(**kwargs): + if kwargs.get("match") == "login:*": + for i in range(3): + yield f"login:user:test{i}" + # In async functions we can't use 'yield from' + + redis_mock.scan_iter.side_effect = mock_scan_iter + + # Create storage with the mock + with patch("src.infrastructure.auth.session.backends.redis.Redis", return_value=redis_mock): + storage: RedisSessionStorage = RedisSessionStorage() + storage.client = redis_mock + return storage + + +@pytest.mark.asyncio +async def test_create_session(redis_storage, mock_redis, mock_pipeline): + """Test creating a new session.""" + session_id = "test-session-id" + test_data = RedisTestSessionData(user_id=1, session_id=session_id) + + mock_pipeline.execute.return_value = [True, True, True] + + result = await redis_storage.create(test_data, session_id=session_id) + + assert result == session_id + + mock_pipeline.set.assert_called() + mock_pipeline.sadd.assert_called() + mock_pipeline.expire.assert_called() + mock_pipeline.execute.assert_called_once() + + +@pytest.mark.asyncio +async def test_get_session(redis_storage, mock_redis): + """Test retrieving a session by ID.""" + session_id = "test-session-id" + test_data = RedisTestSessionData(user_id=1, session_id=session_id) + + mock_redis.get.return_value = test_data.model_dump_json() + result = await redis_storage.get(session_id, RedisTestSessionData) + + assert result is not None + assert result.user_id == test_data.user_id + assert result.session_id == test_data.session_id + + mock_redis.get.assert_called_once_with(f"test_session:{session_id}") + + +@pytest.mark.asyncio +async def test_get_session_not_found(redis_storage, mock_redis): + """Test retrieving a non-existent session.""" + session_id = "nonexistent-session-id" + + mock_redis.get.return_value = None + + result = await redis_storage.get(session_id, RedisTestSessionData) + assert result is None + + mock_redis.get.assert_called_once_with(f"test_session:{session_id}") + + +@pytest.mark.asyncio +async def test_update_session(redis_storage, mock_redis, mock_pipeline): + """Test updating an existing session.""" + session_id = "test-session-id" + test_data = RedisTestSessionData(user_id=1, session_id=session_id) + + mock_redis.exists.return_value = True + mock_pipeline.execute.return_value = [True, True] + + result = await redis_storage.update(session_id, test_data) + assert result is True + + mock_redis.exists.assert_called_once() + mock_pipeline.set.assert_called() + mock_pipeline.expire.assert_called() + mock_pipeline.execute.assert_called_once() + + +@pytest.mark.asyncio +async def test_delete_session(redis_storage, mock_redis, mock_pipeline): + """Test deleting a session.""" + session_id = "test-session-id" + test_data = RedisTestSessionData(user_id=1, session_id=session_id) + + mock_redis.get.return_value = test_data.model_dump_json() + mock_pipeline.execute.return_value = [1, 1] + + result = await redis_storage.delete(session_id) + assert result is True + + mock_redis.get.assert_called_once() + mock_pipeline.delete.assert_called() + mock_pipeline.srem.assert_called() + mock_pipeline.execute.assert_called_once() + + +@pytest.mark.asyncio +async def test_extend_session(redis_storage, mock_redis, mock_pipeline): + """Test extending the expiration of a session.""" + session_id = "test-session-id" + test_data = RedisTestSessionData(user_id=1, session_id=session_id) + + mock_redis.get.return_value = test_data.model_dump_json() + mock_pipeline.execute.return_value = [True, True] + + result = await redis_storage.extend(session_id) + assert result is True + + mock_redis.get.assert_called_once() + mock_pipeline.expire.assert_called() + mock_pipeline.execute.assert_called_once() + + +@pytest.mark.asyncio +async def test_exists_session(redis_storage, mock_redis): + """Test checking if a session exists.""" + session_id = "test-session-id" + + mock_redis.exists.return_value = True + + result = await redis_storage.exists(session_id) + assert result is True + + mock_redis.exists.assert_called_once_with(f"test_session:{session_id}") + + +@pytest.mark.asyncio +async def test_get_user_sessions(redis_storage, mock_redis): + """Test retrieving all sessions for a user.""" + user_id = 1 + session_ids = ["session1", "session2", "session3"] + + mock_redis.smembers.return_value = session_ids + + result = await redis_storage.get_user_sessions(user_id) + assert result == session_ids + + mock_redis.smembers.assert_called_once_with(f"{redis_storage.user_sessions_prefix}{user_id}") + + +@pytest.mark.asyncio +async def test_delete_pattern(mock_redis): + """Test deleting keys matching a pattern from Redis.""" + storage: RedisSessionStorage = RedisSessionStorage() + storage.client = mock_redis + + login_keys = [f"login:user:test{i}".encode() for i in range(3)] + + class AsyncIterator: + def __init__(self, items): + self.items = items + self.index = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + if self.index >= len(self.items): + raise StopAsyncIteration + item = self.items[self.index] + self.index += 1 + return item + + mock_redis.scan_iter = MagicMock(return_value=AsyncIterator(login_keys)) + + mock_pipeline = MagicMock() + mock_pipeline.delete = MagicMock(return_value=mock_pipeline) + mock_pipeline.execute = AsyncMock(return_value=[1, 1, 1]) + mock_redis.pipeline = MagicMock(return_value=mock_pipeline) + + deleted_count = await storage.delete_pattern("login:*") + + mock_redis.scan_iter.assert_called_once_with(match="login:*") + + mock_redis.pipeline.assert_called_once() + assert mock_pipeline.delete.call_count == 3 + mock_pipeline.execute.assert_called_once() + + assert deleted_count == 3 diff --git a/backend/tests/unit/infrastructure/auth/session/test_api.py b/backend/tests/unit/infrastructure/auth/session/test_api.py new file mode 100644 index 00000000..dd77f11f --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/session/test_api.py @@ -0,0 +1,284 @@ +from datetime import UTC, datetime, timedelta +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from fastapi import Request + +from src.infrastructure.auth.session.dependencies import ( + get_current_session_data, + get_current_user, + get_session_from_cookie, + get_session_manager, + verify_csrf_token, +) +from src.infrastructure.auth.session.manager import SessionManager +from src.infrastructure.auth.session.schemas import SessionData +from src.infrastructure.database.session import async_session +from src.interfaces.main import app + + +@pytest.mark.asyncio +async def test_login_endpoint_success(client, test_user, db_session, monkeypatch): + """Test successful login with session authentication.""" + mock_manager = MagicMock(spec=SessionManager) + mock_manager.create_session = AsyncMock(return_value=("test-session-id", "test-csrf-token")) + mock_manager.set_session_cookies = MagicMock() + mock_manager.track_login_attempt = AsyncMock(return_value=(True, 5)) + + app.dependency_overrides[get_session_manager] = lambda: mock_manager + + try: + response = await client.post( + "/api/v1/auth/login", + data={ + "username": test_user["username"], + "password": "Password123!", + }, + ) + + assert response.status_code == 200 + data = response.json() + assert "csrf_token" in data + assert data["csrf_token"] == "test-csrf-token" + + mock_manager.create_session.assert_called_once() + mock_manager.set_session_cookies.assert_called_once() + + create_args = mock_manager.create_session.call_args + assert create_args[1]["user_id"] == test_user["id"] + finally: + if get_session_manager in app.dependency_overrides: + del app.dependency_overrides[get_session_manager] + + +@pytest.mark.asyncio +async def test_login_endpoint_invalid_credentials(client, test_user, db_session, monkeypatch): + """Test login with invalid credentials.""" + response = await client.post( + "/api/v1/auth/login", + data={ + "username": test_user["username"], + "password": "WrongPassword!", + }, + ) + + assert response.status_code == 401 + data = response.json() + assert "detail" in data + assert "Incorrect username or password" in data["detail"] + + +@pytest.mark.asyncio +async def test_logout_endpoint(client, test_user, db_session, monkeypatch): + """Test logout endpoint.""" + session_id = "test-session-id" + session_data = SessionData( + session_id=session_id, + user_id=test_user["id"], + is_active=True, + ip_address="127.0.0.1", + user_agent="test-agent", + device_info={}, + last_activity=datetime.now(UTC), + metadata={}, + ) + + mock_manager = MagicMock(spec=SessionManager) + mock_manager.terminate_session = AsyncMock(return_value=True) + mock_manager.clear_session_cookies = MagicMock() + + async def mock_get_current_session_data(request: Request, session_id=None, session_manager=None): + return session_data + + app.dependency_overrides[get_session_manager] = lambda: mock_manager + app.dependency_overrides[get_current_session_data] = mock_get_current_session_data + + try: + response = await client.post("/api/v1/auth/logout") + + print(f"Status Code: {response.status_code}") + print(f"Response Body: {response.text}") + + assert response.status_code == 200 + data = response.json() + assert "message" in data + assert data["message"] == "Logged out successfully" + + mock_manager.terminate_session.assert_called_once_with(session_id) + mock_manager.clear_session_cookies.assert_called_once() + finally: + if get_session_manager in app.dependency_overrides: + del app.dependency_overrides[get_session_manager] + if get_current_session_data in app.dependency_overrides: + del app.dependency_overrides[get_current_session_data] + + +@pytest.mark.asyncio +async def test_refresh_csrf_token_endpoint(client, test_user, db_session, monkeypatch): + """Test refreshing the CSRF token.""" + session_id = "test-session-id" + session_data = SessionData( + session_id=session_id, + user_id=test_user["id"], + is_active=True, + ip_address="127.0.0.1", + user_agent="test-agent", + device_info={}, + last_activity=datetime.now(UTC), + metadata={}, + ) + + mock_manager = MagicMock(spec=SessionManager) + mock_manager.regenerate_csrf_token = AsyncMock(return_value="new-csrf-token") + mock_manager.session_timeout = timedelta(minutes=30) # Add session_timeout attribute + + async def mock_get_current_session_data(request: Request, session_id=None, session_manager=None): + return session_data + + app.dependency_overrides[get_session_manager] = lambda: mock_manager + app.dependency_overrides[get_current_session_data] = mock_get_current_session_data + + try: + response = await client.post("/api/v1/auth/refresh-csrf") + + print(f"Status Code: {response.status_code}") + print(f"Response Body: {response.text}") + + assert response.status_code == 200 + data = response.json() + assert "csrf_token" in data + assert data["csrf_token"] == "new-csrf-token" + + mock_manager.regenerate_csrf_token.assert_called_once_with( + user_id=test_user["id"], + session_id=session_id, + ) + finally: + if get_session_manager in app.dependency_overrides: + del app.dependency_overrides[get_session_manager] + if get_current_session_data in app.dependency_overrides: + del app.dependency_overrides[get_current_session_data] + + +@pytest.mark.asyncio +async def test_unauthorized_access(client, test_user, db_session): + """Test accessing protected endpoint without authentication.""" + response = await client.get("/api/v1/users/me") + + assert response.status_code == 401 + data = response.json() + assert "detail" in data + assert "Not authenticated" in data["detail"] + + +@pytest.mark.asyncio +async def test_protected_endpoint_access(client, test_user, db_session, monkeypatch): + """Test accessing protected endpoint with valid session.""" + test_user_with_image = test_user.copy() + test_user_with_image["profile_image_url"] = "https://example.com/default-avatar.png" + + app.dependency_overrides[get_current_user] = lambda: test_user_with_image + app.dependency_overrides[async_session] = lambda: db_session + app.dependency_overrides[get_session_from_cookie] = lambda request: None + app.dependency_overrides[verify_csrf_token] = lambda request, session_data=None: None + + try: + response = await client.get("/api/v1/users/me") + + print(f"Status Code: {response.status_code}") + print(f"Response Body: {response.text}") + + assert response.status_code == 200 + user_data = response.json() + assert user_data["id"] == test_user["id"] + assert user_data["username"] == test_user["username"] + assert user_data["profile_image_url"] == test_user_with_image["profile_image_url"] + finally: + if get_current_user in app.dependency_overrides: + del app.dependency_overrides[get_current_user] + if async_session in app.dependency_overrides: + del app.dependency_overrides[async_session] + if get_session_from_cookie in app.dependency_overrides: + del app.dependency_overrides[get_session_from_cookie] + if verify_csrf_token in app.dependency_overrides: + del app.dependency_overrides[verify_csrf_token] + + +@pytest.mark.asyncio +async def test_csrf_protection(client, test_user, db_session): + """Test CSRF protection for mutation operations.""" + session_data = SessionData( + session_id="test-session-id", + user_id=test_user["id"], + is_active=True, + ip_address="127.0.0.1", + user_agent="test-agent", + device_info={}, + last_activity=datetime.now(UTC), + metadata={}, + ) + + with ( + patch("src.infrastructure.auth.session.dependencies.get_session_from_cookie", return_value=session_data), + patch( + "src.infrastructure.auth.session.dependencies.verify_csrf_token", side_effect=Exception("CSRF validation failed") + ), + ): + response = await client.patch( + f"/api/v1/users/{test_user['username']}", + json={"name": "Updated Name"}, + ) + + assert response.status_code in (401, 403) + + +@pytest.mark.asyncio +async def test_login_endpoint_with_rate_limiting(client, test_user, db_session, monkeypatch): + """Test that the login endpoint correctly applies rate limiting.""" + mock_manager = MagicMock(spec=SessionManager) + + mock_manager.track_login_attempt = AsyncMock(return_value=(True, 4)) + mock_manager.create_session = AsyncMock(return_value=("test-session-id", "test-csrf-token")) + mock_manager.set_session_cookies = MagicMock() + + app.dependency_overrides[get_session_manager] = lambda: mock_manager + + try: + response = await client.post( + "/api/v1/auth/login", + data={ + "username": test_user["username"], + "password": "Password123!", + }, + ) + + assert response.status_code == 200 + data = response.json() + assert "csrf_token" in data + assert data["csrf_token"] == "test-csrf-token" + + mock_manager.track_login_attempt.assert_any_call(ip_address="127.0.0.1", username=test_user["username"], success=False) + mock_manager.track_login_attempt.assert_any_call(ip_address="127.0.0.1", username=test_user["username"], success=True) + + mock_manager.track_login_attempt.reset_mock() + mock_manager.track_login_attempt = AsyncMock(return_value=(False, 0)) + + response = await client.post( + "/api/v1/auth/login", + data={ + "username": test_user["username"], + "password": "Password123!", + }, + ) + + assert response.status_code == 401 + + mock_manager.track_login_attempt.assert_called_once_with( + ip_address="127.0.0.1", username=test_user["username"], success=False + ) + + assert mock_manager.create_session.call_count == 1 + + finally: + if get_session_manager in app.dependency_overrides: + del app.dependency_overrides[get_session_manager] diff --git a/backend/tests/unit/infrastructure/auth/session/test_dependencies.py b/backend/tests/unit/infrastructure/auth/session/test_dependencies.py new file mode 100644 index 00000000..a22c3193 --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/session/test_dependencies.py @@ -0,0 +1,156 @@ +from datetime import UTC, datetime +from unittest.mock import AsyncMock, MagicMock + +import pytest +from fastapi import HTTPException, Request, status + +from src.infrastructure.auth.http_exceptions import CSRFException +from src.infrastructure.auth.session.manager import SessionManager +from src.infrastructure.auth.session.schemas import SessionData + + +async def mock_get_session_from_cookie(request, session_manager): + session_id = request.cookies.get("session_id") + if not session_id: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Not authenticated", + ) + + session_data = await session_manager.validate_session(session_id) + if not session_data: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired session", + ) + + return session_data + + +async def mock_verify_csrf_token(request, session_data, session_manager): + if request.method in ("GET", "HEAD", "OPTIONS"): + return + + if not session_data: + return + + token = request.headers.get("X-CSRF-Token") + if not token: + raise CSRFException("CSRF token missing") + + is_valid = await session_manager.validate_csrf_token(session_data.session_id, token) + if not is_valid: + raise CSRFException("Invalid CSRF token") + + +@pytest.fixture +def mock_request(): + request = MagicMock(spec=Request) + request.cookies = {"session_id": "test-session-id"} + request.headers = {"X-CSRF-Token": "test-csrf-token"} + request.method = "POST" + return request + + +@pytest.fixture +def mock_session_data(): + return SessionData( + session_id="test-session-id", + user_id=1, + is_active=True, + ip_address="127.0.0.1", + user_agent="test-agent", + device_info={}, + last_activity=datetime.now(UTC), + metadata={}, + ) + + +@pytest.fixture +def mock_session_manager(): + session_manager = MagicMock(spec=SessionManager) + session_manager.validate_session = AsyncMock() + session_manager.validate_csrf_token = AsyncMock() + session_manager.cleanup_expired_sessions = AsyncMock() + return session_manager + + +@pytest.mark.asyncio +async def test_get_session_from_cookie(mock_request, mock_session_data, mock_session_manager): + """Test getting session data from a cookie.""" + mock_session_manager.validate_session.return_value = mock_session_data + + result = await mock_get_session_from_cookie(mock_request, mock_session_manager) + + assert result == mock_session_data + mock_session_manager.validate_session.assert_called_once_with("test-session-id") + + +@pytest.mark.asyncio +async def test_get_session_from_cookie_no_cookie(mock_request, mock_session_manager): + """Test when no session cookie is present.""" + mock_request.cookies = {} + + with pytest.raises(HTTPException) as exc_info: + await mock_get_session_from_cookie(mock_request, mock_session_manager) + + assert exc_info.value.status_code == status.HTTP_401_UNAUTHORIZED + assert "Not authenticated" in exc_info.value.detail + + +@pytest.mark.asyncio +async def test_get_session_from_cookie_invalid_session(mock_request, mock_session_manager): + """Test when session is invalid or expired.""" + mock_session_manager.validate_session.return_value = None + + with pytest.raises(HTTPException) as exc_info: + await mock_get_session_from_cookie(mock_request, mock_session_manager) + + assert exc_info.value.status_code == status.HTTP_401_UNAUTHORIZED + assert "Invalid or expired session" in exc_info.value.detail + mock_session_manager.validate_session.assert_called_once_with("test-session-id") + + +@pytest.mark.asyncio +async def test_verify_csrf_token_valid(mock_request, mock_session_data, mock_session_manager): + """Test verifying a valid CSRF token.""" + mock_session_manager.validate_csrf_token.return_value = True + + await mock_verify_csrf_token( + request=mock_request, + session_data=mock_session_data, + session_manager=mock_session_manager, + ) + + mock_session_manager.validate_csrf_token.assert_called_once_with("test-session-id", "test-csrf-token") + + +@pytest.mark.asyncio +async def test_verify_csrf_token_missing(mock_request, mock_session_data, mock_session_manager): + """Test when CSRF token is missing.""" + mock_request.headers = {} + + with pytest.raises(CSRFException) as exc_info: + await mock_verify_csrf_token( + request=mock_request, + session_data=mock_session_data, + session_manager=mock_session_manager, + ) + + assert "CSRF token missing" in str(exc_info.value) + + +@pytest.mark.asyncio +async def test_verify_csrf_token_invalid(mock_request, mock_session_data, mock_session_manager): + """Test when CSRF token is invalid.""" + mock_session_manager.validate_csrf_token.return_value = False + + with pytest.raises(CSRFException) as exc_info: + await mock_verify_csrf_token( + request=mock_request, + session_data=mock_session_data, + session_manager=mock_session_manager, + ) + + assert "Invalid CSRF token" in str(exc_info.value) + mock_session_manager.validate_csrf_token.assert_called_once_with("test-session-id", "test-csrf-token") diff --git a/backend/tests/unit/infrastructure/auth/session/test_manager.py b/backend/tests/unit/infrastructure/auth/session/test_manager.py new file mode 100644 index 00000000..e8e93c0c --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/session/test_manager.py @@ -0,0 +1,398 @@ +from datetime import UTC, datetime, timedelta +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from fastapi import Response + +from src.infrastructure.auth.session.manager import SessionManager +from src.infrastructure.auth.session.schemas import CSRFToken, SessionData, UserAgentInfo + + +@pytest.fixture +def mock_storage(): + """Create a mock session storage.""" + storage = AsyncMock() + storage.create = AsyncMock() + storage.get = AsyncMock() + storage.update = AsyncMock() + storage.delete = AsyncMock() + storage.extend = AsyncMock() + storage.exists = AsyncMock() + storage.get_user_sessions = AsyncMock(return_value=[]) + return storage + + +@pytest.fixture +def mock_csrf_storage(): + """Create a mock CSRF token storage.""" + storage = AsyncMock() + storage.create = AsyncMock() + storage.get = AsyncMock() + storage.delete = AsyncMock() + return storage + + +@pytest.fixture +def session_manager(mock_storage, mock_csrf_storage): + """Create a session manager with mock storage.""" + with ( + patch("src.infrastructure.auth.session.manager.get_session_storage", return_value=mock_storage), + patch("src.infrastructure.auth.session.storage.get_session_storage", return_value=mock_csrf_storage), + ): + manager = SessionManager(session_storage=mock_storage) + manager.csrf_storage = mock_csrf_storage + return manager + + +@pytest.fixture +def mock_request(): + """Create a mock request for session creation.""" + request = MagicMock() + request.client.host = "127.0.0.1" + request.headers = { + "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) Chrome/91.0.4472.124 Safari/537.36", + "x-forwarded-for": "192.168.1.1", + } + return request + + +@pytest.fixture +def mock_response(): + """Create a mock response for cookie handling.""" + response = MagicMock(spec=Response) + response.set_cookie = MagicMock() + response.delete_cookie = MagicMock() + return response + + +@pytest.mark.asyncio +async def test_create_session(session_manager, mock_storage, mock_csrf_storage, mock_request): + """Test creating a new session with CSRF token.""" + user_id = 1 + session_id = "test-session-id" + csrf_token = "test-csrf-token" + + mock_storage.create.return_value = session_id + + with patch.object(session_manager, "_generate_csrf_token", return_value=csrf_token) as mock_gen: + result_session_id, result_csrf_token = await session_manager.create_session( + request=mock_request, + user_id=user_id, + ) + + assert result_session_id == session_id + assert result_csrf_token == csrf_token + + mock_storage.create.assert_called_once() + mock_gen.assert_called_once() + + create_args = mock_storage.create.call_args[0][0] + assert create_args.user_id == user_id + assert create_args.ip_address == "192.168.1.1" + + +@pytest.mark.asyncio +async def test_validate_session_valid(session_manager, mock_storage): + """Test validating a valid, non-expired session.""" + session_id = "test-session-id" + + current_time = datetime.now(UTC) + session_data = SessionData( + session_id=session_id, + user_id=1, + is_active=True, + ip_address="127.0.0.1", + user_agent="test_agent", + device_info={}, + last_activity=current_time - timedelta(minutes=5), + metadata={}, + ) + + mock_storage.get.return_value = session_data + mock_storage.update.return_value = True + + result = await session_manager.validate_session(session_id) + + assert result is not None + assert result.session_id == session_id + assert result.user_id == 1 + + mock_storage.get.assert_called_once_with(session_id, SessionData) + mock_storage.update.assert_called_once() + + +@pytest.mark.asyncio +async def test_validate_session_expired(session_manager, mock_storage): + """Test validating an expired session.""" + session_id = "test-session-id" + + current_time = datetime.now(UTC) + session_data = SessionData( + session_id=session_id, + user_id=1, + is_active=True, + ip_address="127.0.0.1", + user_agent="test_agent", + device_info={}, + last_activity=current_time - timedelta(minutes=45), + metadata={}, + ) + + mock_storage.get.return_value = session_data + + result = await session_manager.validate_session(session_id) + + assert result is None + + assert mock_storage.get.call_count >= 1 + assert mock_storage.get.call_args_list[0][0][0] == session_id + assert mock_storage.get.call_args_list[0][0][1] == SessionData + + assert mock_storage.update.call_count == 1 + update_args = mock_storage.update.call_args[0] + assert update_args[0] == session_id + assert update_args[1].is_active is False + assert "terminated_at" in update_args[1].metadata + + +@pytest.mark.asyncio +async def test_validate_session_inactive(session_manager, mock_storage): + """Test validating an inactive session.""" + session_id = "test-session-id" + + current_time = datetime.now(UTC) + session_data = SessionData( + session_id=session_id, + user_id=1, + is_active=False, + ip_address="127.0.0.1", + user_agent="test_agent", + device_info={}, + last_activity=current_time - timedelta(minutes=5), + metadata={}, + ) + + mock_storage.get.return_value = session_data + + result = await session_manager.validate_session(session_id) + + assert result is None + + mock_storage.get.assert_called_once_with(session_id, SessionData) + mock_storage.update.assert_not_called() + + +@pytest.mark.asyncio +async def test_validate_csrf_token_valid(session_manager, mock_csrf_storage): + """Test validating a valid CSRF token.""" + session_id = "test-session-id" + csrf_token = "test-csrf-token" + + current_time = datetime.now(UTC) + token_data = CSRFToken( + token=csrf_token, + user_id=1, + session_id=session_id, + expiry=current_time + timedelta(minutes=30), + ) + + mock_csrf_storage.get.return_value = token_data + + result = await session_manager.validate_csrf_token(session_id, csrf_token) + + assert result is True + + mock_csrf_storage.get.assert_called_once_with(csrf_token, CSRFToken) + + +@pytest.mark.asyncio +async def test_validate_csrf_token_expired(session_manager, mock_csrf_storage): + """Test validating an expired CSRF token.""" + session_id = "test-session-id" + csrf_token = "test-csrf-token" + + current_time = datetime.now(UTC) + token_data = CSRFToken( + token=csrf_token, + user_id=1, + session_id=session_id, + expiry=current_time - timedelta(minutes=5), + ) + + mock_csrf_storage.get.return_value = token_data + + result = await session_manager.validate_csrf_token(session_id, csrf_token) + + assert result is False + + mock_csrf_storage.get.assert_called_once_with(csrf_token, CSRFToken) + mock_csrf_storage.delete.assert_called_once_with(csrf_token) + + +@pytest.mark.asyncio +async def test_validate_csrf_token_mismatched_session(session_manager, mock_csrf_storage): + """Test validating a CSRF token with wrong session ID.""" + session_id = "test-session-id" + wrong_session_id = "wrong-session-id" + csrf_token = "test-csrf-token" + + current_time = datetime.now(UTC) + token_data = CSRFToken( + token=csrf_token, + user_id=1, + session_id=wrong_session_id, + expiry=current_time + timedelta(minutes=30), + ) + + mock_csrf_storage.get.return_value = token_data + + result = await session_manager.validate_csrf_token(session_id, csrf_token) + + assert result is False + + mock_csrf_storage.get.assert_called_once_with(csrf_token, CSRFToken) + + +@pytest.mark.asyncio +async def test_regenerate_csrf_token(session_manager, mock_csrf_storage): + """Test regenerating a CSRF token for an existing session.""" + user_id = 1 + session_id = "test-session-id" + new_csrf_token = "new-csrf-token" + + with patch.object(session_manager, "_generate_csrf_token", return_value=new_csrf_token) as mock_generate: + result = await session_manager.regenerate_csrf_token(user_id, session_id) + + assert result == new_csrf_token + + mock_generate.assert_called_once_with(user_id, session_id) + + +@pytest.mark.asyncio +async def test_terminate_session(session_manager, mock_storage): + """Test terminating a session.""" + session_id = "test-session-id" + + session_data = SessionData( + session_id=session_id, + user_id=1, + is_active=True, + ip_address="127.0.0.1", + user_agent="test_agent", + device_info={}, + last_activity=datetime.now(UTC), + metadata={}, + ) + + mock_storage.get.return_value = session_data + mock_storage.update.return_value = True + + result = await session_manager.terminate_session(session_id) + + assert result is True + + mock_storage.get.assert_called_once_with(session_id, SessionData) + mock_storage.update.assert_called_once() + + update_args = mock_storage.update.call_args[0] + assert update_args[0] == session_id + assert not update_args[1].is_active + assert "terminated_at" in update_args[1].metadata + assert "termination_reason" in update_args[1].metadata + + +@pytest.mark.asyncio +async def test_set_session_cookies(session_manager, mock_response): + """Test setting session cookies.""" + session_id = "test-session-id" + csrf_token = "test-csrf-token" + + session_manager.set_session_cookies( + response=mock_response, + session_id=session_id, + csrf_token=csrf_token, + ) + + assert mock_response.set_cookie.call_count == 2 + + session_cookie_args = mock_response.set_cookie.call_args_list[0][1] + assert session_cookie_args["key"] == "session_id" + assert session_cookie_args["value"] == session_id + assert session_cookie_args["httponly"] is True + + csrf_cookie_args = mock_response.set_cookie.call_args_list[1][1] + assert csrf_cookie_args["key"] == "csrf_token" + assert csrf_cookie_args["value"] == csrf_token + assert csrf_cookie_args["httponly"] is False + + +@pytest.mark.asyncio +async def test_clear_session_cookies(session_manager, mock_response): + """Test clearing session cookies.""" + session_manager.clear_session_cookies(response=mock_response) + + assert mock_response.delete_cookie.call_count == 2 + + assert mock_response.delete_cookie.call_args_list[0][1]["key"] == "session_id" + assert mock_response.delete_cookie.call_args_list[1][1]["key"] == "csrf_token" + + +@pytest.mark.asyncio +async def test_parse_user_agent(session_manager): + """Test parsing a user agent string.""" + ua_string = ( + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36" + ) + + result = session_manager.parse_user_agent(ua_string) + + assert isinstance(result, UserAgentInfo) + assert result.browser == "Chrome" + assert "91.0" in result.browser_version + assert result.os == "Windows" + assert result.is_pc is True + assert result.is_mobile is False + + ua_string = ( + "Mozilla/5.0 (iPhone; CPU iPhone OS 14_6 like Mac OS X) AppleWebKit/605.1.15 " + "(KHTML, like Gecko) Version/14.0 Mobile/15E148 Safari/604.1" + ) + + result = session_manager.parse_user_agent(ua_string) + + assert result.browser == "Mobile Safari" + assert result.os == "iOS" + assert result.device == "iPhone" + assert result.is_mobile is True + assert result.is_pc is False + + +@pytest.mark.asyncio +async def test_enforce_session_limit(session_manager, mock_storage): + """Test enforcing maximum sessions per user.""" + user_id = 1 + active_sessions = [] + + for i in range(6): + session_data = SessionData( + session_id=f"session-{i}", + user_id=user_id, + is_active=True, + ip_address="127.0.0.1", + user_agent="test-agent", + device_info={}, + last_activity=datetime.now(UTC) - timedelta(minutes=i), + metadata={}, + ) + active_sessions.append(session_data) + + mock_storage.get_user_sessions.return_value = [s.session_id for s in active_sessions] + mock_storage.get.side_effect = lambda sid, cls: next((s for s in active_sessions if s.session_id == sid), None) + + await session_manager._enforce_session_limit(user_id) + + assert mock_storage.update.call_count == 2 + + terminated_sessions = [args[0][0] for args in mock_storage.update.call_args_list] + assert "session-5" in terminated_sessions + assert "session-4" in terminated_sessions diff --git a/backend/tests/unit/infrastructure/auth/session/test_rate_limiting.py b/backend/tests/unit/infrastructure/auth/session/test_rate_limiting.py new file mode 100644 index 00000000..cd893c83 --- /dev/null +++ b/backend/tests/unit/infrastructure/auth/session/test_rate_limiting.py @@ -0,0 +1,146 @@ +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from fastapi import Request, Response +from fastapi.security import OAuth2PasswordRequestForm +from sqlalchemy.ext.asyncio import AsyncSession + +from src.infrastructure.auth.routes import login +from src.infrastructure.auth.session.manager import SessionManager + + +@pytest.mark.asyncio +async def test_track_login_attempt_with_rate_limiter(): + """Test tracking login attempts when a rate limiter is configured.""" + mock_rate_limiter = MagicMock() + mock_rate_limiter.increment = AsyncMock(side_effect=[1, 1, 2, 2, 3, 3, 4, 4, 5, 5]) + mock_rate_limiter.delete = AsyncMock(return_value=True) + + mock_storage = MagicMock() + + session_manager = SessionManager( + session_storage=mock_storage, rate_limiter=mock_rate_limiter, login_max_attempts=5, login_window_minutes=15 + ) + + is_allowed, remaining = await session_manager.track_login_attempt( + ip_address="192.168.1.1", username="testuser", success=True + ) + + assert is_allowed is True + assert remaining is None + assert mock_rate_limiter.delete.call_count == 2 + mock_rate_limiter.delete.assert_any_call("login:ip:192.168.1.1") + mock_rate_limiter.delete.assert_any_call("login:user:testuser") + + mock_rate_limiter.delete.reset_mock() + mock_rate_limiter.increment.reset_mock() + + for i in range(1, 5): + is_allowed, remaining = await session_manager.track_login_attempt( + ip_address="192.168.1.1", username="testuser", success=False + ) + + assert is_allowed is True + assert remaining == (5 - i) + assert mock_rate_limiter.increment.call_count == i * 2 + + mock_rate_limiter.increment.side_effect = [6, 6] + + is_allowed, remaining = await session_manager.track_login_attempt( + ip_address="192.168.1.1", username="testuser", success=False + ) + + assert is_allowed is False + assert remaining == 0 + + +@pytest.mark.asyncio +async def test_track_login_attempt_without_rate_limiter(): + """Test tracking login attempts when no rate limiter is configured.""" + mock_storage = MagicMock() + + session_manager = SessionManager( + session_storage=mock_storage, rate_limiter=None, login_max_attempts=5, login_window_minutes=15 + ) + + is_allowed, remaining = await session_manager.track_login_attempt( + ip_address="192.168.1.1", username="testuser", success=True + ) + + assert is_allowed is True + assert remaining is None + + is_allowed, remaining = await session_manager.track_login_attempt( + ip_address="192.168.1.1", username="testuser", success=False + ) + + assert is_allowed is True + assert remaining is None + + +@pytest.mark.asyncio +async def test_login_endpoint_rate_limiting(): + """Test that the login endpoint applies rate limiting.""" + mock_request = MagicMock(spec=Request) + mock_request.client.host = "192.168.1.1" + + mock_response = MagicMock(spec=Response) + + mock_form_data = MagicMock(spec=OAuth2PasswordRequestForm) + mock_form_data.username = "testuser" + mock_form_data.password = "password123" + + mock_db = MagicMock(spec=AsyncSession) + + mock_session_manager = MagicMock() + + mock_session_manager.track_login_attempt = AsyncMock(return_value=(True, 4)) + mock_auth_user = AsyncMock(return_value={"id": 1, "username": "testuser"}) + mock_session_manager.create_session = AsyncMock(return_value=("session_id", "csrf_token")) + + with patch("src.infrastructure.auth.routes.authenticate_user", mock_auth_user): + result = await login( + request=mock_request, + response=mock_response, + form_data=mock_form_data, + db=mock_db, + session_manager=mock_session_manager, + ) + + assert result == {"csrf_token": "csrf_token"} + assert mock_session_manager.track_login_attempt.call_count == 2 + mock_session_manager.track_login_attempt.assert_any_call(ip_address="192.168.1.1", username="testuser", success=False) + mock_session_manager.track_login_attempt.assert_any_call(ip_address="192.168.1.1", username="testuser", success=True) + + mock_session_manager.reset_mock() + mock_auth_user.reset_mock() + + mock_session_manager.track_login_attempt = AsyncMock(return_value=(False, 0)) + + with patch("src.infrastructure.auth.routes.authenticate_user", mock_auth_user): + with pytest.raises(Exception) as excinfo: + await login( + request=mock_request, + response=mock_response, + form_data=mock_form_data, + db=mock_db, + session_manager=mock_session_manager, + ) + + assert "Too many failed login attempts" in str(excinfo.value) + assert mock_auth_user.call_count == 0 + + +@pytest.mark.asyncio +async def test_cleanup_rate_limits(): + """Test cleanup_rate_limits method.""" + mock_rate_limiter = MagicMock() + mock_rate_limiter.delete_pattern = AsyncMock(return_value=5) + + mock_storage = MagicMock() + + session_manager = SessionManager(session_storage=mock_storage, rate_limiter=mock_rate_limiter) + + await session_manager.cleanup_rate_limits() + + mock_rate_limiter.delete_pattern.assert_called_once_with("login:*") diff --git a/src/scripts/__init__.py b/backend/tests/unit/infrastructure/cache/__init__.py similarity index 100% rename from src/scripts/__init__.py rename to backend/tests/unit/infrastructure/cache/__init__.py diff --git a/tests/__init__.py b/backend/tests/unit/infrastructure/cache/backends/__init__.py similarity index 100% rename from tests/__init__.py rename to backend/tests/unit/infrastructure/cache/backends/__init__.py diff --git a/backend/tests/unit/infrastructure/cache/backends/test_memcached.py b/backend/tests/unit/infrastructure/cache/backends/test_memcached.py new file mode 100644 index 00000000..232b1399 --- /dev/null +++ b/backend/tests/unit/infrastructure/cache/backends/test_memcached.py @@ -0,0 +1,75 @@ +import json +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from src.infrastructure.cache.backends.memcached import ( + MemcachedBackend, + MemcachedSettings, + PatternMatchingNotSupportedError, +) + + +@pytest.fixture +def mock_memcached(): + memcached_mock = MagicMock() + memcached_mock.get = AsyncMock() + memcached_mock.set = AsyncMock() + memcached_mock.delete = AsyncMock() + return memcached_mock + + +@pytest.fixture +def memcached_backend(mock_memcached): + with patch( + "src.infrastructure.cache.backends.memcached.aiomcache.Client", + return_value=mock_memcached, + ): + settings = MemcachedSettings(host="localhost", port=11211) + backend = MemcachedBackend(settings=settings) + backend.client = mock_memcached + return backend + + +@pytest.mark.asyncio +async def test_get_existing_key(memcached_backend, mock_memcached): + test_data = {"key": "value"} + serialized_data = json.dumps(test_data).encode() + mock_memcached.get.return_value = serialized_data + + result = await memcached_backend.get("test_key") + + assert result == test_data + mock_memcached.get.assert_called_once_with(b"test_key") + + +@pytest.mark.asyncio +async def test_get_nonexistent_key(memcached_backend, mock_memcached): + mock_memcached.get.return_value = None + + result = await memcached_backend.get("test_key") + + assert result is None + mock_memcached.get.assert_called_once_with(b"test_key") + + +@pytest.mark.asyncio +async def test_set_key(memcached_backend, mock_memcached): + test_data = {"key": "value"} + + await memcached_backend.set("test_key", test_data, 3600) + + mock_memcached.set.assert_called_once_with(b"test_key", json.dumps(test_data).encode(), exptime=3600) + + +@pytest.mark.asyncio +async def test_delete_key(memcached_backend, mock_memcached): + await memcached_backend.delete("test_key") + + mock_memcached.delete.assert_called_once_with(b"test_key") + + +@pytest.mark.asyncio +async def test_delete_pattern_raises_error(memcached_backend): + with pytest.raises(PatternMatchingNotSupportedError): + await memcached_backend.delete_pattern("test_*") diff --git a/backend/tests/unit/infrastructure/cache/backends/test_redis.py b/backend/tests/unit/infrastructure/cache/backends/test_redis.py new file mode 100644 index 00000000..e20e539d --- /dev/null +++ b/backend/tests/unit/infrastructure/cache/backends/test_redis.py @@ -0,0 +1,75 @@ +import json +from unittest.mock import AsyncMock, patch + +import pytest + +from src.infrastructure.cache.backends.redis import RedisBackend, RedisSettings + + +@pytest.fixture +def mock_redis(): + redis_mock = AsyncMock() + redis_mock.get = AsyncMock() + redis_mock.set = AsyncMock() + redis_mock.delete = AsyncMock() + redis_mock.keys = AsyncMock() + redis_mock.scan = AsyncMock() + return redis_mock + + +@pytest.fixture +def redis_backend(mock_redis): + with patch("src.infrastructure.cache.backends.redis.Redis", return_value=mock_redis): + settings = RedisSettings(host="localhost", port=6379, password=None, db=0) + backend = RedisBackend(settings=settings) + backend.client = mock_redis + return backend + + +@pytest.mark.asyncio +async def test_get_existing_key(redis_backend, mock_redis): + test_data = {"key": "value"} + serialized_data = json.dumps(test_data).encode() + mock_redis.get.return_value = serialized_data + + result = await redis_backend.get("test_key") + + assert result == test_data + mock_redis.get.assert_called_once_with("test_key") + + +@pytest.mark.asyncio +async def test_get_nonexistent_key(redis_backend, mock_redis): + mock_redis.get.return_value = None + + result = await redis_backend.get("test_key") + + assert result is None + mock_redis.get.assert_called_once_with("test_key") + + +@pytest.mark.asyncio +async def test_set_key(redis_backend, mock_redis): + test_data = {"key": "value"} + + await redis_backend.set("test_key", test_data, 3600) + + mock_redis.set.assert_called_once_with("test_key", json.dumps(test_data).encode(), ex=3600) + + +@pytest.mark.asyncio +async def test_delete_key(redis_backend, mock_redis): + await redis_backend.delete("test_key") + + mock_redis.delete.assert_called_once_with("test_key") + + +@pytest.mark.asyncio +async def test_delete_pattern(redis_backend, mock_redis): + mock_redis.scan.return_value = (0, [b"key1", b"key2", b"key3"]) + + await redis_backend.delete_pattern("test_") + + mock_redis.scan.assert_called_once_with(cursor=0, match="test_*", count=100) + + mock_redis.delete.assert_called_once_with(b"key1", b"key2", b"key3") diff --git a/backend/tests/unit/infrastructure/cache/test_decorator.py b/backend/tests/unit/infrastructure/cache/test_decorator.py new file mode 100644 index 00000000..23e9531b --- /dev/null +++ b/backend/tests/unit/infrastructure/cache/test_decorator.py @@ -0,0 +1,148 @@ +from unittest.mock import AsyncMock, patch + +import pytest +from fastapi import Request + +from src.infrastructure.cache.decorator import cache +from src.infrastructure.cache.exceptions import InvalidRequestError + + +@pytest.fixture +def mock_backend(): + mock = AsyncMock() + mock.get = AsyncMock() + mock.set = AsyncMock() + mock.delete = AsyncMock() + mock.delete_pattern = AsyncMock() + return mock + + +@pytest.fixture +def mock_cache_provider(): + with patch("src.infrastructure.cache.decorator.cache_provider") as mock_provider: + yield mock_provider + + +@pytest.mark.asyncio +async def test_cache_get_request_hit(mock_backend, mock_cache_provider): + mock_cache_provider.get_backend.return_value = mock_backend + cached_response = {"data": "cached_value"} + mock_backend.get.return_value = cached_response + + @cache(key_prefix="test", resource_id_name="item_id") + async def mock_endpoint(request: Request, item_id: int): + pytest.fail("Endpoint was called despite cache hit") + + mock_request = AsyncMock() + mock_request.method = "GET" + + result = await mock_endpoint(mock_request, item_id=123) + + assert result == cached_response + mock_backend.get.assert_called_once_with("test:123") + mock_backend.set.assert_not_called() + + +@pytest.mark.asyncio +async def test_cache_get_request_miss(mock_backend, mock_cache_provider): + mock_cache_provider.get_backend.return_value = mock_backend + mock_backend.get.return_value = None + expected_response = {"data": "fresh_value"} + + @cache(key_prefix="test", resource_id_name="item_id") + async def mock_endpoint(request: Request, item_id: int): + return expected_response + + mock_request = AsyncMock() + mock_request.method = "GET" + + result = await mock_endpoint(mock_request, item_id=123) + + assert result == expected_response + mock_backend.get.assert_called_once_with("test:123") + mock_backend.set.assert_called_once() + + +@pytest.mark.asyncio +async def test_cache_non_get_request_invalidation(mock_backend, mock_cache_provider): + mock_cache_provider.get_backend.return_value = mock_backend + expected_response = {"data": "updated_value"} + + @cache(key_prefix="test", resource_id_name="item_id") + async def mock_endpoint(request: Request, item_id: int): + return expected_response + + mock_request = AsyncMock() + mock_request.method = "PUT" + + result = await mock_endpoint(mock_request, item_id=123) + + assert result == expected_response + mock_backend.get.assert_not_called() + mock_backend.set.assert_not_called() + mock_backend.delete.assert_called_once_with("test:123") + + +@pytest.mark.asyncio +async def test_cache_extra_invalidation(mock_backend, mock_cache_provider): + mock_cache_provider.get_backend.return_value = mock_backend + expected_response = {"data": "updated_value"} + + @cache( + key_prefix="test", + resource_id_name="item_id", + to_invalidate_extra={"related": "related_id"}, + ) + async def mock_endpoint(request: Request, item_id: int, related_id: int): + return expected_response + + mock_request = AsyncMock() + mock_request.method = "PUT" + + result = await mock_endpoint(mock_request, item_id=123, related_id=456) + + assert result == expected_response + mock_backend.delete.assert_any_call("test:123") + mock_backend.delete.assert_any_call("related:456") + + +@pytest.mark.asyncio +async def test_cache_pattern_invalidation(mock_backend, mock_cache_provider): + mock_cache_provider.get_backend.return_value = mock_backend + expected_response = {"data": "updated_value"} + + @cache( + key_prefix="test", + resource_id_name="item_id", + pattern_to_invalidate_extra=["pattern_*"], + ) + async def mock_endpoint(request: Request, item_id: int): + return expected_response + + mock_request = AsyncMock() + mock_request.method = "PUT" + + result = await mock_endpoint(mock_request, item_id=123) + + assert result == expected_response + mock_backend.delete.assert_called_once_with("test:123") + mock_backend.delete_pattern.assert_called_once_with("pattern_*") + + +@pytest.mark.asyncio +async def test_cache_get_with_invalidation_raises_error(mock_backend, mock_cache_provider): + mock_cache_provider.get_backend.return_value = mock_backend + + @cache( + key_prefix="test", + resource_id_name="item_id", + to_invalidate_extra={"related": "related_id"}, + ) + async def mock_endpoint(request: Request, item_id: int, related_id: int): + return {"data": "value"} + + mock_request = AsyncMock() + mock_request.method = "GET" + + with pytest.raises(InvalidRequestError): + await mock_endpoint(mock_request, item_id=123, related_id=456) diff --git a/backend/tests/unit/infrastructure/cache/test_integration.py b/backend/tests/unit/infrastructure/cache/test_integration.py new file mode 100644 index 00000000..bb14c226 --- /dev/null +++ b/backend/tests/unit/infrastructure/cache/test_integration.py @@ -0,0 +1,98 @@ +from unittest.mock import AsyncMock, patch + +import pytest +from fastapi import FastAPI, Request +from fastapi.testclient import TestClient + +from src.infrastructure.cache.decorator import cache +from src.infrastructure.cache.provider import cache_provider + + +@pytest.fixture +def mock_redis_backend(): + redis_mock = AsyncMock() + redis_mock.get = AsyncMock(return_value=None) + redis_mock.set = AsyncMock() + redis_mock.delete = AsyncMock() + redis_mock.delete_pattern = AsyncMock() + return redis_mock + + +@pytest.fixture +def app(mock_redis_backend): + with patch.object(cache_provider, "get_backend", return_value=mock_redis_backend): + app = FastAPI() + + @app.get("/users/{user_id}") + @cache(key_prefix="user", resource_id_name="user_id") + async def get_user(request: Request, user_id: int): + return {"id": user_id, "name": f"User {user_id}"} + + @app.post("/users/{user_id}") + @cache(key_prefix="user", resource_id_name="user_id") + async def update_user(request: Request, user_id: int, name: str | None = None): + return {"id": user_id, "name": name, "updated": True} + + @app.put("/orgs/{org_id}/users/{user_id}") + @cache( + key_prefix="org_user", + resource_id_name="user_id", + to_invalidate_extra={"user": "user_id", "org": "org_id"}, + ) + async def update_org_user(request: Request, org_id: int, user_id: int): + return {"org_id": org_id, "user_id": user_id, "updated": True} + + yield app + + +@pytest.fixture +def client(app): + return TestClient(app) + + +def test_get_with_cache_miss(client, mock_redis_backend): + mock_redis_backend.get.return_value = None + response = client.get("/users/123") + + assert response.status_code == 200 + assert response.json() == {"id": 123, "name": "User 123"} + + mock_redis_backend.get.assert_called_once_with("user:123") + mock_redis_backend.set.assert_called_once() + assert mock_redis_backend.delete.call_count == 0 + + +def test_get_with_cache_hit(client, mock_redis_backend): + cached_data = {"id": 123, "name": "Cached User 123"} + mock_redis_backend.get.return_value = cached_data + + response = client.get("/users/123") + + assert response.status_code == 200 + assert response.json() == cached_data + + mock_redis_backend.get.assert_called_once_with("user:123") + mock_redis_backend.set.assert_not_called() + + +def test_post_invalidates_cache(client, mock_redis_backend): + response = client.post("/users/123?name=Updated%20User") + + assert response.status_code == 200 + assert response.json() == {"id": 123, "name": "Updated User", "updated": True} + + mock_redis_backend.get.assert_not_called() + mock_redis_backend.set.assert_not_called() + mock_redis_backend.delete.assert_called_once_with("user:123") + + +def test_put_with_extra_invalidation(client, mock_redis_backend): + response = client.put("/orgs/456/users/123") + + assert response.status_code == 200 + assert response.json() == {"org_id": 456, "user_id": 123, "updated": True} + + mock_redis_backend.delete.assert_any_call("org_user:123") + mock_redis_backend.delete.assert_any_call("user:123") + mock_redis_backend.delete.assert_any_call("org:456") + assert mock_redis_backend.delete.call_count == 3 diff --git a/backend/tests/unit/infrastructure/cache/test_provider.py b/backend/tests/unit/infrastructure/cache/test_provider.py new file mode 100644 index 00000000..7ce2e8d9 --- /dev/null +++ b/backend/tests/unit/infrastructure/cache/test_provider.py @@ -0,0 +1,56 @@ +from unittest.mock import MagicMock + +import pytest + +from src.infrastructure.cache.exceptions import BackendNotFoundError +from src.infrastructure.cache.provider import CacheProvider + + +@pytest.fixture +def mock_backends(): + return {"redis": MagicMock(), "memcached": MagicMock()} + + +@pytest.fixture +def cache_provider(mock_backends): + provider = CacheProvider() + provider._backends = mock_backends + provider._default_backend = "redis" + return provider + + +def test_get_backend_default(cache_provider): + backend = cache_provider.get_backend() + assert backend == cache_provider._backends["redis"] + + +def test_get_backend_specific(cache_provider): + backend = cache_provider.get_backend("memcached") + assert backend == cache_provider._backends["memcached"] + + +def test_get_backend_not_found(cache_provider): + with pytest.raises(BackendNotFoundError): + cache_provider.get_backend("nonexistent") + + +def test_register_backend(cache_provider): + new_backend = MagicMock() + cache_provider.register_backend("new_backend", new_backend) + assert cache_provider._backends["new_backend"] == new_backend + + returned_backend = cache_provider.get_backend("new_backend") + assert returned_backend == new_backend + + +def test_set_default_backend(cache_provider): + cache_provider.set_default_backend("memcached") + assert cache_provider._default_backend == "memcached" + + backend = cache_provider.get_backend() + assert backend == cache_provider._backends["memcached"] + + +def test_set_default_backend_not_found(cache_provider): + with pytest.raises(BackendNotFoundError): + cache_provider.set_default_backend("nonexistent") diff --git a/backend/tests/unit/infrastructure/cache/test_utils.py b/backend/tests/unit/infrastructure/cache/test_utils.py new file mode 100644 index 00000000..37bfe724 --- /dev/null +++ b/backend/tests/unit/infrastructure/cache/test_utils.py @@ -0,0 +1,81 @@ +import pytest + +from src.infrastructure.cache.exceptions import ( + CacheIdentificationInferenceError, +) +from src.infrastructure.cache.utils import ( + format_extra_data, + format_prefix, + infer_resource_id, +) + + +def test_infer_resource_id_integer(): + kwargs = {"user_id": 123, "other_param": "value"} + resource_id = infer_resource_id(kwargs=kwargs, resource_id_type=int) + assert resource_id == 123 + + +def test_infer_resource_id_string(): + kwargs = {"product_id": "abc123", "other_param": 456} + resource_id = infer_resource_id(kwargs=kwargs, resource_id_type=str) + assert resource_id == "abc123" + + +def test_infer_resource_id_multiple_types(): + kwargs = {"id": "abc123", "other_param": 456} + resource_id = infer_resource_id(kwargs=kwargs, resource_id_type=(str, int)) + assert resource_id == "abc123" + + kwargs = {"id": 789, "other_param": "value"} + resource_id = infer_resource_id(kwargs=kwargs, resource_id_type=(str, int)) + assert resource_id == 789 + + +def test_infer_resource_id_no_match(): + kwargs = {"param1": "value1", "param2": "value2"} + with pytest.raises(CacheIdentificationInferenceError): + infer_resource_id(kwargs=kwargs, resource_id_type=int) + + +def test_format_prefix_simple(): + prefix = "user" + kwargs = {"user_id": 123} + formatted = format_prefix(prefix, kwargs) + assert formatted == "user" + + +def test_format_prefix_with_variables(): + prefix = "user:{user_id}:profile" + kwargs = {"user_id": 123, "other_param": "value"} + formatted = format_prefix(prefix, kwargs) + assert formatted == "user:123:profile" + + +def test_format_prefix_with_multiple_variables(): + prefix = "org:{org_id}:user:{user_id}" + kwargs = {"org_id": 456, "user_id": 123} + formatted = format_prefix(prefix, kwargs) + assert formatted == "org:456:user:123" + + +def test_format_extra_data_simple(): + extra_data = {"user": "user_id"} + kwargs = {"user_id": 123} + formatted = format_extra_data(extra_data, kwargs) + assert formatted == {"user": 123} + + +def test_format_extra_data_multiple(): + extra_data = {"user": "user_id", "org": "org_id"} + kwargs = {"user_id": 123, "org_id": 456} + formatted = format_extra_data(extra_data, kwargs) + assert formatted == {"user": 123, "org": 456} + + +def test_format_extra_data_missing_key(): + extra_data = {"user": "user_id", "org": "org_id"} + kwargs = {"user_id": 123} + formatted = format_extra_data(extra_data, kwargs) + assert formatted == {"user": 123} + assert "org" not in formatted diff --git a/src/migrations/versions/README.MD b/backend/tests/unit/infrastructure/config/__init__.py similarity index 100% rename from src/migrations/versions/README.MD rename to backend/tests/unit/infrastructure/config/__init__.py diff --git a/backend/tests/unit/infrastructure/config/test_settings.py b/backend/tests/unit/infrastructure/config/test_settings.py new file mode 100644 index 00000000..53ac724d --- /dev/null +++ b/backend/tests/unit/infrastructure/config/test_settings.py @@ -0,0 +1,229 @@ +"""Tests for configuration settings.""" + +import os +from unittest.mock import patch + +import pytest + +from src.infrastructure.config.settings import Settings, get_settings + + +class TestSettings: + """Test cases for application settings.""" + + def test_settings_creation(self): + """Test creating settings instance.""" + settings = Settings() + assert settings is not None + assert hasattr(settings, "DATABASE_URL") + assert hasattr(settings, "SECRET_KEY") + + def test_get_settings_singleton(self): + """Test that get_settings returns the same instance.""" + settings1 = get_settings() + settings2 = get_settings() + assert settings1 is settings2 + + @patch.dict(os.environ, {"SECRET_KEY": "test_secret_key"}) + def test_settings_from_env(self): + """Test loading settings from environment variables.""" + settings = Settings() + assert settings.SECRET_KEY == "test_secret_key" + + def test_database_url_format(self): + """Test database URL format validation.""" + settings = get_settings() + assert settings.DATABASE_URL is not None + # Should be a valid database URL format + assert "://" in settings.DATABASE_URL + + @patch.dict( + os.environ, {"DATABASE_URL": "postgresql+asyncpg://prod_user:prod_pass@prod.example.com:5432/prod_db"}, clear=False + ) + def test_database_url_env_var_override(self): + """Test that DATABASE_URL environment variable takes precedence.""" + settings = Settings() + expected_url = "postgresql+asyncpg://prod_user:prod_pass@prod.example.com:5432/prod_db" + assert settings.DATABASE_URL == expected_url + + @patch.dict(os.environ, {}, clear=False) + def test_database_url_fallback_to_constructed(self): + """Test that DATABASE_URL falls back to constructed URL when env var not set.""" + # Remove DATABASE_URL if it exists + if "DATABASE_URL" in os.environ: + del os.environ["DATABASE_URL"] + + settings = Settings() + # Should construct URL from components + assert "postgresql+asyncpg://" in settings.DATABASE_URL + assert "postgres:postgres@localhost:5432" in settings.DATABASE_URL + + @patch.dict(os.environ, {"DEBUG": "true"}) + def test_debug_mode_setting(self): + """Test debug mode configuration.""" + settings = Settings() + # Assuming DEBUG is a boolean setting + if hasattr(settings, "DEBUG"): + assert isinstance(settings.DEBUG, bool) + + def test_required_settings_exist(self): + """Test that all required settings are present.""" + settings = get_settings() + + # Core required settings + required_attrs = [ + "DATABASE_URL", + "SECRET_KEY", + ] + + for attr in required_attrs: + assert hasattr(settings, attr), f"Missing required setting: {attr}" + assert getattr(settings, attr) is not None, f"Setting {attr} is None" + + @patch.dict(os.environ, {"SQLITE_URI": ":memory:"}) + def test_test_database_override(self): + """Test that test database settings work.""" + settings = Settings() + # In test mode, should use in-memory database + if hasattr(settings, "SQLITE_URI"): + assert ":memory:" in settings.SQLITE_URI + + +class TestTaskiqSettings: + """Test cases for Taskiq configuration settings.""" + + def test_taskiq_settings_defaults(self): + """Test Taskiq settings have correct defaults.""" + settings = get_settings() + + # Test default values + assert settings.TASKIQ_ENABLED is True + assert settings.TASKIQ_BROKER_TYPE == "redis" + assert settings.TASKIQ_REDIS_HOST == "localhost" + assert settings.TASKIQ_REDIS_PORT == 6379 + assert settings.TASKIQ_REDIS_DB == 3 + assert settings.TASKIQ_REDIS_PASSWORD is None + assert settings.TASKIQ_WORKER_CONCURRENCY == 2 + assert settings.TASKIQ_MAX_TASKS_PER_WORKER == 1000 + + @patch.dict( + os.environ, + { + "TASKIQ_ENABLED": "false", + "TASKIQ_BROKER_TYPE": "rabbitmq", + "TASKIQ_REDIS_HOST": "redis-server", + "TASKIQ_REDIS_PORT": "6380", + "TASKIQ_REDIS_DB": "5", + "TASKIQ_REDIS_PASSWORD": "test-password", + "TASKIQ_WORKER_CONCURRENCY": "4", + "TASKIQ_MAX_TASKS_PER_WORKER": "500", + }, + ) + def test_taskiq_settings_from_env(self): + """Test loading Taskiq settings from environment variables.""" + settings = Settings() + + assert settings.TASKIQ_ENABLED is False + assert settings.TASKIQ_BROKER_TYPE == "rabbitmq" + assert settings.TASKIQ_REDIS_HOST == "redis-server" + assert settings.TASKIQ_REDIS_PORT == 6380 + assert settings.TASKIQ_REDIS_DB == 5 + assert settings.TASKIQ_REDIS_PASSWORD == "test-password" + assert settings.TASKIQ_WORKER_CONCURRENCY == 4 + assert settings.TASKIQ_MAX_TASKS_PER_WORKER == 500 + + @patch.dict( + os.environ, + { + "TASKIQ_RABBITMQ_HOST": "rabbitmq-server", + "TASKIQ_RABBITMQ_PORT": "5673", + "TASKIQ_RABBITMQ_USER": "test-user", + "TASKIQ_RABBITMQ_PASSWORD": "test-password", + "TASKIQ_RABBITMQ_VHOST": "/test", + }, + ) + def test_taskiq_rabbitmq_settings_from_env(self): + """Test loading Taskiq RabbitMQ settings from environment variables.""" + settings = Settings() + + assert settings.TASKIQ_RABBITMQ_HOST == "rabbitmq-server" + assert settings.TASKIQ_RABBITMQ_PORT == 5673 + assert settings.TASKIQ_RABBITMQ_USER == "test-user" + assert settings.TASKIQ_RABBITMQ_PASSWORD == "test-password" + assert settings.TASKIQ_RABBITMQ_VHOST == "/test" + + def test_taskiq_redis_broker_url_generation(self): + """Test Redis broker URL generation.""" + settings = get_settings() + + # Test Redis URL without password + broker_url = settings.TASKIQ_BROKER_URL + expected_url = f"redis://{settings.TASKIQ_REDIS_HOST}:{settings.TASKIQ_REDIS_PORT}/{settings.TASKIQ_REDIS_DB}" + assert broker_url == expected_url + + @patch.dict( + os.environ, + { + "TASKIQ_REDIS_PASSWORD": "test-password", + "TASKIQ_REDIS_HOST": "redis-host", + "TASKIQ_REDIS_PORT": "6380", + "TASKIQ_REDIS_DB": "2", + }, + ) + def test_taskiq_redis_broker_url_with_password(self): + """Test Redis broker URL generation with password.""" + settings = Settings() + + broker_url = settings.TASKIQ_BROKER_URL + expected_url = "redis://:test-password@redis-host:6380/2" + assert broker_url == expected_url + + @patch.dict( + os.environ, + { + "TASKIQ_BROKER_TYPE": "rabbitmq", + "TASKIQ_RABBITMQ_USER": "test-user", + "TASKIQ_RABBITMQ_PASSWORD": "test-password", + "TASKIQ_RABBITMQ_HOST": "rabbitmq-host", + "TASKIQ_RABBITMQ_PORT": "5673", + "TASKIQ_RABBITMQ_VHOST": "/test", + }, + ) + def test_taskiq_rabbitmq_broker_url_generation(self): + """Test RabbitMQ broker URL generation.""" + settings = Settings() + + broker_url = settings.TASKIQ_BROKER_URL + expected_url = "amqp://test-user:test-password@rabbitmq-host:5673/test" + assert broker_url == expected_url + + @patch.dict(os.environ, {"TASKIQ_BROKER_TYPE": "invalid"}) + def test_taskiq_invalid_broker_type_raises_error(self): + """Test that invalid broker type raises ValueError.""" + settings = Settings() + + with pytest.raises(ValueError, match="Unsupported broker type: invalid"): + settings.TASKIQ_BROKER_URL + + def test_taskiq_required_settings_exist(self): + """Test that all required Taskiq settings are present.""" + settings = get_settings() + + required_attrs = [ + "TASKIQ_ENABLED", + "TASKIQ_BROKER_TYPE", + "TASKIQ_REDIS_HOST", + "TASKIQ_REDIS_PORT", + "TASKIQ_REDIS_DB", + "TASKIQ_RABBITMQ_HOST", + "TASKIQ_RABBITMQ_PORT", + "TASKIQ_RABBITMQ_USER", + "TASKIQ_RABBITMQ_PASSWORD", + "TASKIQ_RABBITMQ_VHOST", + "TASKIQ_WORKER_CONCURRENCY", + "TASKIQ_MAX_TASKS_PER_WORKER", + "TASKIQ_BROKER_URL", + ] + + for attr in required_attrs: + assert hasattr(settings, attr), f"Missing required Taskiq setting: {attr}" diff --git a/backend/tests/unit/infrastructure/database/__init__.py b/backend/tests/unit/infrastructure/database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/unit/infrastructure/database/test_session.py b/backend/tests/unit/infrastructure/database/test_session.py new file mode 100644 index 00000000..8b673599 --- /dev/null +++ b/backend/tests/unit/infrastructure/database/test_session.py @@ -0,0 +1,56 @@ +"""Tests for database session management.""" + +import pytest +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession + +from src.infrastructure.database.session import async_session + +pytestmark = pytest.mark.asyncio + + +class TestDatabaseSession: + """Test cases for database session management.""" + + async def test_async_session(self): + """Test getting an async database session.""" + async for session in async_session(): + assert isinstance(session, AsyncSession) + break # Just test that we can get a session + + async def test_session_transaction(self, db_session: AsyncSession): + """Test database session transaction handling.""" + # Test that we can execute a simple query + result = await db_session.execute(text("SELECT 1 as test_value")) + row = result.fetchone() + assert row is not None + assert row[0] == 1 + + async def test_session_rollback(self, db_session: AsyncSession): + """Test session rollback functionality.""" + # Start a transaction + await db_session.begin() + + # Execute some operation + await db_session.execute(text("SELECT 1")) + + # Rollback + await db_session.rollback() + + # Session should still be usable + result = await db_session.execute(text("SELECT 2")) + row = result.fetchone() + assert row is not None + assert row[0] == 2 + + async def test_session_commit(self, db_session: AsyncSession): + """Test session commit functionality.""" + # The session fixture should handle commits automatically + # This test verifies the session is working properly + result = await db_session.execute(text("SELECT 3")) + row = result.fetchone() + assert row is not None + assert row[0] == 3 + + # Commit should work without errors + await db_session.commit() diff --git a/backend/tests/unit/infrastructure/logging/__init__.py b/backend/tests/unit/infrastructure/logging/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/unit/infrastructure/logging/test_correlation_id.py b/backend/tests/unit/infrastructure/logging/test_correlation_id.py new file mode 100644 index 00000000..a76f566b --- /dev/null +++ b/backend/tests/unit/infrastructure/logging/test_correlation_id.py @@ -0,0 +1,343 @@ +"""Tests for correlation ID logging functionality.""" + +import contextvars +import logging +import threading +import time +from io import StringIO +from unittest.mock import MagicMock, patch + +import pytest + +from src.infrastructure.logging.config import ( + CorrelationIdFilter, + add_correlation_id_filter, + generate_correlation_id, + get_correlation_id, + set_correlation_id, +) + + +@pytest.fixture +def mock_logger(): + """Create a mock logger for testing.""" + logger = logging.getLogger("test_correlation") + logger.setLevel(logging.DEBUG) + + # Clear existing handlers + logger.handlers.clear() + + # Add string handler for capturing output + stream = StringIO() + handler = logging.StreamHandler(stream) + handler.setFormatter(logging.Formatter("%(correlation_id)s - %(message)s")) + logger.addHandler(handler) + + return logger, stream + + +def test_correlation_id_context_management(): + """Test correlation ID context variable management.""" + # Test setting and getting correlation ID + test_id = "test-correlation-123" + set_correlation_id(test_id) + + retrieved_id = get_correlation_id() + assert retrieved_id == test_id + + # Test in different context - create new empty context + def new_context(): + # Should not have correlation ID in new empty context + assert get_correlation_id() is None + + # Set new ID in this context + set_correlation_id("context-456") + assert get_correlation_id() == "context-456" + + # Run in new empty context + ctx = contextvars.Context() # Create empty context instead of copying + ctx.run(new_context) + + # Original context should still have original ID + assert get_correlation_id() == test_id + + +def test_generate_correlation_id(): + """Test correlation ID generation.""" + id1 = generate_correlation_id() + id2 = generate_correlation_id() + + # Should generate different IDs + assert id1 != id2 + + # Should be valid UUIDs (36 characters with hyphens) + assert len(id1) == 36 + assert len(id2) == 36 + assert "-" in id1 + assert "-" in id2 + + +def test_correlation_id_filter_with_context(mock_logger): + """Test correlation ID filter with context variable.""" + logger, stream = mock_logger + + # Add correlation filter + correlation_filter = CorrelationIdFilter() + logger.addFilter(correlation_filter) + + # Set correlation ID in context + test_id = "filter-test-789" + set_correlation_id(test_id) + + # Log message + logger.info("Test message with correlation ID") + + # Check output + output = stream.getvalue() + assert test_id in output + assert "Test message with correlation ID" in output + + +def test_correlation_id_filter_no_context(mock_logger): + """Test correlation ID filter without context.""" + logger, stream = mock_logger + + # Add correlation filter + correlation_filter = CorrelationIdFilter() + logger.addFilter(correlation_filter) + + # Test in a fresh context without correlation ID + def run_test(): + # Log message in context without correlation ID + logger.info("Test message without correlation ID") + + # Run in fresh context (no correlation ID set) + ctx = contextvars.Context() + ctx.run(run_test) + + # Check output - should have default value + output = stream.getvalue() + assert "no-correlation" in output + assert "Test message without correlation ID" in output + + +def test_correlation_id_filter_thread_local(mock_logger): + """Test correlation ID filter with thread local fallback.""" + logger, stream = mock_logger + + # Add correlation filter + correlation_filter = CorrelationIdFilter() + logger.addFilter(correlation_filter) + + # Set thread local correlation ID + test_id = "thread-local-456" + threading.current_thread().correlation_id = test_id # type: ignore[attr-defined] + + try: + # Test in fresh context to force thread local lookup + def run_test(): + # Log message - should fall back to thread local + logger.info("Test thread local correlation ID") + + # Run in fresh context (no correlation ID context var set) + ctx = contextvars.Context() + ctx.run(run_test) + + # Check output + output = stream.getvalue() + assert test_id in output + + finally: + # Clean up thread local + delattr(threading.current_thread(), "correlation_id") + + +def test_correlation_id_filter_request_headers(mock_logger): + """Test correlation ID filter basic functionality.""" + logger, stream = mock_logger + + # Add correlation filter + correlation_filter = CorrelationIdFilter() + logger.addFilter(correlation_filter) + + # Set correlation ID via context (the primary method) + set_correlation_id("header-correlation-123") + + # Log message + logger.info("Test correlation ID functionality") + + # Check output + output = stream.getvalue() + assert "header-correlation-123" in output + + +def test_correlation_id_filter_request_id_header(mock_logger): + """Test correlation ID filter with different ID format.""" + logger, stream = mock_logger + + # Add correlation filter + correlation_filter = CorrelationIdFilter() + logger.addFilter(correlation_filter) + + # Set correlation ID with request ID format + set_correlation_id("request-id-789") + + # Log message + logger.info("Test request ID header") + + # Check output + output = stream.getvalue() + assert "request-id-789" in output + + +def test_correlation_id_filter_request_state(mock_logger): + """Test correlation ID filter with request state.""" + logger, stream = mock_logger + + # Add correlation filter + correlation_filter = CorrelationIdFilter() + logger.addFilter(correlation_filter) + + # Create mock log record + record = logging.LogRecord( + name="test", level=logging.INFO, pathname="", lineno=0, msg="Test message", args=(), exc_info=None + ) + + # Set test correlation ID in context + test_id = "state-correlation-456" + set_correlation_id(test_id) + + # Process record through filter + result = correlation_filter.filter(record) + + assert result is True + assert record.correlation_id == test_id # type: ignore[attr-defined] + assert hasattr(record, "extra") + assert getattr(record, "extra")["correlation_id"] == test_id + + +def test_correlation_id_filter_error_handling(mock_logger): + """Test correlation ID filter error handling.""" + logger, stream = mock_logger + + # Add correlation filter + correlation_filter = CorrelationIdFilter() + logger.addFilter(correlation_filter) + + # Mock _get_correlation_id to raise exception + mock_method = MagicMock(side_effect=Exception("Test error")) + + # Use patch to mock the method properly + with patch.object(correlation_filter, "_get_correlation_id", mock_method): + # Log message - should not crash + logger.info("Test error handling") + + # Check output - should have default value + output = stream.getvalue() + assert "no-correlation" in output + + +def test_add_correlation_id_filter(): + """Test adding correlation ID filter to logger.""" + test_logger = logging.getLogger("test_add_filter") + test_logger.handlers.clear() + + # Add stream handler + stream = StringIO() + handler = logging.StreamHandler(stream) + handler.setFormatter(logging.Formatter("%(correlation_id)s - %(message)s")) + test_logger.addHandler(handler) + test_logger.setLevel(logging.DEBUG) + + # Should start with no filters + assert len(test_logger.filters) == 0 + + # Add correlation filter + add_correlation_id_filter() + + # Root logger should have the filter + root_logger = logging.getLogger() + correlation_filters = [f for f in root_logger.filters if isinstance(f, CorrelationIdFilter)] + assert len(correlation_filters) >= 1 + + +def test_correlation_id_inheritance(): + """Test correlation ID functionality in child loggers.""" + # Create child logger with its own filter + child_logger = logging.getLogger("test.child") + child_logger.setLevel(logging.DEBUG) + child_logger.handlers.clear() + + # Add correlation filter to child logger + correlation_filter = CorrelationIdFilter() + child_logger.addFilter(correlation_filter) + + # Add handler to capture output + stream = StringIO() + handler = logging.StreamHandler(stream) + handler.setFormatter(logging.Formatter("%(correlation_id)s - %(message)s")) + child_logger.addHandler(handler) + child_logger.propagate = False # Don't propagate to avoid double filtering + + # Set correlation ID + test_id = "inheritance-test-123" + set_correlation_id(test_id) + + # Log to child logger + child_logger.info("Child logger message") + + # Should have correlation ID from filter + output = stream.getvalue() + assert test_id in output + + +def test_correlation_id_multiple_contexts(): + """Test correlation ID in multiple concurrent contexts.""" + results = {} + + def context_function(context_id): + test_id = f"context-{context_id}" + set_correlation_id(test_id) + retrieved_id = get_correlation_id() + results[context_id] = retrieved_id + + # Run multiple contexts + contexts = [] + for i in range(3): + ctx = contextvars.copy_context() + contexts.append(ctx) + ctx.run(context_function, i) + + # Each context should have its own correlation ID + assert results[0] == "context-0" + assert results[1] == "context-1" + assert results[2] == "context-2" + assert len(set(results.values())) == 3 # All unique + + +def test_correlation_id_filter_performance(): + """Test correlation ID filter performance.""" + filter_instance = CorrelationIdFilter() + + # Create log record + record = logging.LogRecord( + name="perf_test", level=logging.INFO, pathname="", lineno=0, msg="Performance test message", args=(), exc_info=None + ) + + # Set correlation ID + set_correlation_id("performance-test-id") + + # Time multiple filter calls + start_time = time.time() + + for _ in range(1000): + filter_instance.filter(record) + + end_time = time.time() + duration = end_time - start_time + + # Should be fast (less than 1 second for 1000 calls) + assert duration < 1.0 + + # Record should have correlation ID + assert record.correlation_id == "performance-test-id" # type: ignore[attr-defined] diff --git a/backend/tests/unit/infrastructure/rate_limit/__init__.py b/backend/tests/unit/infrastructure/rate_limit/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/unit/infrastructure/rate_limit/backends/__init__.py b/backend/tests/unit/infrastructure/rate_limit/backends/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/unit/infrastructure/rate_limit/backends/test_fail_open.py b/backend/tests/unit/infrastructure/rate_limit/backends/test_fail_open.py new file mode 100644 index 00000000..04c2e7e6 --- /dev/null +++ b/backend/tests/unit/infrastructure/rate_limit/backends/test_fail_open.py @@ -0,0 +1,159 @@ +"""Tests for the fail_open behavior in rate limiter backends.""" + +from unittest.mock import AsyncMock, MagicMock + +import pytest +from redis.exceptions import RedisError + +from src.infrastructure.rate_limit.backends.memcached import MemcachedBackend, MemcachedSettings +from src.infrastructure.rate_limit.backends.redis import RedisBackend, RedisSettings + + +@pytest.fixture +def mock_redis_client(): + """Create a mock Redis client.""" + pipeline_mock = MagicMock() + pipeline_mock.incr = MagicMock() + pipeline_mock.expire = MagicMock() + pipeline_mock.execute = AsyncMock(return_value=[1]) + + client_mock = AsyncMock() + client_mock.pipeline = MagicMock(return_value=pipeline_mock) + client_mock.get = AsyncMock(return_value="1") + client_mock.delete = AsyncMock(return_value=1) + client_mock.ping = AsyncMock(return_value=True) + + return client_mock, pipeline_mock + + +@pytest.fixture +def mock_memcached_client(): + """Create a mock Memcached client.""" + client_mock = AsyncMock() + client_mock.get = AsyncMock(return_value=b"1") + client_mock.set = AsyncMock(return_value=True) + client_mock.delete = AsyncMock(return_value=True) + + return client_mock + + +@pytest.fixture +def redis_backend_fail_open(mock_redis_client): + """Create a RedisBackend with fail_open=True.""" + client_mock, pipeline_mock = mock_redis_client + + settings = RedisSettings(host="localhost", port=6379) + backend = RedisBackend(settings=settings, fail_open=True) + backend.client = client_mock + + yield backend, client_mock, pipeline_mock + + +@pytest.fixture +def redis_backend_fail_closed(mock_redis_client): + """Create a RedisBackend with fail_open=False.""" + client_mock, pipeline_mock = mock_redis_client + + settings = RedisSettings(host="localhost", port=6379) + backend = RedisBackend(settings=settings, fail_open=False) + backend.client = client_mock + + yield backend, client_mock, pipeline_mock + + +@pytest.fixture +def memcached_backend_fail_open(mock_memcached_client): + """Create a MemcachedBackend with fail_open=True.""" + settings = MemcachedSettings(host="localhost", port=11211) + backend = MemcachedBackend(settings=settings, fail_open=True) + backend.client = mock_memcached_client + + yield backend, mock_memcached_client + + +@pytest.fixture +def memcached_backend_fail_closed(mock_memcached_client): + """Create a MemcachedBackend with fail_open=False.""" + settings = MemcachedSettings(host="localhost", port=11211) + backend = MemcachedBackend(settings=settings, fail_open=False) + backend.client = mock_memcached_client + + yield backend, mock_memcached_client + + +@pytest.mark.asyncio +async def test_redis_error_handling_fail_open(redis_backend_fail_open): + """Test Redis error handling with fail_open=True.""" + backend, _, pipeline_mock = redis_backend_fail_open + + pipeline_mock.execute.side_effect = RedisError("Test Redis error") + + count, is_limited = await backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 0 + assert is_limited is False + + +@pytest.mark.asyncio +async def test_redis_error_handling_fail_closed(redis_backend_fail_closed): + """Test Redis error handling with fail_open=False.""" + backend, _, pipeline_mock = redis_backend_fail_closed + + pipeline_mock.execute.side_effect = RedisError("Test Redis error") + + count, is_limited = await backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 0 + assert is_limited is True + + +@pytest.mark.asyncio +async def test_redis_general_error_fail_open(redis_backend_fail_open): + """Test general error handling with fail_open=True in Redis backend.""" + backend, _, pipeline_mock = redis_backend_fail_open + + pipeline_mock.execute.side_effect = Exception("General error") + + count, is_limited = await backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 0 + assert is_limited is False + + +@pytest.mark.asyncio +async def test_redis_general_error_fail_closed(redis_backend_fail_closed): + """Test general error handling with fail_open=False in Redis backend.""" + backend, _, pipeline_mock = redis_backend_fail_closed + + pipeline_mock.execute.side_effect = Exception("General error") + + count, is_limited = await backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 0 + assert is_limited is True + + +@pytest.mark.asyncio +async def test_memcached_error_fail_open(memcached_backend_fail_open): + """Test error handling with fail_open=True in Memcached backend.""" + backend, client_mock = memcached_backend_fail_open + + client_mock.get.side_effect = Exception("Memcached error") + + count, is_limited = await backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 0 + assert is_limited is False + + +@pytest.mark.asyncio +async def test_memcached_error_fail_closed(memcached_backend_fail_closed): + """Test error handling with fail_open=False in Memcached backend.""" + backend, client_mock = memcached_backend_fail_closed + + client_mock.get.side_effect = Exception("Memcached error") + + count, is_limited = await backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 0 + assert is_limited is True diff --git a/backend/tests/unit/infrastructure/rate_limit/backends/test_memcached.py b/backend/tests/unit/infrastructure/rate_limit/backends/test_memcached.py new file mode 100644 index 00000000..74c3bb91 --- /dev/null +++ b/backend/tests/unit/infrastructure/rate_limit/backends/test_memcached.py @@ -0,0 +1,136 @@ +"""Tests for the Memcached rate limiter backend.""" + +from unittest.mock import AsyncMock, patch + +import pytest + +from src.infrastructure.rate_limit.backends.memcached import ( + MemcachedBackend, + MemcachedSettings, +) +from src.infrastructure.rate_limit.exceptions import RateLimiterBackendException + + +@pytest.fixture +def mock_aiomcache(): + """Create a mock aiomcache client.""" + client_mock = AsyncMock() + client_mock.get = AsyncMock() + client_mock.set = AsyncMock() + client_mock.delete = AsyncMock() + return client_mock + + +@pytest.fixture +def memcached_backend(mock_aiomcache): + """Create a MemcachedBackend with a mock client.""" + with patch("aiomcache.Client", return_value=mock_aiomcache): + settings = MemcachedSettings(host="localhost", port=11211) + backend = MemcachedBackend(settings=settings) + backend.client = mock_aiomcache + yield backend + + +@pytest.mark.asyncio +async def test_init_error(): + """Test that initialization errors are properly handled.""" + with patch("aiomcache.Client", side_effect=Exception("Connection error")): + with pytest.raises(RateLimiterBackendException) as excinfo: + MemcachedBackend(settings=MemcachedSettings()) + + assert "Failed to initialize Memcached client" in str(excinfo.value) + + +@pytest.mark.asyncio +async def test_increment_and_check_new_key(memcached_backend, mock_aiomcache): + """Test incrementing a counter for a new key.""" + mock_aiomcache.get.return_value = None + + count, is_limited = await memcached_backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 1 + assert is_limited is False + + assert mock_aiomcache.get.called + assert mock_aiomcache.set.called + + set_args = mock_aiomcache.set.call_args.args + assert set_args[1] == b"1" + assert mock_aiomcache.set.call_args.kwargs["exptime"] == 60 + + +@pytest.mark.asyncio +async def test_increment_and_check_existing_key(memcached_backend, mock_aiomcache): + """Test incrementing a counter for an existing key.""" + mock_aiomcache.get.return_value = b"4" + + count, is_limited = await memcached_backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 5 + assert is_limited is False + + mock_aiomcache.get.assert_called_once() + mock_aiomcache.set.assert_called_once() + + set_args = mock_aiomcache.set.call_args.args + assert set_args[1] == b"5" + + +@pytest.mark.asyncio +async def test_rate_limited(memcached_backend, mock_aiomcache): + """Test that requests are rate limited once limit is exceeded.""" + mock_aiomcache.get.return_value = b"5" + + count, is_limited = await memcached_backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 6 + assert is_limited is True + + +@pytest.mark.asyncio +async def test_get_count(memcached_backend, mock_aiomcache): + """Test getting the current count for a key.""" + mock_aiomcache.get.return_value = b"3" + count = await memcached_backend.get_count("test:123") + assert count == 3 + + mock_aiomcache.get.return_value = None + count = await memcached_backend.get_count("test:456") + assert count is None + + +@pytest.mark.asyncio +async def test_reset(memcached_backend, mock_aiomcache): + """Test resetting the counter for a key.""" + await memcached_backend.reset("test:123") + mock_aiomcache.delete.assert_called_once_with(b"test:123") + + +@pytest.mark.asyncio +async def test_ping_success(memcached_backend, mock_aiomcache): + """Test ping with successful connection.""" + mock_aiomcache.set.return_value = None + mock_aiomcache.get.return_value = b"1" + + result = await memcached_backend.ping() + assert result is True + + +@pytest.mark.asyncio +async def test_ping_failure(memcached_backend, mock_aiomcache): + """Test ping with failed connection.""" + mock_aiomcache.set.side_effect = Exception("Connection failed") + + result = await memcached_backend.ping() + assert result is False + + +@pytest.mark.asyncio +async def test_increment_error_handling(memcached_backend, mock_aiomcache): + """Test error handling during increment operation.""" + mock_aiomcache.get.side_effect = Exception("Connection error") + + count, is_limited = await memcached_backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 0 + assert is_limited is False diff --git a/backend/tests/unit/infrastructure/rate_limit/backends/test_redis.py b/backend/tests/unit/infrastructure/rate_limit/backends/test_redis.py new file mode 100644 index 00000000..b9a0ccd6 --- /dev/null +++ b/backend/tests/unit/infrastructure/rate_limit/backends/test_redis.py @@ -0,0 +1,137 @@ +"""Tests for the Redis rate limiter backend.""" + +from unittest.mock import AsyncMock, MagicMock + +import pytest +from redis.exceptions import RedisError + +from src.infrastructure.rate_limit.backends.redis import RedisBackend, RedisSettings + + +@pytest.fixture +def mock_redis_client(): + """Create a mock Redis client.""" + pipeline_mock = MagicMock() + pipeline_mock.incr = MagicMock() + pipeline_mock.expire = MagicMock() + pipeline_mock.execute = AsyncMock(return_value=[1]) + + client_mock = AsyncMock() + client_mock.pipeline = MagicMock(return_value=pipeline_mock) + client_mock.get = AsyncMock(return_value="1") + client_mock.delete = AsyncMock(return_value=1) + client_mock.ping = AsyncMock(return_value=True) + + return client_mock, pipeline_mock + + +@pytest.fixture +def redis_backend(mock_redis_client): + """Create a RedisBackend with a mock client.""" + client_mock, pipeline_mock = mock_redis_client + + settings = RedisSettings(host="localhost", port=6379) + backend = RedisBackend(settings=settings, fail_open=True) + backend.client = client_mock + + yield backend, client_mock, pipeline_mock + + +@pytest.mark.asyncio +async def test_increment_and_check_new_key(redis_backend): + """Test incrementing a counter for a new key.""" + backend, client_mock, pipeline_mock = redis_backend + + pipeline_mock.execute.return_value = [1] + + count, is_limited = await backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 1 + assert is_limited is False + + client_mock.pipeline.assert_called_once() + pipeline_mock.incr.assert_called_once() + pipeline_mock.expire.assert_called_once() + pipeline_mock.execute.assert_called_once() + + +@pytest.mark.asyncio +async def test_increment_and_check_existing_key(redis_backend): + """Test incrementing a counter for an existing key.""" + backend, client_mock, pipeline_mock = redis_backend + + pipeline_mock.execute.return_value = [5] + + count, is_limited = await backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 5 + assert is_limited is False + + +@pytest.mark.asyncio +async def test_rate_limited(redis_backend): + """Test that requests are rate limited once limit is exceeded.""" + backend, client_mock, pipeline_mock = redis_backend + + pipeline_mock.execute.return_value = [6] + + count, is_limited = await backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 6 + assert is_limited is True + + +@pytest.mark.asyncio +async def test_get_count(redis_backend): + """Test getting the current count for a key.""" + backend, client_mock, _ = redis_backend + + client_mock.get.return_value = "3" + count = await backend.get_count("test:123") + assert count == 3 + + client_mock.get.return_value = None + count = await backend.get_count("test:456") + assert count is None + + +@pytest.mark.asyncio +async def test_reset(redis_backend): + """Test resetting the counter for a key.""" + backend, client_mock, _ = redis_backend + + await backend.reset("test:123") + client_mock.delete.assert_called_once_with("test:123") + + +@pytest.mark.asyncio +async def test_ping_success(redis_backend): + """Test ping with successful connection.""" + backend, client_mock, _ = redis_backend + client_mock.ping.return_value = True + + result = await backend.ping() + assert result is True + + +@pytest.mark.asyncio +async def test_ping_failure(redis_backend): + """Test ping with failed connection.""" + backend, client_mock, _ = redis_backend + client_mock.ping.side_effect = Exception("Connection failed") + + result = await backend.ping() + assert result is False + + +@pytest.mark.asyncio +async def test_redis_error_handling(redis_backend): + """Test Redis-specific error handling.""" + backend, client_mock, pipeline_mock = redis_backend + + pipeline_mock.execute.side_effect = RedisError("Redis error") + + count, is_limited = await backend.increment_and_check(key="test:123", limit=5, period=60) + + assert count == 0 + assert is_limited is False diff --git a/backend/tests/unit/infrastructure/rate_limit/test_fail_open_middleware.py b/backend/tests/unit/infrastructure/rate_limit/test_fail_open_middleware.py new file mode 100644 index 00000000..4418bee7 --- /dev/null +++ b/backend/tests/unit/infrastructure/rate_limit/test_fail_open_middleware.py @@ -0,0 +1,101 @@ +"""Tests for the fail_open behavior in rate limiter middleware.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from fastapi import Request + +from src.infrastructure.rate_limit.exceptions import RateLimitException +from src.infrastructure.rate_limit.middleware import _check_rate_limit + + +@pytest.fixture +def mock_request(): + """Create a mock Request object.""" + mock = MagicMock(spec=Request) + mock.url = MagicMock() + mock.url.path = "/api/v1/test" + mock.client = MagicMock() + mock.client.host = "127.0.0.1" + mock.state = MagicMock() + mock.app = MagicMock() + mock.app.state = MagicMock() + mock.app.state.initialization_complete = AsyncMock() + mock.app.state.initialization_complete.wait = AsyncMock() + return mock + + +@pytest.fixture +def mock_db(): + """Create a mock database session.""" + return AsyncMock() + + +@pytest.mark.asyncio +async def test_middleware_fail_open_behavior(mock_request, mock_db, mock_rate_limit_settings_fail_open): + """Test middleware with fail_open=True when a backend error occurs.""" + + with ( + patch("src.infrastructure.rate_limit.middleware.increment_and_check") as mock_inc, + patch("src.infrastructure.rate_limit.middleware.DEFAULT_LIMIT", 100), + patch("src.infrastructure.rate_limit.middleware.settings", mock_rate_limit_settings_fail_open), + ): + mock_inc.side_effect = Exception("Backend error") + + await _check_rate_limit(mock_request, mock_db) + + mock_inc.assert_called_once() + assert mock_inc.call_args[1]["fail_open"] is True + + +@pytest.mark.asyncio +async def test_middleware_fail_closed_behavior(mock_request, mock_db, mock_rate_limit_settings_fail_closed): + """Test middleware with fail_open=False when a backend error occurs.""" + + with ( + patch("src.infrastructure.rate_limit.middleware.increment_and_check") as mock_inc, + patch("src.infrastructure.rate_limit.middleware.DEFAULT_LIMIT", 100), + patch("src.infrastructure.rate_limit.middleware.settings", mock_rate_limit_settings_fail_closed), + ): + mock_inc.side_effect = Exception("Backend error") + + with pytest.raises(RateLimitException): + await _check_rate_limit(mock_request, mock_db) + + mock_inc.assert_called_once() + assert mock_inc.call_args[1]["fail_open"] is False + + +@pytest.mark.asyncio +async def test_middleware_respects_rate_limit_exception(mock_request, mock_db, mock_rate_limit_settings_fail_open): + """Test that middleware re-raises RateLimitException even with fail_open=True.""" + + with ( + patch("src.infrastructure.rate_limit.middleware.increment_and_check") as mock_inc, + patch("src.infrastructure.rate_limit.middleware.DEFAULT_LIMIT", 100), + patch("src.infrastructure.rate_limit.middleware.settings", mock_rate_limit_settings_fail_open), + ): + mock_inc.side_effect = RateLimitException("Rate limit exceeded") + + with pytest.raises(RateLimitException): + await _check_rate_limit(mock_request, mock_db) + + +@pytest.mark.asyncio +async def test_middleware_sets_correct_headers(mock_request, mock_db, mock_rate_limit_settings_fail_open): + """Test that middleware sets correct rate limit headers on success.""" + + with ( + patch("src.infrastructure.rate_limit.middleware.increment_and_check") as mock_inc, + patch("src.infrastructure.rate_limit.middleware.DEFAULT_LIMIT", 100), + patch("src.infrastructure.rate_limit.middleware.settings", mock_rate_limit_settings_fail_open), + ): + mock_inc.return_value = (3, False) + + await _check_rate_limit(mock_request, mock_db) + + assert hasattr(mock_request.state, "rate_limit_headers") + headers = mock_request.state.rate_limit_headers + assert headers["X-RateLimit-Limit"] == "100" + assert headers["X-RateLimit-Remaining"] == "97" + assert headers["X-RateLimit-Reset"] == "60" diff --git a/backend/tests/unit/infrastructure/rate_limit/test_fail_open_provider.py b/backend/tests/unit/infrastructure/rate_limit/test_fail_open_provider.py new file mode 100644 index 00000000..17a72b72 --- /dev/null +++ b/backend/tests/unit/infrastructure/rate_limit/test_fail_open_provider.py @@ -0,0 +1,137 @@ +"""Tests for the fail_open override functionality in the rate limiter provider.""" + +from unittest.mock import AsyncMock, patch + +import pytest + +from src.infrastructure.rate_limit.base import RateLimiterBackend +from src.infrastructure.rate_limit.provider import ( + RateLimiterProvider, + increment_and_check, +) + + +class MockBackend(RateLimiterBackend): + """Mock implementation of RateLimiterBackend with fail_open support.""" + + def __init__(self, fail_open: bool = True): + """Initialize the mock backend with configurable fail_open behavior.""" + super().__init__(fail_open=fail_open) + self.increment_and_check_mock = AsyncMock(return_value=(1, False)) + self.get_count_mock = AsyncMock(return_value=1) + self.reset_mock = AsyncMock() + self.ping_mock = AsyncMock(return_value=True) + self.increment_mock = AsyncMock(return_value=1) + self.delete_mock = AsyncMock(return_value=True) + + async def increment_and_check(self, key, limit, period): + """Mock implementation with side effect based on fail_open value.""" + if hasattr(self.increment_and_check_mock, "side_effect") and self.increment_and_check_mock.side_effect: + if isinstance(self.increment_and_check_mock.side_effect, Exception): + return 0, not self.fail_open + if callable(self.increment_and_check_mock.side_effect): + return self.increment_and_check_mock.side_effect(key, limit, period) + raise self.increment_and_check_mock.side_effect + return await self.increment_and_check_mock(key, limit, period) + + async def get_count(self, key): + return await self.get_count_mock(key) + + async def reset(self, key): + return await self.reset_mock(key) + + async def ping(self): + return await self.ping_mock() + + async def increment(self, key, amount=1, expiry=300): + return await self.increment_mock(key, amount, expiry) + + async def delete(self, key): + return await self.delete_mock(key) + + +@pytest.fixture +def provider(): + """Create a fresh RateLimiterProvider for testing.""" + return RateLimiterProvider() + + +@pytest.fixture +def mock_backend_fail_open(): + """Create a mock rate limiter backend with fail_open=True.""" + return MockBackend(fail_open=True) + + +@pytest.fixture +def mock_backend_fail_closed(): + """Create a mock rate limiter backend with fail_open=False.""" + return MockBackend(fail_open=False) + + +@pytest.mark.asyncio +async def test_increment_and_check_with_fail_open_override(provider, mock_backend_fail_closed): + """Test overriding fail_closed with fail_open in increment_and_check.""" + provider.register_backend("test", mock_backend_fail_closed, default=True) + + mock_backend_fail_closed.increment_and_check_mock.side_effect = Exception("Test error") + + with patch("src.infrastructure.rate_limit.provider.rate_limiter_provider", provider): + count, is_limited = await increment_and_check(key="test:key", limit=5, period=60, backend_name="test") + assert is_limited is True + + count, is_limited = await increment_and_check(key="test:key", limit=5, period=60, backend_name="test", fail_open=True) + assert is_limited is False + + assert mock_backend_fail_closed.fail_open is False + + +@pytest.mark.asyncio +async def test_increment_and_check_with_fail_closed_override(provider, mock_backend_fail_open): + """Test overriding fail-open with fail-closed in increment_and_check.""" + provider.register_backend("test", mock_backend_fail_open, default=True) + + mock_backend_fail_open.increment_and_check_mock.side_effect = Exception("Test error") + + with patch("src.infrastructure.rate_limit.provider.rate_limiter_provider", provider): + count, is_limited = await increment_and_check(key="test:key", limit=5, period=60, backend_name="test") + assert is_limited is False + + count, is_limited = await increment_and_check(key="test:key", limit=5, period=60, backend_name="test", fail_open=False) + assert is_limited is True + + assert mock_backend_fail_open.fail_open is True + + +@pytest.mark.asyncio +async def test_provider_temp_override_behavior(provider, mock_backend_fail_open): + """Test that temporary override only affects the current call.""" + provider.register_backend("test", mock_backend_fail_open, default=True) + + fail_open_during_call = None + + def side_effect(key, limit, period): + nonlocal fail_open_during_call + fail_open_during_call = mock_backend_fail_open.fail_open + return 1, False + + mock_backend_fail_open.increment_and_check_mock.side_effect = side_effect + + with patch("src.infrastructure.rate_limit.provider.rate_limiter_provider", provider): + await increment_and_check(key="test:key", limit=5, period=60, backend_name="test", fail_open=False) + assert fail_open_during_call is False + + assert mock_backend_fail_open.fail_open is True + + +@pytest.mark.asyncio +async def test_provider_no_override_needed(provider, mock_backend_fail_open): + """Test that no override happens if the value matches.""" + provider.register_backend("test", mock_backend_fail_open, default=True) + + original_fail_open = mock_backend_fail_open.fail_open + assert original_fail_open is True + + with patch("src.infrastructure.rate_limit.provider.rate_limiter_provider", provider): + await increment_and_check(key="test:key", limit=5, period=60, backend_name="test", fail_open=True) + + assert mock_backend_fail_open.fail_open is original_fail_open diff --git a/backend/tests/unit/infrastructure/rate_limit/test_middleware.py b/backend/tests/unit/infrastructure/rate_limit/test_middleware.py new file mode 100644 index 00000000..ef2e0861 --- /dev/null +++ b/backend/tests/unit/infrastructure/rate_limit/test_middleware.py @@ -0,0 +1,210 @@ +"""Tests for the rate limiter middleware module.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from fastapi import Request, Response + +from src.infrastructure.rate_limit.exceptions import RateLimitException +from src.infrastructure.rate_limit.middleware import ( + RateLimiterMiddleware, + _check_rate_limit, +) +from src.modules.tier.schemas import TierSelect + + +@pytest.fixture +def mock_request(): + """Create a mock Request object.""" + mock = MagicMock(spec=Request) + mock.url = MagicMock() + mock.url.path = "/api/v1/test" + mock.client = MagicMock() + mock.client.host = "127.0.0.1" + mock.state = MagicMock() + mock.app = MagicMock() + mock.app.state = MagicMock() + mock.app.state.initialization_complete = AsyncMock() + mock.app.state.initialization_complete.wait = AsyncMock() + return mock + + +@pytest.fixture +def mock_response(): + """Create a mock Response object.""" + mock = MagicMock(spec=Response) + mock.headers = {} + return mock + + +@pytest.fixture +def mock_db(): + """Create a mock database session.""" + return AsyncMock() + + +@pytest.fixture +def mock_user(): + """Create a mock user dict.""" + return { + "id": 123, + "username": "testuser", + "email": "test@example.com", + "tier_id": 1, + } + + +@pytest.fixture +def mock_app(): + """Create a mock FastAPI app.""" + return MagicMock() + + +@pytest.mark.asyncio +async def test_check_rate_limit_disabled(mock_request, mock_db): + """Test check_rate_limit when rate limiting is disabled.""" + with patch("src.infrastructure.rate_limit.middleware.settings") as mock_settings: + mock_settings.RATE_LIMITER_ENABLED = False + + await _check_rate_limit(mock_request, mock_db, None) + + +@pytest.mark.asyncio +async def test_check_rate_limit_no_user(mock_request, mock_db): + """Test check_rate_limit with no authenticated user.""" + with ( + patch("src.infrastructure.rate_limit.middleware.settings") as mock_settings, + patch("src.infrastructure.rate_limit.middleware.DEFAULT_LIMIT", 100), + patch("src.infrastructure.rate_limit.middleware.increment_and_check") as mock_increment, + ): + mock_settings.RATE_LIMITER_ENABLED = True + mock_settings.DEFAULT_RATE_LIMIT_LIMIT = 100 + mock_settings.DEFAULT_RATE_LIMIT_PERIOD = 60 + + mock_increment.return_value = (1, False) + + await _check_rate_limit(mock_request, mock_db, None) + + mock_increment.assert_called_once() + key_arg = mock_increment.call_args.kwargs["key"] + assert "127.0.0.1" in key_arg + assert mock_increment.call_args.kwargs["limit"] == 100 + assert mock_increment.call_args.kwargs["period"] == 60 + + +@pytest.mark.asyncio +async def test_check_rate_limit_with_user(mock_request, mock_db, mock_user): + """Test check_rate_limit with an authenticated user.""" + with ( + patch("src.infrastructure.rate_limit.middleware.settings") as mock_settings, + patch("src.infrastructure.rate_limit.middleware.DEFAULT_LIMIT", 100), + patch("src.infrastructure.rate_limit.middleware.increment_and_check") as mock_increment, + patch("src.infrastructure.rate_limit.middleware.crud_tiers.get") as mock_get_tier, + patch("src.infrastructure.rate_limit.middleware.crud_rate_limits.get") as mock_get_rate_limit, + ): + mock_settings.RATE_LIMITER_ENABLED = True + mock_settings.DEFAULT_RATE_LIMIT_LIMIT = 100 + mock_settings.DEFAULT_RATE_LIMIT_PERIOD = 60 + + mock_get_tier.return_value = {"id": 1, "name": "pro"} + mock_get_rate_limit.return_value = {"limit": 10, "period": 30} + + mock_increment.return_value = (1, False) + + await _check_rate_limit(mock_request, mock_db, mock_user) + + mock_get_tier.assert_called_once_with(db=mock_db, id=1, schema_to_select=TierSelect) + mock_get_rate_limit.assert_called_once() + + mock_increment.assert_called_once() + key_arg = mock_increment.call_args.kwargs["key"] + assert "123" in key_arg + assert mock_increment.call_args.kwargs["limit"] == 10 + assert mock_increment.call_args.kwargs["period"] == 30 + + +@pytest.mark.asyncio +async def test_check_rate_limit_no_specific_limits(mock_request, mock_db, mock_user): + """Test check_rate_limit with user but no specific rate limits.""" + with ( + patch("src.infrastructure.rate_limit.middleware.settings") as mock_settings, + patch("src.infrastructure.rate_limit.middleware.DEFAULT_LIMIT", 100), + patch("src.infrastructure.rate_limit.middleware.increment_and_check") as mock_increment, + patch("src.infrastructure.rate_limit.middleware.crud_tiers.get") as mock_get_tier, + patch("src.infrastructure.rate_limit.middleware.crud_rate_limits.get") as mock_get_rate_limit, + patch("src.infrastructure.rate_limit.middleware.logger") as mock_logger, + ): + mock_settings.RATE_LIMITER_ENABLED = True + mock_settings.DEFAULT_RATE_LIMIT_LIMIT = 100 + mock_settings.DEFAULT_RATE_LIMIT_PERIOD = 60 + + mock_get_tier.return_value = {"id": 1, "name": "pro"} + mock_get_rate_limit.return_value = None + + mock_increment.return_value = (1, False) + + await _check_rate_limit(mock_request, mock_db, mock_user) + + assert mock_logger.warning.called + mock_increment.assert_called_once() + assert mock_increment.call_args.kwargs["limit"] == 100 + assert mock_increment.call_args.kwargs["period"] == 60 + + +@pytest.mark.asyncio +async def test_check_rate_limit_exceeded(mock_request, mock_db): + """Test check_rate_limit when rate limit is exceeded.""" + with ( + patch("src.infrastructure.rate_limit.middleware.settings") as mock_settings, + patch("src.infrastructure.rate_limit.middleware.DEFAULT_LIMIT", 100), + patch("src.infrastructure.rate_limit.middleware.increment_and_check") as mock_increment, + patch("src.infrastructure.rate_limit.middleware.logger") as mock_logger, + ): + mock_settings.RATE_LIMITER_ENABLED = True + mock_settings.DEFAULT_RATE_LIMIT_LIMIT = 100 + mock_settings.DEFAULT_RATE_LIMIT_PERIOD = 60 + + mock_increment.return_value = (101, True) + + with pytest.raises(RateLimitException) as excinfo: + await _check_rate_limit(mock_request, mock_db, None) + + assert "Rate limit exceeded" in str(excinfo.value) + assert mock_logger.warning.called + + +@pytest.mark.asyncio +async def test_rate_limiter_middleware(mock_request, mock_response, mock_app): + """Test the RateLimiterMiddleware.""" + middleware = RateLimiterMiddleware(app=mock_app) + + async def next_handler(request): + return mock_response + + mock_request.state.rate_limit_headers = { + "X-RateLimit-Limit": "10", + "X-RateLimit-Remaining": "5", + "X-RateLimit-Reset": "60", + } + + response = await middleware.dispatch(mock_request, next_handler) + + assert response.headers["X-RateLimit-Limit"] == "10" + assert response.headers["X-RateLimit-Remaining"] == "5" + assert response.headers["X-RateLimit-Reset"] == "60" + + +@pytest.mark.asyncio +async def test_rate_limiter_middleware_no_headers(mock_request, mock_response, mock_app): + """Test the RateLimiterMiddleware with no rate limit headers.""" + middleware = RateLimiterMiddleware(app=mock_app) + + async def next_handler(request): + return mock_response + + if hasattr(mock_request.state, "rate_limit_headers"): + delattr(mock_request.state, "rate_limit_headers") + + response = await middleware.dispatch(mock_request, next_handler) + + assert len(response.headers) == 0 diff --git a/backend/tests/unit/infrastructure/rate_limit/test_provider.py b/backend/tests/unit/infrastructure/rate_limit/test_provider.py new file mode 100644 index 00000000..743e5b7b --- /dev/null +++ b/backend/tests/unit/infrastructure/rate_limit/test_provider.py @@ -0,0 +1,177 @@ +"""Tests for the rate limiter provider module.""" + +from unittest.mock import AsyncMock, patch + +import pytest + +from src.infrastructure.rate_limit.base import RateLimiterBackend +from src.infrastructure.rate_limit.exceptions import BackendNotFoundError +from src.infrastructure.rate_limit.provider import ( + RateLimiterProvider, + get_count, + increment_and_check, + reset, +) + + +class MockBackend(RateLimiterBackend): + """Mock implementation of RateLimiterBackend for testing.""" + + def __init__(self): + super().__init__() + self.increment_and_check_mock = AsyncMock(return_value=(1, False)) + self.get_count_mock = AsyncMock(return_value=1) + self.reset_mock = AsyncMock() + self.ping_mock = AsyncMock(return_value=True) + self.increment_mock = AsyncMock(return_value=1) + self.delete_mock = AsyncMock(return_value=True) + + async def increment_and_check(self, key, limit, period): + return await self.increment_and_check_mock(key, limit, period) + + async def get_count(self, key): + return await self.get_count_mock(key) + + async def reset(self, key): + return await self.reset_mock(key) + + async def ping(self): + return await self.ping_mock() + + async def increment(self, key, amount=1, expiry=300): + return await self.increment_mock(key, amount, expiry) + + async def delete(self, key): + return await self.delete_mock(key) + + +@pytest.fixture +def provider(): + """Create a fresh RateLimiterProvider for testing.""" + return RateLimiterProvider() + + +@pytest.fixture +def mock_backend(): + """Create a mock rate limiter backend.""" + return MockBackend() + + +@pytest.mark.asyncio +async def test_register_backend(provider, mock_backend): + """Test registering a backend.""" + provider.register_backend("test", mock_backend) + + assert provider.get_backend("test") == mock_backend + + assert provider.default_backend_name == "test" + + +@pytest.mark.asyncio +async def test_register_multiple_backends(provider, mock_backend): + """Test registering multiple backends.""" + provider.register_backend("test1", mock_backend) + + mock_backend2 = MockBackend() + provider.register_backend("test2", mock_backend2, default=True) + + assert provider.get_backend("test1") == mock_backend + assert provider.get_backend("test2") == mock_backend2 + + assert provider.default_backend_name == "test2" + assert provider.get_backend() == mock_backend2 + + +@pytest.mark.asyncio +async def test_get_backend_not_found(provider): + """Test getting a non-existent backend.""" + with pytest.raises(BackendNotFoundError): + provider.get_backend("nonexistent") + + +@pytest.mark.asyncio +async def test_set_default_backend(provider, mock_backend): + """Test setting the default backend.""" + provider.register_backend("test1", mock_backend) + mock_backend2 = MockBackend() + provider.register_backend("test2", mock_backend2) + + assert provider.default_backend_name == "test1" + + provider.set_default_backend("test2") + assert provider.default_backend_name == "test2" + assert provider.get_backend() == mock_backend2 + + +@pytest.mark.asyncio +async def test_set_default_backend_not_found(provider, mock_backend): + """Test setting a non-existent backend as default.""" + provider.register_backend("test", mock_backend) + + with pytest.raises(BackendNotFoundError): + provider.set_default_backend("nonexistent") + + +@pytest.mark.asyncio +async def test_ping_all(provider, mock_backend): + """Test pinging all backends.""" + provider.register_backend("test1", mock_backend) + + mock_backend2 = MockBackend() + mock_backend2.ping_mock.return_value = False + provider.register_backend("test2", mock_backend2) + + results = await provider.ping_all() + + assert results == {"test1": True, "test2": False} + + +@pytest.mark.asyncio +async def test_list_backends(provider, mock_backend): + """Test listing all registered backends.""" + provider.register_backend("test1", mock_backend) + mock_backend2 = MockBackend() + provider.register_backend("test2", mock_backend2) + + backends = provider.list_backends() + + assert set(backends.keys()) == {"test1", "test2"} + assert all(issubclass(cls, MockBackend) for cls in backends.values()) + + +@pytest.mark.asyncio +async def test_increment_and_check_convenience(mock_backend): + """Test the increment_and_check convenience function.""" + with patch( + "src.infrastructure.rate_limit.provider.rate_limiter_provider.get_backend", + return_value=mock_backend, + ): + result = await increment_and_check("test:key", 5, 60) + + mock_backend.increment_and_check_mock.assert_called_once_with("test:key", 5, 60) + assert result == (1, False) + + +@pytest.mark.asyncio +async def test_get_count_convenience(mock_backend): + """Test the get_count convenience function.""" + with patch( + "src.infrastructure.rate_limit.provider.rate_limiter_provider.get_backend", + return_value=mock_backend, + ): + result = await get_count("test:key") + + mock_backend.get_count_mock.assert_called_once_with("test:key") + assert result == 1 + + +@pytest.mark.asyncio +async def test_reset_convenience(mock_backend): + """Test the reset convenience function.""" + with patch( + "src.infrastructure.rate_limit.provider.rate_limiter_provider.get_backend", + return_value=mock_backend, + ): + await reset("test:key") + + mock_backend.reset_mock.assert_called_once_with("test:key") diff --git a/backend/tests/unit/infrastructure/security/__init__.py b/backend/tests/unit/infrastructure/security/__init__.py new file mode 100644 index 00000000..34f4cfe3 --- /dev/null +++ b/backend/tests/unit/infrastructure/security/__init__.py @@ -0,0 +1 @@ +"""Security tests package.""" diff --git a/backend/tests/unit/infrastructure/security/test_production_validator.py b/backend/tests/unit/infrastructure/security/test_production_validator.py new file mode 100644 index 00000000..6b2aac1a --- /dev/null +++ b/backend/tests/unit/infrastructure/security/test_production_validator.py @@ -0,0 +1,305 @@ +"""Tests for production security validator.""" + +from unittest.mock import Mock + +import pytest + +from src.infrastructure.config.settings import EnvironmentOption, Settings +from src.infrastructure.security.production_validator import ( + ProductionSecurityError, + ProductionSecurityValidator, + validate_production_security, +) + + +class TestProductionSecurityValidator: + """Test the production security validator.""" + + def create_mock_settings(self, **overrides): + """Create mock settings with defaults and overrides.""" + defaults = { + "ENVIRONMENT": EnvironmentOption.PRODUCTION, + "SECRET_KEY": "xF9mWqP3nL7vBfKsRt8HjZ2CyE5QaM6NuV4DgX1SpY7LwB9KzT3RhI0UoJ5PcA2MvS8", + "POSTGRES_PASSWORD": "secure_db_password", + "REDIS_PASSWORD": "secure_redis_password", + "CACHE_BACKEND": "memcached", + "RATE_LIMITER_BACKEND": "memcached", + "SESSION_BACKEND": "redis", + "CORS_ENABLED": True, + "CORS_ORIGINS": "https://example.com", + "DEBUG": False, + "ENABLE_DOCS_IN_PRODUCTION": False, + "SESSION_SECURE_COOKIES": True, + "SESSION_TIMEOUT_MINUTES": 30, + "CSRF_ENABLED": True, + "ADMIN_ENABLED": True, + "ADMIN_USERNAME": "secure_admin_user", + "ADMIN_PASSWORD": "very_secure_admin_password_123", + "PRODUCTION_SECURITY_VALIDATION_ENABLED": True, + "PRODUCTION_SECURITY_STRICT_MODE": False, + # Redis settings + "CACHE_REDIS_HOST": "localhost", + "CACHE_REDIS_PORT": 6379, + "CACHE_REDIS_DB": 0, + "CACHE_REDIS_PASSWORD": None, + "RATE_LIMITER_REDIS_HOST": "localhost", + "RATE_LIMITER_REDIS_PORT": 6379, + "RATE_LIMITER_REDIS_DB": 1, + "RATE_LIMITER_REDIS_PASSWORD": None, + } + defaults.update(overrides) + + # Create a mock settings object + settings = Mock(spec=Settings) + for key, value in defaults.items(): + setattr(settings, key, value) + + # Mock the property methods + def get_cors_origins_list(): + origins = getattr(settings, "CORS_ORIGINS", "*") + if not origins: + return ["*"] + return [x.strip() for x in origins.split(",") if x.strip()] + + # Add property methods + settings.CORS_ORIGINS_LIST = get_cors_origins_list() + + return settings + + def test_non_production_environment_skips_validation(self): + """Test that non-production environments skip validation.""" + settings = self.create_mock_settings(ENVIRONMENT=EnvironmentOption.DEVELOPMENT) + validator = ProductionSecurityValidator(settings) + + # Should not raise any exceptions + validator.validate_production_security() + + def test_secure_production_config_passes(self): + """Test that a secure production configuration passes all checks.""" + settings = self.create_mock_settings() + validator = ProductionSecurityValidator(settings) + + # Should not raise any exceptions + validator.validate_production_security() + + def test_insecure_secret_key_raises_error(self): + """Test that insecure SECRET_KEY raises critical error.""" + test_cases = [ + "insecure-secret-key-change-this", + "change-me", + "secret", + "password", + "123456", + "short", # Too short + "", # Empty + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", # Repeated chars + "abcd1234qwerty", # Predictable patterns + ] + + for insecure_key in test_cases: + settings = self.create_mock_settings(SECRET_KEY=insecure_key) + validator = ProductionSecurityValidator(settings) + + with pytest.raises(ProductionSecurityError) as exc_info: + validator.validate_production_security() + + assert "SECRET_KEY" in str(exc_info.value) + assert "insecure" in str(exc_info.value).lower() + + def test_admin_disabled_does_not_check_credentials(self): + """Test that disabled admin doesn't trigger credential checks.""" + settings = self.create_mock_settings(ADMIN_ENABLED=False, ADMIN_USERNAME="admin", ADMIN_PASSWORD="weak") + validator = ProductionSecurityValidator(settings) + + # Should not raise any exceptions for admin credentials + validator.validate_production_security() + + def test_default_database_password_raises_error(self): + """Test that default database password raises critical error.""" + settings = self.create_mock_settings(POSTGRES_PASSWORD="postgres") + validator = ProductionSecurityValidator(settings) + + with pytest.raises(ProductionSecurityError) as exc_info: + validator.validate_production_security() + + assert "Database" in str(exc_info.value) + assert "default credentials" in str(exc_info.value) + + def test_empty_database_password_raises_error(self): + """Test that empty database password raises critical error.""" + settings = self.create_mock_settings(POSTGRES_PASSWORD="") + validator = ProductionSecurityValidator(settings) + + with pytest.raises(ProductionSecurityError) as exc_info: + validator.validate_production_security() + + assert "Database password is empty" in str(exc_info.value) + + def test_multiple_critical_errors_combined(self): + """Test that multiple critical errors are combined in one message.""" + settings = self.create_mock_settings(SECRET_KEY="insecure", POSTGRES_PASSWORD="postgres") + validator = ProductionSecurityValidator(settings) + + with pytest.raises(ProductionSecurityError) as exc_info: + validator.validate_production_security() + + error_msg = str(exc_info.value) + assert "SECRET_KEY" in error_msg + assert "Database" in error_msg + + def test_redis_without_password_logs_warning(self, caplog): + """Test that Redis without password logs warning.""" + settings = self.create_mock_settings(CACHE_BACKEND="redis", CACHE_REDIS_PASSWORD=None) + validator = ProductionSecurityValidator(settings) + + validator.validate_production_security() + + # Check that warnings were logged + warning_logs = [record for record in caplog.records if record.levelname == "WARNING"] + assert len(warning_logs) > 0 + + # Check that Redis password warnings are present + redis_warnings = [log for log in warning_logs if "Redis instance" in log.message and "no password" in log.message] + assert len(redis_warnings) > 0 + + def test_shared_redis_instance_logs_warning(self, caplog): + """Test that shared Redis instances log warning.""" + settings = self.create_mock_settings( + CACHE_BACKEND="redis", + RATE_LIMITER_BACKEND="redis", + # Both using same Redis instance + CACHE_REDIS_HOST="localhost", + CACHE_REDIS_PORT=6379, + CACHE_REDIS_DB=0, + RATE_LIMITER_REDIS_HOST="localhost", + RATE_LIMITER_REDIS_PORT=6379, + RATE_LIMITER_REDIS_DB=0, # Same DB to test shared instance warning + ) + validator = ProductionSecurityValidator(settings) + + validator.validate_production_security() + + # Check for shared instance warning + warning_logs = [record for record in caplog.records if record.levelname == "WARNING"] + shared_warnings = [log for log in warning_logs if "sharing the same Redis instance" in log.message] + assert len(shared_warnings) > 0 + + def test_permissive_cors_logs_warning(self, caplog): + """Test that permissive CORS logs warning.""" + settings = self.create_mock_settings(CORS_ORIGINS="*") + validator = ProductionSecurityValidator(settings) + + validator.validate_production_security() + + # Check for CORS warning + warning_logs = [record for record in caplog.records if record.levelname == "WARNING"] + cors_warnings = [log for log in warning_logs if "CORS_ORIGINS" in log.message and "allow all origins" in log.message] + assert len(cors_warnings) > 0 + + def test_debug_enabled_logs_warning(self, caplog): + """Test that debug mode enabled logs warning.""" + settings = self.create_mock_settings(DEBUG=True) + validator = ProductionSecurityValidator(settings) + + validator.validate_production_security() + + # Check for debug warning + warning_logs = [record for record in caplog.records if record.levelname == "WARNING"] + debug_warnings = [log for log in warning_logs if "DEBUG mode" in log.message] + assert len(debug_warnings) > 0 + + def test_docs_enabled_logs_warning(self, caplog): + """Test that docs enabled in production logs warning.""" + settings = self.create_mock_settings(ENABLE_DOCS_IN_PRODUCTION=True) + validator = ProductionSecurityValidator(settings) + + validator.validate_production_security() + + # Check for docs warning + warning_logs = [record for record in caplog.records if record.levelname == "WARNING"] + docs_warnings = [log for log in warning_logs if "API documentation" in log.message] + assert len(docs_warnings) > 0 + + def test_insecure_session_config_logs_warning(self, caplog): + """Test that insecure session configuration logs warnings.""" + settings = self.create_mock_settings( + SESSION_SECURE_COOKIES=False, + SESSION_TIMEOUT_MINUTES=180, # 3 hours + CSRF_ENABLED=False, + ) + validator = ProductionSecurityValidator(settings) + + validator.validate_production_security() + + # Check for session warnings + warning_logs = [record for record in caplog.records if record.levelname == "WARNING"] + + cookie_warnings = [log for log in warning_logs if "SESSION_SECURE_COOKIES" in log.message] + timeout_warnings = [log for log in warning_logs if "Session timeout" in log.message] + csrf_warnings = [log for log in warning_logs if "CSRF protection" in log.message] + + assert len(cookie_warnings) > 0 + assert len(timeout_warnings) > 0 + assert len(csrf_warnings) > 0 + + def test_weak_admin_credentials_logs_warning(self, caplog): + """Test that weak admin credentials log warnings.""" + settings = self.create_mock_settings(ADMIN_USERNAME="admin", ADMIN_PASSWORD="123456") + validator = ProductionSecurityValidator(settings) + + validator.validate_production_security() + + # Check for admin credential warnings + warning_logs = [record for record in caplog.records if record.levelname == "WARNING"] + + username_warnings = [log for log in warning_logs if "Admin username" in log.message and "predictable" in log.message] + password_warnings = [log for log in warning_logs if "Admin password" in log.message] + + assert len(username_warnings) > 0 + assert len(password_warnings) > 0 + + def test_convenience_function(self): + """Test the convenience function validate_production_security.""" + settings = self.create_mock_settings(SECRET_KEY="insecure") + + with pytest.raises(ProductionSecurityError): + validate_production_security(settings) + + def test_no_admin_credentials_skips_admin_checks(self, caplog): + """Test that missing admin credentials skip admin checks.""" + settings = self.create_mock_settings(ADMIN_USERNAME="", ADMIN_PASSWORD="") + validator = ProductionSecurityValidator(settings) + + validator.validate_production_security() + + # Should not have admin credential warnings + warning_logs = [record for record in caplog.records if record.levelname == "WARNING"] + admin_warnings = [log for log in warning_logs if "Admin username" in log.message or "Admin password" in log.message] + assert len(admin_warnings) == 0 + + def test_redis_ssl_with_external_host(self, caplog): + """Test that external Redis without SSL logs warning.""" + settings = self.create_mock_settings( + CACHE_BACKEND="redis", + CACHE_REDIS_HOST="redis.example.com", # External host + ) + validator = ProductionSecurityValidator(settings) + + validator.validate_production_security() + + # Check for SSL warnings + warning_logs = [record for record in caplog.records if record.levelname == "WARNING"] + ssl_warnings = [log for log in warning_logs if "not using SSL/TLS" in log.message] + assert len(ssl_warnings) > 0 + + def test_localhost_redis_no_ssl_warning(self, caplog): + """Test that localhost Redis without SSL doesn't log SSL warning.""" + settings = self.create_mock_settings(CACHE_BACKEND="redis", CACHE_REDIS_HOST="localhost") + validator = ProductionSecurityValidator(settings) + + validator.validate_production_security() + + # Should not have SSL warnings for localhost + warning_logs = [record for record in caplog.records if record.levelname == "WARNING"] + ssl_warnings = [log for log in warning_logs if "not using SSL/TLS" in log.message] + assert len(ssl_warnings) == 0 diff --git a/backend/tests/unit/infrastructure/test_middleware.py b/backend/tests/unit/infrastructure/test_middleware.py new file mode 100644 index 00000000..37fbc81b --- /dev/null +++ b/backend/tests/unit/infrastructure/test_middleware.py @@ -0,0 +1,92 @@ +"""Tests for middleware components.""" + +import pytest +from fastapi import FastAPI +from httpx import ASGITransport, AsyncClient + +from src.infrastructure.middleware import ClientCacheMiddleware, SecurityHeadersMiddleware + + +def _create_app_with_middleware( + cache: bool = False, + security: bool = False, + environment: str = "development", + max_age: int = 60, +) -> FastAPI: + app = FastAPI() + + if cache: + app.add_middleware(ClientCacheMiddleware, max_age=max_age) + if security: + app.add_middleware(SecurityHeadersMiddleware, environment=environment) + + @app.get("/api/v1/users") + async def api_route(): + return {"users": []} + + @app.get("/static/logo.png") + async def static_route(): + return {"file": "logo"} + + return app + + +# === ClientCacheMiddleware === + + +@pytest.mark.asyncio +async def test_api_paths_get_no_cache(): + app = _create_app_with_middleware(cache=True, max_age=120) + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client: + resp = await client.get("/api/v1/users") + + assert resp.status_code == 200 + assert resp.headers["cache-control"] == "private, no-cache, no-store, must-revalidate" + + +@pytest.mark.asyncio +async def test_static_paths_get_public_cache(): + app = _create_app_with_middleware(cache=True, max_age=120) + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client: + resp = await client.get("/static/logo.png") + + assert resp.status_code == 200 + assert resp.headers["cache-control"] == "public, max-age=120" + + +# === SecurityHeadersMiddleware === + + +@pytest.mark.asyncio +async def test_security_headers_present_in_dev(): + app = _create_app_with_middleware(security=True, environment="development") + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client: + resp = await client.get("/api/v1/users") + + assert resp.headers["x-content-type-options"] == "nosniff" + assert resp.headers["x-frame-options"] == "DENY" + assert resp.headers["referrer-policy"] == "strict-origin-when-cross-origin" + assert resp.headers["x-xss-protection"] == "0" + assert "camera=()" in resp.headers["permissions-policy"] + # HSTS should NOT be set in dev + assert "strict-transport-security" not in resp.headers + + +@pytest.mark.asyncio +async def test_hsts_set_in_production(): + app = _create_app_with_middleware(security=True, environment="production") + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client: + resp = await client.get("/api/v1/users") + + assert "strict-transport-security" in resp.headers + assert "max-age=" in resp.headers["strict-transport-security"] + assert "includeSubDomains" in resp.headers["strict-transport-security"] + + +@pytest.mark.asyncio +async def test_hsts_set_in_staging(): + app = _create_app_with_middleware(security=True, environment="staging") + async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client: + resp = await client.get("/api/v1/users") + + assert "strict-transport-security" in resp.headers diff --git a/backend/tests/unit/interfaces/__init__.py b/backend/tests/unit/interfaces/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/unit/interfaces/admin/__init__.py b/backend/tests/unit/interfaces/admin/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/unit/interfaces/api/__init__.py b/backend/tests/unit/interfaces/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/unit/interfaces/api/v1/__init__.py b/backend/tests/unit/interfaces/api/v1/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/unit/modules/__init__.py b/backend/tests/unit/modules/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/unit/modules/api_keys/__init__.py b/backend/tests/unit/modules/api_keys/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/unit/modules/api_keys/test_service.py b/backend/tests/unit/modules/api_keys/test_service.py new file mode 100644 index 00000000..3371b7a6 --- /dev/null +++ b/backend/tests/unit/modules/api_keys/test_service.py @@ -0,0 +1,444 @@ +"""Tests for API key management service.""" + +from datetime import UTC, datetime, timedelta + +import pytest +import pytest_asyncio +from sqlalchemy.ext.asyncio import AsyncSession + +from src.modules.api_keys.crud import crud_api_keys, crud_key_permissions +from src.modules.api_keys.enums import KeyPermissionAction, KeyPermissionResource +from src.modules.api_keys.schemas import ( + APIKeyCreate, + APIKeyCreateInternal, + APIKeyUpdate, + KeyPermissionCreate, + KeyUsageCreate, +) +from src.modules.api_keys.service import APIKeyService +from src.modules.common.exceptions import PermissionDeniedError, ResourceNotFoundError + + +@pytest.fixture +def api_key_service(): + """Create API key service instance.""" + return APIKeyService() + + +@pytest_asyncio.fixture +async def test_api_key(api_key_service, db_session: AsyncSession, test_user: dict): + """Create a test API key.""" + key_data = APIKeyCreate( + name="Test API Key", permissions={"read": True, "write": True}, usage_limits={"requests_per_day": 1000} + ) + + response = await api_key_service.create_api_key(user_id=test_user["id"], key_data=key_data, db=db_session) + + return response + + +@pytest.mark.asyncio +async def test_create_api_key(api_key_service, db_session: AsyncSession, test_user: dict): + """Test creating a new API key.""" + key_data = APIKeyCreate(name="Test Key", permissions={"read": True, "write": True}, usage_limits={"requests_per_day": 1000}) + + response = await api_key_service.create_api_key(user_id=test_user["id"], key_data=key_data, db=db_session) + + assert response["name"] == "Test Key" + assert response["user_id"] == test_user["id"] + assert response["is_active"] is True + assert response["api_key"].startswith("fai_") + assert len(response["api_key"]) > 20 # Should be long enough + assert response["key_prefix"] is not None + assert len(response["key_prefix"]) == 8 + + +@pytest.mark.asyncio +async def test_api_key_generation_unique(api_key_service): + """Test that API key generation produces unique keys.""" + key1, prefix1, hash1 = api_key_service._generate_api_key() + key2, prefix2, hash2 = api_key_service._generate_api_key() + + assert key1 != key2 + assert prefix1 != prefix2 + assert hash1 != hash2 + assert key1.startswith("fai_") + assert key2.startswith("fai_") + + +@pytest.mark.asyncio +async def test_get_user_api_keys(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test getting user's API keys.""" + result = await api_key_service.get_user_api_keys(user_id=test_user["id"], db=db_session) + keys = result.get("data", []) if isinstance(result, dict) else [] + + assert len(keys) >= 1 + key = keys[0] + assert key["name"] == "Test API Key" + assert key["user_id"] == test_user["id"] + assert key["is_active"] is True + # API key should not be included in read response + assert "api_key" not in key + + +@pytest.mark.asyncio +async def test_get_user_api_keys_active_only(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test getting only active API keys.""" + # Deactivate the test key + await api_key_service.delete_api_key(key_id=test_api_key["id"], user_id=test_user["id"], db=db_session) + + # Should return no active keys + active_result = await api_key_service.get_user_api_keys(user_id=test_user["id"], db=db_session, active_only=True) + active_keys = active_result.get("data", []) if isinstance(active_result, dict) else [] + + # Should return all keys including inactive + all_result = await api_key_service.get_user_api_keys(user_id=test_user["id"], db=db_session, active_only=False) + all_keys = all_result.get("data", []) if isinstance(all_result, dict) else [] + + assert len(active_keys) == 0 + assert len(all_keys) >= 1 + + +@pytest.mark.asyncio +async def test_get_api_key_success(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test getting a specific API key.""" + key = await api_key_service.get_api_key(key_id=test_api_key["id"], user_id=test_user["id"], db=db_session) + + assert key["id"] == test_api_key["id"] + assert key["name"] == "Test API Key" + assert key["user_id"] == test_user["id"] + + +@pytest.mark.asyncio +async def test_get_api_key_not_found(api_key_service, db_session: AsyncSession, test_user: dict): + """Test getting non-existent API key.""" + with pytest.raises(ResourceNotFoundError): + await api_key_service.get_api_key(key_id=99999, user_id=test_user["id"], db=db_session) + + +@pytest.mark.asyncio +async def test_get_api_key_permission_denied( + api_key_service, db_session: AsyncSession, test_user: dict, test_user_2: dict, test_api_key +): + """Test getting API key owned by different user.""" + with pytest.raises(PermissionDeniedError): + await api_key_service.get_api_key(key_id=test_api_key["id"], user_id=test_user_2["id"], db=db_session) + + +@pytest.mark.asyncio +async def test_update_api_key(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test updating an API key.""" + update_data = APIKeyUpdate(name="Updated Key Name") + + updated_key = await api_key_service.update_api_key( + key_id=test_api_key["id"], user_id=test_user["id"], update_data=update_data, db=db_session + ) + + assert updated_key["name"] == "Updated Key Name" + assert updated_key["id"] == test_api_key["id"] + + +@pytest.mark.asyncio +async def test_delete_api_key(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test deleting (deactivating) an API key.""" + await api_key_service.delete_api_key(key_id=test_api_key["id"], user_id=test_user["id"], db=db_session) + + # Key should still exist but be inactive + key = await api_key_service.get_api_key(key_id=test_api_key["id"], user_id=test_user["id"], db=db_session) + + assert key["is_active"] is False + + +@pytest.mark.asyncio +async def test_validate_api_key_success(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test successful API key validation.""" + # Add permission for the key + permission_data = KeyPermissionCreate( + api_key_id=test_api_key["id"], + resource=KeyPermissionResource.CONVERSATIONS, + action=KeyPermissionAction.READ, + is_allowed=True, + ) + await crud_key_permissions.create(db=db_session, object=permission_data) + + validation = await api_key_service.validate_api_key( + api_key=test_api_key["api_key"], resource="conversations", action="read", db=db_session + ) + + assert validation.is_valid is True + assert validation.api_key_id == test_api_key["id"] + assert validation.user_id == test_user["id"] + assert validation.error_message is None + + +@pytest.mark.asyncio +async def test_validate_api_key_invalid(api_key_service, db_session: AsyncSession): + """Test validation with invalid API key.""" + validation = await api_key_service.validate_api_key( + api_key="fai_invalid_key_12345", resource="conversations", action="read", db=db_session + ) + + assert validation.is_valid is False + assert "Invalid API key" in validation.error_message + + +@pytest.mark.asyncio +async def test_validate_api_key_inactive(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test validation with inactive API key.""" + # Deactivate the key + await api_key_service.delete_api_key(key_id=test_api_key["id"], user_id=test_user["id"], db=db_session) + + validation = await api_key_service.validate_api_key( + api_key=test_api_key["api_key"], resource="conversations", action="read", db=db_session + ) + + assert validation.is_valid is False + assert "inactive" in validation.error_message + + +@pytest.mark.asyncio +async def test_validate_api_key_expired(api_key_service, db_session: AsyncSession, test_user: dict): + """Test validation with expired API key.""" + # Create key with past expiration + + key_data = APIKeyCreate( + name="Expired Key", + expires_at=datetime.now(UTC) - timedelta(days=1), # Already expired + ) + + expired_key = await api_key_service.create_api_key(user_id=test_user["id"], key_data=key_data, db=db_session) + + validation = await api_key_service.validate_api_key( + api_key=expired_key["api_key"], resource="conversations", action="read", db=db_session + ) + + assert validation.is_valid is False + assert "expired" in validation.error_message + + +@pytest.mark.asyncio +async def test_validate_api_key_no_permission(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test validation with no permissions.""" + validation = await api_key_service.validate_api_key( + api_key=test_api_key["api_key"], resource="admin", action="delete", db=db_session + ) + + assert validation.is_valid is False + assert "No permission" in validation.error_message + + +@pytest.mark.asyncio +async def test_wildcard_permissions(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test wildcard permission validation.""" + # Add wildcard permission + permission_data = KeyPermissionCreate( + api_key_id=test_api_key["id"], + resource=KeyPermissionResource.WILDCARD, + action=KeyPermissionAction.WILDCARD, + is_allowed=True, + ) + await crud_key_permissions.create(db=db_session, object=permission_data) + + validation = await api_key_service.validate_api_key( + api_key=test_api_key["api_key"], resource="any_resource", action="any_action", db=db_session + ) + + assert validation.is_valid is True + + +@pytest.mark.asyncio +async def test_record_usage(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test recording API key usage.""" + usage_data = KeyUsageCreate( + api_key_id=test_api_key["id"], + user_id=test_user["id"], + endpoint="/api/v1/conversations", + method="POST", + status_code=201, + response_time_ms=150, + tokens_used=25, + cost_microcents=5000, # $0.05 + user_agent="test-client/1.0", + ) + + usage_record = await api_key_service.record_usage( + api_key_id=test_api_key["id"], user_id=test_user["id"], usage_data=usage_data, db=db_session + ) + + assert usage_record["api_key_id"] == test_api_key["id"] + assert usage_record["user_id"] == test_user["id"] + assert usage_record["endpoint"] == "/api/v1/conversations" + assert usage_record["status_code"] == 201 + assert usage_record["tokens_used"] == 25 + assert usage_record["cost_microcents"] == 5000 + + +@pytest.mark.asyncio +async def test_get_key_usage(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test getting API key usage history.""" + # Record some usage + usage_data = KeyUsageCreate( + api_key_id=test_api_key["id"], + user_id=test_user["id"], + endpoint="/api/v1/test", + method="GET", + status_code=200, + response_time_ms=100, + ) + + await api_key_service.record_usage( + api_key_id=test_api_key["id"], user_id=test_user["id"], usage_data=usage_data, db=db_session + ) + + result = await api_key_service.get_key_usage(key_id=test_api_key["id"], user_id=test_user["id"], db=db_session) + usage_history = result.get("data", []) if isinstance(result, dict) else [] + + assert len(usage_history) >= 1 + usage = usage_history[0] + assert usage["api_key_id"] == test_api_key["id"] + assert usage["endpoint"] == "/api/v1/test" + + +@pytest.mark.asyncio +async def test_get_usage_analytics(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test getting usage analytics for API key.""" + # Record multiple usage entries + endpoints = ["/api/v1/test1", "/api/v1/test2", "/api/v1/test1"] + status_codes = [200, 201, 500] + + for i, endpoint in enumerate(endpoints): + usage_data = KeyUsageCreate( + api_key_id=test_api_key["id"], + user_id=test_user["id"], + endpoint=endpoint, + method="GET", + status_code=status_codes[i], + response_time_ms=100 + i * 50, + tokens_used=10 + i * 5, + cost_microcents=1000 * (i + 1), + ) + + await api_key_service.record_usage( + api_key_id=test_api_key["id"], user_id=test_user["id"], usage_data=usage_data, db=db_session + ) + + analytics = await api_key_service.get_usage_analytics(key_id=test_api_key["id"], user_id=test_user["id"], db=db_session) + + assert analytics["api_key_id"] == test_api_key["id"] + assert analytics["total_requests"] == 3 + assert analytics["successful_requests"] == 2 # 200, 201 + assert analytics["failed_requests"] == 1 # 500 + assert analytics["total_tokens"] == 10 + 15 + 20 # 45 + assert analytics["total_cost_microcents"] == 1000 + 2000 + 3000 # 6000 + assert analytics["average_response_time_ms"] == (100 + 150 + 200) / 3 + + # Check most used endpoints + assert len(analytics["most_used_endpoints"]) >= 1 + most_used = analytics["most_used_endpoints"][0] + assert most_used["endpoint"] == "/api/v1/test1" + assert most_used["count"] == 2 + + # Check error breakdown + assert "500" in analytics["error_breakdown"] + assert analytics["error_breakdown"]["500"] == 1 + + +@pytest.mark.asyncio +async def test_get_user_summary(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test getting comprehensive user API key summary.""" + # Record some usage + usage_data = KeyUsageCreate( + api_key_id=test_api_key["id"], + user_id=test_user["id"], + endpoint="/api/v1/test", + method="GET", + status_code=200, + cost_microcents=2500, + ) + + await api_key_service.record_usage( + api_key_id=test_api_key["id"], user_id=test_user["id"], usage_data=usage_data, db=db_session + ) + + summary = await api_key_service.get_user_summary(user_id=test_user["id"], db=db_session) + + assert summary["user_id"] == test_user["id"] + assert summary["total_keys"] >= 1 + assert summary["active_keys"] >= 1 + assert summary["total_requests"] >= 1 + assert summary["total_cost_microcents"] >= 2500 + assert len(summary["keys"]) >= 1 + + +@pytest.mark.asyncio +async def test_api_key_hash_roundtrip(api_key_service): + """Hashing produces a fresh salt each call; verifying must still succeed.""" + test_key = "fai_test_key_12345" + + hash1 = api_key_service._hash_api_key(test_key) + hash2 = api_key_service._hash_api_key(test_key) + + assert hash1 != hash2 + assert hash1.startswith("scrypt$") + assert hash2.startswith("scrypt$") + assert api_key_service._verify_api_key(test_key, hash1) + assert api_key_service._verify_api_key(test_key, hash2) + assert not api_key_service._verify_api_key("fai_wrong_key", hash1) + + +@pytest.mark.asyncio +async def test_validate_api_key_with_underscore_in_prefix(api_key_service, db_session: AsyncSession, test_user: dict): + """Regression: secrets.token_urlsafe alphabet includes `_`; prefix extraction must not split on it. + + When the random 8-char prefix happens to contain `_`, a naive `split("_", 2)` returns the wrong + substring and the key_prefix lookup misses, breaking validation for the (rare) keys that draw + underscores. + """ + api_key, prefix, key_hash = api_key_service._generate_api_key() + forced_prefix = "ab_cd_ef" + api_key = f"fai_{forced_prefix}_{api_key.split('_', 2)[2]}" + forced_hash = api_key_service._hash_api_key(api_key) + + key_dict = { + "name": "underscore prefix", + "user_id": test_user["id"], + "key_hash": forced_hash, + "key_prefix": forced_prefix, + "permissions": {}, + "usage_limits": {}, + } + await crud_api_keys.create(db=db_session, object=APIKeyCreateInternal(**key_dict)) + + permission_data = KeyPermissionCreate( + api_key_id=(await crud_api_keys.get(db=db_session, key_prefix=forced_prefix))["id"], + resource=KeyPermissionResource.WILDCARD, + action=KeyPermissionAction.WILDCARD, + is_allowed=True, + ) + await crud_key_permissions.create(db=db_session, object=permission_data) + + validation = await api_key_service.validate_api_key(api_key=api_key, resource="anything", action="anything", db=db_session) + + assert validation.is_valid is True + + +@pytest.mark.asyncio +async def test_usage_pagination(api_key_service, db_session: AsyncSession, test_user: dict, test_api_key): + """Test usage history pagination.""" + # Create multiple usage records + for i in range(5): + usage_data = KeyUsageCreate( + api_key_id=test_api_key["id"], user_id=test_user["id"], endpoint=f"/api/v1/test{i}", method="GET", status_code=200 + ) + + await api_key_service.record_usage( + api_key_id=test_api_key["id"], user_id=test_user["id"], usage_data=usage_data, db=db_session + ) + + # Test pagination + result = await api_key_service.get_key_usage( + key_id=test_api_key["id"], user_id=test_user["id"], db=db_session, limit=3, offset=0 + ) + usage_history = result.get("data", []) if isinstance(result, dict) else [] + + assert len(usage_history) == 3 diff --git a/backend/tests/unit/modules/common/__init__.py b/backend/tests/unit/modules/common/__init__.py new file mode 100644 index 00000000..3af971c0 --- /dev/null +++ b/backend/tests/unit/modules/common/__init__.py @@ -0,0 +1 @@ +"""Common module tests package.""" diff --git a/backend/tests/unit/modules/common/test_error_handler.py b/backend/tests/unit/modules/common/test_error_handler.py new file mode 100644 index 00000000..6fcfe9eb --- /dev/null +++ b/backend/tests/unit/modules/common/test_error_handler.py @@ -0,0 +1,122 @@ +"""Tests for the error handler module.""" + +import pytest +from fastapi import FastAPI +from httpx import ASGITransport, AsyncClient + +from src.modules.common.constants import GENERIC_ERROR_MESSAGE +from src.modules.common.exceptions import ( + InsufficientCreditsError, + ResourceNotFoundError, + ValidationError, +) +from src.modules.common.utils.error_handler import ( + _generate_support_id, + handle_exception, + map_exception, + register_exception_handlers, +) + + +def test_generate_support_id_length(): + support_id = _generate_support_id() + assert len(support_id) == 8 + + +def test_generate_support_id_unique(): + ids = {_generate_support_id() for _ in range(100)} + assert len(ids) == 100 + + +def _create_test_app() -> FastAPI: + """Create a minimal FastAPI app with error handlers registered.""" + app = FastAPI() + register_exception_handlers(app) + + @app.get("/not-found") + async def raise_not_found(): + raise ResourceNotFoundError("User 123 not found") + + @app.get("/validation") + async def raise_validation(): + raise ValidationError("name must be at least 2 chars") + + @app.get("/credits") + async def raise_credits(): + raise InsufficientCreditsError("You need 50 more credits") + + @app.get("/unhandled") + async def raise_unhandled(): + raise RuntimeError("unexpected internal failure") + + return app + + +@pytest.fixture +def test_app(): + return _create_test_app() + + +@pytest.mark.asyncio +async def test_domain_error_returns_generic_message(test_app): + async with AsyncClient(transport=ASGITransport(app=test_app), base_url="http://test") as client: + response = await client.get("/not-found") + + assert response.status_code == 404 + body = response.json() + # Must NOT contain the raw exception message + assert "User 123" not in body["detail"] + assert body["detail"] == GENERIC_ERROR_MESSAGE + assert "support_id" in body + assert len(body["support_id"]) == 8 + + +@pytest.mark.asyncio +async def test_insufficient_credits_preserves_message(test_app): + async with AsyncClient(transport=ASGITransport(app=test_app), base_url="http://test") as client: + response = await client.get("/credits") + + assert response.status_code == 402 + body = response.json() + # InsufficientCreditsError SHOULD keep its message + assert "50 more credits" in body["detail"] + assert "support_id" in body + + +@pytest.mark.asyncio +async def test_unhandled_error_returns_generic_500(test_app): + async with AsyncClient(transport=ASGITransport(app=test_app), base_url="http://test") as client: + response = await client.get("/unhandled") + + assert response.status_code == 500 + body = response.json() + assert "unexpected internal failure" not in body["detail"] + assert body["detail"] == GENERIC_ERROR_MESSAGE + assert "support_id" in body + + +def test_map_exception_not_found_uses_generic_detail(): + """map_exception must return a generic message, not the raw error string.""" + exc = ResourceNotFoundError("User 42 has secret internal ID xyz") + http_exc = map_exception(exc) + assert http_exc.status_code == 404 + assert "User 42" not in http_exc.detail + assert "secret" not in http_exc.detail + assert "not found" in http_exc.detail.lower() + + +def test_map_exception_insufficient_credits_preserves_detail(): + """InsufficientCreditsError must keep its message for frontend upgrade prompts.""" + exc = InsufficientCreditsError("You need 50 more credits") + http_exc = map_exception(exc) + assert http_exc.status_code == 402 + assert "50 more credits" in http_exc.detail + + +def test_handle_exception_returns_generic_for_domain_errors(): + """handle_exception (used by routes) must also return generic messages.""" + exc = ResourceNotFoundError("Payment record #123 not found in DB") + http_exc = handle_exception(exc) + assert http_exc is not None + assert http_exc.status_code == 404 + assert "Payment record #123" not in http_exc.detail diff --git a/backend/tests/unit/modules/rate_limit/__init__.py b/backend/tests/unit/modules/rate_limit/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/unit/modules/tier/__init__.py b/backend/tests/unit/modules/tier/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/tests/unit/modules/user/__init__.py b/backend/tests/unit/modules/user/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/backend/uv.lock b/backend/uv.lock new file mode 100644 index 00000000..2fb8f38c --- /dev/null +++ b/backend/uv.lock @@ -0,0 +1,3137 @@ +version = 1 +revision = 3 +requires-python = ">=3.11" +resolution-markers = [ + "python_full_version >= '3.15'", + "python_full_version == '3.14.*'", + "python_full_version == '3.13.*'", + "python_full_version < '3.13'", +] + +[[package]] +name = "aio-pika" +version = "9.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiormq" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/63/56354526f2e6e915c93bee6e4dedb35888fe82d6bc1a19f35f5a77e795ff/aio_pika-9.6.2.tar.gz", hash = "sha256:c49e9246080dc8ffa1bb0e4aca407bf3d8ad78c3ee3a93df88b68fe65d7a49b9", size = 70851, upload-time = "2026-03-22T19:03:20.878Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/05/256fa313f48bed075056d13593b92ce804be05d75f4f312be24edb82860a/aio_pika-9.6.2-py3-none-any.whl", hash = "sha256:2a5478af920d169795071c9c09c7542cd8cdece60438cf7804533dcbcce93b7f", size = 56269, upload-time = "2026-03-22T19:03:19.558Z" }, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.13.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/9a/152096d4808df8e4268befa55fba462f440f14beab85e8ad9bf990516918/aiohttp-3.13.5.tar.gz", hash = "sha256:9d98cc980ecc96be6eb4c1994ce35d28d8b1f5e5208a23b421187d1209dbb7d1", size = 7858271, upload-time = "2026-03-31T22:01:03.343Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/f5/a20c4ac64aeaef1679e25c9983573618ff765d7aa829fa2b84ae7573169e/aiohttp-3.13.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ab7229b6f9b5c1ba4910d6c41a9eb11f543eadb3f384df1b4c293f4e73d44d6", size = 757513, upload-time = "2026-03-31T21:57:02.146Z" }, + { url = "https://files.pythonhosted.org/packages/75/0a/39fa6c6b179b53fcb3e4b3d2b6d6cad0180854eda17060c7218540102bef/aiohttp-3.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f14c50708bb156b3a3ca7230b3d820199d56a48e3af76fa21c2d6087190fe3d", size = 506748, upload-time = "2026-03-31T21:57:04.275Z" }, + { url = "https://files.pythonhosted.org/packages/87/ec/e38ce072e724fd7add6243613f8d1810da084f54175353d25ccf9f9c7e5a/aiohttp-3.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7d2f8616f0ff60bd332022279011776c3ac0faa0f1b463f7bb12326fbc97a1c", size = 501673, upload-time = "2026-03-31T21:57:06.208Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ba/3bc7525d7e2beaa11b309a70d48b0d3cfc3c2089ec6a7d0820d59c657053/aiohttp-3.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2567b72e1ffc3ab25510db43f355b29eeada56c0a622e58dcdb19530eb0a3cb", size = 1763757, upload-time = "2026-03-31T21:57:07.882Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ab/e87744cf18f1bd78263aba24924d4953b41086bd3a31d22452378e9028a0/aiohttp-3.13.5-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fb0540c854ac9c0c5ad495908fdfd3e332d553ec731698c0e29b1877ba0d2ec6", size = 1720152, upload-time = "2026-03-31T21:57:09.946Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f3/ed17a6f2d742af17b50bae2d152315ed1b164b07a5fd5cc1754d99e4dfa5/aiohttp-3.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c9883051c6972f58bfc4ebb2116345ee2aa151178e99c3f2b2bbe2af712abd13", size = 1818010, upload-time = "2026-03-31T21:57:12.157Z" }, + { url = "https://files.pythonhosted.org/packages/53/06/ecbc63dc937192e2a5cb46df4d3edb21deb8225535818802f210a6ea5816/aiohttp-3.13.5-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2294172ce08a82fb7c7273485895de1fa1186cc8294cfeb6aef4af42ad261174", size = 1907251, upload-time = "2026-03-31T21:57:14.023Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a5/0521aa32c1ddf3aa1e71dcc466be0b7db2771907a13f18cddaa45967d97b/aiohttp-3.13.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3a807cabd5115fb55af198b98178997a5e0e57dead43eb74a93d9c07d6d4a7dc", size = 1759969, upload-time = "2026-03-31T21:57:16.146Z" }, + { url = "https://files.pythonhosted.org/packages/f6/78/a38f8c9105199dd3b9706745865a8a59d0041b6be0ca0cc4b2ccf1bab374/aiohttp-3.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:aa6d0d932e0f39c02b80744273cd5c388a2d9bc07760a03164f229c8e02662f6", size = 1616871, upload-time = "2026-03-31T21:57:17.856Z" }, + { url = "https://files.pythonhosted.org/packages/6f/41/27392a61ead8ab38072105c71aa44ff891e71653fe53d576a7067da2b4e8/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60869c7ac4aaabe7110f26499f3e6e5696eae98144735b12a9c3d9eae2b51a49", size = 1739844, upload-time = "2026-03-31T21:57:19.679Z" }, + { url = "https://files.pythonhosted.org/packages/6e/55/5564e7ae26d94f3214250009a0b1c65a0c6af4bf88924ccb6fdab901de28/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:26d2f8546f1dfa75efa50c3488215a903c0168d253b75fba4210f57ab77a0fb8", size = 1731969, upload-time = "2026-03-31T21:57:22.006Z" }, + { url = "https://files.pythonhosted.org/packages/6d/c5/705a3929149865fc941bcbdd1047b238e4a72bcb215a9b16b9d7a2e8d992/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1162a1492032c82f14271e831c8f4b49f2b6078f4f5fc74de2c912fa225d51d", size = 1795193, upload-time = "2026-03-31T21:57:24.256Z" }, + { url = "https://files.pythonhosted.org/packages/a6/19/edabed62f718d02cff7231ca0db4ef1c72504235bc467f7b67adb1679f48/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:8b14eb3262fad0dc2f89c1a43b13727e709504972186ff6a99a3ecaa77102b6c", size = 1606477, upload-time = "2026-03-31T21:57:26.364Z" }, + { url = "https://files.pythonhosted.org/packages/de/fc/76f80ef008675637d88d0b21584596dc27410a990b0918cb1e5776545b5b/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ca9ac61ac6db4eb6c2a0cd1d0f7e1357647b638ccc92f7e9d8d133e71ed3c6ac", size = 1813198, upload-time = "2026-03-31T21:57:28.316Z" }, + { url = "https://files.pythonhosted.org/packages/e5/67/5b3ac26b80adb20ea541c487f73730dc8fa107d632c998f25bbbab98fcda/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7996023b2ed59489ae4762256c8516df9820f751cf2c5da8ed2fb20ee50abab3", size = 1752321, upload-time = "2026-03-31T21:57:30.549Z" }, + { url = "https://files.pythonhosted.org/packages/88/06/e4a2e49255ea23fa4feeb5ab092d90240d927c15e47b5b5c48dff5a9ce29/aiohttp-3.13.5-cp311-cp311-win32.whl", hash = "sha256:77dfa48c9f8013271011e51c00f8ada19851f013cde2c48fca1ba5e0caf5bb06", size = 439069, upload-time = "2026-03-31T21:57:32.388Z" }, + { url = "https://files.pythonhosted.org/packages/c0/43/8c7163a596dab4f8be12c190cf467a1e07e4734cf90eebb39f7f5d53fc6a/aiohttp-3.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:d3a4834f221061624b8887090637db9ad4f61752001eae37d56c52fddade2dc8", size = 462859, upload-time = "2026-03-31T21:57:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/be/6f/353954c29e7dcce7cf00280a02c75f30e133c00793c7a2ed3776d7b2f426/aiohttp-3.13.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:023ecba036ddd840b0b19bf195bfae970083fd7024ce1ac22e9bba90464620e9", size = 748876, upload-time = "2026-03-31T21:57:36.319Z" }, + { url = "https://files.pythonhosted.org/packages/f5/1b/428a7c64687b3b2e9cd293186695affc0e1e54a445d0361743b231f11066/aiohttp-3.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15c933ad7920b7d9a20de151efcd05a6e38302cbf0e10c9b2acb9a42210a2416", size = 499557, upload-time = "2026-03-31T21:57:38.236Z" }, + { url = "https://files.pythonhosted.org/packages/29/47/7be41556bfbb6917069d6a6634bb7dd5e163ba445b783a90d40f5ac7e3a7/aiohttp-3.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab2899f9fa2f9f741896ebb6fa07c4c883bfa5c7f2ddd8cf2aafa86fa981b2d2", size = 500258, upload-time = "2026-03-31T21:57:39.923Z" }, + { url = "https://files.pythonhosted.org/packages/67/84/c9ecc5828cb0b3695856c07c0a6817a99d51e2473400f705275a2b3d9239/aiohttp-3.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60eaa2d440cd4707696b52e40ed3e2b0f73f65be07fd0ef23b6b539c9c0b0b4", size = 1749199, upload-time = "2026-03-31T21:57:41.938Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d3/3c6d610e66b495657622edb6ae7c7fd31b2e9086b4ec50b47897ad6042a9/aiohttp-3.13.5-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:55b3bdd3292283295774ab585160c4004f4f2f203946997f49aac032c84649e9", size = 1721013, upload-time = "2026-03-31T21:57:43.904Z" }, + { url = "https://files.pythonhosted.org/packages/49/a0/24409c12217456df0bae7babe3b014e460b0b38a8e60753d6cb339f6556d/aiohttp-3.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2b2355dc094e5f7d45a7bb262fe7207aa0460b37a0d87027dcf21b5d890e7d5", size = 1781501, upload-time = "2026-03-31T21:57:46.285Z" }, + { url = "https://files.pythonhosted.org/packages/98/9d/b65ec649adc5bccc008b0957a9a9c691070aeac4e41cea18559fef49958b/aiohttp-3.13.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b38765950832f7d728297689ad78f5f2cf79ff82487131c4d26fe6ceecdc5f8e", size = 1878981, upload-time = "2026-03-31T21:57:48.734Z" }, + { url = "https://files.pythonhosted.org/packages/57/d8/8d44036d7eb7b6a8ec4c5494ea0c8c8b94fbc0ed3991c1a7adf230df03bf/aiohttp-3.13.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b18f31b80d5a33661e08c89e202edabf1986e9b49c42b4504371daeaa11b47c1", size = 1767934, upload-time = "2026-03-31T21:57:51.171Z" }, + { url = "https://files.pythonhosted.org/packages/31/04/d3f8211f273356f158e3464e9e45484d3fb8c4ce5eb2f6fe9405c3273983/aiohttp-3.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:33add2463dde55c4f2d9635c6ab33ce154e5ecf322bd26d09af95c5f81cfa286", size = 1566671, upload-time = "2026-03-31T21:57:53.326Z" }, + { url = "https://files.pythonhosted.org/packages/41/db/073e4ebe00b78e2dfcacff734291651729a62953b48933d765dc513bf798/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:327cc432fdf1356fb4fbc6fe833ad4e9f6aacb71a8acaa5f1855e4b25910e4a9", size = 1705219, upload-time = "2026-03-31T21:57:55.385Z" }, + { url = "https://files.pythonhosted.org/packages/48/45/7dfba71a2f9fd97b15c95c06819de7eb38113d2cdb6319669195a7d64270/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7c35b0bf0b48a70b4cb4fc5d7bed9b932532728e124874355de1a0af8ec4bc88", size = 1743049, upload-time = "2026-03-31T21:57:57.341Z" }, + { url = "https://files.pythonhosted.org/packages/18/71/901db0061e0f717d226386a7f471bb59b19566f2cae5f0d93874b017271f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:df23d57718f24badef8656c49743e11a89fd6f5358fa8a7b96e728fda2abf7d3", size = 1749557, upload-time = "2026-03-31T21:57:59.626Z" }, + { url = "https://files.pythonhosted.org/packages/08/d5/41eebd16066e59cd43728fe74bce953d7402f2b4ddfdfef2c0e9f17ca274/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:02e048037a6501a5ec1f6fc9736135aec6eb8a004ce48838cb951c515f32c80b", size = 1558931, upload-time = "2026-03-31T21:58:01.972Z" }, + { url = "https://files.pythonhosted.org/packages/30/e6/4a799798bf05740e66c3a1161079bda7a3dd8e22ca392481d7a7f9af82a6/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31cebae8b26f8a615d2b546fee45d5ffb76852ae6450e2a03f42c9102260d6fe", size = 1774125, upload-time = "2026-03-31T21:58:04.007Z" }, + { url = "https://files.pythonhosted.org/packages/84/63/7749337c90f92bc2cb18f9560d67aa6258c7060d1397d21529b8004fcf6f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:888e78eb5ca55a615d285c3c09a7a91b42e9dd6fc699b166ebd5dee87c9ccf14", size = 1732427, upload-time = "2026-03-31T21:58:06.337Z" }, + { url = "https://files.pythonhosted.org/packages/98/de/cf2f44ff98d307e72fb97d5f5bbae3bfcb442f0ea9790c0bf5c5c2331404/aiohttp-3.13.5-cp312-cp312-win32.whl", hash = "sha256:8bd3ec6376e68a41f9f95f5ed170e2fcf22d4eb27a1f8cb361d0508f6e0557f3", size = 433534, upload-time = "2026-03-31T21:58:08.712Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ca/eadf6f9c8fa5e31d40993e3db153fb5ed0b11008ad5d9de98a95045bed84/aiohttp-3.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:110e448e02c729bcebb18c60b9214a87ba33bac4a9fa5e9a5f139938b56c6cb1", size = 460446, upload-time = "2026-03-31T21:58:10.945Z" }, + { url = "https://files.pythonhosted.org/packages/78/e9/d76bf503005709e390122d34e15256b88f7008e246c4bdbe915cd4f1adce/aiohttp-3.13.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5029cc80718bbd545123cd8fe5d15025eccaaaace5d0eeec6bd556ad6163d61", size = 742930, upload-time = "2026-03-31T21:58:13.155Z" }, + { url = "https://files.pythonhosted.org/packages/57/00/4b7b70223deaebd9bb85984d01a764b0d7bd6526fcdc73cca83bcbe7243e/aiohttp-3.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4bb6bf5811620003614076bdc807ef3b5e38244f9d25ca5fe888eaccea2a9832", size = 496927, upload-time = "2026-03-31T21:58:15.073Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f5/0fb20fb49f8efdcdce6cd8127604ad2c503e754a8f139f5e02b01626523f/aiohttp-3.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a84792f8631bf5a94e52d9cc881c0b824ab42717165a5579c760b830d9392ac9", size = 497141, upload-time = "2026-03-31T21:58:17.009Z" }, + { url = "https://files.pythonhosted.org/packages/3b/86/b7c870053e36a94e8951b803cb5b909bfbc9b90ca941527f5fcafbf6b0fa/aiohttp-3.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57653eac22c6a4c13eb22ecf4d673d64a12f266e72785ab1c8b8e5940d0e8090", size = 1732476, upload-time = "2026-03-31T21:58:18.925Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e5/4e161f84f98d80c03a238671b4136e6530453d65262867d989bbe78244d0/aiohttp-3.13.5-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5e5f7debc7a57af53fdf5c5009f9391d9f4c12867049d509bf7bb164a6e295b", size = 1706507, upload-time = "2026-03-31T21:58:21.094Z" }, + { url = "https://files.pythonhosted.org/packages/d4/56/ea11a9f01518bd5a2a2fcee869d248c4b8a0cfa0bb13401574fa31adf4d4/aiohttp-3.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c719f65bebcdf6716f10e9eff80d27567f7892d8988c06de12bbbd39307c6e3a", size = 1773465, upload-time = "2026-03-31T21:58:23.159Z" }, + { url = "https://files.pythonhosted.org/packages/eb/40/333ca27fb74b0383f17c90570c748f7582501507307350a79d9f9f3c6eb1/aiohttp-3.13.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d97f93fdae594d886c5a866636397e2bcab146fd7a132fd6bb9ce182224452f8", size = 1873523, upload-time = "2026-03-31T21:58:25.59Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d2/e2f77eef1acb7111405433c707dc735e63f67a56e176e72e9e7a2cd3f493/aiohttp-3.13.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3df334e39d4c2f899a914f1dba283c1aadc311790733f705182998c6f7cae665", size = 1754113, upload-time = "2026-03-31T21:58:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/fb/56/3f653d7f53c89669301ec9e42c95233e2a0c0a6dd051269e6e678db4fdb0/aiohttp-3.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe6970addfea9e5e081401bcbadf865d2b6da045472f58af08427e108d618540", size = 1562351, upload-time = "2026-03-31T21:58:29.918Z" }, + { url = "https://files.pythonhosted.org/packages/ec/a6/9b3e91eb8ae791cce4ee736da02211c85c6f835f1bdfac0594a8a3b7018c/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7becdf835feff2f4f335d7477f121af787e3504b48b449ff737afb35869ba7bb", size = 1693205, upload-time = "2026-03-31T21:58:32.214Z" }, + { url = "https://files.pythonhosted.org/packages/98/fc/bfb437a99a2fcebd6b6eaec609571954de2ed424f01c352f4b5504371dd3/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:676e5651705ad5d8a70aeb8eb6936c436d8ebbd56e63436cb7dd9bb36d2a9a46", size = 1730618, upload-time = "2026-03-31T21:58:34.728Z" }, + { url = "https://files.pythonhosted.org/packages/e4/b6/c8534862126191a034f68153194c389addc285a0f1347d85096d349bbc15/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9b16c653d38eb1a611cc898c41e76859ca27f119d25b53c12875fd0474ae31a8", size = 1745185, upload-time = "2026-03-31T21:58:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/0b/93/4ca8ee2ef5236e2707e0fd5fecb10ce214aee1ff4ab307af9c558bda3b37/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:999802d5fa0389f58decd24b537c54aa63c01c3219ce17d1214cbda3c2b22d2d", size = 1557311, upload-time = "2026-03-31T21:58:39.38Z" }, + { url = "https://files.pythonhosted.org/packages/57/ae/76177b15f18c5f5d094f19901d284025db28eccc5ae374d1d254181d33f4/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ec707059ee75732b1ba130ed5f9580fe10ff75180c812bc267ded039db5128c6", size = 1773147, upload-time = "2026-03-31T21:58:41.476Z" }, + { url = "https://files.pythonhosted.org/packages/01/a4/62f05a0a98d88af59d93b7fcac564e5f18f513cb7471696ac286db970d6a/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d6d44a5b48132053c2f6cd5c8cb14bc67e99a63594e336b0f2af81e94d5530c", size = 1730356, upload-time = "2026-03-31T21:58:44.049Z" }, + { url = "https://files.pythonhosted.org/packages/e4/85/fc8601f59dfa8c9523808281f2da571f8b4699685f9809a228adcc90838d/aiohttp-3.13.5-cp313-cp313-win32.whl", hash = "sha256:329f292ed14d38a6c4c435e465f48bebb47479fd676a0411936cc371643225cc", size = 432637, upload-time = "2026-03-31T21:58:46.167Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1b/ac685a8882896acf0f6b31d689e3792199cfe7aba37969fa91da63a7fa27/aiohttp-3.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:69f571de7500e0557801c0b51f4780482c0ec5fe2ac851af5a92cfce1af1cb83", size = 458896, upload-time = "2026-03-31T21:58:48.119Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/46572759afc859e867a5bc8ec3487315869013f59281ce61764f76d879de/aiohttp-3.13.5-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:eb4639f32fd4a9904ab8fb45bf3383ba71137f3d9d4ba25b3b3f3109977c5b8c", size = 745721, upload-time = "2026-03-31T21:58:50.229Z" }, + { url = "https://files.pythonhosted.org/packages/13/fe/8a2efd7626dbe6049b2ef8ace18ffda8a4dfcbe1bcff3ac30c0c7575c20b/aiohttp-3.13.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:7e5dc4311bd5ac493886c63cbf76ab579dbe4641268e7c74e48e774c74b6f2be", size = 497663, upload-time = "2026-03-31T21:58:52.232Z" }, + { url = "https://files.pythonhosted.org/packages/9b/91/cc8cc78a111826c54743d88651e1687008133c37e5ee615fee9b57990fac/aiohttp-3.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:756c3c304d394977519824449600adaf2be0ccee76d206ee339c5e76b70ded25", size = 499094, upload-time = "2026-03-31T21:58:54.566Z" }, + { url = "https://files.pythonhosted.org/packages/0a/33/a8362cb15cf16a3af7e86ed11962d5cd7d59b449202dc576cdc731310bde/aiohttp-3.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecc26751323224cf8186efcf7fbcbc30f4e1d8c7970659daf25ad995e4032a56", size = 1726701, upload-time = "2026-03-31T21:58:56.864Z" }, + { url = "https://files.pythonhosted.org/packages/45/0c/c091ac5c3a17114bd76cbf85d674650969ddf93387876cf67f754204bd77/aiohttp-3.13.5-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10a75acfcf794edf9d8db50e5a7ec5fc818b2a8d3f591ce93bc7b1210df016d2", size = 1683360, upload-time = "2026-03-31T21:58:59.072Z" }, + { url = "https://files.pythonhosted.org/packages/23/73/bcee1c2b79bc275e964d1446c55c54441a461938e70267c86afaae6fba27/aiohttp-3.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f7a18f258d124cd678c5fe072fe4432a4d5232b0657fca7c1847f599233c83a", size = 1773023, upload-time = "2026-03-31T21:59:01.776Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ef/720e639df03004fee2d869f771799d8c23046dec47d5b81e396c7cda583a/aiohttp-3.13.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:df6104c009713d3a89621096f3e3e88cc323fd269dbd7c20afe18535094320be", size = 1853795, upload-time = "2026-03-31T21:59:04.568Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c9/989f4034fb46841208de7aeeac2c6d8300745ab4f28c42f629ba77c2d916/aiohttp-3.13.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:241a94f7de7c0c3b616627aaad530fe2cb620084a8b144d3be7b6ecfe95bae3b", size = 1730405, upload-time = "2026-03-31T21:59:07.221Z" }, + { url = "https://files.pythonhosted.org/packages/ce/75/ee1fd286ca7dc599d824b5651dad7b3be7ff8d9a7e7b3fe9820d9180f7db/aiohttp-3.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c974fb66180e58709b6fc402846f13791240d180b74de81d23913abe48e96d94", size = 1558082, upload-time = "2026-03-31T21:59:09.484Z" }, + { url = "https://files.pythonhosted.org/packages/c3/20/1e9e6650dfc436340116b7aa89ff8cb2bbdf0abc11dfaceaad8f74273a10/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6e27ea05d184afac78aabbac667450c75e54e35f62238d44463131bd3f96753d", size = 1692346, upload-time = "2026-03-31T21:59:12.068Z" }, + { url = "https://files.pythonhosted.org/packages/d8/40/8ebc6658d48ea630ac7903912fe0dd4e262f0e16825aa4c833c56c9f1f56/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a79a6d399cef33a11b6f004c67bb07741d91f2be01b8d712d52c75711b1e07c7", size = 1698891, upload-time = "2026-03-31T21:59:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/d8/78/ea0ae5ec8ba7a5c10bdd6e318f1ba5e76fcde17db8275188772afc7917a4/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c632ce9c0b534fbe25b52c974515ed674937c5b99f549a92127c85f771a78772", size = 1742113, upload-time = "2026-03-31T21:59:17.068Z" }, + { url = "https://files.pythonhosted.org/packages/8a/66/9d308ed71e3f2491be1acb8769d96c6f0c47d92099f3bc9119cada27b357/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:fceedde51fbd67ee2bcc8c0b33d0126cc8b51ef3bbde2f86662bd6d5a6f10ec5", size = 1553088, upload-time = "2026-03-31T21:59:19.541Z" }, + { url = "https://files.pythonhosted.org/packages/da/a6/6cc25ed8dfc6e00c90f5c6d126a98e2cf28957ad06fa1036bd34b6f24a2c/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f92995dfec9420bb69ae629abf422e516923ba79ba4403bc750d94fb4a6c68c1", size = 1757976, upload-time = "2026-03-31T21:59:22.311Z" }, + { url = "https://files.pythonhosted.org/packages/c1/2b/cce5b0ffe0de99c83e5e36d8f828e4161e415660a9f3e58339d07cce3006/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20ae0ff08b1f2c8788d6fb85afcb798654ae6ba0b747575f8562de738078457b", size = 1712444, upload-time = "2026-03-31T21:59:24.635Z" }, + { url = "https://files.pythonhosted.org/packages/6c/cf/9e1795b4160c58d29421eafd1a69c6ce351e2f7c8d3c6b7e4ca44aea1a5b/aiohttp-3.13.5-cp314-cp314-win32.whl", hash = "sha256:b20df693de16f42b2472a9c485e1c948ee55524786a0a34345511afdd22246f3", size = 438128, upload-time = "2026-03-31T21:59:27.291Z" }, + { url = "https://files.pythonhosted.org/packages/22/4d/eaedff67fc805aeba4ba746aec891b4b24cebb1a7d078084b6300f79d063/aiohttp-3.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:f85c6f327bf0b8c29da7d93b1cabb6363fb5e4e160a32fa241ed2dce21b73162", size = 464029, upload-time = "2026-03-31T21:59:29.429Z" }, + { url = "https://files.pythonhosted.org/packages/79/11/c27d9332ee20d68dd164dc12a6ecdef2e2e35ecc97ed6cf0d2442844624b/aiohttp-3.13.5-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:1efb06900858bb618ff5cee184ae2de5828896c448403d51fb633f09e109be0a", size = 778758, upload-time = "2026-03-31T21:59:31.547Z" }, + { url = "https://files.pythonhosted.org/packages/04/fb/377aead2e0a3ba5f09b7624f702a964bdf4f08b5b6728a9799830c80041e/aiohttp-3.13.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:fee86b7c4bd29bdaf0d53d14739b08a106fdda809ca5fe032a15f52fae5fe254", size = 512883, upload-time = "2026-03-31T21:59:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a6/aa109a33671f7a5d3bd78b46da9d852797c5e665bfda7d6b373f56bff2ec/aiohttp-3.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:20058e23909b9e65f9da62b396b77dfa95965cbe840f8def6e572538b1d32e36", size = 516668, upload-time = "2026-03-31T21:59:36.497Z" }, + { url = "https://files.pythonhosted.org/packages/79/b3/ca078f9f2fa9563c36fb8ef89053ea2bb146d6f792c5104574d49d8acb63/aiohttp-3.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cf20a8d6868cb15a73cab329ffc07291ba8c22b1b88176026106ae39aa6df0f", size = 1883461, upload-time = "2026-03-31T21:59:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e3/a7ad633ca1ca497b852233a3cce6906a56c3225fb6d9217b5e5e60b7419d/aiohttp-3.13.5-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:330f5da04c987f1d5bdb8ae189137c77139f36bd1cb23779ca1a354a4b027800", size = 1747661, upload-time = "2026-03-31T21:59:41.187Z" }, + { url = "https://files.pythonhosted.org/packages/33/b9/cd6fe579bed34a906d3d783fe60f2fa297ef55b27bb4538438ee49d4dc41/aiohttp-3.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f1cbf0c7926d315c3c26c2da41fd2b5d2fe01ac0e157b78caefc51a782196cf", size = 1863800, upload-time = "2026-03-31T21:59:43.84Z" }, + { url = "https://files.pythonhosted.org/packages/c0/3f/2c1e2f5144cefa889c8afd5cf431994c32f3b29da9961698ff4e3811b79a/aiohttp-3.13.5-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:53fc049ed6390d05423ba33103ded7281fe897cf97878f369a527070bd95795b", size = 1958382, upload-time = "2026-03-31T21:59:46.187Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/f31ec3f1013723b3babe3609e7f119c2c2fb6ef33da90061a705ef3e1bc8/aiohttp-3.13.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:898703aa2667e3c5ca4c54ca36cd73f58b7a38ef87a5606414799ebce4d3fd3a", size = 1803724, upload-time = "2026-03-31T21:59:48.656Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b4/57712dfc6f1542f067daa81eb61da282fab3e6f1966fca25db06c4fc62d5/aiohttp-3.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0494a01ca9584eea1e5fbd6d748e61ecff218c51b576ee1999c23db7066417d8", size = 1640027, upload-time = "2026-03-31T21:59:51.284Z" }, + { url = "https://files.pythonhosted.org/packages/25/3c/734c878fb43ec083d8e31bf029daae1beafeae582d1b35da234739e82ee7/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6cf81fe010b8c17b09495cbd15c1d35afbc8fb405c0c9cf4738e5ae3af1d65be", size = 1806644, upload-time = "2026-03-31T21:59:53.753Z" }, + { url = "https://files.pythonhosted.org/packages/20/a5/f671e5cbec1c21d044ff3078223f949748f3a7f86b14e34a365d74a5d21f/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:c564dd5f09ddc9d8f2c2d0a301cd30a79a2cc1b46dd1a73bef8f0038863d016b", size = 1791630, upload-time = "2026-03-31T21:59:56.239Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/fb8d0ad63a0b8a99be97deac8c04dacf0785721c158bdf23d679a87aa99e/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:2994be9f6e51046c4f864598fd9abeb4fba6e88f0b2152422c9666dcd4aea9c6", size = 1809403, upload-time = "2026-03-31T21:59:59.103Z" }, + { url = "https://files.pythonhosted.org/packages/59/0c/bfed7f30662fcf12206481c2aac57dedee43fe1c49275e85b3a1e1742294/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:157826e2fa245d2ef46c83ea8a5faf77ca19355d278d425c29fda0beb3318037", size = 1634924, upload-time = "2026-03-31T22:00:02.116Z" }, + { url = "https://files.pythonhosted.org/packages/17/d6/fd518d668a09fd5a3319ae5e984d4d80b9a4b3df4e21c52f02251ef5a32e/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:a8aca50daa9493e9e13c0f566201a9006f080e7c50e5e90d0b06f53146a54500", size = 1836119, upload-time = "2026-03-31T22:00:04.756Z" }, + { url = "https://files.pythonhosted.org/packages/78/b7/15fb7a9d52e112a25b621c67b69c167805cb1f2ab8f1708a5c490d1b52fe/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3b13560160d07e047a93f23aaa30718606493036253d5430887514715b67c9d9", size = 1772072, upload-time = "2026-03-31T22:00:07.494Z" }, + { url = "https://files.pythonhosted.org/packages/7e/df/57ba7f0c4a553fc2bd8b6321df236870ec6fd64a2a473a8a13d4f733214e/aiohttp-3.13.5-cp314-cp314t-win32.whl", hash = "sha256:9a0f4474b6ea6818b41f82172d799e4b3d29e22c2c520ce4357856fced9af2f8", size = 471819, upload-time = "2026-03-31T22:00:10.277Z" }, + { url = "https://files.pythonhosted.org/packages/62/29/2f8418269e46454a26171bfdd6a055d74febf32234e474930f2f60a17145/aiohttp-3.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:18a2f6c1182c51baa1d28d68fea51513cb2a76612f038853c0ad3c145423d3d9", size = 505441, upload-time = "2026-03-31T22:00:12.791Z" }, +] + +[[package]] +name = "aiomcache" +version = "0.8.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/0a/914d8df1002d88ca70679d192f6e16d113e6b5cbcc13c51008db9230025f/aiomcache-0.8.2.tar.gz", hash = "sha256:43b220d7f499a32a71871c4f457116eb23460fa216e69c1d32b81e3209e51359", size = 10640, upload-time = "2024-05-07T15:03:14.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/f8/78455f6377cbe85f335f4dbd40a807dafb72bd5fa05eb946f2ad0cec3d40/aiomcache-0.8.2-py3-none-any.whl", hash = "sha256:9d78d6b6e74e775df18b350b1cddfa96bd2f0a44d49ad27fa87759a3469cef5e", size = 10145, upload-time = "2024-05-07T15:03:12.003Z" }, +] + +[[package]] +name = "aiormq" +version = "6.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pamqp" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6c/0e/db90154d52d399108903fe603e5110a533c42065180265dd003788264080/aiormq-6.9.4.tar.gz", hash = "sha256:0e7c01b662804e1cc7ace9a17794e8c1192a27fc2afa96162362a6e61ae8e8ef", size = 49232, upload-time = "2026-03-23T09:18:19.493Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/48/1ce3773f392f02ceda37aee168fade9d725483a9592c202d06044cd093ff/aiormq-6.9.4-py3-none-any.whl", hash = "sha256:726a8586695e863fba68cf88842065ab12348c9438dcebdfc9d0bddaf6083277", size = 32166, upload-time = "2026-03-23T09:18:17.523Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "aiosqlite" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/8a/64761f4005f17809769d23e518d915db74e6310474e733e3593cfc854ef1/aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650", size = 14821, upload-time = "2025-12-23T19:25:43.997Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/b7/e3bf5133d697a08128598c8d0abc5e16377b51465a33756de24fa7dee953/aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb", size = 17405, upload-time = "2025-12-23T19:25:42.139Z" }, +] + +[[package]] +name = "aiostream" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/65/b9b69695702b76a878c9879f2ee80cefce75bc5cb864fc100460bc1c5380/aiostream-0.7.1.tar.gz", hash = "sha256:272aaa0d8f83beb906f5aa9022bb59046bb7a103fa3770f807c31f918595acf6", size = 44059, upload-time = "2025-10-13T20:02:06.961Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a0/d7c6ca304140f3f49987d710e15bc164248924a35d8cdfac2f6e87fca041/aiostream-0.7.1-py3-none-any.whl", hash = "sha256:ea8739e9158ee6a606b3feedf3762721c3507344e540d09a10984c5e88a13b37", size = 41416, upload-time = "2025-10-13T20:02:05.535Z" }, +] + +[[package]] +name = "alembic" +version = "1.18.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mako" }, + { name = "sqlalchemy" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/13/8b084e0f2efb0275a1d534838844926f798bd766566b1375174e2448cd31/alembic-1.18.4.tar.gz", hash = "sha256:cb6e1fd84b6174ab8dbb2329f86d631ba9559dd78df550b57804d607672cedbc", size = 2056725, upload-time = "2026-02-10T16:00:47.195Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/29/6533c317b74f707ea28f8d633734dbda2119bbadfc61b2f3640ba835d0f7/alembic-1.18.4-py3-none-any.whl", hash = "sha256:a5ed4adcf6d8a4cb575f3d759f071b03cd6e5c7618eb796cb52497be25bfe19a", size = 263893, upload-time = "2026-02-10T16:00:49.997Z" }, +] + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" }, +] + +[[package]] +name = "ast-serialize" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/9d/912fefab0e30aee6a3af8a62bbea4a81b29afa4ba2c973d31170620a26de/ast_serialize-0.3.0.tar.gz", hash = "sha256:1bc3ca09a63a021376527c4e938deedd11d11d675ce850e6f9c7487f5889992b", size = 60689, upload-time = "2026-04-30T23:24:48.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/57/a54d4de491d6cdd7a4e4b0952cc3ca9f60dcefa7b5fb48d6d492debe1649/ast_serialize-0.3.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:3a867927df59f76a18dc1d874a0b2c079b42c58972dca637905576deb0912e14", size = 1182966, upload-time = "2026-04-30T23:23:57.376Z" }, + { url = "https://files.pythonhosted.org/packages/ee/9e/a5db014bb0f91b209236b57c429389e31290c0093532b8436d577699b2fa/ast_serialize-0.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a6fb063bf040abf8321e7b8113a0554eda445ffc508aa51287f8808886a5ae22", size = 1171316, upload-time = "2026-04-30T23:23:59.63Z" }, + { url = "https://files.pythonhosted.org/packages/15/59/fd55133e478c4326f60a11df02573bf7ccb2ac685810b50f1803d0f68053/ast_serialize-0.3.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5075cd8482573d743586779e5f9b652a015e37d4e95132d7e5a9bc5c8f483d8f", size = 1232234, upload-time = "2026-04-30T23:24:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/cc/79/0ca1d26357ecb4a697d74d00b73ef3137f24c140424125393a0de820eb09/ast_serialize-0.3.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:41560b27794f4553b0f77811e9fb325b77db4a2b39018d437e09932275306e66", size = 1233437, upload-time = "2026-04-30T23:24:03.151Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/7078ec94dd6e124b8e028ac77016a4f13c83fa1c145790f2e68f3816998b/ast_serialize-0.3.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b967c01ca74909c5d90e0fe4393401e2cc5da5ebd9a6262a19e45ffd3757dec8", size = 1440188, upload-time = "2026-04-30T23:24:04.717Z" }, + { url = "https://files.pythonhosted.org/packages/21/16/cca7195ef55a012f8013c3442afa91d287a0a36dcf88b480b262475135b3/ast_serialize-0.3.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:424ebb8f46cd993f7cec4009d119312d8433dd90e6b0df0499cd2c91bdcc5af9", size = 1254211, upload-time = "2026-04-30T23:24:06.18Z" }, + { url = "https://files.pythonhosted.org/packages/a0/0f/f3d4dfae67dee6580534361a6343367d34217e7d25cff858bd1d8f03b8ed/ast_serialize-0.3.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d14b1d566b56e2ee70b11fec1de7e0b94ec7cd83717ec7d189967841a361190e", size = 1255973, upload-time = "2026-04-30T23:24:07.772Z" }, + { url = "https://files.pythonhosted.org/packages/14/41/55fbfe02c42f40fbe3e74eda167d977d555ff720ce1abfa08515236efd88/ast_serialize-0.3.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ba30b18735f047ec11103d1ab92f4789cf1fea1e0dc89b04a2f5a0632fd79de", size = 1298629, upload-time = "2026-04-30T23:24:09.4Z" }, + { url = "https://files.pythonhosted.org/packages/28/36/7d2501cacc7989fb8504aa9da2a2022a174200a59d4e6639de4367a57fdd/ast_serialize-0.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e6ea0754cb7b0f682ebb005ffb0d18f8d17993490d9c289863cd69cacc4ab8df", size = 1408435, upload-time = "2026-04-30T23:24:11.013Z" }, + { url = "https://files.pythonhosted.org/packages/03/e7/54e3b469c3fa0bf9cd532fa643d1d33b73303f8d70beac3e366b68dd64b7/ast_serialize-0.3.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:a0c5aa1073a5ba7b2abaa4b54abe8b8d75c4d1e2d54a2ff70b0ca6222fea5728", size = 1508174, upload-time = "2026-04-30T23:24:12.635Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/9b9621865b02c60539e26d9b114a312b4fa46aa703e33e79317174bfea21/ast_serialize-0.3.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:4e52650d834c1ea7791969a361de2c54c13b2fb4c519ec79445fa8b9021a147d", size = 1502354, upload-time = "2026-04-30T23:24:14.186Z" }, + { url = "https://files.pythonhosted.org/packages/34/dd/f138bc5c43b0c414fdd12eefe15677839323078b6e75301ad7f96cd26d45/ast_serialize-0.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:15bd6af3f136c61dae27805eb6b8f3269e85a545c4c27ffe9e530ead78d2b36d", size = 1450504, upload-time = "2026-04-30T23:24:16.076Z" }, + { url = "https://files.pythonhosted.org/packages/68/cf/97ef9e1c315601db74365955c8edd3292e3055500d6317602815dbdf08ae/ast_serialize-0.3.0-cp314-cp314t-win32.whl", hash = "sha256:d188bfe37b674b49708497683051d4b571366a668799c9b8e8a94513694969d9", size = 1058662, upload-time = "2026-04-30T23:24:17.535Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d6/e2c3483c31580fdb623f92ad38d2f856cde4b9205a3e6bd84760f3de7d82/ast_serialize-0.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:5832c2fdf8f8a6cf682b4cfcf677f5eaf39b4ddbc490f5480cfccdd1e7ce8fa1", size = 1100349, upload-time = "2026-04-30T23:24:18.992Z" }, + { url = "https://files.pythonhosted.org/packages/ab/89/29abcb1fe18a429cda60c6e0bbd1d6e90499339842a2f548d7567542357e/ast_serialize-0.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:670f177188d128fb7f9f15b5ad0e1b553d22c34e3f584dcb83eb8077600437f0", size = 1072895, upload-time = "2026-04-30T23:24:20.706Z" }, + { url = "https://files.pythonhosted.org/packages/bc/93/72abad83966ed6235647c9f956417dc1e17e997696388521910e3d1fa3f4/ast_serialize-0.3.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:2ec2fafa5e4313cc8feed96e436ebe19ac7bc6fa41fbc2827e826c48b9e4c3a9", size = 1190024, upload-time = "2026-04-30T23:24:22.486Z" }, + { url = "https://files.pythonhosted.org/packages/85/4f/eb88584b2f0234e581762011208ca203252bf6c98e59b4769daa571f3576/ast_serialize-0.3.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:ef6d3c08b7b4cd29b48410338e134764a00e76d25841eb02c1084e868c888ecc", size = 1178633, upload-time = "2026-04-30T23:24:24.35Z" }, + { url = "https://files.pythonhosted.org/packages/56/51/cf1ec1ff3e616373d0dcbd5fad502e0029dc541f13ab642259762a7d127f/ast_serialize-0.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d841424f41b886e98044abc80769c14a956e6e5ccd5fb5b0d9f5ead72be18a4", size = 1241351, upload-time = "2026-04-30T23:24:25.987Z" }, + { url = "https://files.pythonhosted.org/packages/0d/44/68fcf50478cf1093f2d423f034ae06453122c8b415d8e21a44668eca485d/ast_serialize-0.3.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d21453734ad39367ede5d37efe4f59f830ce1c09f432fc72a90e368f77a4a3e7", size = 1239582, upload-time = "2026-04-30T23:24:27.808Z" }, + { url = "https://files.pythonhosted.org/packages/9d/c1/a6c9fa284eceb5fc6f21347e968445a051d7ca2c4d34e6a04314646dbcee/ast_serialize-0.3.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f5e110cdce2a347e1dd987529c88ef54d26f67848dce3eba1b3b2cc2cf085c94", size = 1448853, upload-time = "2026-04-30T23:24:29.534Z" }, + { url = "https://files.pythonhosted.org/packages/23/5f/8ad3829a09e4e8c5328a53ce7d4711d660944e3e164c5f6abcc2c8f27167/ast_serialize-0.3.0-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6e23a98e57560a055f5c4b68700a0fd5ce483d2814c23140b3638c7f5d1e61", size = 1262204, upload-time = "2026-04-30T23:24:31.482Z" }, + { url = "https://files.pythonhosted.org/packages/25/13/44aa28d97f10e25247e8576b5f6b2795d4fa1a80acc88acc942c508d06f7/ast_serialize-0.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1c9e763d70293d65ce1e1ea8c943140c68d0953f0268c7ee0998f2e07f77dd0", size = 1266458, upload-time = "2026-04-30T23:24:33.088Z" }, + { url = "https://files.pythonhosted.org/packages/d8/58/b3a8be3777cd3744324fd5cec0d80d37cd96fc7cbb0fb010e03dff1e870f/ast_serialize-0.3.0-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4388a1796c228f1ce5c391426f7d21a0003ad3b47f677dbeded9bd1a85c7209f", size = 1308700, upload-time = "2026-04-30T23:24:34.657Z" }, + { url = "https://files.pythonhosted.org/packages/13/03/f8312d6b57f5471a9dc7946f22b8798a1fc296d38c25766223aacadec42c/ast_serialize-0.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5283cdcc0c64c3d8b9b688dc6aaa012d9c0cf1380a7f774a6bae6a1c01b3205a", size = 1416724, upload-time = "2026-04-30T23:24:36.562Z" }, + { url = "https://files.pythonhosted.org/packages/50/5d/13fc3789a7abac00559da2e2e9f386db4612aa1f84fc53d09bf714c37545/ast_serialize-0.3.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:f5ef88cc5842a5d7a6ac09dc0d5fc2c98f5d276c1f076f866d55047ce886785b", size = 1515441, upload-time = "2026-04-30T23:24:38.018Z" }, + { url = "https://files.pythonhosted.org/packages/eb/b9/7ab43fc7a23b1f970281093228f5f79bed6edeed7a3e672bde6d7a832a58/ast_serialize-0.3.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:cc14bf402bdc0978594ecce783793de2c7470cd4f5cd7eb286ca97ed8ff7cba9", size = 1510522, upload-time = "2026-04-30T23:24:39.798Z" }, + { url = "https://files.pythonhosted.org/packages/56/ec/d75fc2b788d319f1fad77c14156896f31afdfc68af85b505e5bdebcb9592/ast_serialize-0.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:11eae0cf1b7b3e0678133cc2daa974ea972caf02eb4b3aa062af6fa9acd52c57", size = 1460917, upload-time = "2026-04-30T23:24:41.305Z" }, + { url = "https://files.pythonhosted.org/packages/95/74/f99c81193a2725911e1911ae567ed27c2f2419332c7f3537366f9d238cac/ast_serialize-0.3.0-cp39-abi3-win32.whl", hash = "sha256:2db3dd99de5e6a5a11d7dda73de8750eb6e5baaf25245adf7bdcfe64b6108ae2", size = 1067804, upload-time = "2026-04-30T23:24:43.091Z" }, + { url = "https://files.pythonhosted.org/packages/16/81/76af00c47daa151e89f98ae21fbbcb2840aaa9f5766579c4da76a3c57188/ast_serialize-0.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:a2cd125adccf7969470621905d302750cd25951f22ea430d9a25b7be031e5549", size = 1105561, upload-time = "2026-04-30T23:24:44.578Z" }, + { url = "https://files.pythonhosted.org/packages/bd/46/d3ec57ad500f598d1554bd14ce4df615960549ab2844961bc4e1f5fbd174/ast_serialize-0.3.0-cp39-abi3-win_arm64.whl", hash = "sha256:0dd00da29985f15f50dc35728b7e1e7c84507bccfea1d9914738530f1c72238a", size = 1077165, upload-time = "2026-04-30T23:24:46.377Z" }, +] + +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + +[[package]] +name = "asyncpg" +version = "0.31.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/17/cc02bc49bc350623d050fa139e34ea512cd6e020562f2a7312a7bcae4bc9/asyncpg-0.31.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eee690960e8ab85063ba93af2ce128c0f52fd655fdff9fdb1a28df01329f031d", size = 643159, upload-time = "2025-11-24T23:25:36.443Z" }, + { url = "https://files.pythonhosted.org/packages/a4/62/4ded7d400a7b651adf06f49ea8f73100cca07c6df012119594d1e3447aa6/asyncpg-0.31.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2657204552b75f8288de08ca60faf4a99a65deef3a71d1467454123205a88fab", size = 638157, upload-time = "2025-11-24T23:25:37.89Z" }, + { url = "https://files.pythonhosted.org/packages/d6/5b/4179538a9a72166a0bf60ad783b1ef16efb7960e4d7b9afe9f77a5551680/asyncpg-0.31.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a429e842a3a4b4ea240ea52d7fe3f82d5149853249306f7ff166cb9948faa46c", size = 2918051, upload-time = "2025-11-24T23:25:39.461Z" }, + { url = "https://files.pythonhosted.org/packages/e6/35/c27719ae0536c5b6e61e4701391ffe435ef59539e9360959240d6e47c8c8/asyncpg-0.31.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0807be46c32c963ae40d329b3a686356e417f674c976c07fa49f1b30303f109", size = 2972640, upload-time = "2025-11-24T23:25:41.512Z" }, + { url = "https://files.pythonhosted.org/packages/43/f4/01ebb9207f29e645a64699b9ce0eefeff8e7a33494e1d29bb53736f7766b/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e5d5098f63beeae93512ee513d4c0c53dc12e9aa2b7a1af5a81cddf93fe4e4da", size = 2851050, upload-time = "2025-11-24T23:25:43.153Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f4/03ff1426acc87be0f4e8d40fa2bff5c3952bef0080062af9efc2212e3be8/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37fc6c00a814e18eef51833545d1891cac9aa69140598bb076b4cd29b3e010b9", size = 2962574, upload-time = "2025-11-24T23:25:44.942Z" }, + { url = "https://files.pythonhosted.org/packages/c7/39/cc788dfca3d4060f9d93e67be396ceec458dfc429e26139059e58c2c244d/asyncpg-0.31.0-cp311-cp311-win32.whl", hash = "sha256:5a4af56edf82a701aece93190cc4e094d2df7d33f6e915c222fb09efbb5afc24", size = 521076, upload-time = "2025-11-24T23:25:46.486Z" }, + { url = "https://files.pythonhosted.org/packages/28/fc/735af5384c029eb7f1ca60ccb8fa95521dbdaeef788edf4cecfc604c3cab/asyncpg-0.31.0-cp311-cp311-win_amd64.whl", hash = "sha256:480c4befbdf079c14c9ca43c8c5e1fe8b6296c96f1f927158d4f1e750aacc047", size = 584980, upload-time = "2025-11-24T23:25:47.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/a6/59d0a146e61d20e18db7396583242e32e0f120693b67a8de43f1557033e2/asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad", size = 662042, upload-time = "2025-11-24T23:25:49.578Z" }, + { url = "https://files.pythonhosted.org/packages/36/01/ffaa189dcb63a2471720615e60185c3f6327716fdc0fc04334436fbb7c65/asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d", size = 638504, upload-time = "2025-11-24T23:25:51.501Z" }, + { url = "https://files.pythonhosted.org/packages/9f/62/3f699ba45d8bd24c5d65392190d19656d74ff0185f42e19d0bbd973bb371/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a", size = 3426241, upload-time = "2025-11-24T23:25:53.278Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d1/a867c2150f9c6e7af6462637f613ba67f78a314b00db220cd26ff559d532/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:aad7a33913fb8bcb5454313377cc330fbb19a0cd5faa7272407d8a0c4257b671", size = 3520321, upload-time = "2025-11-24T23:25:54.982Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1a/cce4c3f246805ecd285a3591222a2611141f1669d002163abef999b60f98/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3df118d94f46d85b2e434fd62c84cb66d5834d5a890725fe625f498e72e4d5ec", size = 3316685, upload-time = "2025-11-24T23:25:57.43Z" }, + { url = "https://files.pythonhosted.org/packages/40/ae/0fc961179e78cc579e138fad6eb580448ecae64908f95b8cb8ee2f241f67/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5b6efff3c17c3202d4b37189969acf8927438a238c6257f66be3c426beba20", size = 3471858, upload-time = "2025-11-24T23:25:59.636Z" }, + { url = "https://files.pythonhosted.org/packages/52/b2/b20e09670be031afa4cbfabd645caece7f85ec62d69c312239de568e058e/asyncpg-0.31.0-cp312-cp312-win32.whl", hash = "sha256:027eaa61361ec735926566f995d959ade4796f6a49d3bde17e5134b9964f9ba8", size = 527852, upload-time = "2025-11-24T23:26:01.084Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f0/f2ed1de154e15b107dc692262395b3c17fc34eafe2a78fc2115931561730/asyncpg-0.31.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d6bdcbc93d608a1158f17932de2321f68b1a967a13e014998db87a72ed3186", size = 597175, upload-time = "2025-11-24T23:26:02.564Z" }, + { url = "https://files.pythonhosted.org/packages/95/11/97b5c2af72a5d0b9bc3fa30cd4b9ce22284a9a943a150fdc768763caf035/asyncpg-0.31.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c204fab1b91e08b0f47e90a75d1b3c62174dab21f670ad6c5d0f243a228f015b", size = 661111, upload-time = "2025-11-24T23:26:04.467Z" }, + { url = "https://files.pythonhosted.org/packages/1b/71/157d611c791a5e2d0423f09f027bd499935f0906e0c2a416ce712ba51ef3/asyncpg-0.31.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54a64f91839ba59008eccf7aad2e93d6e3de688d796f35803235ea1c4898ae1e", size = 636928, upload-time = "2025-11-24T23:26:05.944Z" }, + { url = "https://files.pythonhosted.org/packages/2e/fc/9e3486fb2bbe69d4a867c0b76d68542650a7ff1574ca40e84c3111bb0c6e/asyncpg-0.31.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0e0822b1038dc7253b337b0f3f676cadc4ac31b126c5d42691c39691962e403", size = 3424067, upload-time = "2025-11-24T23:26:07.957Z" }, + { url = "https://files.pythonhosted.org/packages/12/c6/8c9d076f73f07f995013c791e018a1cd5f31823c2a3187fc8581706aa00f/asyncpg-0.31.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bef056aa502ee34204c161c72ca1f3c274917596877f825968368b2c33f585f4", size = 3518156, upload-time = "2025-11-24T23:26:09.591Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3b/60683a0baf50fbc546499cfb53132cb6835b92b529a05f6a81471ab60d0c/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0bfbcc5b7ffcd9b75ab1558f00db2ae07db9c80637ad1b2469c43df79d7a5ae2", size = 3319636, upload-time = "2025-11-24T23:26:11.168Z" }, + { url = "https://files.pythonhosted.org/packages/50/dc/8487df0f69bd398a61e1792b3cba0e47477f214eff085ba0efa7eac9ce87/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22bc525ebbdc24d1261ecbf6f504998244d4e3be1721784b5f64664d61fbe602", size = 3472079, upload-time = "2025-11-24T23:26:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/13/a1/c5bbeeb8531c05c89135cb8b28575ac2fac618bcb60119ee9696c3faf71c/asyncpg-0.31.0-cp313-cp313-win32.whl", hash = "sha256:f890de5e1e4f7e14023619399a471ce4b71f5418cd67a51853b9910fdfa73696", size = 527606, upload-time = "2025-11-24T23:26:14.78Z" }, + { url = "https://files.pythonhosted.org/packages/91/66/b25ccb84a246b470eb943b0107c07edcae51804912b824054b3413995a10/asyncpg-0.31.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc5f2fa9916f292e5c5c8b2ac2813763bcd7f58e130055b4ad8a0531314201ab", size = 596569, upload-time = "2025-11-24T23:26:16.189Z" }, + { url = "https://files.pythonhosted.org/packages/3c/36/e9450d62e84a13aea6580c83a47a437f26c7ca6fa0f0fd40b6670793ea30/asyncpg-0.31.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f6b56b91bb0ffc328c4e3ed113136cddd9deefdf5f79ab448598b9772831df44", size = 660867, upload-time = "2025-11-24T23:26:17.631Z" }, + { url = "https://files.pythonhosted.org/packages/82/4b/1d0a2b33b3102d210439338e1beea616a6122267c0df459ff0265cd5807a/asyncpg-0.31.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:334dec28cf20d7f5bb9e45b39546ddf247f8042a690bff9b9573d00086e69cb5", size = 638349, upload-time = "2025-11-24T23:26:19.689Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/e7f7ac9a7974f08eff9183e392b2d62516f90412686532d27e196c0f0eeb/asyncpg-0.31.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98cc158c53f46de7bb677fd20c417e264fc02b36d901cc2a43bd6cb0dc6dbfd2", size = 3410428, upload-time = "2025-11-24T23:26:21.275Z" }, + { url = "https://files.pythonhosted.org/packages/6f/de/bf1b60de3dede5c2731e6788617a512bc0ebd9693eac297ee74086f101d7/asyncpg-0.31.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9322b563e2661a52e3cdbc93eed3be7748b289f792e0011cb2720d278b366ce2", size = 3471678, upload-time = "2025-11-24T23:26:23.627Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/fc3ade003e22d8bd53aaf8f75f4be48f0b460fa73738f0391b9c856a9147/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19857a358fc811d82227449b7ca40afb46e75b33eb8897240c3839dd8b744218", size = 3313505, upload-time = "2025-11-24T23:26:25.235Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/73eb8a6789e927816f4705291be21f2225687bfa97321e40cd23055e903a/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ba5f8886e850882ff2c2ace5732300e99193823e8107e2c53ef01c1ebfa1e85d", size = 3434744, upload-time = "2025-11-24T23:26:26.944Z" }, + { url = "https://files.pythonhosted.org/packages/08/4b/f10b880534413c65c5b5862f79b8e81553a8f364e5238832ad4c0af71b7f/asyncpg-0.31.0-cp314-cp314-win32.whl", hash = "sha256:cea3a0b2a14f95834cee29432e4ddc399b95700eb1d51bbc5bfee8f31fa07b2b", size = 532251, upload-time = "2025-11-24T23:26:28.404Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2d/7aa40750b7a19efa5d66e67fc06008ca0f27ba1bd082e457ad82f59aba49/asyncpg-0.31.0-cp314-cp314-win_amd64.whl", hash = "sha256:04d19392716af6b029411a0264d92093b6e5e8285ae97a39957b9a9c14ea72be", size = 604901, upload-time = "2025-11-24T23:26:30.34Z" }, + { url = "https://files.pythonhosted.org/packages/ce/fe/b9dfe349b83b9dee28cc42360d2c86b2cdce4cb551a2c2d27e156bcac84d/asyncpg-0.31.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bdb957706da132e982cc6856bb2f7b740603472b54c3ebc77fe60ea3e57e1bd2", size = 702280, upload-time = "2025-11-24T23:26:32Z" }, + { url = "https://files.pythonhosted.org/packages/6a/81/e6be6e37e560bd91e6c23ea8a6138a04fd057b08cf63d3c5055c98e81c1d/asyncpg-0.31.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6d11b198111a72f47154fa03b85799f9be63701e068b43f84ac25da0bda9cb31", size = 682931, upload-time = "2025-11-24T23:26:33.572Z" }, + { url = "https://files.pythonhosted.org/packages/a6/45/6009040da85a1648dd5bc75b3b0a062081c483e75a1a29041ae63a0bf0dc/asyncpg-0.31.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18c83b03bc0d1b23e6230f5bf8d4f217dc9bc08644ce0502a9d91dc9e634a9c7", size = 3581608, upload-time = "2025-11-24T23:26:35.638Z" }, + { url = "https://files.pythonhosted.org/packages/7e/06/2e3d4d7608b0b2b3adbee0d0bd6a2d29ca0fc4d8a78f8277df04e2d1fd7b/asyncpg-0.31.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e009abc333464ff18b8f6fd146addffd9aaf63e79aa3bb40ab7a4c332d0c5e9e", size = 3498738, upload-time = "2025-11-24T23:26:37.275Z" }, + { url = "https://files.pythonhosted.org/packages/7d/aa/7d75ede780033141c51d83577ea23236ba7d3a23593929b32b49db8ed36e/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3b1fbcb0e396a5ca435a8826a87e5c2c2cc0c8c68eb6fadf82168056b0e53a8c", size = 3401026, upload-time = "2025-11-24T23:26:39.423Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7a/15e37d45e7f7c94facc1e9148c0e455e8f33c08f0b8a0b1deb2c5171771b/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8df714dba348efcc162d2adf02d213e5fab1bd9f557e1305633e851a61814a7a", size = 3429426, upload-time = "2025-11-24T23:26:41.032Z" }, + { url = "https://files.pythonhosted.org/packages/13/d5/71437c5f6ae5f307828710efbe62163974e71237d5d46ebd2869ea052d10/asyncpg-0.31.0-cp314-cp314t-win32.whl", hash = "sha256:1b41f1afb1033f2b44f3234993b15096ddc9cd71b21a42dbd87fc6a57b43d65d", size = 614495, upload-time = "2025-11-24T23:26:42.659Z" }, + { url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" }, +] + +[[package]] +name = "attrs" +version = "26.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/8e/82a0fe20a541c03148528be8cac2408564a6c9a0cc7e9171802bc1d26985/attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32", size = 952055, upload-time = "2026-03-19T14:22:25.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/b4/17d4b0b2a2dc85a6df63d1157e028ed19f90d4cd97c36717afef2bc2f395/attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309", size = 67548, upload-time = "2026-03-19T14:22:23.645Z" }, +] + +[[package]] +name = "bcrypt" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/85/3e65e01985fddf25b64ca67275bb5bdb4040bd1a53b66d355c6c37c8a680/bcrypt-5.0.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be", size = 481806, upload-time = "2025-09-25T19:49:05.102Z" }, + { url = "https://files.pythonhosted.org/packages/44/dc/01eb79f12b177017a726cbf78330eb0eb442fae0e7b3dfd84ea2849552f3/bcrypt-5.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2", size = 268626, upload-time = "2025-09-25T19:49:06.723Z" }, + { url = "https://files.pythonhosted.org/packages/8c/cf/e82388ad5959c40d6afd94fb4743cc077129d45b952d46bdc3180310e2df/bcrypt-5.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f", size = 271853, upload-time = "2025-09-25T19:49:08.028Z" }, + { url = "https://files.pythonhosted.org/packages/ec/86/7134b9dae7cf0efa85671651341f6afa695857fae172615e960fb6a466fa/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86", size = 269793, upload-time = "2025-09-25T19:49:09.727Z" }, + { url = "https://files.pythonhosted.org/packages/cc/82/6296688ac1b9e503d034e7d0614d56e80c5d1a08402ff856a4549cb59207/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23", size = 289930, upload-time = "2025-09-25T19:49:11.204Z" }, + { url = "https://files.pythonhosted.org/packages/d1/18/884a44aa47f2a3b88dd09bc05a1e40b57878ecd111d17e5bba6f09f8bb77/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2", size = 272194, upload-time = "2025-09-25T19:49:12.524Z" }, + { url = "https://files.pythonhosted.org/packages/0e/8f/371a3ab33c6982070b674f1788e05b656cfbf5685894acbfef0c65483a59/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83", size = 269381, upload-time = "2025-09-25T19:49:14.308Z" }, + { url = "https://files.pythonhosted.org/packages/b1/34/7e4e6abb7a8778db6422e88b1f06eb07c47682313997ee8a8f9352e5a6f1/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746", size = 271750, upload-time = "2025-09-25T19:49:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1b/54f416be2499bd72123c70d98d36c6cd61a4e33d9b89562c22481c81bb30/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e", size = 303757, upload-time = "2025-09-25T19:49:17.244Z" }, + { url = "https://files.pythonhosted.org/packages/13/62/062c24c7bcf9d2826a1a843d0d605c65a755bc98002923d01fd61270705a/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d", size = 306740, upload-time = "2025-09-25T19:49:18.693Z" }, + { url = "https://files.pythonhosted.org/packages/d5/c8/1fdbfc8c0f20875b6b4020f3c7dc447b8de60aa0be5faaf009d24242aec9/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba", size = 334197, upload-time = "2025-09-25T19:49:20.523Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c1/8b84545382d75bef226fbc6588af0f7b7d095f7cd6a670b42a86243183cd/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41", size = 352974, upload-time = "2025-09-25T19:49:22.254Z" }, + { url = "https://files.pythonhosted.org/packages/10/a6/ffb49d4254ed085e62e3e5dd05982b4393e32fe1e49bb1130186617c29cd/bcrypt-5.0.0-cp313-cp313t-win32.whl", hash = "sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861", size = 148498, upload-time = "2025-09-25T19:49:24.134Z" }, + { url = "https://files.pythonhosted.org/packages/48/a9/259559edc85258b6d5fc5471a62a3299a6aa37a6611a169756bf4689323c/bcrypt-5.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e", size = 145853, upload-time = "2025-09-25T19:49:25.702Z" }, + { url = "https://files.pythonhosted.org/packages/2d/df/9714173403c7e8b245acf8e4be8876aac64a209d1b392af457c79e60492e/bcrypt-5.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5", size = 139626, upload-time = "2025-09-25T19:49:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/f8/14/c18006f91816606a4abe294ccc5d1e6f0e42304df5a33710e9e8e95416e1/bcrypt-5.0.0-cp314-cp314t-macosx_10_12_universal2.whl", hash = "sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef", size = 481862, upload-time = "2025-09-25T19:49:28.365Z" }, + { url = "https://files.pythonhosted.org/packages/67/49/dd074d831f00e589537e07a0725cf0e220d1f0d5d8e85ad5bbff251c45aa/bcrypt-5.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4", size = 268544, upload-time = "2025-09-25T19:49:30.39Z" }, + { url = "https://files.pythonhosted.org/packages/f5/91/50ccba088b8c474545b034a1424d05195d9fcbaaf802ab8bfe2be5a4e0d7/bcrypt-5.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf", size = 271787, upload-time = "2025-09-25T19:49:32.144Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e7/d7dba133e02abcda3b52087a7eea8c0d4f64d3e593b4fffc10c31b7061f3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da", size = 269753, upload-time = "2025-09-25T19:49:33.885Z" }, + { url = "https://files.pythonhosted.org/packages/33/fc/5b145673c4b8d01018307b5c2c1fc87a6f5a436f0ad56607aee389de8ee3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9", size = 289587, upload-time = "2025-09-25T19:49:35.144Z" }, + { url = "https://files.pythonhosted.org/packages/27/d7/1ff22703ec6d4f90e62f1a5654b8867ef96bafb8e8102c2288333e1a6ca6/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f", size = 272178, upload-time = "2025-09-25T19:49:36.793Z" }, + { url = "https://files.pythonhosted.org/packages/c8/88/815b6d558a1e4d40ece04a2f84865b0fef233513bd85fd0e40c294272d62/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493", size = 269295, upload-time = "2025-09-25T19:49:38.164Z" }, + { url = "https://files.pythonhosted.org/packages/51/8c/e0db387c79ab4931fc89827d37608c31cc57b6edc08ccd2386139028dc0d/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b", size = 271700, upload-time = "2025-09-25T19:49:39.917Z" }, + { url = "https://files.pythonhosted.org/packages/06/83/1570edddd150f572dbe9fc00f6203a89fc7d4226821f67328a85c330f239/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c", size = 334034, upload-time = "2025-09-25T19:49:41.227Z" }, + { url = "https://files.pythonhosted.org/packages/c9/f2/ea64e51a65e56ae7a8a4ec236c2bfbdd4b23008abd50ac33fbb2d1d15424/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4", size = 352766, upload-time = "2025-09-25T19:49:43.08Z" }, + { url = "https://files.pythonhosted.org/packages/d7/d4/1a388d21ee66876f27d1a1f41287897d0c0f1712ef97d395d708ba93004c/bcrypt-5.0.0-cp314-cp314t-win32.whl", hash = "sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e", size = 152449, upload-time = "2025-09-25T19:49:44.971Z" }, + { url = "https://files.pythonhosted.org/packages/3f/61/3291c2243ae0229e5bca5d19f4032cecad5dfb05a2557169d3a69dc0ba91/bcrypt-5.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d", size = 149310, upload-time = "2025-09-25T19:49:46.162Z" }, + { url = "https://files.pythonhosted.org/packages/3e/89/4b01c52ae0c1a681d4021e5dd3e45b111a8fb47254a274fa9a378d8d834b/bcrypt-5.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993", size = 143761, upload-time = "2025-09-25T19:49:47.345Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" }, + { url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" }, + { url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" }, + { url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" }, + { url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" }, + { url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" }, + { url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" }, + { url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" }, + { url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" }, + { url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" }, + { url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" }, + { url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" }, + { url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" }, + { url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" }, + { url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" }, + { url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" }, + { url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" }, + { url = "https://files.pythonhosted.org/packages/8a/75/4aa9f5a4d40d762892066ba1046000b329c7cd58e888a6db878019b282dc/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534", size = 271180, upload-time = "2025-09-25T19:50:38.575Z" }, + { url = "https://files.pythonhosted.org/packages/54/79/875f9558179573d40a9cc743038ac2bf67dfb79cecb1e8b5d70e88c94c3d/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4", size = 273791, upload-time = "2025-09-25T19:50:39.913Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fe/975adb8c216174bf70fc17535f75e85ac06ed5252ea077be10d9cff5ce24/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911", size = 270746, upload-time = "2025-09-25T19:50:43.306Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", size = 273375, upload-time = "2025-09-25T19:50:45.43Z" }, +] + +[[package]] +name = "certifi" +version = "2026.4.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/25/ee/6caf7a40c36a1220410afe15a1cc64993a1f864871f698c0f93acb72842a/certifi-2026.4.22.tar.gz", hash = "sha256:8d455352a37b71bf76a79caa83a3d6c25afee4a385d632127b6afb3963f1c580", size = 137077, upload-time = "2026-04-22T11:26:11.191Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/30/7cd8fdcdfbc5b869528b079bfb76dcdf6056b1a2097a662e5e8c04f42965/certifi-2026.4.22-py3-none-any.whl", hash = "sha256:3cb2210c8f88ba2318d29b0388d1023c8492ff72ecdde4ebdaddbb13a31b1c4a", size = 135707, upload-time = "2026-04-22T11:26:09.372Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/a1/67fe25fac3c7642725500a3f6cfe5821ad557c3abb11c9d20d12c7008d3e/charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5", size = 144271, upload-time = "2026-04-02T09:28:39.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/d7/b5b7020a0565c2e9fa8c09f4b5fa6232feb326b8c20081ccded47ea368fd/charset_normalizer-3.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7641bb8895e77f921102f72833904dcd9901df5d6d72a2ab8f31d04b7e51e4e7", size = 309705, upload-time = "2026-04-02T09:26:02.191Z" }, + { url = "https://files.pythonhosted.org/packages/5a/53/58c29116c340e5456724ecd2fff4196d236b98f3da97b404bc5e51ac3493/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:202389074300232baeb53ae2569a60901f7efadd4245cf3a3bf0617d60b439d7", size = 206419, upload-time = "2026-04-02T09:26:03.583Z" }, + { url = "https://files.pythonhosted.org/packages/b2/02/e8146dc6591a37a00e5144c63f29fb7c97a734ea8a111190783c0e60ab63/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:30b8d1d8c52a48c2c5690e152c169b673487a2a58de1ec7393196753063fcd5e", size = 227901, upload-time = "2026-04-02T09:26:04.738Z" }, + { url = "https://files.pythonhosted.org/packages/fb/73/77486c4cd58f1267bf17db420e930c9afa1b3be3fe8c8b8ebbebc9624359/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:532bc9bf33a68613fd7d65e4b1c71a6a38d7d42604ecf239c77392e9b4e8998c", size = 222742, upload-time = "2026-04-02T09:26:06.36Z" }, + { url = "https://files.pythonhosted.org/packages/a1/fa/f74eb381a7d94ded44739e9d94de18dc5edc9c17fb8c11f0a6890696c0a9/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe249cb4651fd12605b7288b24751d8bfd46d35f12a20b1ba33dea122e690df", size = 214061, upload-time = "2026-04-02T09:26:08.347Z" }, + { url = "https://files.pythonhosted.org/packages/dc/92/42bd3cefcf7687253fb86694b45f37b733c97f59af3724f356fa92b8c344/charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:65bcd23054beab4d166035cabbc868a09c1a49d1efe458fe8e4361215df40265", size = 199239, upload-time = "2026-04-02T09:26:09.823Z" }, + { url = "https://files.pythonhosted.org/packages/4c/3d/069e7184e2aa3b3cddc700e3dd267413dc259854adc3380421c805c6a17d/charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:08e721811161356f97b4059a9ba7bafb23ea5ee2255402c42881c214e173c6b4", size = 210173, upload-time = "2026-04-02T09:26:10.953Z" }, + { url = "https://files.pythonhosted.org/packages/62/51/9d56feb5f2e7074c46f93e0ebdbe61f0848ee246e2f0d89f8e20b89ebb8f/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e060d01aec0a910bdccb8be71faf34e7799ce36950f8294c8bf612cba65a2c9e", size = 209841, upload-time = "2026-04-02T09:26:12.142Z" }, + { url = "https://files.pythonhosted.org/packages/d2/59/893d8f99cc4c837dda1fe2f1139079703deb9f321aabcb032355de13b6c7/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:38c0109396c4cfc574d502df99742a45c72c08eff0a36158b6f04000043dbf38", size = 200304, upload-time = "2026-04-02T09:26:13.711Z" }, + { url = "https://files.pythonhosted.org/packages/7d/1d/ee6f3be3464247578d1ed5c46de545ccc3d3ff933695395c402c21fa6b77/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1c2a768fdd44ee4a9339a9b0b130049139b8ce3c01d2ce09f67f5a68048d477c", size = 229455, upload-time = "2026-04-02T09:26:14.941Z" }, + { url = "https://files.pythonhosted.org/packages/54/bb/8fb0a946296ea96a488928bdce8ef99023998c48e4713af533e9bb98ef07/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:1a87ca9d5df6fe460483d9a5bbf2b18f620cbed41b432e2bddb686228282d10b", size = 210036, upload-time = "2026-04-02T09:26:16.478Z" }, + { url = "https://files.pythonhosted.org/packages/9a/bc/015b2387f913749f82afd4fcba07846d05b6d784dd16123cb66860e0237d/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d635aab80466bc95771bb78d5370e74d36d1fe31467b6b29b8b57b2a3cd7d22c", size = 224739, upload-time = "2026-04-02T09:26:17.751Z" }, + { url = "https://files.pythonhosted.org/packages/17/ab/63133691f56baae417493cba6b7c641571a2130eb7bceba6773367ab9ec5/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae196f021b5e7c78e918242d217db021ed2a6ace2bc6ae94c0fc596221c7f58d", size = 216277, upload-time = "2026-04-02T09:26:18.981Z" }, + { url = "https://files.pythonhosted.org/packages/06/6d/3be70e827977f20db77c12a97e6a9f973631a45b8d186c084527e53e77a4/charset_normalizer-3.4.7-cp311-cp311-win32.whl", hash = "sha256:adb2597b428735679446b46c8badf467b4ca5f5056aae4d51a19f9570301b1ad", size = 147819, upload-time = "2026-04-02T09:26:20.295Z" }, + { url = "https://files.pythonhosted.org/packages/20/d9/5f67790f06b735d7c7637171bbfd89882ad67201891b7275e51116ed8207/charset_normalizer-3.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:8e385e4267ab76874ae30db04c627faaaf0b509e1ccc11a95b3fc3e83f855c00", size = 159281, upload-time = "2026-04-02T09:26:21.74Z" }, + { url = "https://files.pythonhosted.org/packages/ca/83/6413f36c5a34afead88ce6f66684d943d91f233d76dd083798f9602b75ae/charset_normalizer-3.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:d4a48e5b3c2a489fae013b7589308a40146ee081f6f509e047e0e096084ceca1", size = 147843, upload-time = "2026-04-02T09:26:22.901Z" }, + { url = "https://files.pythonhosted.org/packages/0c/eb/4fc8d0a7110eb5fc9cc161723a34a8a6c200ce3b4fbf681bc86feee22308/charset_normalizer-3.4.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:eca9705049ad3c7345d574e3510665cb2cf844c2f2dcfe675332677f081cbd46", size = 311328, upload-time = "2026-04-02T09:26:24.331Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e3/0fadc706008ac9d7b9b5be6dc767c05f9d3e5df51744ce4cc9605de7b9f4/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6178f72c5508bfc5fd446a5905e698c6212932f25bcdd4b47a757a50605a90e2", size = 208061, upload-time = "2026-04-02T09:26:25.568Z" }, + { url = "https://files.pythonhosted.org/packages/42/f0/3dd1045c47f4a4604df85ec18ad093912ae1344ac706993aff91d38773a2/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1421b502d83040e6d7fb2fb18dff63957f720da3d77b2fbd3187ceb63755d7b", size = 229031, upload-time = "2026-04-02T09:26:26.865Z" }, + { url = "https://files.pythonhosted.org/packages/dc/67/675a46eb016118a2fbde5a277a5d15f4f69d5f3f5f338e5ee2f8948fcf43/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:edac0f1ab77644605be2cbba52e6b7f630731fc42b34cb0f634be1a6eface56a", size = 225239, upload-time = "2026-04-02T09:26:28.044Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f8/d0118a2f5f23b02cd166fa385c60f9b0d4f9194f574e2b31cef350ad7223/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5649fd1c7bade02f320a462fdefd0b4bd3ce036065836d4f42e0de958038e116", size = 216589, upload-time = "2026-04-02T09:26:29.239Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f1/6d2b0b261b6c4ceef0fcb0d17a01cc5bc53586c2d4796fa04b5c540bc13d/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:203104ed3e428044fd943bc4bf45fa73c0730391f9621e37fe39ecf477b128cb", size = 202733, upload-time = "2026-04-02T09:26:30.5Z" }, + { url = "https://files.pythonhosted.org/packages/6f/c0/7b1f943f7e87cc3db9626ba17807d042c38645f0a1d4415c7a14afb5591f/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:298930cec56029e05497a76988377cbd7457ba864beeea92ad7e844fe74cd1f1", size = 212652, upload-time = "2026-04-02T09:26:31.709Z" }, + { url = "https://files.pythonhosted.org/packages/38/dd/5a9ab159fe45c6e72079398f277b7d2b523e7f716acc489726115a910097/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:708838739abf24b2ceb208d0e22403dd018faeef86ddac04319a62ae884c4f15", size = 211229, upload-time = "2026-04-02T09:26:33.282Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ff/531a1cad5ca855d1c1a8b69cb71abfd6d85c0291580146fda7c82857caa1/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0f7eb884681e3938906ed0434f20c63046eacd0111c4ba96f27b76084cd679f5", size = 203552, upload-time = "2026-04-02T09:26:34.845Z" }, + { url = "https://files.pythonhosted.org/packages/c1/4c/a5fb52d528a8ca41f7598cb619409ece30a169fbdf9cdce592e53b46c3a6/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4dc1e73c36828f982bfe79fadf5919923f8a6f4df2860804db9a98c48824ce8d", size = 230806, upload-time = "2026-04-02T09:26:36.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/7a/071feed8124111a32b316b33ae4de83d36923039ef8cf48120266844285b/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:aed52fea0513bac0ccde438c188c8a471c4e0f457c2dd20cdbf6ea7a450046c7", size = 212316, upload-time = "2026-04-02T09:26:37.672Z" }, + { url = "https://files.pythonhosted.org/packages/fd/35/f7dba3994312d7ba508e041eaac39a36b120f32d4c8662b8814dab876431/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fea24543955a6a729c45a73fe90e08c743f0b3334bbf3201e6c4bc1b0c7fa464", size = 227274, upload-time = "2026-04-02T09:26:38.93Z" }, + { url = "https://files.pythonhosted.org/packages/8a/2d/a572df5c9204ab7688ec1edc895a73ebded3b023bb07364710b05dd1c9be/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb6d88045545b26da47aa879dd4a89a71d1dce0f0e549b1abcb31dfe4a8eac49", size = 218468, upload-time = "2026-04-02T09:26:40.17Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/890922a8b03a568ca2f336c36585a4713c55d4d67bf0f0c78924be6315ca/charset_normalizer-3.4.7-cp312-cp312-win32.whl", hash = "sha256:2257141f39fe65a3fdf38aeccae4b953e5f3b3324f4ff0daf9f15b8518666a2c", size = 148460, upload-time = "2026-04-02T09:26:41.416Z" }, + { url = "https://files.pythonhosted.org/packages/35/d9/0e7dffa06c5ab081f75b1b786f0aefc88365825dfcd0ac544bdb7b2b6853/charset_normalizer-3.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:5ed6ab538499c8644b8a3e18debabcd7ce684f3fa91cf867521a7a0279cab2d6", size = 159330, upload-time = "2026-04-02T09:26:42.554Z" }, + { url = "https://files.pythonhosted.org/packages/9e/5d/481bcc2a7c88ea6b0878c299547843b2521ccbc40980cb406267088bc701/charset_normalizer-3.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:56be790f86bfb2c98fb742ce566dfb4816e5a83384616ab59c49e0604d49c51d", size = 147828, upload-time = "2026-04-02T09:26:44.075Z" }, + { url = "https://files.pythonhosted.org/packages/c1/3b/66777e39d3ae1ddc77ee606be4ec6d8cbd4c801f65e5a1b6f2b11b8346dd/charset_normalizer-3.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f496c9c3cc02230093d8330875c4c3cdfc3b73612a5fd921c65d39cbcef08063", size = 309627, upload-time = "2026-04-02T09:26:45.198Z" }, + { url = "https://files.pythonhosted.org/packages/2e/4e/b7f84e617b4854ade48a1b7915c8ccfadeba444d2a18c291f696e37f0d3b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ea948db76d31190bf08bd371623927ee1339d5f2a0b4b1b4a4439a65298703c", size = 207008, upload-time = "2026-04-02T09:26:46.824Z" }, + { url = "https://files.pythonhosted.org/packages/c4/bb/ec73c0257c9e11b268f018f068f5d00aa0ef8c8b09f7753ebd5f2880e248/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a277ab8928b9f299723bc1a2dabb1265911b1a76341f90a510368ca44ad9ab66", size = 228303, upload-time = "2026-04-02T09:26:48.397Z" }, + { url = "https://files.pythonhosted.org/packages/85/fb/32d1f5033484494619f701e719429c69b766bfc4dbc61aa9e9c8c166528b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3bec022aec2c514d9cf199522a802bd007cd588ab17ab2525f20f9c34d067c18", size = 224282, upload-time = "2026-04-02T09:26:49.684Z" }, + { url = "https://files.pythonhosted.org/packages/fa/07/330e3a0dda4c404d6da83b327270906e9654a24f6c546dc886a0eb0ffb23/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e044c39e41b92c845bc815e5ae4230804e8e7bc29e399b0437d64222d92809dd", size = 215595, upload-time = "2026-04-02T09:26:50.915Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7c/fc890655786e423f02556e0216d4b8c6bcb6bdfa890160dc66bf52dee468/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:f495a1652cf3fbab2eb0639776dad966c2fb874d79d87ca07f9d5f059b8bd215", size = 201986, upload-time = "2026-04-02T09:26:52.197Z" }, + { url = "https://files.pythonhosted.org/packages/d8/97/bfb18b3db2aed3b90cf54dc292ad79fdd5ad65c4eae454099475cbeadd0d/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e712b419df8ba5e42b226c510472b37bd57b38e897d3eca5e8cfd410a29fa859", size = 211711, upload-time = "2026-04-02T09:26:53.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a5/a581c13798546a7fd557c82614a5c65a13df2157e9ad6373166d2a3e645d/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7804338df6fcc08105c7745f1502ba68d900f45fd770d5bdd5288ddccb8a42d8", size = 210036, upload-time = "2026-04-02T09:26:54.975Z" }, + { url = "https://files.pythonhosted.org/packages/8c/bf/b3ab5bcb478e4193d517644b0fb2bf5497fbceeaa7a1bc0f4d5b50953861/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:481551899c856c704d58119b5025793fa6730adda3571971af568f66d2424bb5", size = 202998, upload-time = "2026-04-02T09:26:56.303Z" }, + { url = "https://files.pythonhosted.org/packages/e7/4e/23efd79b65d314fa320ec6017b4b5834d5c12a58ba4610aa353af2e2f577/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f59099f9b66f0d7145115e6f80dd8b1d847176df89b234a5a6b3f00437aa0832", size = 230056, upload-time = "2026-04-02T09:26:57.554Z" }, + { url = "https://files.pythonhosted.org/packages/b9/9f/1e1941bc3f0e01df116e68dc37a55c4d249df5e6fa77f008841aef68264f/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:f59ad4c0e8f6bba240a9bb85504faa1ab438237199d4cce5f622761507b8f6a6", size = 211537, upload-time = "2026-04-02T09:26:58.843Z" }, + { url = "https://files.pythonhosted.org/packages/80/0f/088cbb3020d44428964a6c97fe1edfb1b9550396bf6d278330281e8b709c/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3dedcc22d73ec993f42055eff4fcfed9318d1eeb9a6606c55892a26964964e48", size = 226176, upload-time = "2026-04-02T09:27:00.437Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9f/130394f9bbe06f4f63e22641d32fc9b202b7e251c9aef4db044324dac493/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:64f02c6841d7d83f832cd97ccf8eb8a906d06eb95d5276069175c696b024b60a", size = 217723, upload-time = "2026-04-02T09:27:02.021Z" }, + { url = "https://files.pythonhosted.org/packages/73/55/c469897448a06e49f8fa03f6caae97074fde823f432a98f979cc42b90e69/charset_normalizer-3.4.7-cp313-cp313-win32.whl", hash = "sha256:4042d5c8f957e15221d423ba781e85d553722fc4113f523f2feb7b188cc34c5e", size = 148085, upload-time = "2026-04-02T09:27:03.192Z" }, + { url = "https://files.pythonhosted.org/packages/5d/78/1b74c5bbb3f99b77a1715c91b3e0b5bdb6fe302d95ace4f5b1bec37b0167/charset_normalizer-3.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:3946fa46a0cf3e4c8cb1cc52f56bb536310d34f25f01ca9b6c16afa767dab110", size = 158819, upload-time = "2026-04-02T09:27:04.454Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/46bd42279d323deb8687c4a5a811fd548cb7d1de10cf6535d099877a9a9f/charset_normalizer-3.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:80d04837f55fc81da168b98de4f4b797ef007fc8a79ab71c6ec9bc4dd662b15b", size = 147915, upload-time = "2026-04-02T09:27:05.971Z" }, + { url = "https://files.pythonhosted.org/packages/97/c8/c67cb8c70e19ef1960b97b22ed2a1567711de46c4ddf19799923adc836c2/charset_normalizer-3.4.7-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c36c333c39be2dbca264d7803333c896ab8fa7d4d6f0ab7edb7dfd7aea6e98c0", size = 309234, upload-time = "2026-04-02T09:27:07.194Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/c091fdee33f20de70d6c8b522743b6f831a2f1cd3ff86de4c6a827c48a76/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c2aed2e5e41f24ea8ef1590b8e848a79b56f3a5564a65ceec43c9d692dc7d8a", size = 208042, upload-time = "2026-04-02T09:27:08.749Z" }, + { url = "https://files.pythonhosted.org/packages/87/1c/ab2ce611b984d2fd5d86a5a8a19c1ae26acac6bad967da4967562c75114d/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54523e136b8948060c0fa0bc7b1b50c32c186f2fceee897a495406bb6e311d2b", size = 228706, upload-time = "2026-04-02T09:27:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a8/29/2b1d2cb00bf085f59d29eb773ce58ec2d325430f8c216804a0a5cd83cbca/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:715479b9a2802ecac752a3b0efa2b0b60285cf962ee38414211abdfccc233b41", size = 224727, upload-time = "2026-04-02T09:27:11.175Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/032c2d5a07fe4d4855fea851209cca2b6f03ebeb6d4e3afdb3358386a684/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd6c2a1c7573c64738d716488d2cdd3c00e340e4835707d8fdb8dc1a66ef164e", size = 215882, upload-time = "2026-04-02T09:27:12.446Z" }, + { url = "https://files.pythonhosted.org/packages/2c/c2/356065d5a8b78ed04499cae5f339f091946a6a74f91e03476c33f0ab7100/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:c45e9440fb78f8ddabcf714b68f936737a121355bf59f3907f4e17721b9d1aae", size = 200860, upload-time = "2026-04-02T09:27:13.721Z" }, + { url = "https://files.pythonhosted.org/packages/0c/cd/a32a84217ced5039f53b29f460962abb2d4420def55afabe45b1c3c7483d/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3534e7dcbdcf757da6b85a0bbf5b6868786d5982dd959b065e65481644817a18", size = 211564, upload-time = "2026-04-02T09:27:15.272Z" }, + { url = "https://files.pythonhosted.org/packages/44/86/58e6f13ce26cc3b8f4a36b94a0f22ae2f00a72534520f4ae6857c4b81f89/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e8ac484bf18ce6975760921bb6148041faa8fef0547200386ea0b52b5d27bf7b", size = 211276, upload-time = "2026-04-02T09:27:16.834Z" }, + { url = "https://files.pythonhosted.org/packages/8f/fe/d17c32dc72e17e155e06883efa84514ca375f8a528ba2546bee73fc4df81/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a5fe03b42827c13cdccd08e6c0247b6a6d4b5e3cdc53fd1749f5896adcdc2356", size = 201238, upload-time = "2026-04-02T09:27:18.229Z" }, + { url = "https://files.pythonhosted.org/packages/6a/29/f33daa50b06525a237451cdb6c69da366c381a3dadcd833fa5676bc468b3/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:2d6eb928e13016cea4f1f21d1e10c1cebd5a421bc57ddf5b1142ae3f86824fab", size = 230189, upload-time = "2026-04-02T09:27:19.445Z" }, + { url = "https://files.pythonhosted.org/packages/b6/6e/52c84015394a6a0bdcd435210a7e944c5f94ea1055f5cc5d56c5fe368e7b/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e74327fb75de8986940def6e8dee4f127cc9752bee7355bb323cc5b2659b6d46", size = 211352, upload-time = "2026-04-02T09:27:20.79Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d7/4353be581b373033fb9198bf1da3cf8f09c1082561e8e922aa7b39bf9fe8/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d6038d37043bced98a66e68d3aa2b6a35505dc01328cd65217cefe82f25def44", size = 227024, upload-time = "2026-04-02T09:27:22.063Z" }, + { url = "https://files.pythonhosted.org/packages/30/45/99d18aa925bd1740098ccd3060e238e21115fffbfdcb8f3ece837d0ace6c/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7579e913a5339fb8fa133f6bbcfd8e6749696206cf05acdbdca71a1b436d8e72", size = 217869, upload-time = "2026-04-02T09:27:23.486Z" }, + { url = "https://files.pythonhosted.org/packages/5c/05/5ee478aa53f4bb7996482153d4bfe1b89e0f087f0ab6b294fcf92d595873/charset_normalizer-3.4.7-cp314-cp314-win32.whl", hash = "sha256:5b77459df20e08151cd6f8b9ef8ef1f961ef73d85c21a555c7eed5b79410ec10", size = 148541, upload-time = "2026-04-02T09:27:25.146Z" }, + { url = "https://files.pythonhosted.org/packages/48/77/72dcb0921b2ce86420b2d79d454c7022bf5be40202a2a07906b9f2a35c97/charset_normalizer-3.4.7-cp314-cp314-win_amd64.whl", hash = "sha256:92a0a01ead5e668468e952e4238cccd7c537364eb7d851ab144ab6627dbbe12f", size = 159634, upload-time = "2026-04-02T09:27:26.642Z" }, + { url = "https://files.pythonhosted.org/packages/c6/a3/c2369911cd72f02386e4e340770f6e158c7980267da16af8f668217abaa0/charset_normalizer-3.4.7-cp314-cp314-win_arm64.whl", hash = "sha256:67f6279d125ca0046a7fd386d01b311c6363844deac3e5b069b514ba3e63c246", size = 148384, upload-time = "2026-04-02T09:27:28.271Z" }, + { url = "https://files.pythonhosted.org/packages/94/09/7e8a7f73d24dba1f0035fbbf014d2c36828fc1bf9c88f84093e57d315935/charset_normalizer-3.4.7-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:effc3f449787117233702311a1b7d8f59cba9ced946ba727bdc329ec69028e24", size = 330133, upload-time = "2026-04-02T09:27:29.474Z" }, + { url = "https://files.pythonhosted.org/packages/8d/da/96975ddb11f8e977f706f45cddd8540fd8242f71ecdb5d18a80723dcf62c/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fbccdc05410c9ee21bbf16a35f4c1d16123dcdeb8a1d38f33654fa21d0234f79", size = 216257, upload-time = "2026-04-02T09:27:30.793Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/1d63bf8ef2d388e95c64b2098f45f84758f6d102a087552da1485912637b/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:733784b6d6def852c814bce5f318d25da2ee65dd4839a0718641c696e09a2960", size = 234851, upload-time = "2026-04-02T09:27:32.44Z" }, + { url = "https://files.pythonhosted.org/packages/9b/40/e5ff04233e70da2681fa43969ad6f66ca5611d7e669be0246c4c7aaf6dc8/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a89c23ef8d2c6b27fd200a42aa4ac72786e7c60d40efdc76e6011260b6e949c4", size = 233393, upload-time = "2026-04-02T09:27:34.03Z" }, + { url = "https://files.pythonhosted.org/packages/be/c1/06c6c49d5a5450f76899992f1ee40b41d076aee9279b49cf9974d2f313d5/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c114670c45346afedc0d947faf3c7f701051d2518b943679c8ff88befe14f8e", size = 223251, upload-time = "2026-04-02T09:27:35.369Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/f2ff16fb050946169e3e1f82134d107e5d4ae72647ec8a1b1446c148480f/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:a180c5e59792af262bf263b21a3c49353f25945d8d9f70628e73de370d55e1e1", size = 206609, upload-time = "2026-04-02T09:27:36.661Z" }, + { url = "https://files.pythonhosted.org/packages/69/d5/a527c0cd8d64d2eab7459784fb4169a0ac76e5a6fc5237337982fd61347e/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3c9a494bc5ec77d43cea229c4f6db1e4d8fe7e1bbffa8b6f0f0032430ff8ab44", size = 220014, upload-time = "2026-04-02T09:27:38.019Z" }, + { url = "https://files.pythonhosted.org/packages/7e/80/8a7b8104a3e203074dc9aa2c613d4b726c0e136bad1cc734594b02867972/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8d828b6667a32a728a1ad1d93957cdf37489c57b97ae6c4de2860fa749b8fc1e", size = 218979, upload-time = "2026-04-02T09:27:39.37Z" }, + { url = "https://files.pythonhosted.org/packages/02/9a/b759b503d507f375b2b5c153e4d2ee0a75aa215b7f2489cf314f4541f2c0/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:cf1493cd8607bec4d8a7b9b004e699fcf8f9103a9284cc94962cb73d20f9d4a3", size = 209238, upload-time = "2026-04-02T09:27:40.722Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/0f3f5d47b86bdb79256e7290b26ac847a2832d9a4033f7eb2cd4bcf4bb5b/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0c96c3b819b5c3e9e165495db84d41914d6894d55181d2d108cc1a69bfc9cce0", size = 236110, upload-time = "2026-04-02T09:27:42.33Z" }, + { url = "https://files.pythonhosted.org/packages/96/23/bce28734eb3ed2c91dcf93abeb8a5cf393a7b2749725030bb630e554fdd8/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:752a45dc4a6934060b3b0dab47e04edc3326575f82be64bc4fc293914566503e", size = 219824, upload-time = "2026-04-02T09:27:43.924Z" }, + { url = "https://files.pythonhosted.org/packages/2c/6f/6e897c6984cc4d41af319b077f2f600fc8214eb2fe2d6bcb79141b882400/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:8778f0c7a52e56f75d12dae53ae320fae900a8b9b4164b981b9c5ce059cd1fcb", size = 233103, upload-time = "2026-04-02T09:27:45.348Z" }, + { url = "https://files.pythonhosted.org/packages/76/22/ef7bd0fe480a0ae9b656189ec00744b60933f68b4f42a7bb06589f6f576a/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ce3412fbe1e31eb81ea42f4169ed94861c56e643189e1e75f0041f3fe7020abe", size = 225194, upload-time = "2026-04-02T09:27:46.706Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/0e0ab3e0b5bc1219bd80a6a0d4d72ca74d9250cb2382b7c699c147e06017/charset_normalizer-3.4.7-cp314-cp314t-win32.whl", hash = "sha256:c03a41a8784091e67a39648f70c5f97b5b6a37f216896d44d2cdcb82615339a0", size = 159827, upload-time = "2026-04-02T09:27:48.053Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1d/29d32e0fb40864b1f878c7f5a0b343ae676c6e2b271a2d55cc3a152391da/charset_normalizer-3.4.7-cp314-cp314t-win_amd64.whl", hash = "sha256:03853ed82eeebbce3c2abfdbc98c96dc205f32a79627688ac9a27370ea61a49c", size = 174168, upload-time = "2026-04-02T09:27:49.795Z" }, + { url = "https://files.pythonhosted.org/packages/de/32/d92444ad05c7a6e41fb2036749777c163baf7a0301a040cb672d6b2b1ae9/charset_normalizer-3.4.7-cp314-cp314t-win_arm64.whl", hash = "sha256:c35abb8bfff0185efac5878da64c45dafd2b37fb0383add1be155a763c1f083d", size = 153018, upload-time = "2026-04-02T09:27:51.116Z" }, + { url = "https://files.pythonhosted.org/packages/db/8f/61959034484a4a7c527811f4721e75d02d653a35afb0b6054474d8185d4c/charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d", size = 61958, upload-time = "2026-04-02T09:28:37.794Z" }, +] + +[[package]] +name = "click" +version = "8.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bb/63/f9e1ea081ce35720d8b92acde70daaedace594dc93b693c869e0d5910718/click-8.3.3.tar.gz", hash = "sha256:398329ad4837b2ff7cbe1dd166a4c0f8900c3ca3a218de04466f38f6497f18a2", size = 328061, upload-time = "2026-04-22T15:11:27.506Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/44/c1221527f6a71a01ec6fbad7fa78f1d50dfa02217385cf0fa3eec7087d59/click-8.3.3-py3-none-any.whl", hash = "sha256:a2bf429bb3033c89fa4936ffb35d5cb471e3719e1f3c8a7c3fff0b8314305613", size = 110502, upload-time = "2026-04-22T15:11:25.044Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "cryptography" +version = "48.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/a9/db8f313fdcd85d767d4973515e1db101f9c71f95fced83233de224673757/cryptography-48.0.0.tar.gz", hash = "sha256:5c3932f4436d1cccb036cb0eaef46e6e2db91035166f1ad6505c3c9d5a635920", size = 832984, upload-time = "2026-05-04T22:59:38.133Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/3d/01f6dd9190170a5a241e0e98c2d04be3664a9e6f5b9b872cde63aff1c3dd/cryptography-48.0.0-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:0c558d2cdffd8f4bbb30fc7134c74d2ca9a476f830bb053074498fbc86f41ed6", size = 8001587, upload-time = "2026-05-04T22:57:36.803Z" }, + { url = "https://files.pythonhosted.org/packages/b2/6e/e90527eef33f309beb811cf7c982c3aeffcce8e3edb178baa4ca3ae4a6fa/cryptography-48.0.0-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f5333311663ea94f75dd408665686aaf426563556bb5283554a3539177e03b8c", size = 4690433, upload-time = "2026-05-04T22:57:40.373Z" }, + { url = "https://files.pythonhosted.org/packages/90/04/673510ed51ddff56575f306cf1617d80411ee76831ccd3097599140efdfe/cryptography-48.0.0-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7995ef305d7165c3f11ae07f2517e5a4f1d5c18da1376a0a9ed496336b69e5f3", size = 4710620, upload-time = "2026-05-04T22:57:42.935Z" }, + { url = "https://files.pythonhosted.org/packages/14/d5/e9c4ef932c8d800490c34d8bd589d64a31d5890e27ec9e9ad532be893294/cryptography-48.0.0-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:40ba1f85eaa6959837b1d51c9767e230e14612eea4ef110ee8854ada22da1bf5", size = 4696283, upload-time = "2026-05-04T22:57:45.294Z" }, + { url = "https://files.pythonhosted.org/packages/0c/29/174b9dfb60b12d59ecfc6cfa04bc88c21b42a54f01b8aae09bb6e51e4c7f/cryptography-48.0.0-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:369a6348999f94bbd53435c894377b20ab95f25a9065c283570e70150d8abc3c", size = 5296573, upload-time = "2026-05-04T22:57:47.933Z" }, + { url = "https://files.pythonhosted.org/packages/95/38/0d29a6fd7d0d1373f0c0c88a04ba20e359b257753ac497564cd660fc1d55/cryptography-48.0.0-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a0e692c683f4df67815a2d258b324e66f4738bd7a96a218c826dce4f4bd05d8f", size = 4743677, upload-time = "2026-05-04T22:57:50.067Z" }, + { url = "https://files.pythonhosted.org/packages/30/be/eef653013d5c63b6a490529e0316f9ac14a37602965d4903efed1399f32b/cryptography-48.0.0-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:18349bbc56f4743c8b12dc32e2bccb2cf83ee8b69a3bba74ef8ae857e26b3d25", size = 4330808, upload-time = "2026-05-04T22:57:52.301Z" }, + { url = "https://files.pythonhosted.org/packages/84/9e/500463e87abb7a0a0f9f256ec21123ecde0a7b5541a15e840ea54551fd81/cryptography-48.0.0-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e8eac43dfca5c4cccc6dad9a80504436fca53bb9bc3100a2386d730fbe6b602", size = 4695941, upload-time = "2026-05-04T22:57:54.603Z" }, + { url = "https://files.pythonhosted.org/packages/e3/dc/7303087450c2ec9e7fbb750e17c2abfbc658f23cbd0e54009509b7cc4091/cryptography-48.0.0-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9ccdac7d40688ecb5a3b4a604b8a88c8002e3442d6c60aead1db2a89a041560c", size = 5252579, upload-time = "2026-05-04T22:57:57.207Z" }, + { url = "https://files.pythonhosted.org/packages/d0/c0/7101d3b7215edcdc90c45da544961fd8ed2d6448f77577460fa75a8443f7/cryptography-48.0.0-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:bd72e68b06bb1e96913f97dd4901119bc17f39d4586a5adf2d3e47bc2b9d58b5", size = 4743326, upload-time = "2026-05-04T22:57:59.535Z" }, + { url = "https://files.pythonhosted.org/packages/ac/d8/5b833bad13016f562ab9d063d68199a4bd121d18458e439515601d3357ec/cryptography-48.0.0-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:59baa2cb386c4f0b9905bd6eb4c2a79a69a128408fd31d32ca4d7102d4156321", size = 4826672, upload-time = "2026-05-04T22:58:01.996Z" }, + { url = "https://files.pythonhosted.org/packages/98/e1/7074eb8bf3c135558c73fc2bcf0f5633f912e6fb87e868a55c454080ef09/cryptography-48.0.0-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9249e3cd978541d665967ac2cb2787fd6a62bddf1e75b3e347a594d7dacf4f74", size = 4972574, upload-time = "2026-05-04T22:58:03.968Z" }, + { url = "https://files.pythonhosted.org/packages/04/70/e5a1b41d325f797f39427aa44ef8baf0be500065ab6d8e10369d850d4a4f/cryptography-48.0.0-cp311-abi3-win32.whl", hash = "sha256:9c459db21422be75e2809370b829a87eb37f74cd785fc4aa9ea1e5f43b47cda4", size = 3294868, upload-time = "2026-05-04T22:58:06.467Z" }, + { url = "https://files.pythonhosted.org/packages/f4/ac/8ac51b4a5fc5932eb7ee5c517ba7dc8cd834f0048962b6b352f00f41ebf9/cryptography-48.0.0-cp311-abi3-win_amd64.whl", hash = "sha256:5b012212e08b8dd5edc78ef54da83dd9892fd9105323b3993eff6bea65dc21d7", size = 3817107, upload-time = "2026-05-04T22:58:08.845Z" }, + { url = "https://files.pythonhosted.org/packages/6b/84/70e3feea9feea87fd7cbe77efb2712ae1e3e6edf10749dc6e95f4e60e455/cryptography-48.0.0-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:3cb07a3ed6431663cd321ea8a000a1314c74211f823e4177fefa2255e057d1ec", size = 7986556, upload-time = "2026-05-04T22:58:11.172Z" }, + { url = "https://files.pythonhosted.org/packages/89/6e/18e07a618bb5442ba10cf4df16e99c071365528aa570dfcb8c02e25a303b/cryptography-48.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c7378637d7d88016fa6791c159f698b3d3eed28ebf844ac36b9dc04a14dae18", size = 4684776, upload-time = "2026-05-04T22:58:13.712Z" }, + { url = "https://files.pythonhosted.org/packages/be/6a/4ea3b4c6c6759794d5ee2103c304a5076dc4b19ae1f9fe47dba439e159e9/cryptography-48.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc90c0b39b2e3c65ef52c804b72e3c58f8a04ab2a1871272798e5f9572c17d20", size = 4698121, upload-time = "2026-05-04T22:58:16.448Z" }, + { url = "https://files.pythonhosted.org/packages/2f/59/6ff6ad6cae03bb887da2a5860b2c9805f8dac969ef01ce563336c49bd1d1/cryptography-48.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:76341972e1eff8b4bea859f09c0d3e64b96ce931b084f9b9b7db8ef364c30eff", size = 4690042, upload-time = "2026-05-04T22:58:18.544Z" }, + { url = "https://files.pythonhosted.org/packages/ca/b4/fc334ed8cfd705aca282fe4d8f5ae64a8e0f74932e9feecb344610cf6e4d/cryptography-48.0.0-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:55b7718303bf06a5753dcdccf2f3945cf18ad7bffde41b61226e4db31ab89a9c", size = 5282526, upload-time = "2026-05-04T22:58:20.75Z" }, + { url = "https://files.pythonhosted.org/packages/11/08/9f8c5386cc4cd90d8255c7cdd0f5baf459a08502a09de30dc51f553d38dc/cryptography-48.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:a64697c641c7b1b2178e573cbc31c7c6684cd56883a478d75143dbb7118036db", size = 4733116, upload-time = "2026-05-04T22:58:23.627Z" }, + { url = "https://files.pythonhosted.org/packages/b8/77/99307d7574045699f8805aa500fa0fb83422d115b5400a064ddd306d7750/cryptography-48.0.0-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:561215ea3879cb1cbbf272867e2efda62476f240fb58c64de6b393ae19246741", size = 4316030, upload-time = "2026-05-04T22:58:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/fd/36/a608b98337af3cb2aff4818e406649d30572b7031918b04c87d979495348/cryptography-48.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:ad64688338ed4bc1a6618076ba75fd7194a5f1797ac60b47afe926285adb3166", size = 4689640, upload-time = "2026-05-04T22:58:27.747Z" }, + { url = "https://files.pythonhosted.org/packages/dd/a6/825010a291b4438aecc1f568bc428189fc1175515223632477c07dc0a6df/cryptography-48.0.0-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:906cbf0670286c6e0044156bc7d4af9cbb0ef6db9f73e52c3ec56ba6bdde5336", size = 5237657, upload-time = "2026-05-04T22:58:29.848Z" }, + { url = "https://files.pythonhosted.org/packages/b9/09/4e76a09b4caa29aad535ddc806f5d4c5d01885bd978bd984fbc6ca032cae/cryptography-48.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:ea8990436d914540a40ab24b6a77c0969695ed52f4a4874c5137ccf7045a7057", size = 4732362, upload-time = "2026-05-04T22:58:32.009Z" }, + { url = "https://files.pythonhosted.org/packages/18/78/444fa04a77d0cb95f417dda20d450e13c56ba8e5220fc892a1658f44f882/cryptography-48.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c18684a7f0cc9a3cb60328f496b8e3372def7c5d2df39ac267878b05565aaaae", size = 4819580, upload-time = "2026-05-04T22:58:34.254Z" }, + { url = "https://files.pythonhosted.org/packages/38/85/ea67067c70a1fd4be2c63d35eeed82658023021affccc7b17705f8527dd2/cryptography-48.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9be5aafa5736574f8f15f262adc81b2a9869e2cfe9014d52a44633905b40d52c", size = 4963283, upload-time = "2026-05-04T22:58:36.376Z" }, + { url = "https://files.pythonhosted.org/packages/75/54/cc6d0f3deac3e81c7f847e8a189a12b6cdd65059b43dad25d4316abd849a/cryptography-48.0.0-cp314-cp314t-win32.whl", hash = "sha256:c17dfe85494deaeddc5ce251aebd1d60bbe6afc8b62071bb0b469431a000124f", size = 3270954, upload-time = "2026-05-04T22:58:38.791Z" }, + { url = "https://files.pythonhosted.org/packages/49/67/cc947e288c0758a4e5473d1dcb743037ab7785541265a969240b8885441a/cryptography-48.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27241b1dc9962e056062a8eef1991d02c3a24569c95975bd2322a8a52c6e5e12", size = 3797313, upload-time = "2026-05-04T22:58:40.746Z" }, + { url = "https://files.pythonhosted.org/packages/f2/63/61d4a4e1c6b6bab6ce1e213cd36a24c415d90e76d78c5eb8577c5541d2e8/cryptography-48.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:58d00498e8933e4a194f3076aee1b4a97dfec1a6da444535755822fe5d8b0b86", size = 7983482, upload-time = "2026-05-04T22:58:43.769Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ac/f5b5995b87770c693e2596559ffafe195b4033a57f14a82268a2842953f3/cryptography-48.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:614d0949f4790582d2cc25553abd09dd723025f0c0e7c67376a1d77196743d6e", size = 4683266, upload-time = "2026-05-04T22:58:46.064Z" }, + { url = "https://files.pythonhosted.org/packages/ec/c6/8b14f67e18338fbc4adb76f66c001f5c3610b3e2d1837f268f47a347dbbb/cryptography-48.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7ce4bfae76319a532a2dc68f82cc32f5676ee792a983187dac07183690e5c66f", size = 4696228, upload-time = "2026-05-04T22:58:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/ea/73/f808fbae9514bd91b47875b003f13e284c8c6bdfd904b7944e803937eec1/cryptography-48.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:2eb992bbd4661238c5a397594c83f5b4dc2bc5b848c365c8f991b6780efcc5c7", size = 4689097, upload-time = "2026-05-04T22:58:50.9Z" }, + { url = "https://files.pythonhosted.org/packages/93/01/d86632d7d28db8ae83221995752eeb6639ffb374c2d22955648cf8d52797/cryptography-48.0.0-cp39-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:22a5cb272895dce158b2cacdfdc3debd299019659f42947dbdac6f32d68fe832", size = 5283582, upload-time = "2026-05-04T22:58:53.017Z" }, + { url = "https://files.pythonhosted.org/packages/02/e1/50edc7a50334807cc4791fc4a0ce7468b4a1416d9138eab358bfc9a3d70b/cryptography-48.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2b4d59804e8408e2fea7d1fbaf218e5ec984325221db76e6a241a9abd6cdd95c", size = 4730479, upload-time = "2026-05-04T22:58:55.611Z" }, + { url = "https://files.pythonhosted.org/packages/6f/af/99a582b1b1641ff5911ac559beb45097cf79efd4ead4657f578ef1af2d47/cryptography-48.0.0-cp39-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:984a20b0f62a26f48a3396c72e4bc34c66e356d356bf370053066b3b6d54634a", size = 4326481, upload-time = "2026-05-04T22:58:57.607Z" }, + { url = "https://files.pythonhosted.org/packages/90/ee/89aa26a06ef0a7d7611788ffd571a7c50e368cc6a4d5eef8b4884e866edb/cryptography-48.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5a5ed8fde7a1d09376ca0b40e68cd59c69fe23b1f9768bd5824f54681626032a", size = 4688713, upload-time = "2026-05-04T22:59:00.077Z" }, + { url = "https://files.pythonhosted.org/packages/70/ba/bcb1b0bb7a33d4c7c0c4d4c7874b4a62ae4f56113a5f4baefa362dfb1f0f/cryptography-48.0.0-cp39-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:8cd666227ef7af430aa5914a9910e0ddd703e75f039cef0825cd0da71b6b711a", size = 5238165, upload-time = "2026-05-04T22:59:02.317Z" }, + { url = "https://files.pythonhosted.org/packages/c9/70/ca4003b1ce5ca3dc3186ada51908c8a9b9ff7d5cab83cc0d43ee14ec144f/cryptography-48.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9071196d81abc88b3516ac8cdfad32e2b66dd4a5393a8e68a961e9161ddc6239", size = 4729947, upload-time = "2026-05-04T22:59:05.255Z" }, + { url = "https://files.pythonhosted.org/packages/44/a0/4ec7cf774207905aef1a8d11c3750d5a1db805eb380ee4e16df317870128/cryptography-48.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1e2d54c8be6152856a36f0882ab231e70f8ec7f14e93cf87db8a2ed056bf160c", size = 4822059, upload-time = "2026-05-04T22:59:07.802Z" }, + { url = "https://files.pythonhosted.org/packages/1e/75/a2e55f99c16fcac7b5d6c1eb19ad8e00799854d6be5ca845f9259eae1681/cryptography-48.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a5da777e32ffed6f85a7b2b3f7c5cbc88c146bfcd0a1d7baf5fcc6c52ee35dd4", size = 4960575, upload-time = "2026-05-04T22:59:09.851Z" }, + { url = "https://files.pythonhosted.org/packages/b8/23/6e6f32143ab5d8b36ca848a502c4bcd477ae75b9e1677e3530d669062578/cryptography-48.0.0-cp39-abi3-win32.whl", hash = "sha256:77a2ccbbe917f6710e05ba9adaa25fb5075620bf3ea6fb751997875aff4ae4bd", size = 3279117, upload-time = "2026-05-04T22:59:12.019Z" }, + { url = "https://files.pythonhosted.org/packages/9d/9a/0fea98a70cf1749d41d738836f6349d97945f7c89433a259a6c2642eefeb/cryptography-48.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:16cd65b9330583e4619939b3a3843eec1e6e789744bb01e7c7e2e62e33c239c8", size = 3792100, upload-time = "2026-05-04T22:59:14.884Z" }, + { url = "https://files.pythonhosted.org/packages/be/d2/024b5e06be9d44cb021fb0e1a03d34d63989cf56a0fe62f3dfbab695b9b4/cryptography-48.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:84cf79f0dc8b36ac5da873481716e87aef31fcfa0444f9e1d8b4b2cece142855", size = 3950391, upload-time = "2026-05-04T22:59:17.415Z" }, + { url = "https://files.pythonhosted.org/packages/bc/17/3861e17c56fa0fd37491a14a8673fdb77c57fc5693cafe745ea8b06dba75/cryptography-48.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:fdfef35d751d510fcef5252703621574364fec16418c4a1e5e1055248401054b", size = 4637126, upload-time = "2026-05-04T22:59:20.197Z" }, + { url = "https://files.pythonhosted.org/packages/f0/0a/7e226dbff530f21480727eb764973a7bff2b912f8e15cd4f129e71b56d1d/cryptography-48.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0890f502ddf7d9c6426129c3f49f5c0a39278ed7cd6322c8755ffca6ee675a13", size = 4667270, upload-time = "2026-05-04T22:59:22.647Z" }, + { url = "https://files.pythonhosted.org/packages/3b/f2/5a72274ca9f1b2a8b44a662ee0bf1b435909deb473d6f97bcd035bcdbc71/cryptography-48.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:ecde28a596bead48b0cfd2a1b4416c3d43074c2d785e3a398d7ec1fc4d0f7fbb", size = 4636797, upload-time = "2026-05-04T22:59:24.912Z" }, + { url = "https://files.pythonhosted.org/packages/b4/e1/48cedb2fe63626e91ded1edad159e2a4fb8b6906c4425eb7749673077ce7/cryptography-48.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:4defde8685ae324a9eb9d818717e93b4638ef67070ac9bc15b8ca85f63048355", size = 4666800, upload-time = "2026-05-04T22:59:27.474Z" }, + { url = "https://files.pythonhosted.org/packages/a2/ca/7e8365deec19afb2b2c7be7c1c0aa8f99633b54e90c570999acda93260fc/cryptography-48.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:db63bf618e5dea46c07de12e900fe1cdd2541e6dc9dbae772a70b7d4d4765f6a", size = 3739536, upload-time = "2026-05-04T22:59:29.61Z" }, +] + +[[package]] +name = "dnspython" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, +] + +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, +] + +[[package]] +name = "ecdsa" +version = "0.19.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/25/ca/8de7744cb3bc966c85430ca2d0fcaeea872507c6a4cf6e007f7fe269ed9d/ecdsa-0.19.2.tar.gz", hash = "sha256:62635b0ac1ca2e027f82122b5b81cb706edc38cd91c63dda28e4f3455a2bf930", size = 202432, upload-time = "2026-03-26T09:58:17.675Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/79/119091c98e2bf49e24ed9f3ae69f816d715d2904aefa6a2baa039a2ba0b0/ecdsa-0.19.2-py2.py3-none-any.whl", hash = "sha256:840f5dc5e375c68f36c1a7a5b9caad28f95daa65185c9253c0c08dd952bb7399", size = 150818, upload-time = "2026-03-26T09:58:15.808Z" }, +] + +[[package]] +name = "email-validator" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" }, +] + +[[package]] +name = "execnet" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" }, +] + +[[package]] +name = "faker" +version = "40.15.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7f/13/6741787bd91c4109c7bed047d68273965cd52ce8a5f773c471b949334b6d/faker-40.15.0.tar.gz", hash = "sha256:20f3a6ec8c266b74d4c554e34118b21c3c2056c0b4a519d15c8decb3a4e6e795", size = 1967447, upload-time = "2026-04-17T20:05:27.555Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/a7/a600f8f30d4505e89166de51dd121bd540ab8e560e8cf0901de00a81de8c/faker-40.15.0-py3-none-any.whl", hash = "sha256:71ab3c3370da9d2205ab74ffb0fd51273063ad562b3a3bb69d0026a20923e318", size = 2004447, upload-time = "2026-04-17T20:05:25.437Z" }, +] + +[[package]] +name = "fastapi" +version = "0.136.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5d/45/c130091c2dfa061bbfe3150f2a5091ef1adf149f2a8d2ae769ecaf6e99a2/fastapi-0.136.1.tar.gz", hash = "sha256:7af665ad7acfa0a3baf8983d393b6b471b9da10ede59c60045f49fbc89a0fa7f", size = 397448, upload-time = "2026-04-23T16:49:44.046Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/ff/2e4eca3ade2c22fe1dea7043b8ee9dabe47753349eb1b56a202de8af6349/fastapi-0.136.1-py3-none-any.whl", hash = "sha256:a6e9d7eeada96c93a4d69cb03836b44fa34e2854accb7244a1ece36cd4781c3f", size = 117683, upload-time = "2026-04-23T16:49:42.437Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "email-validator" }, + { name = "fastapi-cli", extra = ["standard"] }, + { name = "fastar" }, + { name = "httpx" }, + { name = "jinja2" }, + { name = "pydantic-extra-types" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[[package]] +name = "fastapi-boilerplate" +version = "0.18.0" +source = { editable = "." } +dependencies = [ + { name = "aiomcache" }, + { name = "aiosqlite" }, + { name = "alembic" }, + { name = "asyncpg" }, + { name = "faker" }, + { name = "fastapi", extra = ["standard"] }, + { name = "fastcrud" }, + { name = "fastsecure" }, + { name = "greenlet" }, + { name = "httpx" }, + { name = "itsdangerous" }, + { name = "jinja2" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "redis" }, + { name = "sqladmin" }, + { name = "sqlalchemy" }, + { name = "taskiq" }, + { name = "taskiq-aio-pika" }, + { name = "taskiq-redis" }, + { name = "typer" }, + { name = "user-agents" }, +] + +[package.optional-dependencies] +dev = [ + { name = "mypy" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-mock" }, + { name = "pytest-xdist", extra = ["psutil"] }, + { name = "ruff" }, + { name = "testcontainers" }, + { name = "testcontainers-postgres" }, + { name = "types-python-jose" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiomcache", specifier = ">=0.8.2" }, + { name = "aiosqlite", specifier = ">=0.21.0" }, + { name = "alembic", specifier = ">=1.16.4" }, + { name = "asyncpg", specifier = ">=0.30.0" }, + { name = "faker", specifier = ">=37.1.0" }, + { name = "fastapi", extras = ["standard"], specifier = ">=0.115.8" }, + { name = "fastcrud", specifier = ">=0.21.0" }, + { name = "fastsecure", specifier = ">=0.3.0" }, + { name = "greenlet", specifier = ">=3.1.1" }, + { name = "httpx", specifier = ">=0.28.1" }, + { name = "itsdangerous", specifier = ">=2.2.0" }, + { name = "jinja2", specifier = ">=3.1.6" }, + { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.14.1" }, + { name = "pydantic", specifier = ">=2.10.6" }, + { name = "pydantic-settings", specifier = ">=2.7.1" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.3.5" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.25.3" }, + { name = "pytest-mock", marker = "extra == 'dev'", specifier = ">=3.14.0" }, + { name = "pytest-xdist", extras = ["psutil"], marker = "extra == 'dev'", specifier = ">=3.8.0" }, + { name = "redis", specifier = ">=6.1.0" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.9.4" }, + { name = "sqladmin", specifier = ">=0.22.0" }, + { name = "sqlalchemy", specifier = ">=2.0.37" }, + { name = "taskiq", specifier = ">=0.11.20" }, + { name = "taskiq-aio-pika", specifier = ">=0.4.3" }, + { name = "taskiq-redis", specifier = ">=1.1.2" }, + { name = "testcontainers", marker = "extra == 'dev'", specifier = ">=4.10.0" }, + { name = "testcontainers-postgres", marker = "extra == 'dev'", specifier = ">=0.0.1rc1" }, + { name = "typer", specifier = ">=0.12" }, + { name = "types-python-jose", marker = "extra == 'dev'", specifier = ">=3.4.0.20250224" }, + { name = "user-agents", specifier = ">=2.2.0" }, +] +provides-extras = ["dev"] + +[[package]] +name = "fastapi-cli" +version = "0.0.24" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "rich-toolkit" }, + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6e/58/74797ae9e4610cfa0c6b34c8309096d3b20bb29be3b8b5fbf1004d10fa5f/fastapi_cli-0.0.24.tar.gz", hash = "sha256:1afc9c9e21d7ebc8a3ca5e31790cd8d837742be7e4f8b9236e99cb3451f0de00", size = 19043, upload-time = "2026-02-24T10:45:10.476Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/4b/68f9fe268e535d79c76910519530026a4f994ce07189ac0dded45c6af825/fastapi_cli-0.0.24-py3-none-any.whl", hash = "sha256:4a1f78ed798f106b4fee85ca93b85d8fe33c0a3570f775964d37edb80b8f0edc", size = 12304, upload-time = "2026-02-24T10:45:09.552Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "fastapi-cloud-cli" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[[package]] +name = "fastapi-cloud-cli" +version = "0.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fastar" }, + { name = "httpx" }, + { name = "pydantic", extra = ["email"] }, + { name = "rich-toolkit" }, + { name = "rignore" }, + { name = "sentry-sdk" }, + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/57/cee8e91b83f39e75ae5562a2237261442a8179dcb3b631c7398113157398/fastapi_cloud_cli-0.17.1.tar.gz", hash = "sha256:0baece208fa88063bec46dccb5fb512f3199162092165e57654b44e64adbc44d", size = 47409, upload-time = "2026-04-27T13:38:07.094Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/a0/e252b68cf155409afabea037ab2971f41509481838847f6503fe890884ea/fastapi_cloud_cli-0.17.1-py3-none-any.whl", hash = "sha256:325e0199bdac7cb86f5df4f4a1d2070054095588088ef7b923a60cec458dcd63", size = 34046, upload-time = "2026-04-27T13:38:08.319Z" }, +] + +[[package]] +name = "fastar" +version = "0.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/0f/0aeb3fc50046617702acc0078b277b58367fd62eb727b9ec733ae0e8bbcc/fastar-0.11.0.tar.gz", hash = "sha256:aa7f100f7313c03fdb20f1385927ba95671071ba308ad0c1763fef295e1895ce", size = 70238, upload-time = "2026-04-13T17:11:17.143Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/7a/fb367bdaf4efa2c7952a45aeab2e87a564293ecffe150af673ec8edfda46/fastar-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b82fd6f996e65a86f67a6bd64dd22ef3e8ae2dcaed0ae3b550e71f7e1bbb1df5", size = 709869, upload-time = "2026-04-13T17:09:55.62Z" }, + { url = "https://files.pythonhosted.org/packages/80/ff/b87efb0dcfd081c62c7c7601d7681dabe63103cd51fc16f8d57a1ab45961/fastar-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27eed386fd0558e6daa29211111bbd7b740f7c7e881197f8a00ac7c0f3cdb1d7", size = 631668, upload-time = "2026-04-13T17:09:40.537Z" }, + { url = "https://files.pythonhosted.org/packages/24/7c/0ed6dd38b9adc04b3a8ec3b7045908e7c2170ba0ff6e6d2c51bc9fc770f3/fastar-0.11.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a6931bebc1d8e95ddeef55732c195449e6b44ef33aa31b325505097ed3b4d6aa", size = 869663, upload-time = "2026-04-13T17:09:09.78Z" }, + { url = "https://files.pythonhosted.org/packages/58/ce/8b7fb3f23855accebaaf2d2637eac7f261a7a5d936f861a172079f1ef511/fastar-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:891f72ce42a5e28a74fbd4d5fbf1a3ac1a1163d13cbc200cbd005fb0fabc54bd", size = 762938, upload-time = "2026-04-13T17:07:54.51Z" }, + { url = "https://files.pythonhosted.org/packages/07/cc/5491e2b677bb841f768e3aba052d0344338a5c78aa5d4c18b443831a8e8d/fastar-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5b83c1f61f7017d6e1498568038f8745440cfc16ca2f697ec81bac83050108f6", size = 759232, upload-time = "2026-04-13T17:08:08.864Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b7/643630bdbd179e41e9fae31c03b4cf6061dbf4d6fbbae8425d16eb12545d/fastar-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db73a9b765a516e73983b25341e7b5e0189733878279e278b2295131b0e3a21e", size = 926271, upload-time = "2026-04-13T17:08:23.68Z" }, + { url = "https://files.pythonhosted.org/packages/09/5d/37ade50003b4540e0a53ef100f6692d7ab2ac1122d5acf39920cc09a3e8b/fastar-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:625827d52eb4e8fec942e0233f125ff8010fcf6a67c0a974a8e5f4666b771e3c", size = 818634, upload-time = "2026-04-13T17:08:54.268Z" }, + { url = "https://files.pythonhosted.org/packages/c3/ff/135d177de32cc1e837c99019e4643e6e79352bde49544d4ece5b5eebf56b/fastar-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7f5fd8fa21ec0a88296a38dc5d7fc35efd3b26d46a17b8b7c73c5563925ca15", size = 822755, upload-time = "2026-04-13T17:09:25.01Z" }, + { url = "https://files.pythonhosted.org/packages/27/cb/b835dbe76ceac7fa6105851468c259ffd06830eb9c029402e499d0ec153b/fastar-0.11.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:8c15af91b8cd87ddf23ea55355ae513c1de3ab67178f26dad017c9e9c0af6096", size = 887101, upload-time = "2026-04-13T17:08:39.248Z" }, + { url = "https://files.pythonhosted.org/packages/9e/54/aa8289eb57fc550535470397cb051f5a58a7c89ca4de31d5502b916dd894/fastar-0.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:03a112395a8b0bff251423bd1564c012f0cc058ad8b6bd8fba96f3d7fc117e44", size = 973606, upload-time = "2026-04-13T17:10:10.98Z" }, + { url = "https://files.pythonhosted.org/packages/1f/fd/776d50a0897c01dc6bfd0926772ee913436fdae91b9affaf0a0cbd09f0a1/fastar-0.11.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f2994bb8f5f8c11eb12beae1e6e77a907173c9819236b8a4c8f0573652ceccce", size = 1036696, upload-time = "2026-04-13T17:10:28.502Z" }, + { url = "https://files.pythonhosted.org/packages/c8/f1/cf0f9b499fb37ac065c8a01ec642f96a3c5eb849c38ae983b59f3b3245e0/fastar-0.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dcf99e4b5973d842c7f19c776c3a83cdc0977d505edce6206438505c0456b517", size = 1078182, upload-time = "2026-04-13T17:10:45.318Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9e/21e4701aec4a1123d4dc4d31578dc18875582b5710e4725f7ceb752a248b/fastar-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29c9c386dc0d5dda78845a8e6b1480d26ab861c1e0b68f42ae5735cb70ca07f1", size = 1032336, upload-time = "2026-04-13T17:11:02.364Z" }, + { url = "https://files.pythonhosted.org/packages/ce/e2/5872b28c72c27ec1a00760eace6ff35f714f41ebbd5208cf016b12e29250/fastar-0.11.0-cp311-cp311-win32.whl", hash = "sha256:030b2580fc394f2c9b7890b6735810404e9b9ed5e0344db150b945965b5482b7", size = 457368, upload-time = "2026-04-13T17:11:43.528Z" }, + { url = "https://files.pythonhosted.org/packages/fd/6e/ce6832a16193eb4466f4108be8809c249b51cb1f89dd7894545700d079d5/fastar-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:83ab57ae067969cd0b483ac3b6dccc4b595fc77f5c820760998648d4c42822b5", size = 488605, upload-time = "2026-04-13T17:11:29.161Z" }, + { url = "https://files.pythonhosted.org/packages/15/5a/9cfb80661cf38fd7b0889224beb7d2746784d4ade2a931ed9775a18d8602/fastar-0.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:27b1a4cee2298b704de8151d310462ee7335ed036011ca9aa6e784b30b6c73a9", size = 464580, upload-time = "2026-04-13T17:11:18.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/06/a5773706afc8bd496769786590bbc56d2d0ee419a299cc12ea3f5717fcf3/fastar-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3c51f1c2cdddbd1420d2897ace7738e36c65e17f6ae84e0bfe763f8d1068bb97", size = 708394, upload-time = "2026-04-13T17:09:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/cc/a6/d5e2a4e48495616440a21eed07558219ca90243ad00b0502586f95bd4833/fastar-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0d9d6b052baf5380baea866675dab6ccd04ec2460d12b1c46f10ce3f4ee6a820", size = 628417, upload-time = "2026-04-13T17:09:42.145Z" }, + { url = "https://files.pythonhosted.org/packages/ab/69/9816d69ac8265c9e50456637a487ccfb7a9c566efd9dbcd673df9c2558c2/fastar-0.11.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bd2f05666d4df7e14885b5c38fefd92a785917387513d33d837ff42ec143a22f", size = 863950, upload-time = "2026-04-13T17:09:11.506Z" }, + { url = "https://files.pythonhosted.org/packages/5b/0d/f88daad53aff2e754b6b5ff2a7113f72447a34f6ef17cc23ca99988117b7/fastar-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e6e74aba1ae77ca4aedcaf1697cd413319f4c88a5ccbe5b42c709517c5097e", size = 760737, upload-time = "2026-04-13T17:07:55.958Z" }, + { url = "https://files.pythonhosted.org/packages/2f/a6/82ef4ecd969d50d92ed3ed9dbd8fe77faa24be5e5736f716edc9f4ce8d62/fastar-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38ef77fe940bbc9b37a98bd838727f844b11731cd39358a2640ff864fb385086", size = 757603, upload-time = "2026-04-13T17:08:10.623Z" }, + { url = "https://files.pythonhosted.org/packages/03/35/50249f0d827251f8ac511495e2eacccebda80a00a0ad73e9615b8113b84f/fastar-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8955e61b32d6aff82c983217abf80933fd823b0e727586fc72f08043d996fd59", size = 923952, upload-time = "2026-04-13T17:08:25.526Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d8/faee41659e9c379d906d24eaee6d6833ac8cfef0a5df480e5c2a8d3efb33/fastar-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:483532442cdb08fbff0169510224eae0836f2f672cea6aacb52847d90fefdc46", size = 816574, upload-time = "2026-04-13T17:08:56.076Z" }, + { url = "https://files.pythonhosted.org/packages/22/47/0448ea7992b997dad2bf004bfd98eca74b5858630eae080b50c7b17d9ddc/fastar-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef5a6071121e05d8287fc75bccb054bcbac8bb0501200a0c0a8feeace5303ea4", size = 819382, upload-time = "2026-04-13T17:09:26.66Z" }, + { url = "https://files.pythonhosted.org/packages/33/ef/0d63eb43586831b7a6f8b22c4d77125a7c594423af1f4f090fa9541b9b40/fastar-0.11.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:e45e598af5afe8412197d4786efd6cf29be02e7d3d4f6a3461149eae5d7e94f1", size = 885254, upload-time = "2026-04-13T17:08:40.9Z" }, + { url = "https://files.pythonhosted.org/packages/01/25/edd584675d69e49a165052c3ee886df1c5d574f3e7d813c990306387c623/fastar-0.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e160919b1c47ddb8538e7e8eb4cd527281b40f0bf75110a75993838ef61f286", size = 971239, upload-time = "2026-04-13T17:10:12.997Z" }, + { url = "https://files.pythonhosted.org/packages/a5/37/e8bb24f506ba2b08fbaf36c5800e843bd4d542954e9331f00418e2d23349/fastar-0.11.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:4bb4dc0fc8f7a6807febcebce8a2f3626ba4955a9263d81ecc630aad83be84c0", size = 1035185, upload-time = "2026-04-13T17:10:30.207Z" }, + { url = "https://files.pythonhosted.org/packages/9a/bf/be753736296338149ee4cb3e92e2b5423d6ba17c7b951d15218fd7e99bbf/fastar-0.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4ec95af56aa173f6e320e1183001bf108ba59beaf13edd1fc8200648db203588", size = 1072191, upload-time = "2026-04-13T17:10:47.072Z" }, + { url = "https://files.pythonhosted.org/packages/d2/cd/a81c1aaafb5a22ce57c98ae22f39c89413ed53e4ee6e1b1444b0bd666a6c/fastar-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:136cf342735464091c39dc3708168f9fdeb9ebea40b1ead937c61afaf46143d9", size = 1028054, upload-time = "2026-04-13T17:11:04.293Z" }, + { url = "https://files.pythonhosted.org/packages/ec/88/1ce4eed3d70627c95f49ca017f6bbbf2ddcc4b0c601d293259de7689bc20/fastar-0.11.0-cp312-cp312-win32.whl", hash = "sha256:35f23c11b556cc4d3704587faacbc0037f7bdf6c4525cd1d09c70bda4b1c6809", size = 454198, upload-time = "2026-04-13T17:11:45.168Z" }, + { url = "https://files.pythonhosted.org/packages/8f/1d/26ce92f4331cd61a69840db9ca6115829805eec24f285481a854f578e917/fastar-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:920bc56c3c0b8a8ca492904941d1883c1c947c858cd93343356c29122a38f44c", size = 486697, upload-time = "2026-04-13T17:11:31.084Z" }, + { url = "https://files.pythonhosted.org/packages/ed/96/e6eda4480559c69b05d466e7b5ea9170e81fef3795a73e059959a3258319/fastar-0.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:395248faf89e8a6bd5dc1fd544c8465113b627cb6d7c8b296796b60ebea33593", size = 462591, upload-time = "2026-04-13T17:11:20.577Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d6/3be260037e86fb694e88d47f583bac3a0188c99cee1a6b257ac26cb6b53c/fastar-0.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:33f544b08b4541b678e53749b4552a44720d96761fb79c172b005b1089c443ed", size = 707975, upload-time = "2026-04-13T17:09:58.866Z" }, + { url = "https://files.pythonhosted.org/packages/e1/cd/7867aefb1784662554a335f2952c75a50f0c70585ed0d2210d6cc15e5627/fastar-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:91c1c792447e4a642745f347ff9847c52af39633071c57ee67ed53c157fc3506", size = 628460, upload-time = "2026-04-13T17:09:43.776Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2b/d11d84bdd5e0e377771b955755771e3460b290da5809cb78c1b735ee2228/fastar-0.11.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:881247e6b6eaea59fc6569f9b61447aa6b9fc2ee864e048b4643d69c52745805", size = 863054, upload-time = "2026-04-13T17:09:13.048Z" }, + { url = "https://files.pythonhosted.org/packages/25/39/d3f428b318fa940b1b6e785b8d54fc895dfb5d5b945ef8d5442ffa904fb2/fastar-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:863b7929845c9fec92ef6c8d59579cf46af5136655e5342f8df5cebe46cab06c", size = 760247, upload-time = "2026-04-13T17:07:57.396Z" }, + { url = "https://files.pythonhosted.org/packages/9e/04/03949aee82aabb8ede06ac5a4a5579ffaf98a8fe59ce958494508ff15513/fastar-0.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:96b4a57df12bf3211662627a3ea29d62ecb314a2434a0d0843f9fc23e47536e5", size = 756512, upload-time = "2026-04-13T17:08:12.415Z" }, + { url = "https://files.pythonhosted.org/packages/3f/0c/2ca1ae0a3828ca51047962d932b80daca2522db73e8cb9d040cb6ebe28d5/fastar-0.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceef1c2c4df7b7b8ebd3f5d718bbf457b9bbdf25ce0bd07870211ec4fbd9aff4", size = 922183, upload-time = "2026-04-13T17:08:27.187Z" }, + { url = "https://files.pythonhosted.org/packages/65/68/7fe808b1f73a68e686f25434f538c6dc10ef4dfb3db0ace22cd861744bf8/fastar-0.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8e545918441910a779659d4759ad0eef349e935fbdb4668a666d3681567eb05", size = 816394, upload-time = "2026-04-13T17:08:57.657Z" }, + { url = "https://files.pythonhosted.org/packages/1f/17/07d086080f8a83b8d7966955e29bcdbd6a060f5bd949dc9d5abd3658cead/fastar-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28095bb8f821e85fc2764e1a55f03e5e2876dee2abe7cd0ee9420d929905d643", size = 818983, upload-time = "2026-04-13T17:09:28.46Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e2/2c4edf0910af2e814ff6d65b77a91196d472ca8a9fb2033bd983f6856caa/fastar-0.11.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:0fafb95ecbe70f666a5e9b35dd63974ccdc9bb3d99ccdbd4014a823ec3e659b5", size = 884689, upload-time = "2026-04-13T17:08:42.763Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/04fdcbd6558e60de4ced3b55230fac47675d181252582b2fcec3c74608e5/fastar-0.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:af48fed039b94016629dcdad1c95c90c486326dd068de2b0a4df419ee09b6821", size = 970677, upload-time = "2026-04-13T17:10:15.124Z" }, + { url = "https://files.pythonhosted.org/packages/df/b3/2b860a9658550167dbd5824c85e88d0b4b912bf493e42a6322544d6e483d/fastar-0.11.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:74cd96163f39b8638ab4e8d49708ca887959672a22871d8170d01f067319533b", size = 1034026, upload-time = "2026-04-13T17:10:32.318Z" }, + { url = "https://files.pythonhosted.org/packages/b7/9b/fa42ea1188b144bac4b1b60753dfd449974a4d5eda132029ee7711569f94/fastar-0.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4e8b993cb5613bab495ed482810bedc0986633fcb9a3b55c37ec88e0d6714f6a", size = 1071147, upload-time = "2026-04-13T17:10:48.833Z" }, + { url = "https://files.pythonhosted.org/packages/95/c8/d2e501556dca9f1fbc9246111a31792fb49ad908fa4927f34938a97a3604/fastar-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dfe39d91fc28e37e06162d94afe01050220edb7df554acb5b702b5503e564816", size = 1028377, upload-time = "2026-04-13T17:11:06.374Z" }, + { url = "https://files.pythonhosted.org/packages/db/33/5f11f23eca0a569cd052507bc45dda2e5468697f8665728d25be44120f7d/fastar-0.11.0-cp313-cp313-win32.whl", hash = "sha256:c5f63d4d99ff4bfb37c659982ec413358bdee747005348756cc50a04d412d989", size = 454089, upload-time = "2026-04-13T17:11:46.821Z" }, + { url = "https://files.pythonhosted.org/packages/da/2f/35ff03c939cba7a255a9132367873fec6c355fd06a7f84fedcbaf4c8129f/fastar-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:8690ed1928d31ded3ada308e1086525fb3871f5fa81e1b69601a3f7774004583", size = 486312, upload-time = "2026-04-13T17:11:32.86Z" }, + { url = "https://files.pythonhosted.org/packages/ef/71/ee9246cbfcbfd4144558f35e7e9a306ffe0a7564730a5188c45f21d2dab8/fastar-0.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:d977ded9d98a0719a305e0a4d5ee811f1d3e856d853a50acb8ae833c3cd6d5d2", size = 461975, upload-time = "2026-04-13T17:11:22.589Z" }, + { url = "https://files.pythonhosted.org/packages/7a/cd/3644c48ecac456f928c12d47ec3bed36c36555b17c3859856f1ff860265d/fastar-0.11.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:71375bd6f03c2a43eb47bd949ea38ff45434917f9cdac79675c5b9f60de4fa73", size = 707860, upload-time = "2026-04-13T17:10:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/69/ca/dee04476ae3626b2b040a60ad84628f77e1ffd8444232f2426b0ca1e0d7e/fastar-0.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:eddfd9cab16e19ae247fe44bf992cb403ccfe27d3931d6de29a4695d95ad386c", size = 628216, upload-time = "2026-04-13T17:09:45.355Z" }, + { url = "https://files.pythonhosted.org/packages/dc/5e/9395c7353d079cb4f5be0f7982ce0dc9f2e7dec5fd175eef466729d6023a/fastar-0.11.0-cp314-cp314-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c371f1d4386c699018bb64eb2fa785feacf32785559049d2bb72fe4af023f53", size = 864378, upload-time = "2026-04-13T17:09:14.611Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/1e4f67148223ff219612b6281a6000357abbcc2417964fa5c83f11d68fce/fastar-0.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cad7fa41e3e66554387481c1a09365e4638becd322904932674159d5f4046728", size = 760921, upload-time = "2026-04-13T17:07:59.138Z" }, + { url = "https://files.pythonhosted.org/packages/0f/82/09d11fb6d12f17993ffaf32ffd30c3c121a11e2966e84f19fb6f66430118/fastar-0.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf36652fa71b83761717c9899b98732498f8a2cb6327ff16bbf07f6be85c3437", size = 757012, upload-time = "2026-04-13T17:08:14.186Z" }, + { url = "https://files.pythonhosted.org/packages/52/1f/5aeeacc4cb65615e2c9292cd9c5b0cd6fb6d2e6ee472ca6adc6c1b1b22ef/fastar-0.11.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f68ff8c17833053da4841720e95edde80ce45bb994b6b7d51418dddaac70ee47", size = 924510, upload-time = "2026-04-13T17:08:28.741Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1a/1e5bdabbeaf2e856928956292609f2ff6a650f94480fb8afaca30229e483/fastar-0.11.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4563ed37a12ea1cdc398af8571258d24b988bf342b7b3bf5451bd5891243280c", size = 816602, upload-time = "2026-04-13T17:08:59.461Z" }, + { url = "https://files.pythonhosted.org/packages/87/24/f960147910da3bed41a3adfcb026e17d5f50f4cf467a3324237a7088f61a/fastar-0.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cee63c9875cba3b70dc44338c560facc5d6e763047dcc4a30501f9a68cf5f890", size = 819452, upload-time = "2026-04-13T17:09:29.926Z" }, + { url = "https://files.pythonhosted.org/packages/cc/f4/3e77d7901d5707fd7f8a352e153c8ae09ea974e6fabad0b7c4eb9944b8d4/fastar-0.11.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:bd76bfffae6d0a91f4ac4a612f721e7aec108db97dccdd120ae063cd66959f27", size = 885254, upload-time = "2026-04-13T17:08:44.285Z" }, + { url = "https://files.pythonhosted.org/packages/47/01/1585edd5ec47782ae93cd94edf05828e0ab02ef00aec00aea4194a600464/fastar-0.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f5b707501ec01c1bc0518f741f01d322e50c9adc19a451aa24f67a2316e9397", size = 971496, upload-time = "2026-04-13T17:10:17.024Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e9/6874c9d1236ded565a0bed54b320ac9f165f287b1d89490fb70f9f323c81/fastar-0.11.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:37c0b5a88a657839aad98b0a6c9e4ac4c2c15d6b49c44ee3935c6b08e9d3e479", size = 1034685, upload-time = "2026-04-13T17:10:34.063Z" }, + { url = "https://files.pythonhosted.org/packages/14/d8/4ab20613ce2983427aee958e39be878dba874aa227c530a845e32429c4f6/fastar-0.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6c55f536c62a6efb180c1af0d5182948bff576bbfe6276e8e1359c9c7d2215d8", size = 1072675, upload-time = "2026-04-13T17:10:50.53Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ae/5ac3b7c20ce4b08f011dd2b979f96caabe64f9b10b157f211ea91bdfadca/fastar-0.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3082eeca59e189b9039335862f4c2780c0c8871d656bfdf559db4414a105b251", size = 1029330, upload-time = "2026-04-13T17:11:08.138Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e7/37cd6a1d4e288292170b64e19d79ecce2a7de8bb76790323399a2abc4619/fastar-0.11.0-cp314-cp314-win32.whl", hash = "sha256:b201a0a4e29f9fec2a177e13154b8725ec65ab9f83bd6415483efaa2aa18344b", size = 453940, upload-time = "2026-04-13T17:11:48.713Z" }, + { url = "https://files.pythonhosted.org/packages/ff/1c/795c878b1ee29d79021cf8ed81f18f2b25ccde58453b0d34b9bdc7e025ea/fastar-0.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:868fddb26072a43e870a8819134b9f80ee602931be5a76e6fb873e04da343637", size = 486334, upload-time = "2026-04-13T17:11:34.882Z" }, + { url = "https://files.pythonhosted.org/packages/ff/a4/113f104301df8bddcc0b3775b611a30cb7610baa3add933c7ccac9386467/fastar-0.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:3db39c9cc42abb0c780a26b299f24dfbc8be455985e969e15336d70d7b2f833b", size = 461534, upload-time = "2026-04-13T17:11:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a6/5c5f2c2c8e0c63e56a5636ebc7721589c889e94c0092cec7eb28ae7207e6/fastar-0.11.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:49c3299dec5e125e7ebaa27545714da9c7391777366015427e0ae62d548b442b", size = 707156, upload-time = "2026-04-13T17:10:02.176Z" }, + { url = "https://files.pythonhosted.org/packages/df/f7/982c01b61f0fc135ad2b16d01e6d0ee53cf8791e68827f5f7c5a65b2e5b1/fastar-0.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3328ed1ed56d31f5198350b17dd60449b8d6b9d47abb4688bab6aef4450a165b", size = 627032, upload-time = "2026-04-13T17:09:46.978Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c3/38f1dac77ae0c71c37b176277c96d830796b8ce2fe69705f917829b53829/fastar-0.11.0-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bd3eca3bbfec84a614bcb4143b4ad4f784d0895babc26cfc88436af88ca23c7a", size = 864403, upload-time = "2026-04-13T17:09:16.58Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f0/e69c363bdb3e5a5848e937b662b5469581ee6682c51bc1c0556494773929/fastar-0.11.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff86a967acb0d621dd24063dda090daa67bf4993b9570e97fe156de88a9006ca", size = 759480, upload-time = "2026-04-13T17:08:00.599Z" }, + { url = "https://files.pythonhosted.org/packages/3b/29/4d8737590c2a6357d614d7cc7288e8f68e7e449680b8922997cc4349e65e/fastar-0.11.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:86eaf7c0e985d93a7734168be2fb232b2a8cca53e41431c2782d7c12b12c03b1", size = 756219, upload-time = "2026-04-13T17:08:15.699Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ec/400de7b3b7d48801908f19cf5462177104395799472671b3e8152b2b04ca/fastar-0.11.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91f07b0b8eb67e2f177733a1f884edad7dfb9f8977ffef15927b20cb9604027d", size = 923669, upload-time = "2026-04-13T17:08:30.574Z" }, + { url = "https://files.pythonhosted.org/packages/5d/01/8926c53da923fed7ab4b96e7fbf7f73b663beb4f02095b654d6fab46f9ad/fastar-0.11.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f85c896885eb4abf1a635d54dea22cac6ae48d04fc2ea26ae652fcf1febe1220", size = 815729, upload-time = "2026-04-13T17:09:01.204Z" }, + { url = "https://files.pythonhosted.org/packages/89/f0/5fef4c7946e352651b504b1a4235dac3505e7cfd24020788ab50552e84bf/fastar-0.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:075c07095c8de4b774ba8f28b9c0a02b1a2cd254da50cbe464dd3bb2432e9158", size = 819812, upload-time = "2026-04-13T17:09:31.907Z" }, + { url = "https://files.pythonhosted.org/packages/b3/c8/0ebc3298b4a45e7bddc50b169ae6a6f5b80c939394d4befe6e60de535ee7/fastar-0.11.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:07f028933820c65750baf3383b807ecce1cd9385cf00ce192b79d263ad6b856c", size = 884074, upload-time = "2026-04-13T17:08:45.802Z" }, + { url = "https://files.pythonhosted.org/packages/ae/9f/7baa4cdff8d6fbca41fa5c764b48a941fed8a9ec6c4cc92de65895a28299/fastar-0.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:039f875efa0f01fa43c20bf4e2fc7305489c61d0ac76eda991acfba7820a0e63", size = 969450, upload-time = "2026-04-13T17:10:18.667Z" }, + { url = "https://files.pythonhosted.org/packages/d4/dc/1ebbfb58a47056ba866494f19efbcdd2ba2897096b94f36e796594b4d05b/fastar-0.11.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:fff12452a9a5c6814a012445f26365541cc3d99dcca61f09762e6a389f7a32ea", size = 1033775, upload-time = "2026-04-13T17:10:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/c2/5f/ce4e3914066f08c99eb8c32952cc07c1a013e81b1db1b0f598130bf6b974/fastar-0.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:2bf733e09f942b6fa876efe30a90508d1f4caef5630c00fb2a84fba355873712", size = 1072158, upload-time = "2026-04-13T17:10:52.497Z" }, + { url = "https://files.pythonhosted.org/packages/03/2a/6bca72992c84151c387cc6558f3867f5ebe5fb3684ee6fa9b76280ba4b8e/fastar-0.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d1531fa848fdd3677d2dce0a4b436ea64d9ae38fb8babe2ddbc180dd153cb7a3", size = 1028577, upload-time = "2026-04-13T17:11:09.934Z" }, + { url = "https://files.pythonhosted.org/packages/83/18/7a7c15657a3da5569b26fc51cde6a80f8d84cb54b3b1aea6d74a103db4ad/fastar-0.11.0-cp314-cp314t-win32.whl", hash = "sha256:5744551bc67c6fc6581cbd0e34a0fd6e2cd0bd30b43e94b1c3119cf35064b162", size = 453601, upload-time = "2026-04-13T17:11:53.726Z" }, + { url = "https://files.pythonhosted.org/packages/6d/d8/331b59a6de279f3ad75c10c02c40a12f21d64a437d9c3d6f1af2dcbd7a76/fastar-0.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f4ce44e3b56c47cf38244b98d29f269b259740a580c47a2552efa5b96a5458fb", size = 486436, upload-time = "2026-04-13T17:11:40.089Z" }, + { url = "https://files.pythonhosted.org/packages/6b/fd/5390ec4f49100f3ecb9968a392f9e6d039f1e3fe0ecd28443716ff01e589/fastar-0.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:76c1359314355eafbc6989f20fb1ad565a3d10200117923b9da765a17e2f6f11", size = 461049, upload-time = "2026-04-13T17:11:25.918Z" }, + { url = "https://files.pythonhosted.org/packages/cc/5c/9bbeffbf1905391446dd98aa520422ce7affde5c9a7c22d757cc5d7c1397/fastar-0.11.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1266d6a004f427b0d61bd6c7b544d84cc964691b2232c2f4d635a1b75f2f6d5e", size = 711644, upload-time = "2026-04-13T17:10:07.663Z" }, + { url = "https://files.pythonhosted.org/packages/7e/af/ae5cf39d4fb82d0c592705f5ec6db1b065be5265c151b108f86126ee8773/fastar-0.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:298a827ec04ade43733f6ca960d0faec38706aa1494175869ea7ea17f5bad5d3", size = 634371, upload-time = "2026-04-13T17:09:52.083Z" }, + { url = "https://files.pythonhosted.org/packages/7e/36/8d4569e26473c72ccb02d1c5df3ed710073f1c06eca09c26d52ea79fd815/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8800e2387e463a0e5799416a1cbe72dd0fde7270a20e4bde684145e7878f6516", size = 870850, upload-time = "2026-04-13T17:09:21.439Z" }, + { url = "https://files.pythonhosted.org/packages/bf/46/724dc796e1756d3977970f820d30d59bb8cab8e3671b285f1d82ab513aec/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7496def0a2befd82d429cb004ef7ca831585cc887947bd6b9abb68a5ef852b0b", size = 764469, upload-time = "2026-04-13T17:08:05.638Z" }, + { url = "https://files.pythonhosted.org/packages/99/e3/74d6859e632e8fb9339a14f652fb9f800c2bd6aa53071e311c0be3fbab8b/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:878eaf15463eb572e3538af7ca3a8534e5e279cf8196db902d24e5725c4af86e", size = 761375, upload-time = "2026-04-13T17:08:20.669Z" }, + { url = "https://files.pythonhosted.org/packages/a3/e7/cc70e2be5ef8731a7525552b1c35c1448cf9eae6a62cb3a56f12c1bf27ea/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0324ed1d1ef0186e1bbd843b17807d6d837d0906899d4c99378b02c5d86bdd9c", size = 928189, upload-time = "2026-04-13T17:08:35.663Z" }, + { url = "https://files.pythonhosted.org/packages/3c/33/c9a969e78dca323547276a6fee5f4f9588f7cd5ab45acec3778c67399589/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bdf9bd863205590beaf8ef6e66f315310196632180dceaf674985d01a876cac3", size = 820864, upload-time = "2026-04-13T17:09:06.366Z" }, + { url = "https://files.pythonhosted.org/packages/84/bd/6b9434b541fe55c125b5f2e017a565596a2d215aa09207e4555e4585064f/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59af8dbb683b24b90fb5b506de080faeab0a17a908e6c2a5d93a97260ed75d7b", size = 824060, upload-time = "2026-04-13T17:09:37.377Z" }, + { url = "https://files.pythonhosted.org/packages/24/8d/871d5f8cf4c6f13987119fb0a9ae8be131e34f2756c2524e9974adf33824/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:9f3df73a3c4292cfe15696cdf59cdb6c309ab59d30b34c733be13c6e32d9a264", size = 889217, upload-time = "2026-04-13T17:08:50.884Z" }, + { url = "https://files.pythonhosted.org/packages/d0/26/cca0fd2704f3ed20165e5613ed911549aef3aaf3b0b5b02fee0e8e23e6cc/fastar-0.11.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:aa3762cbb16e41a76b61f4a6914937a71aab3a7b6c2d82ca233bc686ebaf756b", size = 975418, upload-time = "2026-04-13T17:10:24.307Z" }, + { url = "https://files.pythonhosted.org/packages/99/94/8bbb0b13f5b6cbe2492f0b7cbba5103e6163976a3331466d010e781fa189/fastar-0.11.0-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:a8c7bc8ac74cb359bb546b199288c83236372d094b402e557c197e85527495cd", size = 1038492, upload-time = "2026-04-13T17:10:41.939Z" }, + { url = "https://files.pythonhosted.org/packages/ed/d3/5b7df222a30eac2822ffd00f82fd4c2ce84fba4b369d1e1a03732fd177fc/fastar-0.11.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:587cbd060a2699c5f66281081395bb4657b2b1e0eef5c206b1aabf740019d670", size = 1080210, upload-time = "2026-04-13T17:10:58.462Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/56ef943ea524784598c035ccbd42e564e937da0438ae3f55f0e76cb95571/fastar-0.11.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6a1c56957ac82408be37a3f63594bc83e0919e8760492a4475e542f9f1828778", size = 1034886, upload-time = "2026-04-13T17:11:15.617Z" }, +] + +[[package]] +name = "fastcrud" +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fastapi" }, + { name = "pydantic" }, + { name = "sqlalchemy" }, + { name = "sqlalchemy-utils" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/5c/2ee44d6cc63cc1f039cb218ea2681d6978c3a27b094e2a4bdf02f503f599/fastcrud-0.21.0.tar.gz", hash = "sha256:38990a96d4639e65e6f1c9b8f4cebfe5bf1a84ba731eb5c224fff593a7316e02", size = 79300, upload-time = "2026-01-23T19:30:07.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/1b/85fb4c1cb771845e5ede668ef28c090e021020d117e891fa1e599c9e32d1/fastcrud-0.21.0-py3-none-any.whl", hash = "sha256:94daa5ca7815a268b700ee31b36455a8248a1a0547f3fcef5969df0047aab338", size = 107150, upload-time = "2026-01-23T19:30:05.895Z" }, +] + +[[package]] +name = "fastsecure" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "bcrypt" }, + { name = "greenlet" }, + { name = "httpx" }, + { name = "pydantic" }, + { name = "python-jose", extra = ["cryptography"] }, + { name = "python-multipart" }, + { name = "redis" }, + { name = "sqlalchemy" }, + { name = "types-python-jose" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/12/c9abbd8e08bbc26898e9db7084cc09619f289f3aefdb3216d6641d59baba/fastsecure-0.3.0.tar.gz", hash = "sha256:bfaf9830965fce198c0ff2c4b0cfea2c5d153b79dd92fa64c3c0555a009b64e9", size = 47028, upload-time = "2025-02-11T22:30:32.704Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/03/59dd272c39997f1fbfe11fdac7cb2185608aad2166312a86c3437e219d0b/fastsecure-0.3.0-py3-none-any.whl", hash = "sha256:33050a45af64771b49967a63de8d892e54c0c4f5e2ca391b0e7f98221dbbfa89", size = 42045, upload-time = "2025-02-11T22:30:30.567Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, + { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, + { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, + { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, + { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, + { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, + { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, + { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, + { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, + { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, + { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, + { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, + { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, + { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, + { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, +] + +[[package]] +name = "greenlet" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/3f/dbf99fb14bfeb88c28f16729215478c0e265cacd6dc22270c8f31bb6892f/greenlet-3.5.0.tar.gz", hash = "sha256:d419647372241bc68e957bf38d5c1f98852155e4146bd1e4121adea81f4f01e4", size = 196995, upload-time = "2026-04-27T13:37:15.544Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/0f/a91f143f356523ff682309732b175765a9bc2836fd7c081c2c67fedc1ad4/greenlet-3.5.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8f1cc966c126639cd152fdaa52624d2655f492faa79e013fea161de3e6dda082", size = 284726, upload-time = "2026-04-27T12:20:51.402Z" }, + { url = "https://files.pythonhosted.org/packages/95/82/800646c7ffc5dbabd75ddd2f6b519bb898c0c9c969e5d0473bfe5d20bcce/greenlet-3.5.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:362624e6a8e5bca3b8233e45eef33903a100e9539a2b995c364d595dbc4018b3", size = 604264, upload-time = "2026-04-27T12:52:39.494Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ac/354867c0bba812fc33b15bc55aedafedd0aee3c7dd91dfca22444157dc0c/greenlet-3.5.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5ecd83806b0f4c2f53b1018e0005cd82269ea01d42befc0368730028d850ed1c", size = 616099, upload-time = "2026-04-27T12:59:39.623Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ab/192090c4a5b30df148c22bf4b8895457d739a7c7c5a7b9c41e5dd7f537f2/greenlet-3.5.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fa94cb2288681e3a11645958f1871d48ee9211bd2f66628fdace505927d6e564", size = 623976, upload-time = "2026-04-27T13:02:37.363Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b0/815bece7399e01cadb69014219eebd0042339875c59a59b0820a46ece356/greenlet-3.5.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ff251e9a0279522e62f6176412869395a64ddf2b5c5f782ff609a8216a4e662", size = 615198, upload-time = "2026-04-27T12:25:25.928Z" }, + { url = "https://files.pythonhosted.org/packages/24/11/05eb2b9b188c6df7d68a89c99134d644a7af616a40b9808e8e6ced315d5d/greenlet-3.5.0-cp311-cp311-manylinux_2_39_riscv64.whl", hash = "sha256:64d6ac45f7271f48e45f67c95b54ef73534c52ec041fcda8edf520c6d811f4bc", size = 418379, upload-time = "2026-04-27T13:05:12.755Z" }, + { url = "https://files.pythonhosted.org/packages/10/80/3b2c0a895d6698f6ddb31b07942ebfa982f3e30888bc5546a5b5990de8b2/greenlet-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6d874e79afd41a96e11ff4c5d0bc90a80973e476fda1c2c64985667397df432b", size = 1574927, upload-time = "2026-04-27T12:53:25.81Z" }, + { url = "https://files.pythonhosted.org/packages/44/0e/f354af514a4c61454dbc68e44d47544a5a4d6317e30b77ddfa3a09f4c5f3/greenlet-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0ed006e4b86c59de7467eb2601cd1b77b5a7d657d1ee55e30fe30d76451edba4", size = 1642683, upload-time = "2026-04-27T12:25:23.9Z" }, + { url = "https://files.pythonhosted.org/packages/fa/6a/87f38255201e993a1915265ebb80cd7c2c78b04a45744995abbf6b259fd8/greenlet-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:703cb211b820dbffbbc55a16bfc6e4583a6e6e990f33a119d2cc8b83211119c8", size = 238115, upload-time = "2026-04-27T12:21:48.845Z" }, + { url = "https://files.pythonhosted.org/packages/e3/f8/450fe3c5938fa737ea4d22699772e6e34e8e24431a47bf4e8a1ceed4a98e/greenlet-3.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:6c18dfb59c70f5a94acd271c72e90128c3c776e41e5f07767908c8c1b74ad339", size = 235017, upload-time = "2026-04-27T12:22:26.768Z" }, + { url = "https://files.pythonhosted.org/packages/ef/32/f2ce6d4cac3e55bc6173f92dbe627e782e1850f89d986c3606feb63aafa7/greenlet-3.5.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:db2910d3c809444e0a20147361f343fe2798e106af8d9d8506f5305302655a9f", size = 286228, upload-time = "2026-04-27T12:20:34.421Z" }, + { url = "https://files.pythonhosted.org/packages/b7/aa/caed9e5adf742315fc7be2a84196373aab4816e540e38ba0d76cb7584d68/greenlet-3.5.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ec9ea74e7268ace7f9aab1b1a4e730193fc661b39a993cd91c606c32d4a3628", size = 601775, upload-time = "2026-04-27T12:52:41.045Z" }, + { url = "https://files.pythonhosted.org/packages/c7/af/90ae08497400a941595d12774447f752d3dfe0fbb012e35b76bc5c0ff37e/greenlet-3.5.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54d243512da35485fc7a6bf3c178fdda6327a9d6506fcdd62b1abd1e41b2927b", size = 614436, upload-time = "2026-04-27T12:59:41.595Z" }, + { url = "https://files.pythonhosted.org/packages/3f/e9/4eeadf8cb3403ac274245ba75f07844abc7fa5f6787583fc9156ba741e0f/greenlet-3.5.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:41353ec2ecedf7aa8f682753a41919f8718031a6edac46b8d3dc7ed9e1ceb136", size = 620610, upload-time = "2026-04-27T13:02:39.194Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e0/2e13df68f367e2f9960616927d60857dd7e56aaadd59a47c644216b2f920/greenlet-3.5.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d280a7f5c331622c69f97eb167f33577ff2d1df282c41cd15907fc0a3ca198c", size = 611388, upload-time = "2026-04-27T12:25:28.008Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ef/f913b3c0eb7d26d86a2401c5e1546c9d46b657efee724b06f6f4ac5d8824/greenlet-3.5.0-cp312-cp312-manylinux_2_39_riscv64.whl", hash = "sha256:58c1c374fe2b3d852f9b6b11a7dff4c85404e51b9a596fd9e89cf904eb09866d", size = 422775, upload-time = "2026-04-27T13:05:14.261Z" }, + { url = "https://files.pythonhosted.org/packages/82/f7/393c64055132ac0d488ef6be549253b7e6274194863967ddc0bc8f5b87b8/greenlet-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1eb67d5adefb5bd2e182d42678a328979a209e4e82eb93575708185d31d1f588", size = 1570768, upload-time = "2026-04-27T12:53:28.099Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4b/eaf7735253522cf56d1b74d672a58f54fc114702ceaf05def59aae72f6e1/greenlet-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2628d6c86f6cb0cb45e0c3c54058bbec559f57eaae699447748cb3928150577e", size = 1635983, upload-time = "2026-04-27T12:25:26.903Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fe/4fb3a0805bd5165da5ebf858da7cc01cce8061674106d2cf5bdab32cbfde/greenlet-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:d4d9f0624c775f2dfc56ba54d515a8c771044346852a918b405914f6b19d7fd8", size = 238840, upload-time = "2026-04-27T12:23:54.806Z" }, + { url = "https://files.pythonhosted.org/packages/cb/cb/baa584cb00532126ffe12d9787db0a60c5a4f55c27bfe2666df5d4c30a32/greenlet-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:83ed9f27f1680b50e89f40f6df348a290ea234b249a4003d366663a12eab94f2", size = 235615, upload-time = "2026-04-27T12:21:38.57Z" }, + { url = "https://files.pythonhosted.org/packages/0c/58/fc576f99037ce19c5aa16628e4c3226b6d1419f72a62c79f5f40576e6eb3/greenlet-3.5.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:5a5ed18de6a0f6cc7087f1563f6bd93fc7df1c19165ca01e9bde5a5dc281d106", size = 285066, upload-time = "2026-04-27T12:23:05.033Z" }, + { url = "https://files.pythonhosted.org/packages/4a/ba/b28ddbe6bfad6a8ac196ef0e8cff37bc65b79735995b9e410923fffeeb70/greenlet-3.5.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a717fbc46d8a354fa675f7c1e813485b6ba3885f9bef0cd56e5ba27d758ff5b", size = 604414, upload-time = "2026-04-27T12:52:42.358Z" }, + { url = "https://files.pythonhosted.org/packages/09/06/4b69f8f0b67603a8be2790e55107a190b376f2627fe0eaf5695d85ffb3cd/greenlet-3.5.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ddc090c5c1792b10246a78e8c2163ebbe04cf877f9d785c230a7b27b39ad038e", size = 617349, upload-time = "2026-04-27T12:59:43.32Z" }, + { url = "https://files.pythonhosted.org/packages/6a/15/a643b4ecd09969e30b8a150d5919960caae0abe4f5af75ab040b1ab85e78/greenlet-3.5.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4964101b8585c144cbda5532b1aa644255126c08a265dae90c16e7a0e63aaa9d", size = 623234, upload-time = "2026-04-27T13:02:40.611Z" }, + { url = "https://files.pythonhosted.org/packages/8a/17/a3918541fd0ddefe024a69de6d16aa7b46d36ac19562adaa63c7fa180eff/greenlet-3.5.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2094acd54b272cb6eae8c03dd87b3fa1820a4cef18d6889c378d503500a1dc13", size = 613927, upload-time = "2026-04-27T12:25:30.28Z" }, + { url = "https://files.pythonhosted.org/packages/77/18/3b13d5ef1275b0ffaf933b05efa21408ac4ca95823c7411d79682e4fdcff/greenlet-3.5.0-cp313-cp313-manylinux_2_39_riscv64.whl", hash = "sha256:7022615368890680e67b9965d33f5773aade330d5343bbe25560135aaa849eae", size = 425243, upload-time = "2026-04-27T13:05:15.689Z" }, + { url = "https://files.pythonhosted.org/packages/ee/e1/bd0af6213c7dd33175d8a462d4c1fe1175124ebed4855bc1475a5b5242c2/greenlet-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5e05ba267789ea87b5a155cf0e810b1ab88bf18e9e8740813945ceb8ee4350ba", size = 1570893, upload-time = "2026-04-27T12:53:29.483Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2a/0789702f864f5382cb476b93d7a9c823c10472658102ccd65f415747d2e2/greenlet-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0ecec963079cd58cbd14723582384f11f166fd58883c15dcbfb342e0bc9b5846", size = 1636060, upload-time = "2026-04-27T12:25:28.845Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8f/22bf9df92bbff0eb07842b60f7e63bf7675a9742df628437a9f02d09137f/greenlet-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:728d9667d8f2f586644b748dbd9bb67e50d6a9381767d1357714ea6825bb3bf5", size = 238740, upload-time = "2026-04-27T12:24:01.341Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b7/9c5c3d653bd4ff614277c049ac676422e2c557db47b4fe43e6313fc005dc/greenlet-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:47422135b1d308c14b2c6e758beedb1acd33bb91679f5670edf77bf46244722b", size = 235525, upload-time = "2026-04-27T12:23:12.308Z" }, + { url = "https://files.pythonhosted.org/packages/94/5e/a70f31e3e8d961c4ce589c15b28e4225d63704e431a23932a3808cbcc867/greenlet-3.5.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:f35807464c4c58c55f0d31dfa83c541a5615d825c2fe3d2b95360cf7c4e3c0a8", size = 285564, upload-time = "2026-04-27T12:23:08.555Z" }, + { url = "https://files.pythonhosted.org/packages/af/a6/046c0a28e21833e4086918218cfb3d8bed51c075a1b700f20b9d7861c0f4/greenlet-3.5.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55fa7ea52771be44af0de27d8b80c02cd18c2c3cddde6c847ecebdf72418b6a1", size = 651166, upload-time = "2026-04-27T12:52:43.644Z" }, + { url = "https://files.pythonhosted.org/packages/47/f8/4af27f71c5ff32a7fbc516adb46370d9c4ae2bc7bd3dc7d066ac542b4b15/greenlet-3.5.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a97e4821aa710603f94de0da25f25096454d78ffdace5dc77f3a006bc01abba3", size = 663792, upload-time = "2026-04-27T12:59:44.93Z" }, + { url = "https://files.pythonhosted.org/packages/fb/89/2dadb89793c37ee8b4c237857188293e9060dc085f19845c292e00f8e091/greenlet-3.5.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bf2d8a80bec89ab46221ae45c5373d5ba0bd36c19aa8508e85c6cd7e5106cd37", size = 668086, upload-time = "2026-04-27T13:02:42.314Z" }, + { url = "https://files.pythonhosted.org/packages/a3/59/1bd6d7428d6ed9106efbb8c52310c60fd04f6672490f452aeaa3829aa436/greenlet-3.5.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f52a464e4ed91780bdfbbdd2b97197f3accaa629b98c200f4dffada759f3ae7", size = 660933, upload-time = "2026-04-27T12:25:33.276Z" }, + { url = "https://files.pythonhosted.org/packages/82/35/75722be7e26a2af4cbd2dc35b0ed382dacf9394b7e75551f76ed1abe87f2/greenlet-3.5.0-cp314-cp314-manylinux_2_39_riscv64.whl", hash = "sha256:1bae92a1dd94c5f9d9493c3a212dd874c202442047cf96446412c862feca83a2", size = 470799, upload-time = "2026-04-27T13:05:17.094Z" }, + { url = "https://files.pythonhosted.org/packages/83/e4/b903e5a5fae1e8a28cdd32a0cfbfd560b668c25b692f67768822ddc5f40f/greenlet-3.5.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:762612baf1161ccb8437c0161c668a688223cba28e1bf038f4eb47b13e39ccdf", size = 1618401, upload-time = "2026-04-27T12:53:31.062Z" }, + { url = "https://files.pythonhosted.org/packages/0e/e3/5ec408a329acb854fb607a122e1ee5fb3ff649f9a97952948a90803c0d8e/greenlet-3.5.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:57a43c6079a89713522bc4bcb9f75070ecf5d3dbad7792bfe42239362cbf2a16", size = 1682038, upload-time = "2026-04-27T12:25:31.838Z" }, + { url = "https://files.pythonhosted.org/packages/91/20/6b165108058767ee643c55c5c4904d591a830ee2b3c7dbd359828fbc829f/greenlet-3.5.0-cp314-cp314-win_amd64.whl", hash = "sha256:3bc59be3945ae9750b9e7d45067d01ae3fe90ea5f9ade99239dabdd6e28a5033", size = 239835, upload-time = "2026-04-27T12:24:54.136Z" }, + { url = "https://files.pythonhosted.org/packages/4e/62/1c498375cee177b55d980c1db319f26470e5309e54698c8f8fc06c0fd539/greenlet-3.5.0-cp314-cp314-win_arm64.whl", hash = "sha256:a96fcee45e03fe30a62669fd16ab5c9d3c172660d3085605cb1e2d1280d3c988", size = 236862, upload-time = "2026-04-27T12:23:24.957Z" }, + { url = "https://files.pythonhosted.org/packages/78/a8/4522939255bb5409af4e87132f915446bf3622c2c292d14d3c38d128ae82/greenlet-3.5.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:a10a732421ab4fec934783ce3e54763470d0181db6e3468f9103a275c3ed1853", size = 293614, upload-time = "2026-04-27T12:24:12.874Z" }, + { url = "https://files.pythonhosted.org/packages/15/5e/8744c52e2c027b5a8772a01561934c8835f869733e101f62075c60430340/greenlet-3.5.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fc391b1566f2907d17aaebe78f8855dc45675159a775fcf9e61f8ee0078e87f", size = 650723, upload-time = "2026-04-27T12:52:45.412Z" }, + { url = "https://files.pythonhosted.org/packages/00/ef/7b4c39c03cf46ceca512c5d3f914afd85aa30b2cc9a93015b0dd73e4be6c/greenlet-3.5.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:680bd0e7ad5e8daa8a4aa89f68fd6adc834b8a8036dc256533f7e08f4a4b01f7", size = 656529, upload-time = "2026-04-27T12:59:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5c/0602239503b124b70e39355cbdb39361ecfe65b87a5f2f63752c32f5286f/greenlet-3.5.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1aa4ce8debcd4ea7fb2e150f3036588c41493d1d52c43538924ae1819003f4ce", size = 657015, upload-time = "2026-04-27T13:02:43.973Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b5/c7768f352f5c010f92064d0063f987e7dc0cd290a6d92a34109015ce4aa1/greenlet-3.5.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddb36c7d6c9c0a65f18c7258634e0c416c6ab59caac8c987b96f80c2ebda0112", size = 654364, upload-time = "2026-04-27T12:25:35.64Z" }, + { url = "https://files.pythonhosted.org/packages/38/51/8699f865f125dc952384cb432b0f7138aa4d8f2969a7d12d0df5b94d054d/greenlet-3.5.0-cp314-cp314t-manylinux_2_39_riscv64.whl", hash = "sha256:728a73687e39ae9ca34e4694cbf2f049d3fbc7174639468d0f67200a97d8f9e2", size = 488275, upload-time = "2026-04-27T13:05:18.28Z" }, + { url = "https://files.pythonhosted.org/packages/ef/d0/079ebe12e4b1fc758857ce5be1a5e73f06870f2101e52611d1e71925ce54/greenlet-3.5.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e5ddf316ced87539144621453c3aef229575825fe60c604e62bedc4003f372b2", size = 1614204, upload-time = "2026-04-27T12:53:32.618Z" }, + { url = "https://files.pythonhosted.org/packages/6d/89/6c2fb63df3596552d20e58fb4d96669243388cf680cff222758812c7bfaa/greenlet-3.5.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4a448128607be0de65342dc9b31be7f948ef4cc0bc8832069350abefd310a8f2", size = 1675480, upload-time = "2026-04-27T12:25:34.168Z" }, + { url = "https://files.pythonhosted.org/packages/15/32/77ee8a6c1564fc345a491a4e85b3bf360e4cf26eac98c4532d2fdb96e01f/greenlet-3.5.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d60097128cb0a1cab9ea541186ea13cd7b847b8449a7787c2e2350da0cb82d86", size = 245324, upload-time = "2026-04-27T12:24:40.295Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httptools" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" }, + { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" }, + { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" }, + { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" }, + { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, + { url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" }, + { url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" }, + { url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" }, + { url = "https://files.pythonhosted.org/packages/09/8f/c77b1fcbfd262d422f12da02feb0d218fa228d52485b77b953832105bb90/httptools-0.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3", size = 202889, upload-time = "2025-10-10T03:54:47.089Z" }, + { url = "https://files.pythonhosted.org/packages/0a/1a/22887f53602feaa066354867bc49a68fc295c2293433177ee90870a7d517/httptools-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca", size = 108180, upload-time = "2025-10-10T03:54:48.052Z" }, + { url = "https://files.pythonhosted.org/packages/32/6a/6aaa91937f0010d288d3d124ca2946d48d60c3a5ee7ca62afe870e3ea011/httptools-0.7.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c", size = 478596, upload-time = "2025-10-10T03:54:48.919Z" }, + { url = "https://files.pythonhosted.org/packages/6d/70/023d7ce117993107be88d2cbca566a7c1323ccbaf0af7eabf2064fe356f6/httptools-0.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66", size = 473268, upload-time = "2025-10-10T03:54:49.993Z" }, + { url = "https://files.pythonhosted.org/packages/32/4d/9dd616c38da088e3f436e9a616e1d0cc66544b8cdac405cc4e81c8679fc7/httptools-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346", size = 455517, upload-time = "2025-10-10T03:54:51.066Z" }, + { url = "https://files.pythonhosted.org/packages/1d/3a/a6c595c310b7df958e739aae88724e24f9246a514d909547778d776799be/httptools-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650", size = 458337, upload-time = "2025-10-10T03:54:52.196Z" }, + { url = "https://files.pythonhosted.org/packages/fd/82/88e8d6d2c51edc1cc391b6e044c6c435b6aebe97b1abc33db1b0b24cd582/httptools-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6", size = 85743, upload-time = "2025-10-10T03:54:53.448Z" }, + { url = "https://files.pythonhosted.org/packages/34/50/9d095fcbb6de2d523e027a2f304d4551855c2f46e0b82befd718b8b20056/httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270", size = 203619, upload-time = "2025-10-10T03:54:54.321Z" }, + { url = "https://files.pythonhosted.org/packages/07/f0/89720dc5139ae54b03f861b5e2c55a37dba9a5da7d51e1e824a1f343627f/httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3", size = 108714, upload-time = "2025-10-10T03:54:55.163Z" }, + { url = "https://files.pythonhosted.org/packages/b3/cb/eea88506f191fb552c11787c23f9a405f4c7b0c5799bf73f2249cd4f5228/httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1", size = 472909, upload-time = "2025-10-10T03:54:56.056Z" }, + { url = "https://files.pythonhosted.org/packages/e0/4a/a548bdfae6369c0d078bab5769f7b66f17f1bfaa6fa28f81d6be6959066b/httptools-0.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df091cf961a3be783d6aebae963cc9b71e00d57fa6f149025075217bc6a55a7b", size = 470831, upload-time = "2025-10-10T03:54:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/14df99e1c43bd132eec921c2e7e11cda7852f65619bc0fc5bdc2d0cb126c/httptools-0.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f084813239e1eb403ddacd06a30de3d3e09a9b76e7894dcda2b22f8a726e9c60", size = 452631, upload-time = "2025-10-10T03:54:58.219Z" }, + { url = "https://files.pythonhosted.org/packages/22/d2/b7e131f7be8d854d48cb6d048113c30f9a46dca0c9a8b08fcb3fcd588cdc/httptools-0.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7347714368fb2b335e9063bc2b96f2f87a9ceffcd9758ac295f8bbcd3ffbc0ca", size = 452910, upload-time = "2025-10-10T03:54:59.366Z" }, + { url = "https://files.pythonhosted.org/packages/53/cf/878f3b91e4e6e011eff6d1fa9ca39f7eb17d19c9d7971b04873734112f30/httptools-0.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:cfabda2a5bb85aa2a904ce06d974a3f30fb36cc63d7feaddec05d2050acede96", size = 88205, upload-time = "2025-10-10T03:55:00.389Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.13" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/cc/762dfb036166873f0059f3b7de4565e1b5bc3d6f28a414c13da27e442f99/idna-3.13.tar.gz", hash = "sha256:585ea8fe5d69b9181ec1afba340451fba6ba764af97026f92a91d4eef164a242", size = 194210, upload-time = "2026-04-22T16:42:42.314Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/13/ad7d7ca3808a898b4612b6fe93cde56b53f3034dcde235acb1f0e1df24c6/idna-3.13-py3-none-any.whl", hash = "sha256:892ea0cde124a99ce773decba204c5552b69c3c67ffd5f232eb7696135bc8bb3", size = 68629, upload-time = "2026-04-22T16:42:40.909Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "itsdangerous" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "librt" +version = "0.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/39/cb/c1945e506893b5b8577fb45a60c80e3ffe4a82092a04a6f29b0b951d9a24/librt-0.10.0.tar.gz", hash = "sha256:1aba1e8aa4e3307a7be68a74149545fde7451964dc0235a8bec5704a17bdda42", size = 191799, upload-time = "2026-05-05T16:31:23.535Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/a3/1472717d2325adacc8d335ba2e4078015c09d75b599f3cf48e967b3d306e/librt-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01b4500ca3a625450c032a9142a8e843923ce263fa8a92ad1b38927cabe2fe72", size = 76045, upload-time = "2026-05-05T16:29:18.731Z" }, + { url = "https://files.pythonhosted.org/packages/a6/31/bfe32355d4b369aef3d7aa442df663bb5558c2ffa2de286cb2956346bc24/librt-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b7e42d1b3e300d20bfc87e72ffd62f0a92a2cb3c35f7bf90df90c9d2a49f74c", size = 79466, upload-time = "2026-05-05T16:29:20.052Z" }, + { url = "https://files.pythonhosted.org/packages/e9/f1/83f8a2c715ba2cac9b7387a5a5cea25f717f7184320cfe48b36bed9c58e9/librt-0.10.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8ef7b8c61ce3a1b597cd3e15348ff1574325165c2e7ce09a718154cde2a7950", size = 242283, upload-time = "2026-05-05T16:29:21.596Z" }, + { url = "https://files.pythonhosted.org/packages/cc/94/c3a4ce94857f0004a542f86662806383611858f522722db58efaec0a1472/librt-0.10.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.manylinux_2_28_i686.whl", hash = "sha256:e73c84f72d1fa0d6eaa7a1930b436ba8d2c90c58d77bfabb09995a69ad35f6c0", size = 230735, upload-time = "2026-05-05T16:29:23.335Z" }, + { url = "https://files.pythonhosted.org/packages/d1/41/e962bb26c7728eb7b3a69e490d0c800fd9968a6970e390c1f18ddb56093d/librt-0.10.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9728cb98713bd862fb8f4fd6a642d1896c86058a41d77c70f3d5cee75e725275", size = 256606, upload-time = "2026-05-05T16:29:24.91Z" }, + { url = "https://files.pythonhosted.org/packages/66/3a/4e46a707b1ecc993fd691071623b9beab89703a63bd21cc7807e06c28209/librt-0.10.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:648b7e941d20acd72f9652115e0e53facd98156d61f9ebf7a812bdef8bdccea9", size = 249739, upload-time = "2026-05-05T16:29:26.648Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f5/dc5b7eb294656ad23d4ff4cf8514208d54fe1026b909d726a0dc026689c9/librt-0.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c3e33747c068e86a9007c20fdb777eb5ba8d3d19136d7812f88e69a713041b6f", size = 261414, upload-time = "2026-05-05T16:29:28.702Z" }, + { url = "https://files.pythonhosted.org/packages/58/e4/990ed8d12c7f114ac8f8ccd47f7d9bd9704ef61acfcb1df4a05047da7710/librt-0.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d509c745bf7e77d1107cf05e6abb249dc03fad13eb39f2286a49deedaeb2bcd7", size = 256614, upload-time = "2026-05-05T16:29:30.357Z" }, + { url = "https://files.pythonhosted.org/packages/60/eb/52d2726c7fb22818507dc3cc166c8f36dd4a4b68a7be67f12006ac8777c1/librt-0.10.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:786ad5a15e99d0e0e74f3adbeecc198a5ac58f340be07e984723d1e0074838de", size = 255144, upload-time = "2026-05-05T16:29:32.106Z" }, + { url = "https://files.pythonhosted.org/packages/bc/df/bd5591a78f7531fce4b6eb9962aadc6adc9560a01570442a884b6e554abe/librt-0.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:075582d877a97ee3d8e77bda3689dbe617b14f6469224a2d80b4b6c38e3951aa", size = 279121, upload-time = "2026-05-05T16:29:33.688Z" }, + { url = "https://files.pythonhosted.org/packages/fd/df/7c2b838dfc89a1762dd156d8b0c39848a7a2845d725a50be5a6e021fb8ba/librt-0.10.0-cp311-cp311-win32.whl", hash = "sha256:75ecdc3f5a90065aa2af2e574706c5495adc392520762dcf10b1aa716f0b8090", size = 62593, upload-time = "2026-05-05T16:29:35.152Z" }, + { url = "https://files.pythonhosted.org/packages/91/19/22ff572981049a9d436a083dbea1572d0f5dc068b7353637d2dd9977c8f1/librt-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:b6f6084884131d8a52cb9d7095ff2aa52c1e786d9fdaefab1fb4515415e9e083", size = 70914, upload-time = "2026-05-05T16:29:36.407Z" }, + { url = "https://files.pythonhosted.org/packages/12/22/1697cc64f4a5c7e9bce55e99c6d234a346beaedaefcd1e2ca90dd285f98c/librt-0.10.0-cp311-cp311-win_arm64.whl", hash = "sha256:0140bd62151160047e89b2730cb6f8506cdac5127baa1afb9231e4dd3fe7f681", size = 61176, upload-time = "2026-05-05T16:29:37.62Z" }, + { url = "https://files.pythonhosted.org/packages/12/8e/cbb5b6f6e45e65c10a42449a69eaccc44d73e6a081ea752fbc5221c6dc1c/librt-0.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b4b58a44b407e91f633dafee008de9ddea6aa2a555ed94929c099260910bd0ba", size = 77327, upload-time = "2026-05-05T16:29:38.919Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3d/8233cbee8e99e6a8992f02bfc2dec8d787509566a511d1fde2574ee7473f/librt-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:950b79b11762531bdf45a9df909d2f9a2a8445c70c88665c01d14c8511a27dc5", size = 79971, upload-time = "2026-05-05T16:29:40.96Z" }, + { url = "https://files.pythonhosted.org/packages/87/6f/5264b298cef2b72fc97d2dde56c66181eda35204bf5dcd1ed0c3d0a0a782/librt-0.10.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4538453f51be197633b425912c150e25b0667252d3741c53e8368176d98d9d37", size = 246559, upload-time = "2026-05-05T16:29:42.701Z" }, + { url = "https://files.pythonhosted.org/packages/07/7b/19b1b859cc60d5f99276cc2b3144d91556c6d1b1e4ebb50359696bebf7a8/librt-0.10.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.manylinux_2_28_i686.whl", hash = "sha256:70b955f091beac93e994a0b7ec616934f63b3ea5c3d6d7af847562f935aceca7", size = 235216, upload-time = "2026-05-05T16:29:44.193Z" }, + { url = "https://files.pythonhosted.org/packages/6e/56/a2f40717142a8af46289f57874ef914353d8faccd5e4f8e594ab1e16e8c7/librt-0.10.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:483e685e06b6163728ba6c85d74315176be7190f432ec2a41226e5e14355d5f0", size = 263108, upload-time = "2026-05-05T16:29:46.365Z" }, + { url = "https://files.pythonhosted.org/packages/67/ca/15c625c3bdc0167c01e04ef8878317e9713f3bfa788438342f7a94c7b22c/librt-0.10.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ac53d946a009d1a38c44a60812708c9458fb2a239a5f630d8e625571386650f", size = 255280, upload-time = "2026-05-05T16:29:48.087Z" }, + { url = "https://files.pythonhosted.org/packages/ed/c5/ba301d571d9e05844e2435b73aba30bee77bb75ce155c9affcfd2173dd03/librt-0.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bc8771c9fcf0ea894ca41fdc2abd83572c2fbda221f232d86e718614e57ff513", size = 268829, upload-time = "2026-05-05T16:29:49.628Z" }, + { url = "https://files.pythonhosted.org/packages/8b/60/af70e135bc1f1fe15dd3894b1e4bbefc7ecdf911749a925a39eb86ceb2a1/librt-0.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:70805dbc5257892ac572f86290a61e3c8d90224ecce1a8b2d1f7ed51965417f4", size = 262051, upload-time = "2026-05-05T16:29:51.244Z" }, + { url = "https://files.pythonhosted.org/packages/83/c2/c8236eb8b421bac5a172ba208f965abaa89805da2a3fa112bdf1764caf8f/librt-0.10.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d3b4f300f7bcba6e2ff73fb8bef1898479e9772bfa2682998c636391633ec826", size = 264347, upload-time = "2026-05-05T16:29:53.013Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f5/15b6d32bc25dacd4a60886a683d8128d6219910c122202b995a40dd4f8d2/librt-0.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:943bc943f92f4fb3408fae62485c6a3ad68ce4f2ee205643a39641525c19a276", size = 286482, upload-time = "2026-05-05T16:29:54.675Z" }, + { url = "https://files.pythonhosted.org/packages/fb/8e/b1b959bacd323eb4360579db992513e1406d1c6ef7edb57b5511fd0666fd/librt-0.10.0-cp312-cp312-win32.whl", hash = "sha256:6065c1a758fba1010b41401013903d3d5d2750eab425ddedd584abac31d0630e", size = 62955, upload-time = "2026-05-05T16:29:56.39Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4c/d4cd6e4b9fc24098e63cc85537d1b6689682aee96809c38f08072067cc2b/librt-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:d788ecbe208ab352dab0e105cc06057bf9a2fc7e58cabb0d751ad9e30062b9e2", size = 71191, upload-time = "2026-05-05T16:29:57.682Z" }, + { url = "https://files.pythonhosted.org/packages/2b/19/8641da1f63d24b92354a492f893c022d6b3a0df44e70c8eff49364613983/librt-0.10.0-cp312-cp312-win_arm64.whl", hash = "sha256:6003d1f295bdba02656dc81308208fc060d0a51d8c0d0a6db70f7f3c57b9ba0a", size = 61432, upload-time = "2026-05-05T16:29:58.971Z" }, + { url = "https://files.pythonhosted.org/packages/e5/29/681a75c82f4cc90d29e4b257a3299b79fe13fe927a04c57b8109d70b6957/librt-0.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f0ede79d682e73f91c1b599a76d78b7464b9b5d213754cedb13372d9df36e596", size = 77299, upload-time = "2026-05-05T16:30:00.209Z" }, + { url = "https://files.pythonhosted.org/packages/62/24/0c7ca445a55d04be79cac19819437fd094782347fa116f6681844fa6143e/librt-0.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e0ba0b131fdb336c8b9c948e397f4a7e649d0f783b529f07b647bf4961df392e", size = 79930, upload-time = "2026-05-05T16:30:01.555Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1f/1e2b8f6443ef9e9a81e89486ca70e22f3684f93db003ce6eaefc3d0839b9/librt-0.10.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2728117da2afb96fb957768725ee43dc9a2d73b031e02da424b818a3cdd3a275", size = 246195, upload-time = "2026-05-05T16:30:03.261Z" }, + { url = "https://files.pythonhosted.org/packages/74/61/9dc9e03de0439ad84c1c240aac8b747f12c90cb797ea6042f7bdb8d3410f/librt-0.10.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.manylinux_2_28_i686.whl", hash = "sha256:723ba80594c49cdf0584196fc430752262605dc9449902fc9bd3d9b79976cb77", size = 234951, upload-time = "2026-05-05T16:30:04.881Z" }, + { url = "https://files.pythonhosted.org/packages/55/f4/635223117d7590875bca441275065a3bf491203ad4208bd1cc3ffd90c5a1/librt-0.10.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7292edaaca294a61a978c53a3c7d6130d099b0dfbc8f0a65916cdc6b891b9852", size = 262768, upload-time = "2026-05-05T16:30:06.638Z" }, + { url = "https://files.pythonhosted.org/packages/e5/66/b04152d0cd8b6ca2b428a8bd3230343230c35ed304a932f35b5375f2f828/librt-0.10.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:89fe9d539f2c10a1666633eeeac507ce95dd06d9ecc58de3c6390dba156a3d3a", size = 255075, upload-time = "2026-05-05T16:30:08.216Z" }, + { url = "https://files.pythonhosted.org/packages/35/1e/25bac4c7f2ca36f0e612cade186970683cf79153d96beccc3a11a9e19b97/librt-0.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4efa7b9587503fa5b67f40593302b9c8836d211d222ff9f7cafe67be5f8f0b10", size = 268559, upload-time = "2026-05-05T16:30:10.1Z" }, + { url = "https://files.pythonhosted.org/packages/18/54/4601faab35b6632a13200faa146ca62bfd111ffbe2568be430d65c89493a/librt-0.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:22dc982ef59df0136df36092ccbdbb570ced8aafb33e49585739b2f1de1c13b6", size = 261753, upload-time = "2026-05-05T16:30:11.912Z" }, + { url = "https://files.pythonhosted.org/packages/1b/cf/39f4023509e94fade8b074666fa3292db9cb6b34ea5dcbe7af53df9fca1d/librt-0.10.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:6f2e5f3606253a84cea719c94a3bb1c54487b5d617d0254d46e0920d8a06be3f", size = 264055, upload-time = "2026-05-05T16:30:13.465Z" }, + { url = "https://files.pythonhosted.org/packages/8e/00/40247209fc46a8e308a91412d5206aedf8efb667ee89eb625820106a5c2f/librt-0.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:40884bfaa1e29f6b6a9be255007d8f359bfc9e61d68bdef8ed3158bfcbc95df9", size = 286190, upload-time = "2026-05-05T16:30:15.073Z" }, + { url = "https://files.pythonhosted.org/packages/d8/6e/5566beb94431a985abe1787af5ef86e087750172ff9d0bbf20f93e88132d/librt-0.10.0-cp313-cp313-win32.whl", hash = "sha256:3cd34cd8254eba756660bff6c2da91278248184301054fe3e4feb073bdd49b14", size = 62949, upload-time = "2026-05-05T16:30:16.503Z" }, + { url = "https://files.pythonhosted.org/packages/d0/c2/3ea3301d6c8dff51d39dbe8ed75db3dc92896947d4afb5eeadf821c1e67f/librt-0.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:7baac5313e2d8dce1386f97777a8d03ab28f5fe1e780b3b9ac2ee7544551fedc", size = 71152, upload-time = "2026-05-05T16:30:17.766Z" }, + { url = "https://files.pythonhosted.org/packages/3c/de/5d49cb92cadcbc77d3abc27b93fd6030ed8437487dde2eae38cab5e6704d/librt-0.10.0-cp313-cp313-win_arm64.whl", hash = "sha256:afc5b4406c8e2515698d922a5c7823a009312835ea58196671fff40e35cb8166", size = 61336, upload-time = "2026-05-05T16:30:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/6a/64/7165e08108cc185a13a9c069f0685e6ef92e70e07fddf7edf5e7348c6316/librt-0.10.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f09588a30e6a22ec624090d72a3ab1a6d4d5485c3ed739603e76aa3c16efa688", size = 76794, upload-time = "2026-05-05T16:30:20.392Z" }, + { url = "https://files.pythonhosted.org/packages/ae/ef/bf8613febf651b90c5222ee79dea5ae58d4cc2b544df69d3033424448934/librt-0.10.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:131ade118d12bd7a0adc4e655474a553f1b76cf78385868885944d21d51e45e0", size = 79662, upload-time = "2026-05-05T16:30:22.025Z" }, + { url = "https://files.pythonhosted.org/packages/b6/67/9eddd165c1d8397bdf99b38bf12b5a55b3def5035b49eedb49f2775d1430/librt-0.10.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8b9ab28e40d011c373a189eae900c916e66d6fbecf7983e9e4883089ee085ef", size = 242390, upload-time = "2026-05-05T16:30:23.51Z" }, + { url = "https://files.pythonhosted.org/packages/10/d1/d95da80334501866cd37004ab5d7483220d05862fab4b5405394f0264f0d/librt-0.10.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.manylinux_2_28_i686.whl", hash = "sha256:67c39bb30da73bae1f293d1ed8bc2f8f6642649dd0928d3600aeff3041ac23d6", size = 232603, upload-time = "2026-05-05T16:30:25.198Z" }, + { url = "https://files.pythonhosted.org/packages/0c/fa/e6d64d28718bc1be4e1736fcb037ca1c4dfca927e7167df75a7d5215665e/librt-0.10.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8c3273c6b774614f093c8927c2bf1b077d0fefde988fe98f46a333734e5597ab", size = 259187, upload-time = "2026-05-05T16:30:26.772Z" }, + { url = "https://files.pythonhosted.org/packages/72/3f/3fdb77e7f937dad59cfd76b720be7e7643400ec76b2da35befab8d66ba30/librt-0.10.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9dd7c1b86a4baa583ab5db977484b93a2c474e69e96ef3e9538387ea54229cb9", size = 251846, upload-time = "2026-05-05T16:30:28.56Z" }, + { url = "https://files.pythonhosted.org/packages/18/ca/f4d49133dd86a6f55d79eca30bf412fa722f511a9abe67f62f57aa64e66a/librt-0.10.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a77385c5a202e831149f7ad03be9e67cf80e957e52c614e83dcb822c95222eb8", size = 264936, upload-time = "2026-05-05T16:30:30.491Z" }, + { url = "https://files.pythonhosted.org/packages/de/66/a8df2fbadc1f6c1827a096d11c40175bd526133480bd3bc88ec64a03d257/librt-0.10.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c6a5eafa74b5655bad59886138ed68426f098a6beb8cb95a71f2cc3cd8bb33fe", size = 258699, upload-time = "2026-05-05T16:30:32.002Z" }, + { url = "https://files.pythonhosted.org/packages/bb/73/1e3c83613fe05451bb969e27b68a573d177f08d5f63533cc29fec0989658/librt-0.10.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:1fc93d0439204c50ab4d1512611ce2c206f1b369b419f69c7c27c761561e3291", size = 259825, upload-time = "2026-05-05T16:30:35.077Z" }, + { url = "https://files.pythonhosted.org/packages/09/24/5e2f926ee9d3ef348d9339526d7062abb5c44d8419e3179528c01d78c102/librt-0.10.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:79e713c178bc7a744adfbee6b4619a288eecc0c914da2a9313a20255abe2f0cf", size = 282548, upload-time = "2026-05-05T16:30:36.639Z" }, + { url = "https://files.pythonhosted.org/packages/fc/7d/3e89ed6ad0162561fa8bef9df3195e24263104c955713cd0237d3711fad2/librt-0.10.0-cp314-cp314-win32.whl", hash = "sha256:2eba9d955a68c41d9f326be3da42f163ec3518b7ab20f1c826224e7bed71e0bf", size = 58970, upload-time = "2026-05-05T16:30:38.183Z" }, + { url = "https://files.pythonhosted.org/packages/76/25/579e731c94a7086a268bfa3e7a4945cd47836bebd3cbf3faeafd2e7eaef9/librt-0.10.0-cp314-cp314-win_amd64.whl", hash = "sha256:cbfaf7f5145e9917f5d18bffa298eff6a19d74e7b8b11dabdca95785befe8dbf", size = 67260, upload-time = "2026-05-05T16:30:39.804Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f8/235822b7ae0b2334f12ee18bcf2476d07924077a5efeea57dbe927704be2/librt-0.10.0-cp314-cp314-win_arm64.whl", hash = "sha256:8d6d385d1969849a6b1397114df22714b6ded917bada98668e3e974dc663477e", size = 57156, upload-time = "2026-05-05T16:30:41.412Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e3/9b919cbf1e8eb770bf91bb7df28125e0f1daf4587169afefd95402636e9a/librt-0.10.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:6c3a82d3bd32631ef5c79922dfc028520c9ad840255979ab4d908271818039ee", size = 79150, upload-time = "2026-05-05T16:30:42.761Z" }, + { url = "https://files.pythonhosted.org/packages/6a/f5/72a944aa3bc3498169a168087eff58ca48b58bf1b704e59d091fd30739f3/librt-0.10.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d64cc66005dc324c9bb1fa3fc2841f529002f6eb15966d55e46d430f56955a6a", size = 82304, upload-time = "2026-05-05T16:30:44.082Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e3/fcc290a33e295019759472dfa794d204e43504b276ac65eab7fd9da20ea3/librt-0.10.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9bb562cd28c88cd2c6a9a6c78f99dc39348d6b16c94adc25de0e574acf1176e9", size = 272556, upload-time = "2026-05-05T16:30:45.497Z" }, + { url = "https://files.pythonhosted.org/packages/fd/54/546975e4c997573885e7f040a05012f8838e06fb12b0c3c1fbb76254e9d7/librt-0.10.0-cp314-cp314t-manylinux2014_i686.manylinux_2_17_i686.manylinux_2_28_i686.whl", hash = "sha256:b809aa2854d019c28773b03605df22adc675ee4f3f4402d673581313e8906119", size = 256941, upload-time = "2026-05-05T16:30:47.059Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f1d03401571b331653acddbd4e8cd955c06d945241dd08b25192fac0d04b/librt-0.10.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc15acabdd519bd4176fdadc2119e5e3093485d86f89138daf47e5b4cedb983a", size = 285855, upload-time = "2026-05-05T16:30:48.86Z" }, + { url = "https://files.pythonhosted.org/packages/0c/08/62cf80ff046c339faf56718b3a940244d4beb70f1c6407289b5830ec11e9/librt-0.10.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b1b2d835307d08ddadd94568e2369648ec9173bd3eea6d7f52a1abe717c81f98", size = 275321, upload-time = "2026-05-05T16:30:50.63Z" }, + { url = "https://files.pythonhosted.org/packages/d9/ea/da5918d4070362e9a4d2ee9cd34f9dc84902daad8fd4275f8504a727ff4e/librt-0.10.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d261c6a2f93335a5167887fb0223e8b98ffce20ee3fde242e8e58a37ece6d0e5", size = 293993, upload-time = "2026-05-05T16:30:52.577Z" }, + { url = "https://files.pythonhosted.org/packages/c9/8d/68b6086bed1fcdc314c640ea04e31e52d18052e08059fa595409d66a51a9/librt-0.10.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e2ffd44963f8e7f68995504d90f9881d64e94dc1d8e310039b9526108fc0c0f7", size = 284254, upload-time = "2026-05-05T16:30:55.086Z" }, + { url = "https://files.pythonhosted.org/packages/06/c8/b810f1d84ec34a5a7ed93d7b510ab04164d75fbdf23088d5c3fbe6b08357/librt-0.10.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:5f285f6455ed495791c4d8630e5af732960adea93cac4c893d15619f2eae53e8", size = 284925, upload-time = "2026-05-05T16:30:56.728Z" }, + { url = "https://files.pythonhosted.org/packages/5a/00/3c82d4158c5a2c62528b8fccce65a8c9ad700e480e86f9389387435089a5/librt-0.10.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f6034ff52e663d34c7b82ef2aa2f94ad7c1d939e2368e63b06844bc4d127d2e1", size = 307830, upload-time = "2026-05-05T16:30:58.377Z" }, + { url = "https://files.pythonhosted.org/packages/99/3a/9c635ac3e8a00383ff689161d3eac8a30b3b2ddc711b40471e6b8983ea29/librt-0.10.0-cp314-cp314t-win32.whl", hash = "sha256:657860fd877fba6a241ea088ef99f63ca819945d3c715265da670bad56c37ebe", size = 60147, upload-time = "2026-05-05T16:31:00.293Z" }, + { url = "https://files.pythonhosted.org/packages/dc/e8/6f65f3e565d4ac212cddddd552eacc8035ffdf941ca0ad6fe945a211d41f/librt-0.10.0-cp314-cp314t-win_amd64.whl", hash = "sha256:56ded2d66010203a0cb5af063b609e3f079531a0e5e576d618dece859fd2e1af", size = 68649, upload-time = "2026-05-05T16:31:01.778Z" }, + { url = "https://files.pythonhosted.org/packages/51/78/a0705a67cacd81e5fa01a5035b3adbdfbb43a7b8d4bd27e2b282ae61baf2/librt-0.10.0-cp314-cp314t-win_arm64.whl", hash = "sha256:1ee63f30abf18ed4830fdbaf87b2b6f4bba1e198d46085c314edde4045e56715", size = 58247, upload-time = "2026-05-05T16:31:03.191Z" }, +] + +[[package]] +name = "mako" +version = "1.3.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/62/791b31e69ae182791ec67f04850f2f062716bbd205483d63a215f3e062d3/mako-1.3.12.tar.gz", hash = "sha256:9f778e93289bd410bb35daadeb4fc66d95a746f0b75777b942088b7fd7af550a", size = 400219, upload-time = "2026-04-28T19:01:08.512Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/b1/a0ec7a5a9db730a08daef1fdfb8090435b82465abbf758a596f0ea88727e/mako-1.3.12-py3-none-any.whl", hash = "sha256:8f61569480282dbf557145ce441e4ba888be453c30989f879f0d652e39f53ea9", size = 78521, upload-time = "2026-04-28T19:01:10.393Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/ff/7841249c247aa650a76b9ee4bbaeae59370dc8bfd2f6c01f3630c35eb134/markdown_it_py-4.2.0.tar.gz", hash = "sha256:04a21681d6fbb623de53f6f364d352309d4094dd4194040a10fd51833e418d49", size = 82454, upload-time = "2026-05-07T12:08:28.36Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/81/4da04ced5a082363ecfa159c010d200ecbd959ae410c10c0264a38cac0f5/markdown_it_py-4.2.0-py3-none-any.whl", hash = "sha256:9f7ebbcd14fe59494226453aed97c1070d83f8d24b6fc3a3bcf9a38092641c4a", size = 91687, upload-time = "2026-05-07T12:08:27.182Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "multidict" +version = "6.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/f1/a90635c4f88fb913fbf4ce660b83b7445b7a02615bda034b2f8eb38fd597/multidict-6.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ff981b266af91d7b4b3793ca3382e53229088d193a85dfad6f5f4c27fc73e5d", size = 76626, upload-time = "2026-01-26T02:43:26.485Z" }, + { url = "https://files.pythonhosted.org/packages/a6/9b/267e64eaf6fc637a15b35f5de31a566634a2740f97d8d094a69d34f524a4/multidict-6.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:844c5bca0b5444adb44a623fb0a1310c2f4cd41f402126bb269cd44c9b3f3e1e", size = 44706, upload-time = "2026-01-26T02:43:27.607Z" }, + { url = "https://files.pythonhosted.org/packages/dd/a4/d45caf2b97b035c57267791ecfaafbd59c68212004b3842830954bb4b02e/multidict-6.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2a0a924d4c2e9afcd7ec64f9de35fcd96915149b2216e1cb2c10a56df483855", size = 44356, upload-time = "2026-01-26T02:43:28.661Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d2/0a36c8473f0cbaeadd5db6c8b72d15bbceeec275807772bfcd059bef487d/multidict-6.7.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8be1802715a8e892c784c0197c2ace276ea52702a0ede98b6310c8f255a5afb3", size = 244355, upload-time = "2026-01-26T02:43:31.165Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/8c65be997fd7dd311b7d39c7b6e71a0cb449bad093761481eccbbe4b42a2/multidict-6.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2e2d2ed645ea29f31c4c7ea1552fcfd7cb7ba656e1eafd4134a6620c9f5fdd9e", size = 246433, upload-time = "2026-01-26T02:43:32.581Z" }, + { url = "https://files.pythonhosted.org/packages/01/fb/4dbd7e848d2799c6a026ec88ad39cf2b8416aa167fcc903baa55ecaa045c/multidict-6.7.1-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:95922cee9a778659e91db6497596435777bd25ed116701a4c034f8e46544955a", size = 225376, upload-time = "2026-01-26T02:43:34.417Z" }, + { url = "https://files.pythonhosted.org/packages/b6/8a/4a3a6341eac3830f6053062f8fbc9a9e54407c80755b3f05bc427295c2d0/multidict-6.7.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6b83cabdc375ffaaa15edd97eb7c0c672ad788e2687004990074d7d6c9b140c8", size = 257365, upload-time = "2026-01-26T02:43:35.741Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a2/dd575a69c1aa206e12d27d0770cdf9b92434b48a9ef0cd0d1afdecaa93c4/multidict-6.7.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:38fb49540705369bab8484db0689d86c0a33a0a9f2c1b197f506b71b4b6c19b0", size = 254747, upload-time = "2026-01-26T02:43:36.976Z" }, + { url = "https://files.pythonhosted.org/packages/5a/56/21b27c560c13822ed93133f08aa6372c53a8e067f11fbed37b4adcdac922/multidict-6.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:439cbebd499f92e9aa6793016a8acaa161dfa749ae86d20960189f5398a19144", size = 246293, upload-time = "2026-01-26T02:43:38.258Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a4/23466059dc3854763423d0ad6c0f3683a379d97673b1b89ec33826e46728/multidict-6.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6d3bc717b6fe763b8be3f2bee2701d3c8eb1b2a8ae9f60910f1b2860c82b6c49", size = 242962, upload-time = "2026-01-26T02:43:40.034Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/51dd754a3524d685958001e8fa20a0f5f90a6a856e0a9dcabff69be3dbb7/multidict-6.7.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:619e5a1ac57986dbfec9f0b301d865dddf763696435e2962f6d9cf2fdff2bb71", size = 237360, upload-time = "2026-01-26T02:43:41.752Z" }, + { url = "https://files.pythonhosted.org/packages/64/3f/036dfc8c174934d4b55d86ff4f978e558b0e585cef70cfc1ad01adc6bf18/multidict-6.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0b38ebffd9be37c1170d33bc0f36f4f262e0a09bc1aac1c34c7aa51a7293f0b3", size = 245940, upload-time = "2026-01-26T02:43:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/3d/20/6214d3c105928ebc353a1c644a6ef1408bc5794fcb4f170bb524a3c16311/multidict-6.7.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:10ae39c9cfe6adedcdb764f5e8411d4a92b055e35573a2eaa88d3323289ef93c", size = 253502, upload-time = "2026-01-26T02:43:44.371Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e2/c653bc4ae1be70a0f836b82172d643fcf1dade042ba2676ab08ec08bff0f/multidict-6.7.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:25167cc263257660290fba06b9318d2026e3c910be240a146e1f66dd114af2b0", size = 247065, upload-time = "2026-01-26T02:43:45.745Z" }, + { url = "https://files.pythonhosted.org/packages/c8/11/a854b4154cd3bd8b1fd375e8a8ca9d73be37610c361543d56f764109509b/multidict-6.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:128441d052254f42989ef98b7b6a6ecb1e6f708aa962c7984235316db59f50fa", size = 241870, upload-time = "2026-01-26T02:43:47.054Z" }, + { url = "https://files.pythonhosted.org/packages/13/bf/9676c0392309b5fdae322333d22a829715b570edb9baa8016a517b55b558/multidict-6.7.1-cp311-cp311-win32.whl", hash = "sha256:d62b7f64ffde3b99d06b707a280db04fb3855b55f5a06df387236051d0668f4a", size = 41302, upload-time = "2026-01-26T02:43:48.753Z" }, + { url = "https://files.pythonhosted.org/packages/c9/68/f16a3a8ba6f7b6dc92a1f19669c0810bd2c43fc5a02da13b1cbf8e253845/multidict-6.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:bdbf9f3b332abd0cdb306e7c2113818ab1e922dc84b8f8fd06ec89ed2a19ab8b", size = 45981, upload-time = "2026-01-26T02:43:49.921Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ad/9dd5305253fa00cd3c7555dbef69d5bf4133debc53b87ab8d6a44d411665/multidict-6.7.1-cp311-cp311-win_arm64.whl", hash = "sha256:b8c990b037d2fff2f4e33d3f21b9b531c5745b33a49a7d6dbe7a177266af44f6", size = 43159, upload-time = "2026-01-26T02:43:51.635Z" }, + { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, + { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, + { url = "https://files.pythonhosted.org/packages/cf/3b/d6bd75dc4f3ff7c73766e04e705b00ed6dbbaccf670d9e05a12b006f5a21/multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53", size = 251018, upload-time = "2026-01-26T02:43:56.198Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/c959c5933adedb9ac15152e4067c702a808ea183a8b64cf8f31af8ad3155/multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75", size = 258883, upload-time = "2026-01-26T02:43:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/7ed40adafea3d4f1c8b916e3b5cc3a8e07dfcdcb9cd72800f4ed3ca1b387/multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b", size = 242413, upload-time = "2026-01-26T02:43:58.755Z" }, + { url = "https://files.pythonhosted.org/packages/d2/57/b8565ff533e48595503c785f8361ff9a4fde4d67de25c207cd0ba3befd03/multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733", size = 268404, upload-time = "2026-01-26T02:44:00.216Z" }, + { url = "https://files.pythonhosted.org/packages/e0/50/9810c5c29350f7258180dfdcb2e52783a0632862eb334c4896ac717cebcb/multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a", size = 269456, upload-time = "2026-01-26T02:44:02.202Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8d/5e5be3ced1d12966fefb5c4ea3b2a5b480afcea36406559442c6e31d4a48/multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961", size = 256322, upload-time = "2026-01-26T02:44:03.56Z" }, + { url = "https://files.pythonhosted.org/packages/31/6e/d8a26d81ac166a5592782d208dd90dfdc0a7a218adaa52b45a672b46c122/multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582", size = 253955, upload-time = "2026-01-26T02:44:04.845Z" }, + { url = "https://files.pythonhosted.org/packages/59/4c/7c672c8aad41534ba619bcd4ade7a0dc87ed6b8b5c06149b85d3dd03f0cd/multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e", size = 251254, upload-time = "2026-01-26T02:44:06.133Z" }, + { url = "https://files.pythonhosted.org/packages/7b/bd/84c24de512cbafbdbc39439f74e967f19570ce7924e3007174a29c348916/multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3", size = 252059, upload-time = "2026-01-26T02:44:07.518Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/f5449385510825b73d01c2d4087bf6d2fccc20a2d42ac34df93191d3dd03/multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6", size = 263588, upload-time = "2026-01-26T02:44:09.382Z" }, + { url = "https://files.pythonhosted.org/packages/d7/11/afc7c677f68f75c84a69fe37184f0f82fce13ce4b92f49f3db280b7e92b3/multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a", size = 259642, upload-time = "2026-01-26T02:44:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/2b/17/ebb9644da78c4ab36403739e0e6e0e30ebb135b9caf3440825001a0bddcb/multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba", size = 251377, upload-time = "2026-01-26T02:44:12.042Z" }, + { url = "https://files.pythonhosted.org/packages/ca/a4/840f5b97339e27846c46307f2530a2805d9d537d8b8bd416af031cad7fa0/multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511", size = 41887, upload-time = "2026-01-26T02:44:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/80/31/0b2517913687895f5904325c2069d6a3b78f66cc641a86a2baf75a05dcbb/multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19", size = 46053, upload-time = "2026-01-26T02:44:15.371Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/aba28e4ee4006ae4c7df8d327d31025d760ffa992ea23812a601d226e682/multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf", size = 43307, upload-time = "2026-01-26T02:44:16.852Z" }, + { url = "https://files.pythonhosted.org/packages/f2/22/929c141d6c0dba87d3e1d38fbdf1ba8baba86b7776469f2bc2d3227a1e67/multidict-6.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2b41f5fed0ed563624f1c17630cb9941cf2309d4df00e494b551b5f3e3d67a23", size = 76174, upload-time = "2026-01-26T02:44:18.509Z" }, + { url = "https://files.pythonhosted.org/packages/c7/75/bc704ae15fee974f8fccd871305e254754167dce5f9e42d88a2def741a1d/multidict-6.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84e61e3af5463c19b67ced91f6c634effb89ef8bfc5ca0267f954451ed4bb6a2", size = 45116, upload-time = "2026-01-26T02:44:19.745Z" }, + { url = "https://files.pythonhosted.org/packages/79/76/55cd7186f498ed080a18440c9013011eb548f77ae1b297206d030eb1180a/multidict-6.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:935434b9853c7c112eee7ac891bc4cb86455aa631269ae35442cb316790c1445", size = 43524, upload-time = "2026-01-26T02:44:21.571Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3c/414842ef8d5a1628d68edee29ba0e5bcf235dbfb3ccd3ea303a7fe8c72ff/multidict-6.7.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432feb25a1cb67fe82a9680b4d65fb542e4635cb3166cd9c01560651ad60f177", size = 249368, upload-time = "2026-01-26T02:44:22.803Z" }, + { url = "https://files.pythonhosted.org/packages/f6/32/befed7f74c458b4a525e60519fe8d87eef72bb1e99924fa2b0f9d97a221e/multidict-6.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e82d14e3c948952a1a85503817e038cba5905a3352de76b9a465075d072fba23", size = 256952, upload-time = "2026-01-26T02:44:24.306Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/c878a44ba877f366630c860fdf74bfb203c33778f12b6ac274936853c451/multidict-6.7.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4cfb48c6ea66c83bcaaf7e4dfa7ec1b6bbcf751b7db85a328902796dfde4c060", size = 240317, upload-time = "2026-01-26T02:44:25.772Z" }, + { url = "https://files.pythonhosted.org/packages/68/49/57421b4d7ad2e9e60e25922b08ceb37e077b90444bde6ead629095327a6f/multidict-6.7.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d540e51b7e8e170174555edecddbd5538105443754539193e3e1061864d444d", size = 267132, upload-time = "2026-01-26T02:44:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/fe/ec0edd52ddbcea2a2e89e174f0206444a61440b40f39704e64dc807a70bd/multidict-6.7.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:273d23f4b40f3dce4d6c8a821c741a86dec62cded82e1175ba3d99be128147ed", size = 268140, upload-time = "2026-01-26T02:44:29.588Z" }, + { url = "https://files.pythonhosted.org/packages/b0/73/6e1b01cbeb458807aa0831742232dbdd1fa92bfa33f52a3f176b4ff3dc11/multidict-6.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d624335fd4fa1c08a53f8b4be7676ebde19cd092b3895c421045ca87895b429", size = 254277, upload-time = "2026-01-26T02:44:30.902Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b2/5fb8c124d7561a4974c342bc8c778b471ebbeb3cc17df696f034a7e9afe7/multidict-6.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:12fad252f8b267cc75b66e8fc51b3079604e8d43a75428ffe193cd9e2195dfd6", size = 252291, upload-time = "2026-01-26T02:44:32.31Z" }, + { url = "https://files.pythonhosted.org/packages/5a/96/51d4e4e06bcce92577fcd488e22600bd38e4fd59c20cb49434d054903bd2/multidict-6.7.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:03ede2a6ffbe8ef936b92cb4529f27f42be7f56afcdab5ab739cd5f27fb1cbf9", size = 250156, upload-time = "2026-01-26T02:44:33.734Z" }, + { url = "https://files.pythonhosted.org/packages/db/6b/420e173eec5fba721a50e2a9f89eda89d9c98fded1124f8d5c675f7a0c0f/multidict-6.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:90efbcf47dbe33dcf643a1e400d67d59abeac5db07dc3f27d6bdeae497a2198c", size = 249742, upload-time = "2026-01-26T02:44:35.222Z" }, + { url = "https://files.pythonhosted.org/packages/44/a3/ec5b5bd98f306bc2aa297b8c6f11a46714a56b1e6ef5ebda50a4f5d7c5fb/multidict-6.7.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5c4b9bfc148f5a91be9244d6264c53035c8a0dcd2f51f1c3c6e30e30ebaa1c84", size = 262221, upload-time = "2026-01-26T02:44:36.604Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f7/e8c0d0da0cd1e28d10e624604e1a36bcc3353aaebdfdc3a43c72bc683a12/multidict-6.7.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:401c5a650f3add2472d1d288c26deebc540f99e2fb83e9525007a74cd2116f1d", size = 258664, upload-time = "2026-01-26T02:44:38.008Z" }, + { url = "https://files.pythonhosted.org/packages/52/da/151a44e8016dd33feed44f730bd856a66257c1ee7aed4f44b649fb7edeb3/multidict-6.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97891f3b1b3ffbded884e2916cacf3c6fc87b66bb0dde46f7357404750559f33", size = 249490, upload-time = "2026-01-26T02:44:39.386Z" }, + { url = "https://files.pythonhosted.org/packages/87/af/a3b86bf9630b732897f6fc3f4c4714b90aa4361983ccbdcd6c0339b21b0c/multidict-6.7.1-cp313-cp313-win32.whl", hash = "sha256:e1c5988359516095535c4301af38d8a8838534158f649c05dd1050222321bcb3", size = 41695, upload-time = "2026-01-26T02:44:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/b2/35/e994121b0e90e46134673422dd564623f93304614f5d11886b1b3e06f503/multidict-6.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:960c83bf01a95b12b08fd54324a4eb1d5b52c88932b5cba5d6e712bb3ed12eb5", size = 45884, upload-time = "2026-01-26T02:44:42.488Z" }, + { url = "https://files.pythonhosted.org/packages/ca/61/42d3e5dbf661242a69c97ea363f2d7b46c567da8eadef8890022be6e2ab0/multidict-6.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:563fe25c678aaba333d5399408f5ec3c383ca5b663e7f774dd179a520b8144df", size = 43122, upload-time = "2026-01-26T02:44:43.664Z" }, + { url = "https://files.pythonhosted.org/packages/6d/b3/e6b21c6c4f314bb956016b0b3ef2162590a529b84cb831c257519e7fde44/multidict-6.7.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c76c4bec1538375dad9d452d246ca5368ad6e1c9039dadcf007ae59c70619ea1", size = 83175, upload-time = "2026-01-26T02:44:44.894Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/23ecd2abfe0957b234f6c960f4ade497f55f2c16aeb684d4ecdbf1c95791/multidict-6.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:57b46b24b5d5ebcc978da4ec23a819a9402b4228b8a90d9c656422b4bdd8a963", size = 48460, upload-time = "2026-01-26T02:44:46.106Z" }, + { url = "https://files.pythonhosted.org/packages/c4/57/a0ed92b23f3a042c36bc4227b72b97eca803f5f1801c1ab77c8a212d455e/multidict-6.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e954b24433c768ce78ab7929e84ccf3422e46deb45a4dc9f93438f8217fa2d34", size = 46930, upload-time = "2026-01-26T02:44:47.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/66/02ec7ace29162e447f6382c495dc95826bf931d3818799bbef11e8f7df1a/multidict-6.7.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3bd231490fa7217cc832528e1cd8752a96f0125ddd2b5749390f7c3ec8721b65", size = 242582, upload-time = "2026-01-26T02:44:48.604Z" }, + { url = "https://files.pythonhosted.org/packages/58/18/64f5a795e7677670e872673aca234162514696274597b3708b2c0d276cce/multidict-6.7.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:253282d70d67885a15c8a7716f3a73edf2d635793ceda8173b9ecc21f2fb8292", size = 250031, upload-time = "2026-01-26T02:44:50.544Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ed/e192291dbbe51a8290c5686f482084d31bcd9d09af24f63358c3d42fd284/multidict-6.7.1-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b4c48648d7649c9335cf1927a8b87fa692de3dcb15faa676c6a6f1f1aabda43", size = 228596, upload-time = "2026-01-26T02:44:51.951Z" }, + { url = "https://files.pythonhosted.org/packages/1e/7e/3562a15a60cf747397e7f2180b0a11dc0c38d9175a650e75fa1b4d325e15/multidict-6.7.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98bc624954ec4d2c7cb074b8eefc2b5d0ce7d482e410df446414355d158fe4ca", size = 257492, upload-time = "2026-01-26T02:44:53.902Z" }, + { url = "https://files.pythonhosted.org/packages/24/02/7d0f9eae92b5249bb50ac1595b295f10e263dd0078ebb55115c31e0eaccd/multidict-6.7.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b99af4d9eec0b49927b4402bcbb58dea89d3e0db8806a4086117019939ad3dd", size = 255899, upload-time = "2026-01-26T02:44:55.316Z" }, + { url = "https://files.pythonhosted.org/packages/00/e3/9b60ed9e23e64c73a5cde95269ef1330678e9c6e34dd4eb6b431b85b5a10/multidict-6.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aac4f16b472d5b7dc6f66a0d49dd57b0e0902090be16594dc9ebfd3d17c47e7", size = 247970, upload-time = "2026-01-26T02:44:56.783Z" }, + { url = "https://files.pythonhosted.org/packages/3e/06/538e58a63ed5cfb0bd4517e346b91da32fde409d839720f664e9a4ae4f9d/multidict-6.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:21f830fe223215dffd51f538e78c172ed7c7f60c9b96a2bf05c4848ad49921c3", size = 245060, upload-time = "2026-01-26T02:44:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/b2/2f/d743a3045a97c895d401e9bd29aaa09b94f5cbdf1bd561609e5a6c431c70/multidict-6.7.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f5dd81c45b05518b9aa4da4aa74e1c93d715efa234fd3e8a179df611cc85e5f4", size = 235888, upload-time = "2026-01-26T02:44:59.57Z" }, + { url = "https://files.pythonhosted.org/packages/38/83/5a325cac191ab28b63c52f14f1131f3b0a55ba3b9aa65a6d0bf2a9b921a0/multidict-6.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eb304767bca2bb92fb9c5bd33cedc95baee5bb5f6c88e63706533a1c06ad08c8", size = 243554, upload-time = "2026-01-26T02:45:01.054Z" }, + { url = "https://files.pythonhosted.org/packages/20/1f/9d2327086bd15da2725ef6aae624208e2ef828ed99892b17f60c344e57ed/multidict-6.7.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c9035dde0f916702850ef66460bc4239d89d08df4d02023a5926e7446724212c", size = 252341, upload-time = "2026-01-26T02:45:02.484Z" }, + { url = "https://files.pythonhosted.org/packages/e8/2c/2a1aa0280cf579d0f6eed8ee5211c4f1730bd7e06c636ba2ee6aafda302e/multidict-6.7.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:af959b9beeb66c822380f222f0e0a1889331597e81f1ded7f374f3ecb0fd6c52", size = 246391, upload-time = "2026-01-26T02:45:03.862Z" }, + { url = "https://files.pythonhosted.org/packages/e5/03/7ca022ffc36c5a3f6e03b179a5ceb829be9da5783e6fe395f347c0794680/multidict-6.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:41f2952231456154ee479651491e94118229844dd7226541788be783be2b5108", size = 243422, upload-time = "2026-01-26T02:45:05.296Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1d/b31650eab6c5778aceed46ba735bd97f7c7d2f54b319fa916c0f96e7805b/multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32", size = 47770, upload-time = "2026-01-26T02:45:06.754Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/2d2d1d522e51285bd61b1e20df8f47ae1a9d80839db0b24ea783b3832832/multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8", size = 53109, upload-time = "2026-01-26T02:45:08.044Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a3/cc409ba012c83ca024a308516703cf339bdc4b696195644a7215a5164a24/multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118", size = 45573, upload-time = "2026-01-26T02:45:09.349Z" }, + { url = "https://files.pythonhosted.org/packages/91/cc/db74228a8be41884a567e88a62fd589a913708fcf180d029898c17a9a371/multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee", size = 75190, upload-time = "2026-01-26T02:45:10.651Z" }, + { url = "https://files.pythonhosted.org/packages/d5/22/492f2246bb5b534abd44804292e81eeaf835388901f0c574bac4eeec73c5/multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2", size = 44486, upload-time = "2026-01-26T02:45:11.938Z" }, + { url = "https://files.pythonhosted.org/packages/f1/4f/733c48f270565d78b4544f2baddc2fb2a245e5a8640254b12c36ac7ac68e/multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1", size = 43219, upload-time = "2026-01-26T02:45:14.346Z" }, + { url = "https://files.pythonhosted.org/packages/24/bb/2c0c2287963f4259c85e8bcbba9182ced8d7fca65c780c38e99e61629d11/multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d", size = 245132, upload-time = "2026-01-26T02:45:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f9/44d4b3064c65079d2467888794dea218d1601898ac50222ab8a9a8094460/multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31", size = 252420, upload-time = "2026-01-26T02:45:17.293Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/78f7275e73fa17b24c9a51b0bd9d73ba64bb32d0ed51b02a746eb876abe7/multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048", size = 233510, upload-time = "2026-01-26T02:45:19.356Z" }, + { url = "https://files.pythonhosted.org/packages/4b/25/8167187f62ae3cbd52da7893f58cb036b47ea3fb67138787c76800158982/multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362", size = 264094, upload-time = "2026-01-26T02:45:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/69a3a83b7b030cf283fb06ce074a05a02322359783424d7edf0f15fe5022/multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37", size = 260786, upload-time = "2026-01-26T02:45:22.818Z" }, + { url = "https://files.pythonhosted.org/packages/fe/3b/8ec5074bcfc450fe84273713b4b0a0dd47c0249358f5d82eb8104ffe2520/multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709", size = 248483, upload-time = "2026-01-26T02:45:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/48/5a/d5a99e3acbca0e29c5d9cba8f92ceb15dce78bab963b308ae692981e3a5d/multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0", size = 248403, upload-time = "2026-01-26T02:45:25.982Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/e58cd31f6c7d5102f2a4bf89f96b9cf7e00b6c6f3d04ecc44417c00a5a3c/multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb", size = 240315, upload-time = "2026-01-26T02:45:27.487Z" }, + { url = "https://files.pythonhosted.org/packages/94/33/1cd210229559cb90b6786c30676bb0c58249ff42f942765f88793b41fdce/multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd", size = 245528, upload-time = "2026-01-26T02:45:28.991Z" }, + { url = "https://files.pythonhosted.org/packages/64/f2/6e1107d226278c876c783056b7db43d800bb64c6131cec9c8dfb6903698e/multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601", size = 258784, upload-time = "2026-01-26T02:45:30.503Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c1/11f664f14d525e4a1b5327a82d4de61a1db604ab34c6603bb3c2cc63ad34/multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1", size = 251980, upload-time = "2026-01-26T02:45:32.603Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9f/75a9ac888121d0c5bbd4ecf4eead45668b1766f6baabfb3b7f66a410e231/multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b", size = 243602, upload-time = "2026-01-26T02:45:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e7/50bf7b004cc8525d80dbbbedfdc7aed3e4c323810890be4413e589074032/multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d", size = 40930, upload-time = "2026-01-26T02:45:36.278Z" }, + { url = "https://files.pythonhosted.org/packages/e0/bf/52f25716bbe93745595800f36fb17b73711f14da59ed0bb2eba141bc9f0f/multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f", size = 45074, upload-time = "2026-01-26T02:45:37.546Z" }, + { url = "https://files.pythonhosted.org/packages/97/ab/22803b03285fa3a525f48217963da3a65ae40f6a1b6f6cf2768879e208f9/multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5", size = 42471, upload-time = "2026-01-26T02:45:38.889Z" }, + { url = "https://files.pythonhosted.org/packages/e0/6d/f9293baa6146ba9507e360ea0292b6422b016907c393e2f63fc40ab7b7b5/multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581", size = 82401, upload-time = "2026-01-26T02:45:40.254Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/53b5494738d83558d87c3c71a486504d8373421c3e0dbb6d0db48ad42ee0/multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a", size = 48143, upload-time = "2026-01-26T02:45:41.635Z" }, + { url = "https://files.pythonhosted.org/packages/37/e8/5284c53310dcdc99ce5d66563f6e5773531a9b9fe9ec7a615e9bc306b05f/multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c", size = 46507, upload-time = "2026-01-26T02:45:42.99Z" }, + { url = "https://files.pythonhosted.org/packages/e4/fc/6800d0e5b3875568b4083ecf5f310dcf91d86d52573160834fb4bfcf5e4f/multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262", size = 239358, upload-time = "2026-01-26T02:45:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/41/75/4ad0973179361cdf3a113905e6e088173198349131be2b390f9fa4da5fc6/multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59", size = 246884, upload-time = "2026-01-26T02:45:47.167Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9c/095bb28b5da139bd41fb9a5d5caff412584f377914bd8787c2aa98717130/multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889", size = 225878, upload-time = "2026-01-26T02:45:48.698Z" }, + { url = "https://files.pythonhosted.org/packages/07/d0/c0a72000243756e8f5a277b6b514fa005f2c73d481b7d9e47cd4568aa2e4/multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4", size = 253542, upload-time = "2026-01-26T02:45:50.164Z" }, + { url = "https://files.pythonhosted.org/packages/c0/6b/f69da15289e384ecf2a68837ec8b5ad8c33e973aa18b266f50fe55f24b8c/multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d", size = 252403, upload-time = "2026-01-26T02:45:51.779Z" }, + { url = "https://files.pythonhosted.org/packages/a2/76/b9669547afa5a1a25cd93eaca91c0da1c095b06b6d2d8ec25b713588d3a1/multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609", size = 244889, upload-time = "2026-01-26T02:45:53.27Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a9/a50d2669e506dad33cfc45b5d574a205587b7b8a5f426f2fbb2e90882588/multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489", size = 241982, upload-time = "2026-01-26T02:45:54.919Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/1609558ad8b456b4827d3c5a5b775c93b87878fd3117ed3db3423dfbce1b/multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c", size = 232415, upload-time = "2026-01-26T02:45:56.981Z" }, + { url = "https://files.pythonhosted.org/packages/d8/59/6f61039d2aa9261871e03ab9dc058a550d240f25859b05b67fd70f80d4b3/multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e", size = 240337, upload-time = "2026-01-26T02:45:58.698Z" }, + { url = "https://files.pythonhosted.org/packages/a1/29/fdc6a43c203890dc2ae9249971ecd0c41deaedfe00d25cb6564b2edd99eb/multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c", size = 248788, upload-time = "2026-01-26T02:46:00.862Z" }, + { url = "https://files.pythonhosted.org/packages/a9/14/a153a06101323e4cf086ecee3faadba52ff71633d471f9685c42e3736163/multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9", size = 242842, upload-time = "2026-01-26T02:46:02.824Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/604ae839e64a4a6efc80db94465348d3b328ee955e37acb24badbcd24d83/multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2", size = 240237, upload-time = "2026-01-26T02:46:05.898Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/c3a5187bf66f6fb546ff4ab8fb5a077cbdd832d7b1908d4365c7f74a1917/multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7", size = 48008, upload-time = "2026-01-26T02:46:07.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f7/addf1087b860ac60e6f382240f64fb99f8bfb532bb06f7c542b83c29ca61/multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5", size = 53542, upload-time = "2026-01-26T02:46:08.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/81/4629d0aa32302ef7b2ec65c75a728cc5ff4fa410c50096174c1632e70b3e/multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2", size = 44719, upload-time = "2026-01-26T02:46:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" }, +] + +[[package]] +name = "mypy" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ast-serialize" }, + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cf/dc/7e6d49f04fca40b9dd5c752a51a432ffe67fb45200702bc9eee0cb4bbb26/mypy-2.0.0.tar.gz", hash = "sha256:1a9e3900ac5c40f1fe813506c7739da6e6f0eab2729067ebd94bfb0bbba53532", size = 3869036, upload-time = "2026-05-06T19:26:43.22Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/1e/268b81393b81d64683f670680215553e70ae92c55805915b3440080e05e4/mypy-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17b7222e9fdfd352e61fb3131da117e55cc465f701ff232f1bd97a02bbad91f", size = 14580849, upload-time = "2026-05-06T19:23:06.567Z" }, + { url = "https://files.pythonhosted.org/packages/6e/32/d159a8002d9e5c44e59ece9d641a26956c89be5b6827f819d9a9dc678c65/mypy-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc0a61adea1a5ffc2d47a4dc4bb180d8103f477fc2a90a1cdcbb168c2cc6caff", size = 13444955, upload-time = "2026-05-06T19:25:11.982Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/3b28d5a2799591da0ee5490418e94497eaf5d701e42d8b001b5e17a9b3d6/mypy-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8578f857b519993d065e5805290b71467ebfae772407a5f57e823755e4fdb850", size = 13873124, upload-time = "2026-05-06T19:20:39.684Z" }, + { url = "https://files.pythonhosted.org/packages/60/23/f40f723955617b814d5ddc1154d8938b77aaf6926c2dbf72846e8943a0b7/mypy-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:33f668a37a650df60f7b825c1ac61e6baadd4ac3c89519e929badde58d28edf5", size = 14748822, upload-time = "2026-05-06T19:25:30.972Z" }, + { url = "https://files.pythonhosted.org/packages/d6/16/eded971224a483e422a141ffd580c00e1b919df8e529f06d03a4a987878c/mypy-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29ea6da86c8c5e9addd48fa6e624f467341b3814f54ded871b28980468686dea", size = 14992675, upload-time = "2026-05-06T19:23:34.511Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6a/1cbd7290f00b4dbaa4c4502e53ac05645ea635e4d1e3dcd42687c2fc39cd/mypy-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:904baa0124ebbccf0c7ba94f722cf9186ee30478f5e5b11432ffc8929248ee55", size = 10983628, upload-time = "2026-05-06T19:26:39.48Z" }, + { url = "https://files.pythonhosted.org/packages/83/3f/8caa9bcc2636cd512642050747466b695fa2540d7040544fd7ddb721d671/mypy-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:440165501295e523bf1e5d3e411b62b367b901c65610938e75f0e56ba0462461", size = 9906041, upload-time = "2026-05-06T19:24:03.199Z" }, + { url = "https://files.pythonhosted.org/packages/f6/4b/f6cd12ef1eb63be1c342da3e8ca811d2280276177f6de4ef20cb2366d79b/mypy-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:660790551c988e69d8bf7a35c8b4149edeb22f4a339165702be843532e9dcdb5", size = 14756610, upload-time = "2026-05-06T19:26:19.221Z" }, + { url = "https://files.pythonhosted.org/packages/32/73/67d09ca28bee21feaca264b2a680cf2d300bcc2071136ad064928324c843/mypy-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7a15bf92cd8781f8e72f69ffa7e30d1f434402d065ee1ecd5223ef2ef100f914", size = 13554270, upload-time = "2026-05-06T19:26:08.977Z" }, + { url = "https://files.pythonhosted.org/packages/61/b3/44718b5c6b1b5a27440ff2effe6a1be0fa2a190c0f4e2e21a83728416f95/mypy-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ff370b43d7def05bbcd2f5267f0bcda72dd6a552ef2ea9375b02d6fe06da270", size = 13924663, upload-time = "2026-05-06T19:21:24.932Z" }, + { url = "https://files.pythonhosted.org/packages/6a/2b/bbb9cc5773f946846a7c340097e59bcf84095437dda0d56bb4f6cf1f6541/mypy-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:37bd246590a018e5a11703b7b09c39d47ede3df5ba3fa863c5b8590b465beb01", size = 14946862, upload-time = "2026-05-06T19:24:23.023Z" }, + { url = "https://files.pythonhosted.org/packages/43/25/e9318566f443a5130b4ff0ad3367ee6c4c4c49ff083fe5214a7318c18282/mypy-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cce87e92214fac8bf8feb8a680d0c1b6fb748d50e9b57fbb13e4b1d83a3ed19b", size = 15175090, upload-time = "2026-05-06T19:26:28.794Z" }, + { url = "https://files.pythonhosted.org/packages/67/65/2ec28c834f21e164c33bc296a7db538ad50c74f83e517c7a0be95ff6de86/mypy-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:e19e9cb69b66a4141009d24898259914fa2b71d026de0b46edf9fafdbf4fd46e", size = 11052899, upload-time = "2026-05-06T19:25:39.084Z" }, + { url = "https://files.pythonhosted.org/packages/9e/72/d1ec625cfc9bd101c07a6834ef1f94e820296f8fdbad2eb03f50e0983f8c/mypy-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:b021614cb08d44785b025982163ec3c39c94bff766ead071fa9e82b4ef6f62cd", size = 9972935, upload-time = "2026-05-06T19:23:24.204Z" }, + { url = "https://files.pythonhosted.org/packages/e5/c6/996a1e535e5d0d597c3b1460fc962733091f885f312e749350eb2ac10965/mypy-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ef5f581b61240d1cc629b12f8df6565ed6ffac0d82ed745eef7833222ab50b9", size = 14737259, upload-time = "2026-05-06T19:20:23.081Z" }, + { url = "https://files.pythonhosted.org/packages/94/c5/0f9460e26b77f434bd53f47d1ce32a3cd4580c92a5331fa5dfc059f9421a/mypy-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:20e3470a165dbc249bdfbe8d1c5172727ef22688cffc279f8c3aa264ab9d4d9a", size = 13538377, upload-time = "2026-05-06T19:21:08.804Z" }, + { url = "https://files.pythonhosted.org/packages/b2/3e/8ea2f8dd1e5c9c279fb3c28193bdb850adf4d3d8172880abad829eced609/mypy-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:224ba142eee8b4d65d4db657cb1fc22abec30b135ded6ab297302ba1f62e505d", size = 13914264, upload-time = "2026-05-06T19:24:12.875Z" }, + { url = "https://files.pythonhosted.org/packages/be/ce/78bd3b8520f676acee9dab48ea71473e68f6d5cf14b59fbd800bea50a92b/mypy-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e879ad8a03908ff74d15e8a9b42bf049918e6798d52c011011f1873d0b5877e", size = 14926761, upload-time = "2026-05-06T19:20:12.846Z" }, + { url = "https://files.pythonhosted.org/packages/61/ef/b52fa340522da3d22e669117c3b83155c2660f7cdc035856958fbfffb224/mypy-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:65c5c15bcbd18d6fe927cc55c459597a3517d69cc3123f067be3b020010e115e", size = 15157014, upload-time = "2026-05-06T19:25:49.78Z" }, + { url = "https://files.pythonhosted.org/packages/7a/0c/dde7614250c6d017936c7aa3bb63b9b52c7cfd298d3f1be9be45f307870b/mypy-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:d1a068acd7c9fb77e9f8923f1556f2f49d6d7895821121b8d97fa5642b9c52f5", size = 11067049, upload-time = "2026-05-06T19:21:16.116Z" }, + { url = "https://files.pythonhosted.org/packages/27/ec/1d6af4830a94a285442db19caa02f160cc1a255e4f324eec5458e6c2bafb/mypy-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:ef9d96da1ddffbc21f27d3939319b6846d12393baa17c4d2f3e81e040e73ce2c", size = 9967903, upload-time = "2026-05-06T19:22:15.52Z" }, + { url = "https://files.pythonhosted.org/packages/ce/2c/6fefe954207860aed6eeb91776795e64a257d3ce0360862288984ce121f5/mypy-2.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c918c64e8ce36557851b0347f84eb12f1965d3a06813c36df253eb0c0afd1d82", size = 14729633, upload-time = "2026-05-06T19:24:53.383Z" }, + { url = "https://files.pythonhosted.org/packages/23/d6/d336f5b820af189eb0390cce21de62d264c0a4e64713dfbe81bfc4fc7739/mypy-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:301f1a8ccc7d79b542ee218b28bb49443a83e194eb3d10da63ff1649e5aa5d34", size = 13559524, upload-time = "2026-05-06T19:22:24.906Z" }, + { url = "https://files.pythonhosted.org/packages/af/a6/d7bb54fde1770f0484e5fbdbdce37a41e95ed0a1cd493ec60ead111e356c/mypy-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fdf4ef489d44ce350bac3fd699907834e551d4c934e9cc862ef201215ab1558d", size = 13936018, upload-time = "2026-05-06T19:25:02.992Z" }, + { url = "https://files.pythonhosted.org/packages/7d/ba/5be51316b91e6a6bf6e3a8adb3de500e7e1fb5bf9491743b8cbc81a34a2c/mypy-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9cde2d0989f912fc850890f727d0d76495e7a6c5bdd9912a1efdb64952b4398d", size = 14910712, upload-time = "2026-05-06T19:25:21.83Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/e2c8c3b373e20ebfb66e6c83a99027fd67df4ec43b08879f74e822d2dc4c/mypy-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdf05693c231a14fe37dbfce192a3a1372c26a833af4a80f550547742952e719", size = 15141499, upload-time = "2026-05-06T19:20:50.924Z" }, + { url = "https://files.pythonhosted.org/packages/12/36/07756f933e00416d912e35878cfcf89a593a3350a885691c0bb85ae0226a/mypy-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:73aee2da33a2237e66cbe84a94780e53599847e86bb3aa7b93e405e8cd9905f2", size = 11240511, upload-time = "2026-05-06T19:21:32.39Z" }, + { url = "https://files.pythonhosted.org/packages/70/05/79ac1f20f2397353f3845f7b8bb5d8006cda7c8ef9092f04f9de3c6135f2/mypy-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:1f6dcd8f39971f41edab2728c877c4ac8b50ad3c387ff2770423b79a05d23910", size = 10149336, upload-time = "2026-05-06T19:22:08.383Z" }, + { url = "https://files.pythonhosted.org/packages/53/e0/0db84e0ebbad6e99e566c68e4b465784f2a2294f7719e8db9d509ef23087/mypy-2.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:a04e980b9275c76159da66c6e1723c7798306f9802b31bdaf9358d0c84030ce8", size = 15797362, upload-time = "2026-05-06T19:22:00.835Z" }, + { url = "https://files.pythonhosted.org/packages/0a/a4/14cc0768164dd53bec48aa41a20270b18df9bf72aa5054278bf133608315/mypy-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:33f9cf4825469b2bc73c53ba55f6d9a9b4cdb60f9e6e228745581520f29b8771", size = 14635914, upload-time = "2026-05-06T19:23:43.675Z" }, + { url = "https://files.pythonhosted.org/packages/08/48/d866a3e23b4dc5974c77d9cf65a435bf22de01a84dd4620917950e233960/mypy-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:191675c3c7dc2a5c7722a035a6909c277f14046c5e4e02aa5fbf65f8524f08ad", size = 15270866, upload-time = "2026-05-06T19:22:34.756Z" }, + { url = "https://files.pythonhosted.org/packages/71/eb/de9ef94958eb2078a6b908ceb247757dc384d3a238d3bd6ed7d81de5eaf8/mypy-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c3d26c4321a3b06fc9f04c741e0733af693f82d823f8e64e47b2e63b7f19fa84", size = 16093131, upload-time = "2026-05-06T19:23:56.541Z" }, + { url = "https://files.pythonhosted.org/packages/ad/07/0ab2c1a9d26e90942612724cbd5788f16b7810c5dd39bfcf79286c6c4524/mypy-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:bbcbc4d5917ca6ce12de70e051de7f533e3bf92d548b41a38a2232a6fe356525", size = 16330685, upload-time = "2026-05-06T19:21:42.037Z" }, + { url = "https://files.pythonhosted.org/packages/a6/8f/46f85d1371a5be642dad263828118ae1efd536d91d8bd2000c68acff3920/mypy-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:dbc6ba6d40572ae49268531565793a8f07eac7fc65ad76d482c9b4c8765b6043", size = 12752017, upload-time = "2026-05-06T19:22:44.002Z" }, + { url = "https://files.pythonhosted.org/packages/7a/e6/94ca48800cac19eb28a58188a768aaec0d16cac0f373915f073058ab0855/mypy-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:77926029dfcb7e1a3ecb0acb2ddbb24ca36be03f7d623e1759ad5376be8f6c01", size = 10527097, upload-time = "2026-05-06T19:20:58.973Z" }, + { url = "https://files.pythonhosted.org/packages/5c/14/fd0694aa594d6e9f9fd16ce821be2eff295197a273262ef56ddcc1388d68/mypy-2.0.0-py3-none-any.whl", hash = "sha256:8a92b2be3146b4fa1f062af7eb05574cbf3e6eb8e1f14704af1075423144e4e5", size = 2673434, upload-time = "2026-05-06T19:26:32.856Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "packaging" +version = "26.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/f1/e7a6dd94a8d4a5626c03e4e99c87f241ba9e350cd9e6d75123f992427270/packaging-26.2.tar.gz", hash = "sha256:ff452ff5a3e828ce110190feff1178bb1f2ea2281fa2075aadb987c2fb221661", size = 228134, upload-time = "2026-04-24T20:15:23.917Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/b2/87e62e8c3e2f4b32e5fe99e0b86d576da1312593b39f47d8ceef365e95ed/packaging-26.2-py3-none-any.whl", hash = "sha256:5fc45236b9446107ff2415ce77c807cee2862cb6fac22b8a73826d0693b0980e", size = 100195, upload-time = "2026-04-24T20:15:22.081Z" }, +] + +[[package]] +name = "pamqp" +version = "3.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/62/35bbd3d3021e008606cd0a9532db7850c65741bbf69ac8a3a0d8cfeb7934/pamqp-3.3.0.tar.gz", hash = "sha256:40b8795bd4efcf2b0f8821c1de83d12ca16d5760f4507836267fd7a02b06763b", size = 30993, upload-time = "2024-01-12T20:37:25.085Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/8d/c1e93296e109a320e508e38118cf7d1fc2a4d1c2ec64de78565b3c445eb5/pamqp-3.3.0-py2.py3-none-any.whl", hash = "sha256:c901a684794157ae39b52cbf700db8c9aae7a470f13528b9d7b4e5f7202f8eb0", size = 33848, upload-time = "2024-01-12T20:37:21.359Z" }, +] + +[[package]] +name = "pathspec" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/82/42f767fc1c1143d6fd36efb827202a2d997a375e160a71eb2888a925aac1/pathspec-1.1.1.tar.gz", hash = "sha256:17db5ecd524104a120e173814c90367a96a98d07c45b2e10c2f3919fff91bf5a", size = 135180, upload-time = "2026-04-27T01:46:08.907Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/d9/7fb5aa316bc299258e68c73ba3bddbc499654a07f151cba08f6153988714/pathspec-1.1.1-py3-none-any.whl", hash = "sha256:a00ce642f577bf7f473932318056212bc4f8bfdf53128c78bbd5af0b9b20b189", size = 57328, upload-time = "2026-04-27T01:46:07.06Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +] + +[[package]] +name = "psutil" +version = "7.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/c6/d1ddf4abb55e93cebc4f2ed8b5d6dbad109ecb8d63748dd2b20ab5e57ebe/psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372", size = 493740, upload-time = "2026-01-28T18:14:54.428Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/08/510cbdb69c25a96f4ae523f733cdc963ae654904e8db864c07585ef99875/psutil-7.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2edccc433cbfa046b980b0df0171cd25bcaeb3a68fe9022db0979e7aa74a826b", size = 130595, upload-time = "2026-01-28T18:14:57.293Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f5/97baea3fe7a5a9af7436301f85490905379b1c6f2dd51fe3ecf24b4c5fbf/psutil-7.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78c8603dcd9a04c7364f1a3e670cea95d51ee865e4efb3556a3a63adef958ea", size = 131082, upload-time = "2026-01-28T18:14:59.732Z" }, + { url = "https://files.pythonhosted.org/packages/37/d6/246513fbf9fa174af531f28412297dd05241d97a75911ac8febefa1a53c6/psutil-7.2.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a571f2330c966c62aeda00dd24620425d4b0cc86881c89861fbc04549e5dc63", size = 181476, upload-time = "2026-01-28T18:15:01.884Z" }, + { url = "https://files.pythonhosted.org/packages/b8/b5/9182c9af3836cca61696dabe4fd1304e17bc56cb62f17439e1154f225dd3/psutil-7.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:917e891983ca3c1887b4ef36447b1e0873e70c933afc831c6b6da078ba474312", size = 184062, upload-time = "2026-01-28T18:15:04.436Z" }, + { url = "https://files.pythonhosted.org/packages/16/ba/0756dca669f5a9300d0cbcbfae9a4c30e446dfc7440ffe43ded5724bfd93/psutil-7.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:ab486563df44c17f5173621c7b198955bd6b613fb87c71c161f827d3fb149a9b", size = 139893, upload-time = "2026-01-28T18:15:06.378Z" }, + { url = "https://files.pythonhosted.org/packages/1c/61/8fa0e26f33623b49949346de05ec1ddaad02ed8ba64af45f40a147dbfa97/psutil-7.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:ae0aefdd8796a7737eccea863f80f81e468a1e4cf14d926bd9b6f5f2d5f90ca9", size = 135589, upload-time = "2026-01-28T18:15:08.03Z" }, + { url = "https://files.pythonhosted.org/packages/81/69/ef179ab5ca24f32acc1dac0c247fd6a13b501fd5534dbae0e05a1c48b66d/psutil-7.2.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:eed63d3b4d62449571547b60578c5b2c4bcccc5387148db46e0c2313dad0ee00", size = 130664, upload-time = "2026-01-28T18:15:09.469Z" }, + { url = "https://files.pythonhosted.org/packages/7b/64/665248b557a236d3fa9efc378d60d95ef56dd0a490c2cd37dafc7660d4a9/psutil-7.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7b6d09433a10592ce39b13d7be5a54fbac1d1228ed29abc880fb23df7cb694c9", size = 131087, upload-time = "2026-01-28T18:15:11.724Z" }, + { url = "https://files.pythonhosted.org/packages/d5/2e/e6782744700d6759ebce3043dcfa661fb61e2fb752b91cdeae9af12c2178/psutil-7.2.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fa4ecf83bcdf6e6c8f4449aff98eefb5d0604bf88cb883d7da3d8d2d909546a", size = 182383, upload-time = "2026-01-28T18:15:13.445Z" }, + { url = "https://files.pythonhosted.org/packages/57/49/0a41cefd10cb7505cdc04dab3eacf24c0c2cb158a998b8c7b1d27ee2c1f5/psutil-7.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e452c464a02e7dc7822a05d25db4cde564444a67e58539a00f929c51eddda0cf", size = 185210, upload-time = "2026-01-28T18:15:16.002Z" }, + { url = "https://files.pythonhosted.org/packages/dd/2c/ff9bfb544f283ba5f83ba725a3c5fec6d6b10b8f27ac1dc641c473dc390d/psutil-7.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c7663d4e37f13e884d13994247449e9f8f574bc4655d509c3b95e9ec9e2b9dc1", size = 141228, upload-time = "2026-01-28T18:15:18.385Z" }, + { url = "https://files.pythonhosted.org/packages/f2/fc/f8d9c31db14fcec13748d373e668bc3bed94d9077dbc17fb0eebc073233c/psutil-7.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:11fe5a4f613759764e79c65cf11ebdf26e33d6dd34336f8a337aa2996d71c841", size = 136284, upload-time = "2026-01-28T18:15:19.912Z" }, + { url = "https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486", size = 129090, upload-time = "2026-01-28T18:15:22.168Z" }, + { url = "https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979", size = 129859, upload-time = "2026-01-28T18:15:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9", size = 155560, upload-time = "2026-01-28T18:15:25.976Z" }, + { url = "https://files.pythonhosted.org/packages/63/65/37648c0c158dc222aba51c089eb3bdfa238e621674dc42d48706e639204f/psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e", size = 156997, upload-time = "2026-01-28T18:15:27.794Z" }, + { url = "https://files.pythonhosted.org/packages/8e/13/125093eadae863ce03c6ffdbae9929430d116a246ef69866dad94da3bfbc/psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8", size = 148972, upload-time = "2026-01-28T18:15:29.342Z" }, + { url = "https://files.pythonhosted.org/packages/04/78/0acd37ca84ce3ddffaa92ef0f571e073faa6d8ff1f0559ab1272188ea2be/psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc", size = 148266, upload-time = "2026-01-28T18:15:31.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/90/e2159492b5426be0c1fef7acba807a03511f97c5f86b3caeda6ad92351a7/psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988", size = 137737, upload-time = "2026-01-28T18:15:33.849Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c7/7bb2e321574b10df20cbde462a94e2b71d05f9bbda251ef27d104668306a/psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee", size = 134617, upload-time = "2026-01-28T18:15:36.514Z" }, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/60/a3624f79acea344c16fbef3a94d28b89a8042ddfb8f3e4ca83f538671409/psycopg2_binary-2.9.12.tar.gz", hash = "sha256:5ac9444edc768c02a6b6a591f070b8aae28ff3a99be57560ac996001580f294c", size = 379686, upload-time = "2026-04-21T09:40:34.304Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/19/d4ce60954f3bb9d8e3bc5e5c4d1f2487de2d3851bf2391d54954c9df12a6/psycopg2_binary-2.9.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5c8ce6c61bd1b1f6b9c24ee32211599f6166af2c55abb19456090a21fd16554b", size = 3712338, upload-time = "2026-04-20T23:34:03.961Z" }, + { url = "https://files.pythonhosted.org/packages/53/71/c85409ee0d78890f0660eff262e815e7dd2bb741a17611d82e9e8cd9dc5e/psycopg2_binary-2.9.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4a9eaa6e7f4ff91bec10aa3fb296878e75187bced5cc4bafe17dc40915e1326", size = 3822407, upload-time = "2026-04-20T23:34:05.977Z" }, + { url = "https://files.pythonhosted.org/packages/3c/ed/60486c2c7f0d4d1ede2bfb1ed27e2498477ce646bc7f6b2759906303117e/psycopg2_binary-2.9.12-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c6528cefc8e50fcc6f4a107e27a672058b36cc5736d665476aeb413ba88dbb06", size = 4578425, upload-time = "2026-04-20T23:34:08.246Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b9/656cb03fad9f4f49f2145c334b1126ee75189929ca4e6187d485a2d59951/psycopg2_binary-2.9.12-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e4e184b1fb6072bf05388aa41c697e1b2d01b3473f107e7ec44f186a32cfd0b8", size = 4273709, upload-time = "2026-04-20T23:34:10.974Z" }, + { url = "https://files.pythonhosted.org/packages/99/66/08cf0da0e25cc6fb142c89be45fc8418792858f0c4cbff5e24530ff02cd6/psycopg2_binary-2.9.12-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4766ab678563054d3f1d064a4db19cc4b5f9e3a8d9018592a8285cf200c248f3", size = 5893779, upload-time = "2026-04-20T23:34:13.905Z" }, + { url = "https://files.pythonhosted.org/packages/17/d7/eecd9ce8e146d3721115d82d3836efdbb712187e4590325df549989d18f4/psycopg2_binary-2.9.12-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5a0253224780c978746cb9be55a946bcdaf40fe3519c0f622924cdabdafe2c39", size = 4109308, upload-time = "2026-04-20T23:34:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2e/b1dc289b362cc8d45697b57eefbd673186f49a4ea0906928988e3affcc98/psycopg2_binary-2.9.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0dc9228d47c46bda253d2ecd6bb93b56a9f2d7ad33b684a1fa3622bf74ffe30c", size = 3654405, upload-time = "2026-04-20T23:34:19.303Z" }, + { url = "https://files.pythonhosted.org/packages/eb/e4/4c4aea6473214dbdbd0fbba11aa4691e76dc01722c55724c5951719865ff/psycopg2_binary-2.9.12-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f921f3cd87035ef7df233383011d7a53ea1d346224752c1385f1edfd790ceb6a", size = 3299187, upload-time = "2026-04-20T23:34:21.206Z" }, + { url = "https://files.pythonhosted.org/packages/ba/5d/b03b99986446a4f57b170ed9a2579fb7ff9783ca0fa5226b19db99737fee/psycopg2_binary-2.9.12-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:3d999bd982a723113c1a45b55a7a6a90d64d0ed2278020ed625c490ff7bef96c", size = 3047716, upload-time = "2026-04-20T23:34:23.077Z" }, + { url = "https://files.pythonhosted.org/packages/14/86/382ee4afbd1d97500c9d2862b20c2fdeddf4b7335e984df3fb4309f64108/psycopg2_binary-2.9.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29d4d134bd0ab46ffb04e94aa3c5fa3ef582e9026609165e2f758ff76fc3a3be", size = 3349237, upload-time = "2026-04-20T23:34:25.211Z" }, + { url = "https://files.pythonhosted.org/packages/a8/16/9a57c75ba1eda7165c017342f526810d5f5a12647dde749c99ae9a7141d7/psycopg2_binary-2.9.12-cp311-cp311-win_amd64.whl", hash = "sha256:cb4a1dacdd48077150dc762a9e5ddbf32c256d66cb46f80839391aa458774936", size = 2757036, upload-time = "2026-04-20T23:34:27.77Z" }, + { url = "https://files.pythonhosted.org/packages/e2/9f/ef4ef3c8e15083df90ca35265cfd1a081a2f0cc07bb229c6314c6af817f4/psycopg2_binary-2.9.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5cdc05117180c5fa9c40eea8ea559ce64d73824c39d928b7da9fb5f6a9392433", size = 3712459, upload-time = "2026-04-20T23:34:30.549Z" }, + { url = "https://files.pythonhosted.org/packages/b5/01/3dd14e46ba48c1e1a6ec58ee599fa1b5efa00c246d5046cd903d0eeb1af1/psycopg2_binary-2.9.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d3227a3bc228c10d21011a99245edca923e4e8bf461857e869a507d9a41fe9f6", size = 3822936, upload-time = "2026-04-20T23:34:32.77Z" }, + { url = "https://files.pythonhosted.org/packages/a6/f7/0640e4901119d8a9f7a1784b927f494e2198e213ceb593753d1f2c8b1b30/psycopg2_binary-2.9.12-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:995ce929eede89db6254b50827e2b7fd61e50d11f0b116b29fffe4a2e53c4580", size = 4578676, upload-time = "2026-04-20T23:34:35.18Z" }, + { url = "https://files.pythonhosted.org/packages/b0/55/44df3965b5f297c50cc0b1b594a31c67d6127a9d133045b8a66611b14dfb/psycopg2_binary-2.9.12-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9fe06d93e72f1c048e731a2e3e7854a5bfaa58fc736068df90b352cefe66f03f", size = 4274917, upload-time = "2026-04-20T23:34:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/b0/4b/74535248b1eac0c9336862e8617c765ac94dac76f9e25d7c4a79588c8907/psycopg2_binary-2.9.12-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:40e7b28b63aaf737cb3a1edc3a9bbc9a9f4ad3dcb7152e8c1130e4050eddcb7d", size = 5894843, upload-time = "2026-04-20T23:34:40.856Z" }, + { url = "https://files.pythonhosted.org/packages/f2/ba/f1bf8d2ae71868ad800b661099086ee52bc0f8d9f05be1acd8ebb06757cc/psycopg2_binary-2.9.12-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:89d19a9f7899e8eb0656a2b3a08e0da04c720a06db6e0033eab5928aabe60fa9", size = 4110556, upload-time = "2026-04-20T23:34:44.016Z" }, + { url = "https://files.pythonhosted.org/packages/45/46/c15706c338403b7c420bcc0c2905aad116cc064545686d8bf85f1999ea00/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:612b965daee295ae2da8f8218ce1d274645dc76ef3f1abf6a0a94fd57eff876d", size = 3655714, upload-time = "2026-04-20T23:34:46.233Z" }, + { url = "https://files.pythonhosted.org/packages/b3/7c/a2d5dc09b64a4564db242a0fe418fde7d33f6f8259dd2c5b9d7def00fb5a/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b9a339b79d37c1b45f3235265f07cdeb0cb5ad7acd2ac7720a5920989c17c24e", size = 3301154, upload-time = "2026-04-20T23:34:49.528Z" }, + { url = "https://files.pythonhosted.org/packages/c0/e8/cc8c9a4ce71461f9ec548d38cadc41dc184b34c73e6455450775a9334ccd/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3471336e1acfd9c7fe507b8bad5af9317b6a89294f9eb37bd9a030bb7bebcdc6", size = 3048882, upload-time = "2026-04-20T23:34:51.86Z" }, + { url = "https://files.pythonhosted.org/packages/19/6a/31e2296bc0787c5ab75d3d118e40b239db8151b5192b90b77c72bc9256e9/psycopg2_binary-2.9.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7af18183109e23502c8b2ae7f6926c0882766f35b5175a4cd737ad825e4d7a1b", size = 3351298, upload-time = "2026-04-20T23:34:54.124Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a8/75f4e3e11203b590150abed2cf7794b9c9c9f7eceddae955191138b44dde/psycopg2_binary-2.9.12-cp312-cp312-win_amd64.whl", hash = "sha256:398fcd4db988c7d7d3713e2b8e18939776fd3fb447052daae4f24fa39daede4c", size = 2757230, upload-time = "2026-04-20T23:34:56.242Z" }, + { url = "https://files.pythonhosted.org/packages/91/bb/4608c96f970f6e0c56572e87027ef4404f709382a3503e9934526d7ba051/psycopg2_binary-2.9.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7c729a73c7b1b84de3582f73cdd27d905121dc2c531f3d9a3c32a3011033b965", size = 3712419, upload-time = "2026-04-20T23:34:58.754Z" }, + { url = "https://files.pythonhosted.org/packages/5e/af/48f76af9d50d61cf390f8cd657b503168b089e2e9298e48465d029fcc713/psycopg2_binary-2.9.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4413d0caef93c5cf50b96863df4c2efe8c269bf2267df353225595e7e15e8df7", size = 3822990, upload-time = "2026-04-20T23:35:00.821Z" }, + { url = "https://files.pythonhosted.org/packages/7a/df/aba0f99397cd811d32e06fc0cc781f1f3ce98bc0e729cb423925085d781a/psycopg2_binary-2.9.12-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:4dfcf8e45ebb0c663be34a3442f65e17311f3367089cd4e5e3a3e8e62c978777", size = 4578696, upload-time = "2026-04-20T23:35:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/95/9c/eaa74021ac4e4d5c2f83d82fc6615a63f4fe6c94dc4e94c3990427053f67/psycopg2_binary-2.9.12-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c41321a14dd74aceb6a9a643b9253a334521babfa763fa873e33d89cfa122fb5", size = 4274982, upload-time = "2026-04-20T23:35:05.583Z" }, + { url = "https://files.pythonhosted.org/packages/35/ed/c25deff98bd26187ba48b3b250a3ffc3037c46c5b89362534a15d200e0db/psycopg2_binary-2.9.12-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83946ba43979ebfdc99a3cd0ee775c89f221df026984ba19d46133d8d75d3cd9", size = 5894867, upload-time = "2026-04-20T23:35:07.902Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/8d0e21ca77373c6c9589e5c4528f6e8f0c08c62cafc76fb0bddb7a2cee22/psycopg2_binary-2.9.12-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:411e85815652d13560fbe731878daa5d92378c4995a22302071890ec3397d019", size = 4110578, upload-time = "2026-04-20T23:35:10.149Z" }, + { url = "https://files.pythonhosted.org/packages/00/fc/f481e2435bd8f742d0123309174aae4165160ad3ef17c1b99c3622c241d2/psycopg2_binary-2.9.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c8ad4c08e00f7679559eaed7aff1edfffc60c086b976f93972f686384a95e2c", size = 3655816, upload-time = "2026-04-20T23:35:12.56Z" }, + { url = "https://files.pythonhosted.org/packages/53/79/b9f46466bdbe9f239c96cde8be33c1aace4842f06013b47b730dc9759187/psycopg2_binary-2.9.12-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:00814e40fa23c2b37ef0a1e3c749d89982c73a9cb5046137f0752a22d432e82f", size = 3301307, upload-time = "2026-04-20T23:35:15.029Z" }, + { url = "https://files.pythonhosted.org/packages/3f/19/7dc003b32fe35024df89b658104f7c8538a8b2dcbde7a4e746ce929742e7/psycopg2_binary-2.9.12-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:98062447aebc20ed20add1f547a364fd0ef8933640d5372ff1873f8deb9b61be", size = 3048968, upload-time = "2026-04-20T23:35:16.757Z" }, + { url = "https://files.pythonhosted.org/packages/91/58/2dbd7db5c604d45f4950d988506aae672a14126ec22998ced5021cbb76bb/psycopg2_binary-2.9.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:66a7685d7e548f10fb4ce32fb01a7b7f4aa702134de92a292c7bd9e0d3dbd290", size = 3351369, upload-time = "2026-04-20T23:35:18.933Z" }, + { url = "https://files.pythonhosted.org/packages/42/ee/dee8dcaad07f735824de3d6563bc67119fa6c28257b17977a8d624f02fab/psycopg2_binary-2.9.12-cp313-cp313-win_amd64.whl", hash = "sha256:b6937f5fe4e180aeee87de907a2fa982ded6f7f15d7218f78a083e4e1d68f2a0", size = 2757347, upload-time = "2026-04-20T23:35:21.283Z" }, + { url = "https://files.pythonhosted.org/packages/13/1b/708c0dca874acfad6d65314271859899a79007686f3a1f74e82a2ed4b645/psycopg2_binary-2.9.12-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:6f3b3de8a74ef8db215f22edffb19e32dc6fa41340456de7ec99efdc8a7b3ec2", size = 3712428, upload-time = "2026-04-20T23:35:23.453Z" }, + { url = "https://files.pythonhosted.org/packages/d6/39/ddbea9d4b4de6aca9431b6ed253f530f8a02d3b8f9bcfd0dbfe2b3de6fe4/psycopg2_binary-2.9.12-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1006fb62f0f0bc5ce256a832356c6262e91be43f5e4eb15b5eaf38079464caf2", size = 3823184, upload-time = "2026-04-20T23:35:25.92Z" }, + { url = "https://files.pythonhosted.org/packages/bf/a0/bc2fef74b106fa345567122a0659e6d94512ed7dc0131ec44c9e5aba3725/psycopg2_binary-2.9.12-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:840066105706cd2eb29b9a1c2329620056582a4bf3e8169dec5c447042d0869f", size = 4579157, upload-time = "2026-04-20T23:35:28.542Z" }, + { url = "https://files.pythonhosted.org/packages/57/d7/d4e3b2005d3de607ca4fbb0e8742e248056e52184a6b94ebda3c1c2c329b/psycopg2_binary-2.9.12-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:863f5d12241ebe1c76a72a04c2113b6dc905f90b9cef0e9be0efd994affd9354", size = 4274970, upload-time = "2026-04-20T23:35:30.418Z" }, + { url = "https://files.pythonhosted.org/packages/2e/42/c9853f8db3967fe08bcde11f53d53b85d351750cae726ce001cb68afa9c1/psycopg2_binary-2.9.12-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a99eaab34a9010f1a086b126de467466620a750634d114d20455f3a824aae033", size = 5895175, upload-time = "2026-04-20T23:35:33.584Z" }, + { url = "https://files.pythonhosted.org/packages/eb/fd/b82b5601a97630308bef079f545ffec481bbbc795c2ba5ec416a01d03f60/psycopg2_binary-2.9.12-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ffdd7dc5463ccd61845ac37b7012d0f35a1548df9febe14f8dd549be4a0bc81e", size = 4110658, upload-time = "2026-04-20T23:35:35.638Z" }, + { url = "https://files.pythonhosted.org/packages/62/8c/32ca69b0389ef25dd22937bf9e8fbe2ce27aea20b05ded48c4ce4cb42475/psycopg2_binary-2.9.12-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:54a0dfecab1b48731f934e06139dfe11e24219fb6d0ceb32177cf0375f14c7b5", size = 3656251, upload-time = "2026-04-20T23:35:37.854Z" }, + { url = "https://files.pythonhosted.org/packages/c4/29/96992a2b59e3b9d730fcf9612d0a387305025dc867a9fc490a9e496e074e/psycopg2_binary-2.9.12-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:96937c9c5d891f772430f418a7a8b4691a90c3e6b93cf72b5bd7cad8cbca32a5", size = 3301810, upload-time = "2026-04-20T23:35:39.927Z" }, + { url = "https://files.pythonhosted.org/packages/56/ad/44b06659949b243ae10112cd3b20a197f9bf3e81d5651379b9eb889bfaad/psycopg2_binary-2.9.12-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:77b348775efd4cdab410ec6609d81ccecd1139c90265fa583a7255c8064bc03d", size = 3048977, upload-time = "2026-04-20T23:35:41.806Z" }, + { url = "https://files.pythonhosted.org/packages/1d/f2/10a1bcebadb6aa55e280e1f58975c36a7b560ea525184c7aa4064c466633/psycopg2_binary-2.9.12-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:527e6342b3e44c2f0544f6b8e927d60de7f163f5723b8f1dfa7d2a84298738cd", size = 3351466, upload-time = "2026-04-20T23:35:43.993Z" }, + { url = "https://files.pythonhosted.org/packages/20/be/b732c8418ffa5bcfda002890f5dc4c869fc17db66ff11f53b17cfe44afc0/psycopg2_binary-2.9.12-cp314-cp314-win_amd64.whl", hash = "sha256:f12ae41fcafadb39b2785e64a40f9db05d6de2ac114077457e0e7c597f3af980", size = 2848762, upload-time = "2026-04-20T23:35:46.421Z" }, +] + +[[package]] +name = "pyasn1" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/5f/6583902b6f79b399c9c40674ac384fd9cd77805f9e6205075f828ef11fb2/pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf", size = 148685, upload-time = "2026-03-17T01:06:53.382Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/a0/7d793dce3fa811fe047d6ae2431c672364b462850c6235ae306c0efd025f/pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde", size = 83997, upload-time = "2026-03-17T01:06:52.036Z" }, +] + +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + +[[package]] +name = "pycron" +version = "3.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/5d/340be12ae4a69c33102dfb6ddc1dc6e53e69b2d504fa26b5d34a472c3057/pycron-3.2.0.tar.gz", hash = "sha256:e125a28aca0295769541a40633f70b602579df48c9cb357c36c28d2628ba2b13", size = 4248, upload-time = "2025-06-05T13:24:12.636Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/76/caf316909f4545e7158e0e1defd8956a1da49f4af04f5d16b18c358dfeac/pycron-3.2.0-py3-none-any.whl", hash = "sha256:6d2349746270bd642b71b9f7187cf13f4d9ee2412b4710396a507b5fe4f60dac", size = 4904, upload-time = "2025-06-05T13:24:11.477Z" }, +] + +[[package]] +name = "pydantic" +version = "2.13.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/18/a5/b60d21ac674192f8ab0ba4e9fd860690f9b4a6e51ca5df118733b487d8d6/pydantic-2.13.4.tar.gz", hash = "sha256:c40756b57adaa8b1efeeced5c196f3f3b7c435f90e84ea7f443901bec8099ef6", size = 844775, upload-time = "2026-05-06T13:43:05.343Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/7b/122376b1fd3c62c1ed9dc80c931ace4844b3c55407b6fb2d199377c9736f/pydantic-2.13.4-py3-none-any.whl", hash = "sha256:45a282cde31d808236fd7ea9d919b128653c8b38b393d1c4ab335c62924d9aba", size = 472262, upload-time = "2026-05-06T13:43:02.641Z" }, +] + +[package.optional-dependencies] +email = [ + { name = "email-validator" }, +] + +[[package]] +name = "pydantic-core" +version = "2.46.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/56/921726b776ace8d8f5db44c4ef961006580d91dc52b803c489fafd1aa249/pydantic_core-2.46.4.tar.gz", hash = "sha256:62f875393d7f270851f20523dd2e29f082bcc82292d66db2b64ea71f64b6e1c1", size = 471464, upload-time = "2026-05-06T13:37:06.98Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/fa/6d7708d2cfc1a832acb6aeb0cd16e801902df8a0f583bb3b4b527fde022e/pydantic_core-2.46.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0e96592440881c74a213e5ad528e2b24d3d4f940de2766bed9010ab1d9e51594", size = 2111872, upload-time = "2026-05-06T13:40:27.596Z" }, + { url = "https://files.pythonhosted.org/packages/ae/6f/aa064a3e74b5745afbdf250594f38e7ead05e2d651bcb35994b9417a0d4d/pydantic_core-2.46.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0d65b8c354be7fb5f720c3caa8bc940bc2d20ce749c8e06135f07f8ed95dd7c", size = 1948255, upload-time = "2026-05-06T13:39:12.574Z" }, + { url = "https://files.pythonhosted.org/packages/43/3a/41114a9f7569b84b4d84e7a018c57c56347dac30c0d4a872946ec4e36c46/pydantic_core-2.46.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bfb192b3f4b9e8a89b6277b6ce787564f62cfd272055f6e685726b111dc7826", size = 1972827, upload-time = "2026-05-06T13:38:19.841Z" }, + { url = "https://files.pythonhosted.org/packages/ef/25/1ab42e8048fe551934d9884e8d64daa7e990ad386f310a15981aeb6a5b08/pydantic_core-2.46.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9037063db01f09b09e237c282b6792bd4da634b5402c4e7f0c61effed7701a04", size = 2041051, upload-time = "2026-05-06T13:38:10.447Z" }, + { url = "https://files.pythonhosted.org/packages/94/c2/1a934597ddf08da410385b3b7aae91956a5a76c635effef456074fad7e88/pydantic_core-2.46.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc010ab034c8c7452522748bf937df58020d256ccae0874463d1f4d01758af8e", size = 2221314, upload-time = "2026-05-06T13:40:13.089Z" }, + { url = "https://files.pythonhosted.org/packages/02/6d/9e8ad178c9c4df27ad3c8f25d1fe2a7ab0d2ba0559fad4aee5d3d1f16771/pydantic_core-2.46.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c5dac79fa1614d1e06ca695109c6105923bd9c7d1d6c918d4e637b7e6b32fd3", size = 2285146, upload-time = "2026-05-06T13:38:59.224Z" }, + { url = "https://files.pythonhosted.org/packages/80/50/540cd3aeefc041beb111125c4bff779831a2111fc6b15a9138cda277d32c/pydantic_core-2.46.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9fa868638bf362d3d138ea55829cefb3d5f4b0d7f142234382a15e2485dbec4", size = 2089685, upload-time = "2026-05-06T13:38:17.762Z" }, + { url = "https://files.pythonhosted.org/packages/6b/a4/b440ad35f05f6a38f89fa0f149accb3f0e02be94ca5e15f3c449a61b4bc9/pydantic_core-2.46.4-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:17299feefe090f2caa5b8e37222bb5f663e4935a8bfa6931d4102e5df1a9f398", size = 2115420, upload-time = "2026-05-06T13:37:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/99/61/de4f55db8dfd57bfdfa9a12ec90fe1b57c4f41062f7ca86f08586b3e0ac0/pydantic_core-2.46.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4c63ebc82684aa89d9a3bcbd13d515b3be44250dc68dd3bd81526c1cb31286c3", size = 2165122, upload-time = "2026-05-06T13:37:01.167Z" }, + { url = "https://files.pythonhosted.org/packages/f7/52/7c529d7bdb2d1068bd52f51fe32572c8301f9a4febf1948f10639f1436f5/pydantic_core-2.46.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:aaa2a54443eff1950ba5ddc6b6ccda0d9c84a364276a62f969bdf2a390650848", size = 2182573, upload-time = "2026-05-06T13:38:45.04Z" }, + { url = "https://files.pythonhosted.org/packages/37/b3/7c40325848ba78247f2812dcf9c7274e38cd801820ca6dd9fe63bcfb0eb4/pydantic_core-2.46.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:18e5ceec2ab67e6d5f1a9085e5a24c9c4e2ac4545730bfe668680bca05e555f3", size = 2317139, upload-time = "2026-05-06T13:37:15.539Z" }, + { url = "https://files.pythonhosted.org/packages/d9/37/f913f81a657c865b75da6c0dbed79876073c2a43b5bd9edbe8da785e4d49/pydantic_core-2.46.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a0f62d0a58f4e7da165457e995725421e0064f2255d8eccebc49f41bbc23b109", size = 2360433, upload-time = "2026-05-06T13:37:30.099Z" }, + { url = "https://files.pythonhosted.org/packages/c4/67/6acaa1be2567f9256b056d8477158cac7240813956ce86e49deae8e173b4/pydantic_core-2.46.4-cp311-cp311-win32.whl", hash = "sha256:041bde0a48fd37cf71cab1c9d56d3e8625a3793fef1f7dd232b3ff37e978ecda", size = 1985513, upload-time = "2026-05-06T13:38:15.669Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e6/c505f83dfeda9a2e5c995cfd872949e4d05e12f7feb3dca72f633daefa94/pydantic_core-2.46.4-cp311-cp311-win_amd64.whl", hash = "sha256:6f2eeda33a839975441c86a4119e1383c50b47faf0cbb5176985565c6bb02c33", size = 2071114, upload-time = "2026-05-06T13:40:35.416Z" }, + { url = "https://files.pythonhosted.org/packages/0f/da/7a263a96d965d9d0df5e8de8a475f33495451117035b09acb110288c381f/pydantic_core-2.46.4-cp311-cp311-win_arm64.whl", hash = "sha256:14f4c5d6db102bd796a627bbb3a17b4cf4574b9ae861d8b7c9a9661c6dd3362d", size = 2044298, upload-time = "2026-05-06T13:38:29.754Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8c/af022f0af448d7747c5154288d46b5f2bc5f17366eaa0e23e9aa04d59f3b/pydantic_core-2.46.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3245406455a5d98187ec35530fd772b1d799b26667980872c8d4614991e2c4a2", size = 2106158, upload-time = "2026-05-06T13:38:57.215Z" }, + { url = "https://files.pythonhosted.org/packages/19/95/6195171e385007300f0f5574592e467c568becce2d937a0b6804f218bc49/pydantic_core-2.46.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:962ccbab7b642487b1d8b7df90ef677e03134cf1fd8880bf698649b22a69371f", size = 1951724, upload-time = "2026-05-06T13:37:02.697Z" }, + { url = "https://files.pythonhosted.org/packages/8e/bc/f47d1ff9cbb1620e1b5b697eef06010035735f07820180e74178226b27b3/pydantic_core-2.46.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8233f2947cf85404441fd7e0085f53b10c93e0ee78611099b5c7237e36aacbf7", size = 1975742, upload-time = "2026-05-06T13:37:09.448Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/9b9a5b0306345664a2da6410877af6e8082481b5884b3ddd78d47c6013ce/pydantic_core-2.46.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3a233125ac121aa3ffba9a2b59edfc4a985a76092dc8279586ab4b71390875e7", size = 2052418, upload-time = "2026-05-06T13:37:38.234Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b7/a65fec226f5d78fc39f4a13c4cc0c768c22b113438f60c14adc9d2865038/pydantic_core-2.46.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b712b53160b79a5850310b912a5ef8e57e56947c8ad690c227f5c9d7e561712", size = 2232274, upload-time = "2026-05-06T13:38:27.753Z" }, + { url = "https://files.pythonhosted.org/packages/68/f0/92039db98b907ef49269a8271f67db9cb78ae2fc68062ef7e4e77adb5f61/pydantic_core-2.46.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9401557acd873c3a7f3eb9383edef8ac4968f9510e340f4808d427e75667e7b4", size = 2309940, upload-time = "2026-05-06T13:38:05.353Z" }, + { url = "https://files.pythonhosted.org/packages/5f/97/2aab507d3d00ca626e8e57c1eac6a79e4e5fbcc63eb99733ff55d1717f65/pydantic_core-2.46.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:926c9541b14b12b1681dca8a0b75feb510b06c6341b70a8e500c2fdcff837cce", size = 2094516, upload-time = "2026-05-06T13:39:10.577Z" }, + { url = "https://files.pythonhosted.org/packages/22/37/a8aca44d40d737dde2bc05b3c6c07dff0de07ce6f82e9f3167aeaf4d5dea/pydantic_core-2.46.4-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:56cb4851bcaf3d117eddcef4fe66afd750a50274b0da8e22be256d10e5611987", size = 2136854, upload-time = "2026-05-06T13:40:22.59Z" }, + { url = "https://files.pythonhosted.org/packages/24/99/fcef1b79238c06a8cbec70819ac722ba76e02bc8ada9b0fd66eba40da01b/pydantic_core-2.46.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c68fcd102d71ea85c5b2dfac3f4f8476eff42a9e078fd5faefff6d145063536b", size = 2180306, upload-time = "2026-05-06T13:40:10.666Z" }, + { url = "https://files.pythonhosted.org/packages/ae/6c/fc44000918855b42779d007ae63b0532794739027b2f417321cddbc44f6a/pydantic_core-2.46.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b2f69dec1725e79a012d920df1707de5caf7ed5e08f3be4435e25803efc47458", size = 2190044, upload-time = "2026-05-06T13:40:43.231Z" }, + { url = "https://files.pythonhosted.org/packages/6b/65/d9cadc9f1920d7a127ad2edba16c1db7916e59719285cd6c94600b0080ba/pydantic_core-2.46.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:8d0820e8192167f80d88d64038e609c31452eeca865b4e1d9950a27a4609b00b", size = 2329133, upload-time = "2026-05-06T13:39:57.365Z" }, + { url = "https://files.pythonhosted.org/packages/d0/cf/c873d91679f3a30bcf5e7ac280ce5573483e72295307685120d0d5ad3416/pydantic_core-2.46.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fbdb89b3e1c94a30cc5edfce477c6e6a5dc4d8f84665b455c27582f211a1c72c", size = 2374464, upload-time = "2026-05-06T13:38:06.976Z" }, + { url = "https://files.pythonhosted.org/packages/47/bd/6f2fc8188f31bf10590f1e98e7b306336161fac930a8c514cd7bd828c7dc/pydantic_core-2.46.4-cp312-cp312-win32.whl", hash = "sha256:9aa768456404a8bf48a4406685ac2bec8e72b62c69313734fa3b73cf33b3a894", size = 1974823, upload-time = "2026-05-06T13:40:47.985Z" }, + { url = "https://files.pythonhosted.org/packages/40/8c/985c1d41ea1107c2534abd9870e4ed5c8e7669b5c308297835c001e7a1c4/pydantic_core-2.46.4-cp312-cp312-win_amd64.whl", hash = "sha256:e9c26f834c65f5752f3f06cb08cb86a913ceb7274d0db6e267808a708b46bc89", size = 2072919, upload-time = "2026-05-06T13:39:21.153Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ba/f463d006e0c47373ca7ec5e1a261c59dc01ef4d62b2657af925fb0deee3a/pydantic_core-2.46.4-cp312-cp312-win_arm64.whl", hash = "sha256:4fc73cb559bdb54b1134a706a2802a4cddd27a0633f5abb7e53056268751ac6a", size = 2027604, upload-time = "2026-05-06T13:39:03.753Z" }, + { url = "https://files.pythonhosted.org/packages/51/a2/5d30b469c5267a17b39dec53208222f76a8d351dfac4af661888c5aee77d/pydantic_core-2.46.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5d5902252db0d3cedf8d4a1bc68f70eeb430f7e4c7104c8c476753519b423008", size = 2106306, upload-time = "2026-05-06T13:37:48.029Z" }, + { url = "https://files.pythonhosted.org/packages/c1/81/4fa520eaffa8bd7d1525e644cd6d39e7d60b1592bc5b516693c7340b50f1/pydantic_core-2.46.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c94f0688e7b8d0a67abf40e57a7eaaecd17cc9586706a31b76c031f63df052b4", size = 1951906, upload-time = "2026-05-06T13:37:17.012Z" }, + { url = "https://files.pythonhosted.org/packages/03/d5/fd02da45b659668b05923b17ba3a0100a0a3d5541e3bd8fcc4ecb711309e/pydantic_core-2.46.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f027324c56cd5406ca49c124b0db10e56c69064fec039acc571c29020cc87c76", size = 1976802, upload-time = "2026-05-06T13:37:35.113Z" }, + { url = "https://files.pythonhosted.org/packages/21/f2/95727e1368be3d3ed485eaab7adbd7dda408f33f7a36e8b48e0144002b91/pydantic_core-2.46.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e739fee756ba1010f8bcccb534252e85a35fe45ae92c295a06059ce58b74ccd3", size = 2052446, upload-time = "2026-05-06T13:37:12.313Z" }, + { url = "https://files.pythonhosted.org/packages/9c/86/5d99feea3f77c7234b8718075b23db11532773c1a0dbd9b9490215dc2eeb/pydantic_core-2.46.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d56801be94b86a9da183e5f3766e6310752b99ff647e38b09a9500d88e46e76", size = 2232757, upload-time = "2026-05-06T13:39:01.149Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3a/508ac615935ef7588cf6d9e9b91309fdc2da751af865e02a9098de88258c/pydantic_core-2.46.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2412e734dcb48da14d4e4006b82b46b74f2518b8a26ee7e58c6844a6cd6d03c4", size = 2309275, upload-time = "2026-05-06T13:37:41.406Z" }, + { url = "https://files.pythonhosted.org/packages/07/f8/41db9de19d7987d6b04715a02b3b40aea467000275d9d758ffaa31af7d50/pydantic_core-2.46.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9551187363ffc0de2a00b2e47c25aeaeb1020b69b668762966df15fc5659dd5a", size = 2094467, upload-time = "2026-05-06T13:39:18.847Z" }, + { url = "https://files.pythonhosted.org/packages/2c/e2/f35033184cb11d0052daf4416e8e10a502ea2ac006fc4f459aee872727d1/pydantic_core-2.46.4-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:0186750b482eefa11d7f435892b09c5c606193ef3375bcf94aa00ae6bfb66262", size = 2134417, upload-time = "2026-05-06T13:40:17.944Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7b/6ceeb1cc90e193862f444ebe373d8fdf613f0a82572dde03fb10734c6c71/pydantic_core-2.46.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5855698a4856556d86e8e6cd8434bc3ac0314ee8e12089ae0e143f64c6256e4e", size = 2179782, upload-time = "2026-05-06T13:40:32.618Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f2/c8d7773ede6af08036423a00ae0ceffce266c3c52a096c435d68c896083f/pydantic_core-2.46.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:cbaf13819775b7f769bf4a1f066cb6df7a28d4480081a589828ef190226881cd", size = 2188782, upload-time = "2026-05-06T13:36:51.018Z" }, + { url = "https://files.pythonhosted.org/packages/59/31/0c864784e31f09f05cdd87606f08923b9c9e7f6e51dd27f20f62f975ce9f/pydantic_core-2.46.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:633147d34cf4550417f12e2b1a0383973bdf5cdfde212cb09e9a581cf10820be", size = 2328334, upload-time = "2026-05-06T13:40:37.764Z" }, + { url = "https://files.pythonhosted.org/packages/c2/eb/4f6c8a41efa30baa755590f4141abf3a8c370fab610915733e74134a7270/pydantic_core-2.46.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:82cf5301172168103724d49a1444d3378cb20cdee30b116a1bd6031236298a5d", size = 2372986, upload-time = "2026-05-06T13:39:34.152Z" }, + { url = "https://files.pythonhosted.org/packages/5b/24/b375a480d53113860c299764bfe9f349a3dc9108b3adc0d7f0d786492ebf/pydantic_core-2.46.4-cp313-cp313-win32.whl", hash = "sha256:9fa8ae11da9e2b3126c6426f147e0fba88d96d65921799bb30c6abd1cb2c97fb", size = 1973693, upload-time = "2026-05-06T13:37:55.072Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e8/cff247591966f2d22ec8c003cd7587e27b7ba7b81ab2fb888e3ab75dc285/pydantic_core-2.46.4-cp313-cp313-win_amd64.whl", hash = "sha256:6b3ace8194b0e5204818c92802dcdca7fc6d88aabbb799d7c795540d9cd6d292", size = 2071819, upload-time = "2026-05-06T13:38:49.139Z" }, + { url = "https://files.pythonhosted.org/packages/c6/1a/f4aee670d5670e9e148e0c82c7db98d780be566c6e6a97ee8035528ca0b3/pydantic_core-2.46.4-cp313-cp313-win_arm64.whl", hash = "sha256:184c081504d17f1c1066e430e117142b2c77d9448a97f7b65c6ac9fd9aee238d", size = 2027411, upload-time = "2026-05-06T13:40:45.796Z" }, + { url = "https://files.pythonhosted.org/packages/8d/74/228a26ddad29c6672b805d9fd78e8d251cd04004fa7eed0e622096cd0250/pydantic_core-2.46.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:428e04521a40150c85216fc8b85e8d39fece235a9cf5e383761238c7fa9b96fb", size = 2102079, upload-time = "2026-05-06T13:38:41.019Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/8970b150a4b4365623ae00fc88603491f763c627311ae8031e3111356d6e/pydantic_core-2.46.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23ace664830ee0bfe014a0c7bc248b1f7f25ed7ad103852c317624a1083af462", size = 1952179, upload-time = "2026-05-06T13:36:59.812Z" }, + { url = "https://files.pythonhosted.org/packages/95/30/5211a831ae054928054b2f79731661087a2bc5c01e825c672b3a4a8f1b3e/pydantic_core-2.46.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce5c1d2a8b27468f433ca974829c44060b8097eedc39933e3c206a90ee49c4a9", size = 1978926, upload-time = "2026-05-06T13:37:39.933Z" }, + { url = "https://files.pythonhosted.org/packages/57/e9/689668733b1eb67adeef047db3c2e8788fcf65a7fd9c9e2b46b7744fe245/pydantic_core-2.46.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7283d57845ecf5a163403eb0702dfc220cc4fbdd18919cb5ccea4f95ee1cdab4", size = 2046785, upload-time = "2026-05-06T13:38:01.995Z" }, + { url = "https://files.pythonhosted.org/packages/60/d9/6715260422ff50a2109878fd24d948a6c3446bb2664f34ee78cd972b3acd/pydantic_core-2.46.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8daafc69c93ee8a0204506a3b6b30f586ef54028f52aeeeb5c4cfc5184fd5914", size = 2228733, upload-time = "2026-05-06T13:40:50.371Z" }, + { url = "https://files.pythonhosted.org/packages/18/ae/fdb2f64316afca925640f8e70bb1a564b0ec2721c1389e25b8eb4bf9a299/pydantic_core-2.46.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd2213145bcc2ba85884d0ac63d222fece9209678f77b9b4d76f054c561adb28", size = 2307534, upload-time = "2026-05-06T13:37:21.531Z" }, + { url = "https://files.pythonhosted.org/packages/89/1d/8eff589b45bb8190a9d12c49cfad0f176a5cbd1534908a6b5125e2886239/pydantic_core-2.46.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a5f930472650a82629163023e630d160863fce524c616f4e5186e5de9d9a49b", size = 2099732, upload-time = "2026-05-06T13:39:31.942Z" }, + { url = "https://files.pythonhosted.org/packages/06/d5/ee5a3366637fee41dee51a1fc91562dcf12ddbc68fda34e6b253da2324bb/pydantic_core-2.46.4-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:c1b3f518abeca3aa13c712fd202306e145abf59a18b094a6bafb2d2bbf59192c", size = 2129627, upload-time = "2026-05-06T13:37:25.033Z" }, + { url = "https://files.pythonhosted.org/packages/94/33/2414be571d2c6a6c4d08be21f9292b6d3fdb08949a97b6dfe985017821db/pydantic_core-2.46.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a7dd0b3ee80d90150e3495a3a13ac34dbcbfd4f012996a6a1d8900e91b5c0fb", size = 2179141, upload-time = "2026-05-06T13:37:14.046Z" }, + { url = "https://files.pythonhosted.org/packages/7b/79/7daa95be995be0eecc4cf75064cb33f9bbbfe3fe0158caf2f0d4a996a5c7/pydantic_core-2.46.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:3fb702cd90b0446a3a1c5e470bfa0dd23c0233b676a9099ddcc964fa6ca13898", size = 2184325, upload-time = "2026-05-06T13:36:53.615Z" }, + { url = "https://files.pythonhosted.org/packages/9f/cb/d0a382f5c0de8a222dc61c65348e0ce831b1f68e0a018450d31c2cace3a5/pydantic_core-2.46.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b8458003118a712e66286df6a707db01c52c0f52f7db8e4a38f0da1d3b94fc4e", size = 2323990, upload-time = "2026-05-06T13:40:29.971Z" }, + { url = "https://files.pythonhosted.org/packages/05/db/d9ba624cc4a5aced1598e88c04fdbd8310c8a69b9d38b9a3d39ce3a61ed7/pydantic_core-2.46.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:372429a130e469c9cd698925ce5fc50940b7a1336b0d82038e63d5bbc4edc519", size = 2369978, upload-time = "2026-05-06T13:37:23.027Z" }, + { url = "https://files.pythonhosted.org/packages/f2/20/d15df15ba918c423461905802bfd2981c3af0bfa0e40d05e13edbfa48bc3/pydantic_core-2.46.4-cp314-cp314-win32.whl", hash = "sha256:85bb3611ff1802f3ee7fdd7dbff26b56f343fb432d57a4728fdd49b6ef35e2f4", size = 1966354, upload-time = "2026-05-06T13:38:03.499Z" }, + { url = "https://files.pythonhosted.org/packages/fc/b6/6b8de4c0a7d7ab3004c439c80c5c1e0a3e8d78bbae19379b01960383d9e5/pydantic_core-2.46.4-cp314-cp314-win_amd64.whl", hash = "sha256:811ff8e9c313ab425368bcbb36e5c4ebd7108c2bbf4e4089cfbb0b01eff63fac", size = 2072238, upload-time = "2026-05-06T13:39:40.807Z" }, + { url = "https://files.pythonhosted.org/packages/32/36/51eb763beec1f4cf59b1db243a7dcc39cbb41230f050a09b9d69faaf0a48/pydantic_core-2.46.4-cp314-cp314-win_arm64.whl", hash = "sha256:bfec22eab3c8cc2ceec0248aec886624116dc079afa027ecc8ad4a7e62010f8a", size = 2018251, upload-time = "2026-05-06T13:37:26.72Z" }, + { url = "https://files.pythonhosted.org/packages/e8/91/855af51d625b23aa987116a19e231d2aaef9c4a415273ddc189b79a45fee/pydantic_core-2.46.4-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:af8244b2bef6aaad6d92cda81372de7f8c8d36c9f0c3ea36e827c60e7d9467a0", size = 2099593, upload-time = "2026-05-06T13:39:47.682Z" }, + { url = "https://files.pythonhosted.org/packages/fb/1b/8784a54c65edb5f49f0a14d6977cf1b209bba85a4c77445b255c2de58ab3/pydantic_core-2.46.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a4330cdbc57162e4b3aa303f588ba752257694c9c9be3e7ebb11b4aca659b5d", size = 1935226, upload-time = "2026-05-06T13:40:40.428Z" }, + { url = "https://files.pythonhosted.org/packages/e8/e7/1955d28d1afc56dd4b3ad7cc0cf39df1b9852964cf16e5d13912756d6d6b/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c61fc04a3d840155ff08e475a04809278972fe6aef51e2720554e96367e34b", size = 1974605, upload-time = "2026-05-06T13:37:32.029Z" }, + { url = "https://files.pythonhosted.org/packages/93/e2/3fedbf0ba7a22850e6e9fd78117f1c0f10f950182344d8a6c535d468fdd8/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c50f2528cf200c5eed56faf3f4e22fcd5f38c157a8b78576e6ba3168ec35f000", size = 2030777, upload-time = "2026-05-06T13:38:55.239Z" }, + { url = "https://files.pythonhosted.org/packages/f8/61/46be275fcaaba0b4f5b9669dd852267ce1ff616592dccf7a7845588df091/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cbe8b01f948de4286c74cdd6c667aceb38f5c1e26f0693b3983d9d74887c65e", size = 2236641, upload-time = "2026-05-06T13:37:08.096Z" }, + { url = "https://files.pythonhosted.org/packages/60/db/12e93e46a8bac9988be3c016860f83293daea8c716c029c9ace279036f2f/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:617d7e2ca7dcb8c5cf6bcb8c59b8832c94b36196bbf1cbd1bfb56ed341905edd", size = 2286404, upload-time = "2026-05-06T13:40:20.221Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4a/4d8b19008f38d31c53b8219cfedc2e3d5de5fe99d90076b7e767de29274f/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7027560ee92211647d0d34e3f7cd6f50da56399d26a9c8ad0da286d3869a53f3", size = 2109219, upload-time = "2026-05-06T13:38:12.153Z" }, + { url = "https://files.pythonhosted.org/packages/88/70/3cbc40978fefb7bb09c6708d40d4ad1a5d70fd7213c3d17f971de868ec1f/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:f99626688942fb746e545232e7726926f3be91b5975f8b55327665fafda991c7", size = 2110594, upload-time = "2026-05-06T13:40:02.971Z" }, + { url = "https://files.pythonhosted.org/packages/9d/20/b8d36736216e29491125531685b2f9e61aa5b4b2599893f8268551da3338/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc3e9034a63de20e15e8ade85358bc6efc614008cab72898b4b4952bea0509ff", size = 2159542, upload-time = "2026-05-06T13:39:27.506Z" }, + { url = "https://files.pythonhosted.org/packages/1d/a2/367df868eb584dacf6bf82a389272406d7178e301c4ac82545ab98bc2dd9/pydantic_core-2.46.4-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:97e7cf2be5c77b7d1a9713a05605d49460d02c6078d38d8bef3cbe323c548424", size = 2168146, upload-time = "2026-05-06T13:38:31.93Z" }, + { url = "https://files.pythonhosted.org/packages/c1/b8/4460f77f7e201893f649a29ab355dddd3beee8a97bcb1a320db414f9a06e/pydantic_core-2.46.4-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:3bf92c5d0e00fefaab325a4d27828fe6b6e2a21848686b5b60d2d9eeb09d76c6", size = 2306309, upload-time = "2026-05-06T13:37:44.717Z" }, + { url = "https://files.pythonhosted.org/packages/64/c4/be2639293acd87dc8ddbcec41a73cee9b2ebf996fe6d892a1a74e88ad3f7/pydantic_core-2.46.4-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:3ecbc122d18468d06ca279dc26a8c2e2d5acb10943bb35e36ae92096dc3b5565", size = 2369736, upload-time = "2026-05-06T13:37:05.645Z" }, + { url = "https://files.pythonhosted.org/packages/30/a6/9f9f380dbb301f67023bf8f707aaa75daadf84f7152d95c410fd7e81d994/pydantic_core-2.46.4-cp314-cp314t-win32.whl", hash = "sha256:e846ae7835bf0703ae43f534ab79a867146dadd59dc9ca5c8b53d5c8f7c9ef02", size = 1955575, upload-time = "2026-05-06T13:38:51.116Z" }, + { url = "https://files.pythonhosted.org/packages/40/1f/f1eb9eb350e795d1af8586289746f5c5677d16043040d63710e22abc43c9/pydantic_core-2.46.4-cp314-cp314t-win_amd64.whl", hash = "sha256:2108ba5c1c1eca18030634489dc544844144ee36357f2f9f780b93e7ddbb44b5", size = 2051624, upload-time = "2026-05-06T13:38:21.672Z" }, + { url = "https://files.pythonhosted.org/packages/f6/d2/42dd53d0a85c27606f316d3aa5d2869c4e8470a5ed6dec30e4a1abe19192/pydantic_core-2.46.4-cp314-cp314t-win_arm64.whl", hash = "sha256:4fcbe087dbc2068af7eda3aa87634eba216dbda64d1ae73c8684b621d33f6596", size = 2017325, upload-time = "2026-05-06T13:40:52.723Z" }, + { url = "https://files.pythonhosted.org/packages/ee/a4/73995fd4ebbb46ba0ee51e6fa049b8f02c40daebb762208feda8a6b7894d/pydantic_core-2.46.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:14d4edf427bdcf950a8a02d7cb44a08614388dd6e1bdcbf4f67504fa7887da9c", size = 2111589, upload-time = "2026-05-06T13:37:10.817Z" }, + { url = "https://files.pythonhosted.org/packages/fb/7f/f37d3a5e8bfcc2e403f5c57a730f2d815693fb42119e8ea48b3789335af1/pydantic_core-2.46.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:0ce40cd7b21210e99342afafbd4d0f76d784eb5b1d60f3bdc566be4983c6c73b", size = 1944552, upload-time = "2026-05-06T13:36:56.717Z" }, + { url = "https://files.pythonhosted.org/packages/15/3c/d7eb777b3ff43e8433a4efb39a17aa8fd98a4ee8561a24a67ef5db07b2d6/pydantic_core-2.46.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90884113d8b48f760e9587002789ddd741e76ab9f89518cd1e43b1f1a52ec44b", size = 1982984, upload-time = "2026-05-06T13:39:06.207Z" }, + { url = "https://files.pythonhosted.org/packages/63/87/70b9f40170a81afd55ca26c9b2acb25c20d64bcfbf888fafecb3ba077d4c/pydantic_core-2.46.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66ce7632c22d837c95301830e111ad0128a32b8207533b60896a96c4915192ea", size = 2138417, upload-time = "2026-05-06T13:39:45.476Z" }, + { url = "https://files.pythonhosted.org/packages/9d/1d/8987ad40f65ae1432753072f214fb5c74fe47ffbd0698bb9cbbb585664f8/pydantic_core-2.46.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:1d8ba486450b14f3b1d63bc521d410ec7565e52f887b9fb671791886436a42f7", size = 2095527, upload-time = "2026-05-06T13:39:52.283Z" }, + { url = "https://files.pythonhosted.org/packages/64/d3/84c282a7eee1d3ac4c0377546ef5a1ea436ce26840d9ac3b7ed54a377507/pydantic_core-2.46.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:3009f12e4e90b7f88b4f9adb1b0c4a3d58fe7820f3238c190047209d148026df", size = 1936024, upload-time = "2026-05-06T13:40:15.671Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ca/eac61596cdeb4d7e174d3dc0bd8a6238f14f75f97a24e7b7db4c7e7340a0/pydantic_core-2.46.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad785e92e6dc634c21555edc8bd6b64957ab844541bcb96a1366c202951ae526", size = 1990696, upload-time = "2026-05-06T13:38:34.717Z" }, + { url = "https://files.pythonhosted.org/packages/fa/c3/7c8b240552251faf6b3a957db200fcfbbcec36763c050428b601e0c9b83b/pydantic_core-2.46.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00c603d540afdd6b80eb39f078f33ebd46211f02f33e34a32d9f053bba711de0", size = 2147590, upload-time = "2026-05-06T13:39:29.883Z" }, + { url = "https://files.pythonhosted.org/packages/11/cb/428de0385b6c8d44b716feba566abfacfbd23ee3c4439faa789a1456242f/pydantic_core-2.46.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0c563b08bca408dc7f65f700633d8442fffb2421fc47b8101377e9fd65051ff0", size = 2112782, upload-time = "2026-05-06T13:37:04.016Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b5/6a17bdadd0fc1f170adfd05a20d37c832f52b117b4d9131da1f41bb097ce/pydantic_core-2.46.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:db06ffe51636ffe9ca531fe9023dd64bdd794be8754cb5df57c5498ae5b518a7", size = 1952146, upload-time = "2026-05-06T13:39:43.092Z" }, + { url = "https://files.pythonhosted.org/packages/2a/dc/03734d80e362cd43ef65428e9de77c730ce7f2f11c60d2b1e1b39f0fbf99/pydantic_core-2.46.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:133878133d271ade3d41d1bfb2a45ec38dbdbda40bc065921c6b04e4630127e2", size = 2134492, upload-time = "2026-05-06T13:36:58.124Z" }, + { url = "https://files.pythonhosted.org/packages/de/df/5e5ffc085ed07cc22d298134d3d911c63e91f6a0eb91fe646750a3209910/pydantic_core-2.46.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9bc519fbf2b7578398853d815009ae5e4d4603d12f4e3f91da8c06852d3da3e9", size = 2156604, upload-time = "2026-05-06T13:37:49.88Z" }, + { url = "https://files.pythonhosted.org/packages/81/44/6e112a4253e56f5705467cbab7ab5e91ee7398ba3d56d358635958893d3e/pydantic_core-2.46.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c7a7bd4e39e8e4c12c39cd480356842b6a8a06e41b23a55a5e3e191718838ddf", size = 2183828, upload-time = "2026-05-06T13:37:43.053Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ad/5565071e937d8e752842ac241463944c9eb14c87e2d269f2658a5bd05e98/pydantic_core-2.46.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:d396ec2b979760aaf3218e76c24e65bd0aca24983298653b3a9d7a45f9e47b30", size = 2310000, upload-time = "2026-05-06T13:37:56.694Z" }, + { url = "https://files.pythonhosted.org/packages/4f/c3/66883a5cec183e7fba4d024b4cbbe61851a63750ef606b0afecc46d1f2bf/pydantic_core-2.46.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:86e1a4418c6cd97d60c95c71164158eaf7324fae7b0923264016baa993eba6fc", size = 2361286, upload-time = "2026-05-06T13:40:05.667Z" }, + { url = "https://files.pythonhosted.org/packages/4b/2d/69abac8f838090bbecd5df894befb2c2619e7996a98ddb949db9f3b93225/pydantic_core-2.46.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:d51026d73fcfd93610abc7b27789c26b313920fcfb20e27462d74a7f8b06e983", size = 2193071, upload-time = "2026-05-06T13:38:08.682Z" }, +] + +[[package]] +name = "pydantic-extra-types" +version = "2.11.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/71/dba38ee2651f84f7842206adbd2233d8bbdb59fb85e9fa14232486a8c471/pydantic_extra_types-2.11.1.tar.gz", hash = "sha256:46792d2307383859e923d8fcefa82108b1a141f8a9c0198982b3832ab5ef1049", size = 172002, upload-time = "2026-03-16T08:08:03.92Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/c1/3226e6d7f5a4f736f38ac11a6fbb262d701889802595cdb0f53a885ac2e0/pydantic_extra_types-2.11.1-py3-none-any.whl", hash = "sha256:1722ea2bddae5628ace25f2aa685b69978ef533123e5638cfbddb999e0100ec1", size = 79526, upload-time = "2026-03-16T08:08:02.533Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/98/c8345dccdc31de4228c039a98f6467a941e39558da41c1744fbe29fa5666/pydantic_settings-2.14.0.tar.gz", hash = "sha256:24285fd4b0e0c06507dd9fdfd331ee23794305352aaec8fc4eb92d4047aeb67d", size = 235709, upload-time = "2026-04-20T13:37:40.293Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/dd/bebff3040138f00ae8a102d426b27349b9a49acc310fcae7f92112d867e3/pydantic_settings-2.14.0-py3-none-any.whl", hash = "sha256:fc8d5d692eb7092e43c8647c1c35a3ecd00e040fcf02ed86f4cb5458ca62182e", size = 60940, upload-time = "2026-04-20T13:37:38.586Z" }, +] + +[[package]] +name = "pygments" +version = "2.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/549bd94f1a0a402dc8cf64563a117c0f3765662e2e668477624baeec44d5/pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c", size = 1572165, upload-time = "2026-04-07T17:16:18.027Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249, upload-time = "2026-04-07T17:16:16.13Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + +[[package]] +name = "pytest-mock" +version = "3.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036, upload-time = "2025-09-16T16:37:27.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" }, +] + +[[package]] +name = "pytest-xdist" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + +[package.optional-dependencies] +psutil = [ + { name = "psutil" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, +] + +[[package]] +name = "python-jose" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ecdsa" }, + { name = "pyasn1" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/77/3a1c9039db7124eb039772b935f2244fbb73fc8ee65b9acf2375da1c07bf/python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b", size = 92726, upload-time = "2025-05-28T17:31:54.288Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/c3/0bd11992072e6a1c513b16500a5d07f91a24017c5909b02c72c62d7ad024/python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771", size = 34624, upload-time = "2025-05-28T17:31:52.802Z" }, +] + +[package.optional-dependencies] +cryptography = [ + { name = "cryptography" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.27" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/9b/f23807317a113dc36e74e75eb265a02dd1a4d9082abc3c1064acd22997c4/python_multipart-0.0.27.tar.gz", hash = "sha256:9870a6a8c5a20a5bf4f07c017bd1489006ff8836cff097b6933355ee2b49b602", size = 44043, upload-time = "2026-04-27T10:51:26.649Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/78/4126abcbdbd3c559d43e0db7f7b9173fc6befe45d39a2856cc0b8ec2a5a6/python_multipart-0.0.27-py3-none-any.whl", hash = "sha256:6fccfad17a27334bd0193681b369f476eda3409f17381a2d65aa7df3f7275645", size = 29254, upload-time = "2026-04-27T10:51:24.997Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "redis" +version = "7.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7b/7f/3759b1d0d72b7c92f0d70ffd9dc962b7b7b5ee74e135f9d7d8ab06b8a318/redis-7.4.0.tar.gz", hash = "sha256:64a6ea7bf567ad43c964d2c30d82853f8df927c5c9017766c55a1d1ed95d18ad", size = 4943913, upload-time = "2026-03-24T09:14:37.53Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/3a/95deec7db1eb53979973ebd156f3369a72732208d1391cd2e5d127062a32/redis-7.4.0-py3-none-any.whl", hash = "sha256:a9c74a5c893a5ef8455a5adb793a31bb70feb821c86eccb62eebef5a19c429ec", size = 409772, upload-time = "2026-03-24T09:14:35.968Z" }, +] + +[[package]] +name = "requests" +version = "2.33.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" }, +] + +[[package]] +name = "rich" +version = "15.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/8f/0722ca900cc807c13a6a0c696dacf35430f72e0ec571c4275d2371fca3e9/rich-15.0.0.tar.gz", hash = "sha256:edd07a4824c6b40189fb7ac9bc4c52536e9780fbbfbddf6f1e2502c31b068c36", size = 230680, upload-time = "2026-04-12T08:24:00.75Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/3b/64d4899d73f91ba49a8c18a8ff3f0ea8f1c1d75481760df8c68ef5235bf5/rich-15.0.0-py3-none-any.whl", hash = "sha256:33bd4ef74232fb73fe9279a257718407f169c09b78a87ad3d296f548e27de0bb", size = 310654, upload-time = "2026-04-12T08:24:02.83Z" }, +] + +[[package]] +name = "rich-toolkit" +version = "0.19.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/ba/dae9e3096651042754da419a4042bc1c75e07d615f9b15066d738838e4df/rich_toolkit-0.19.7.tar.gz", hash = "sha256:133c0915872da91d4c25d85342d5ec1dfacc69b63448af1a08a0d4b4f23ef46e", size = 195877, upload-time = "2026-02-24T16:06:20.555Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/3c/c923619f6d2f5fafcc96fec0aaf9550a46cd5b6481f06e0c6b66a2a4fed0/rich_toolkit-0.19.7-py3-none-any.whl", hash = "sha256:0288e9203728c47c5a4eb60fd2f0692d9df7455a65901ab6f898437a2ba5989d", size = 32963, upload-time = "2026-02-24T16:06:22.066Z" }, +] + +[[package]] +name = "rignore" +version = "0.7.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/f5/8bed2310abe4ae04b67a38374a4d311dd85220f5d8da56f47ae9361be0b0/rignore-0.7.6.tar.gz", hash = "sha256:00d3546cd793c30cb17921ce674d2c8f3a4b00501cb0e3dd0e82217dbeba2671", size = 57140, upload-time = "2025-11-05T21:41:21.968Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/41/b6e2be3069ef3b7f24e35d2911bd6deb83d20ed5642ad81d5a6d1c015473/rignore-0.7.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:40be8226e12d6653abbebaffaea2885f80374c1c8f76fe5ca9e0cadd120a272c", size = 885285, upload-time = "2025-11-05T20:42:39.763Z" }, + { url = "https://files.pythonhosted.org/packages/52/66/ba7f561b6062402022887706a7f2b2c2e2e2a28f1e3839202b0a2f77e36d/rignore-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182f4e5e4064d947c756819446a7d4cdede8e756b8c81cf9e509683fe38778d7", size = 823882, upload-time = "2025-11-05T20:42:23.488Z" }, + { url = "https://files.pythonhosted.org/packages/f5/81/4087453df35a90b07370647b19017029324950c1b9137d54bf1f33843f17/rignore-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16b63047648a916a87be1e51bb5c009063f1b8b6f5afe4f04f875525507e63dc", size = 899362, upload-time = "2025-11-05T20:40:51.111Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c9/390a8fdfabb76d71416be773bd9f162977bd483084f68daf19da1dec88a6/rignore-0.7.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ba5524f5178deca4d7695e936604ebc742acb8958f9395776e1fcb8133f8257a", size = 873633, upload-time = "2025-11-05T20:41:06.193Z" }, + { url = "https://files.pythonhosted.org/packages/df/c9/79404fcb0faa76edfbc9df0901f8ef18568d1104919ebbbad6d608c888d1/rignore-0.7.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62020dbb89a1dd4b84ab3d60547b3b2eb2723641d5fb198463643f71eaaed57d", size = 1167633, upload-time = "2025-11-05T20:41:22.491Z" }, + { url = "https://files.pythonhosted.org/packages/6e/8d/b3466d32d445d158a0aceb80919085baaae495b1f540fb942f91d93b5e5b/rignore-0.7.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b34acd532769d5a6f153a52a98dcb81615c949ab11697ce26b2eb776af2e174d", size = 941434, upload-time = "2025-11-05T20:41:38.151Z" }, + { url = "https://files.pythonhosted.org/packages/e8/40/9cd949761a7af5bc27022a939c91ff622d29c7a0b66d0c13a863097dde2d/rignore-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c5e53b752f9de44dff7b3be3c98455ce3bf88e69d6dc0cf4f213346c5e3416c", size = 959461, upload-time = "2025-11-05T20:42:08.476Z" }, + { url = "https://files.pythonhosted.org/packages/b5/87/1e1a145731f73bdb7835e11f80da06f79a00d68b370d9a847de979575e6d/rignore-0.7.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25b3536d13a5d6409ce85f23936f044576eeebf7b6db1d078051b288410fc049", size = 985323, upload-time = "2025-11-05T20:41:52.735Z" }, + { url = "https://files.pythonhosted.org/packages/6c/31/1ecff992fc3f59c4fcdcb6c07d5f6c1e6dfb55ccda19c083aca9d86fa1c6/rignore-0.7.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e01cad2b0b92f6b1993f29fc01f23f2d78caf4bf93b11096d28e9d578eb08ce", size = 1079173, upload-time = "2025-11-05T21:40:12.007Z" }, + { url = "https://files.pythonhosted.org/packages/17/18/162eedadb4c2282fa4c521700dbf93c9b14b8842e8354f7d72b445b8d593/rignore-0.7.6-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5991e46ab9b4868334c9e372ab0892b0150f3f586ff2b1e314272caeb38aaedb", size = 1139012, upload-time = "2025-11-05T21:40:29.399Z" }, + { url = "https://files.pythonhosted.org/packages/78/96/a9ca398a8af74bb143ad66c2a31303c894111977e28b0d0eab03867f1b43/rignore-0.7.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6c8ae562e5d1246cba5eaeb92a47b2a279e7637102828dde41dcbe291f529a3e", size = 1118827, upload-time = "2025-11-05T21:40:46.6Z" }, + { url = "https://files.pythonhosted.org/packages/9f/22/1c1a65047df864def9a047dbb40bc0b580b8289a4280e62779cd61ae21f2/rignore-0.7.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aaf938530dcc0b47c4cfa52807aa2e5bfd5ca6d57a621125fe293098692f6345", size = 1128182, upload-time = "2025-11-05T21:41:04.239Z" }, + { url = "https://files.pythonhosted.org/packages/bd/f4/1526eb01fdc2235aca1fd9d0189bee4021d009a8dcb0161540238c24166e/rignore-0.7.6-cp311-cp311-win32.whl", hash = "sha256:166ebce373105dd485ec213a6a2695986346e60c94ff3d84eb532a237b24a4d5", size = 646547, upload-time = "2025-11-05T21:41:49.439Z" }, + { url = "https://files.pythonhosted.org/packages/7c/c8/dda0983e1845706beb5826459781549a840fe5a7eb934abc523e8cd17814/rignore-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:44f35ee844b1a8cea50d056e6a595190ce9d42d3cccf9f19d280ae5f3058973a", size = 727139, upload-time = "2025-11-05T21:41:34.367Z" }, + { url = "https://files.pythonhosted.org/packages/e3/47/eb1206b7bf65970d41190b879e1723fc6bbdb2d45e53565f28991a8d9d96/rignore-0.7.6-cp311-cp311-win_arm64.whl", hash = "sha256:14b58f3da4fa3d5c3fa865cab49821675371f5e979281c683e131ae29159a581", size = 657598, upload-time = "2025-11-05T21:41:23.758Z" }, + { url = "https://files.pythonhosted.org/packages/0b/0e/012556ef3047a2628842b44e753bb15f4dc46806780ff090f1e8fe4bf1eb/rignore-0.7.6-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:03e82348cb7234f8d9b2834f854400ddbbd04c0f8f35495119e66adbd37827a8", size = 883488, upload-time = "2025-11-05T20:42:41.359Z" }, + { url = "https://files.pythonhosted.org/packages/93/b0/d4f1f3fe9eb3f8e382d45ce5b0547ea01c4b7e0b4b4eb87bcd66a1d2b888/rignore-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9e624f6be6116ea682e76c5feb71ea91255c67c86cb75befe774365b2931961", size = 820411, upload-time = "2025-11-05T20:42:24.782Z" }, + { url = "https://files.pythonhosted.org/packages/4a/c8/dea564b36dedac8de21c18e1851789545bc52a0c22ece9843444d5608a6a/rignore-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bda49950d405aa8d0ebe26af807c4e662dd281d926530f03f29690a2e07d649a", size = 897821, upload-time = "2025-11-05T20:40:52.613Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2b/ee96db17ac1835e024c5d0742eefb7e46de60020385ac883dd3d1cde2c1f/rignore-0.7.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5fd5ab3840b8c16851d327ed06e9b8be6459702a53e5ab1fc4073b684b3789e", size = 873963, upload-time = "2025-11-05T20:41:07.49Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8c/ad5a57bbb9d14d5c7e5960f712a8a0b902472ea3f4a2138cbf70d1777b75/rignore-0.7.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ced2a248352636a5c77504cb755dc02c2eef9a820a44d3f33061ce1bb8a7f2d2", size = 1169216, upload-time = "2025-11-05T20:41:23.73Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/5b00bc2a6bc1701e6878fca798cf5d9125eb3113193e33078b6fc0d99123/rignore-0.7.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a04a3b73b75ddc12c9c9b21efcdaab33ca3832941d6f1d67bffd860941cd448a", size = 942942, upload-time = "2025-11-05T20:41:39.393Z" }, + { url = "https://files.pythonhosted.org/packages/85/e5/7f99bd0cc9818a91d0e8b9acc65b792e35750e3bdccd15a7ee75e64efca4/rignore-0.7.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d24321efac92140b7ec910ac7c53ab0f0c86a41133d2bb4b0e6a7c94967f44dd", size = 959787, upload-time = "2025-11-05T20:42:09.765Z" }, + { url = "https://files.pythonhosted.org/packages/55/54/2ffea79a7c1eabcede1926347ebc2a81bc6b81f447d05b52af9af14948b9/rignore-0.7.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73c7aa109d41e593785c55fdaa89ad80b10330affa9f9d3e3a51fa695f739b20", size = 984245, upload-time = "2025-11-05T20:41:54.062Z" }, + { url = "https://files.pythonhosted.org/packages/41/f7/e80f55dfe0f35787fa482aa18689b9c8251e045076c35477deb0007b3277/rignore-0.7.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1734dc49d1e9501b07852ef44421f84d9f378da9fbeda729e77db71f49cac28b", size = 1078647, upload-time = "2025-11-05T21:40:13.463Z" }, + { url = "https://files.pythonhosted.org/packages/d4/cf/2c64f0b6725149f7c6e7e5a909d14354889b4beaadddaa5fff023ec71084/rignore-0.7.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5719ea14ea2b652c0c0894be5dfde954e1853a80dea27dd2fbaa749618d837f5", size = 1139186, upload-time = "2025-11-05T21:40:31.27Z" }, + { url = "https://files.pythonhosted.org/packages/75/95/a86c84909ccc24af0d094b50d54697951e576c252a4d9f21b47b52af9598/rignore-0.7.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8e23424fc7ce35726854f639cb7968151a792c0c3d9d082f7f67e0c362cfecca", size = 1117604, upload-time = "2025-11-05T21:40:48.07Z" }, + { url = "https://files.pythonhosted.org/packages/7f/5e/13b249613fd5d18d58662490ab910a9f0be758981d1797789913adb4e918/rignore-0.7.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3efdcf1dd84d45f3e2bd2f93303d9be103888f56dfa7c3349b5bf4f0657ec696", size = 1127725, upload-time = "2025-11-05T21:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/c7/28/fa5dcd1e2e16982c359128664e3785f202d3eca9b22dd0b2f91c4b3d242f/rignore-0.7.6-cp312-cp312-win32.whl", hash = "sha256:ccca9d1a8b5234c76b71546fc3c134533b013f40495f394a65614a81f7387046", size = 646145, upload-time = "2025-11-05T21:41:51.096Z" }, + { url = "https://files.pythonhosted.org/packages/26/87/69387fb5dd81a0f771936381431780b8cf66fcd2cfe9495e1aaf41548931/rignore-0.7.6-cp312-cp312-win_amd64.whl", hash = "sha256:c96a285e4a8bfec0652e0bfcf42b1aabcdda1e7625f5006d188e3b1c87fdb543", size = 726090, upload-time = "2025-11-05T21:41:36.485Z" }, + { url = "https://files.pythonhosted.org/packages/24/5f/e8418108dcda8087fb198a6f81caadbcda9fd115d61154bf0df4d6d3619b/rignore-0.7.6-cp312-cp312-win_arm64.whl", hash = "sha256:a64a750e7a8277a323f01ca50b7784a764845f6cce2fe38831cb93f0508d0051", size = 656317, upload-time = "2025-11-05T21:41:25.305Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8a/a4078f6e14932ac7edb171149c481de29969d96ddee3ece5dc4c26f9e0c3/rignore-0.7.6-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2bdab1d31ec9b4fb1331980ee49ea051c0d7f7bb6baa28b3125ef03cdc48fdaf", size = 883057, upload-time = "2025-11-05T20:42:42.741Z" }, + { url = "https://files.pythonhosted.org/packages/f9/8f/f8daacd177db4bf7c2223bab41e630c52711f8af9ed279be2058d2fe4982/rignore-0.7.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:90f0a00ce0c866c275bf888271f1dc0d2140f29b82fcf33cdbda1e1a6af01010", size = 820150, upload-time = "2025-11-05T20:42:26.545Z" }, + { url = "https://files.pythonhosted.org/packages/36/31/b65b837e39c3f7064c426754714ac633b66b8c2290978af9d7f513e14aa9/rignore-0.7.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1ad295537041dc2ed4b540fb1a3906bd9ede6ccdad3fe79770cd89e04e3c73c", size = 897406, upload-time = "2025-11-05T20:40:53.854Z" }, + { url = "https://files.pythonhosted.org/packages/ca/58/1970ce006c427e202ac7c081435719a076c478f07b3a23f469227788dc23/rignore-0.7.6-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f782dbd3a65a5ac85adfff69e5c6b101285ef3f845c3a3cae56a54bebf9fe116", size = 874050, upload-time = "2025-11-05T20:41:08.922Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/eb45db9f90137329072a732273be0d383cb7d7f50ddc8e0bceea34c1dfdf/rignore-0.7.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65cece3b36e5b0826d946494734c0e6aaf5a0337e18ff55b071438efe13d559e", size = 1167835, upload-time = "2025-11-05T20:41:24.997Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f1/6f1d72ddca41a64eed569680587a1236633587cc9f78136477ae69e2c88a/rignore-0.7.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7e4bb66c13cd7602dc8931822c02dfbbd5252015c750ac5d6152b186f0a8be0", size = 941945, upload-time = "2025-11-05T20:41:40.628Z" }, + { url = "https://files.pythonhosted.org/packages/48/6f/2f178af1c1a276a065f563ec1e11e7a9e23d4996fd0465516afce4b5c636/rignore-0.7.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297e500c15766e196f68aaaa70e8b6db85fa23fdc075b880d8231fdfba738cd7", size = 959067, upload-time = "2025-11-05T20:42:11.09Z" }, + { url = "https://files.pythonhosted.org/packages/5b/db/423a81c4c1e173877c7f9b5767dcaf1ab50484a94f60a0b2ed78be3fa765/rignore-0.7.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a07084211a8d35e1a5b1d32b9661a5ed20669970b369df0cf77da3adea3405de", size = 984438, upload-time = "2025-11-05T20:41:55.443Z" }, + { url = "https://files.pythonhosted.org/packages/31/eb/c4f92cc3f2825d501d3c46a244a671eb737fc1bcf7b05a3ecd34abb3e0d7/rignore-0.7.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:181eb2a975a22256a1441a9d2f15eb1292839ea3f05606620bd9e1938302cf79", size = 1078365, upload-time = "2025-11-05T21:40:15.148Z" }, + { url = "https://files.pythonhosted.org/packages/26/09/99442f02794bd7441bfc8ed1c7319e890449b816a7493b2db0e30af39095/rignore-0.7.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:7bbcdc52b5bf9f054b34ce4af5269df5d863d9c2456243338bc193c28022bd7b", size = 1139066, upload-time = "2025-11-05T21:40:32.771Z" }, + { url = "https://files.pythonhosted.org/packages/2c/88/bcfc21e520bba975410e9419450f4b90a2ac8236b9a80fd8130e87d098af/rignore-0.7.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f2e027a6da21a7c8c0d87553c24ca5cc4364def18d146057862c23a96546238e", size = 1118036, upload-time = "2025-11-05T21:40:49.646Z" }, + { url = "https://files.pythonhosted.org/packages/e2/25/d37215e4562cda5c13312636393aea0bafe38d54d4e0517520a4cc0753ec/rignore-0.7.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee4a18b82cbbc648e4aac1510066682fe62beb5dc88e2c67c53a83954e541360", size = 1127550, upload-time = "2025-11-05T21:41:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/dc/76/a264ab38bfa1620ec12a8ff1c07778da89e16d8c0f3450b0333020d3d6dc/rignore-0.7.6-cp313-cp313-win32.whl", hash = "sha256:a7d7148b6e5e95035d4390396895adc384d37ff4e06781a36fe573bba7c283e5", size = 646097, upload-time = "2025-11-05T21:41:53.201Z" }, + { url = "https://files.pythonhosted.org/packages/62/44/3c31b8983c29ea8832b6082ddb1d07b90379c2d993bd20fce4487b71b4f4/rignore-0.7.6-cp313-cp313-win_amd64.whl", hash = "sha256:b037c4b15a64dced08fc12310ee844ec2284c4c5c1ca77bc37d0a04f7bff386e", size = 726170, upload-time = "2025-11-05T21:41:38.131Z" }, + { url = "https://files.pythonhosted.org/packages/aa/41/e26a075cab83debe41a42661262f606166157df84e0e02e2d904d134c0d8/rignore-0.7.6-cp313-cp313-win_arm64.whl", hash = "sha256:e47443de9b12fe569889bdbe020abe0e0b667516ee2ab435443f6d0869bd2804", size = 656184, upload-time = "2025-11-05T21:41:27.396Z" }, + { url = "https://files.pythonhosted.org/packages/9a/b9/1f5bd82b87e5550cd843ceb3768b4a8ef274eb63f29333cf2f29644b3d75/rignore-0.7.6-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:8e41be9fa8f2f47239ded8920cc283699a052ac4c371f77f5ac017ebeed75732", size = 882632, upload-time = "2025-11-05T20:42:44.063Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6b/07714a3efe4a8048864e8a5b7db311ba51b921e15268b17defaebf56d3db/rignore-0.7.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6dc1e171e52cefa6c20e60c05394a71165663b48bca6c7666dee4f778f2a7d90", size = 820760, upload-time = "2025-11-05T20:42:27.885Z" }, + { url = "https://files.pythonhosted.org/packages/ac/0f/348c829ea2d8d596e856371b14b9092f8a5dfbb62674ec9b3f67e4939a9d/rignore-0.7.6-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ce2268837c3600f82ab8db58f5834009dc638ee17103582960da668963bebc5", size = 899044, upload-time = "2025-11-05T20:40:55.336Z" }, + { url = "https://files.pythonhosted.org/packages/f0/30/2e1841a19b4dd23878d73edd5d82e998a83d5ed9570a89675f140ca8b2ad/rignore-0.7.6-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:690a3e1b54bfe77e89c4bacb13f046e642f8baadafc61d68f5a726f324a76ab6", size = 874144, upload-time = "2025-11-05T20:41:10.195Z" }, + { url = "https://files.pythonhosted.org/packages/c2/bf/0ce9beb2e5f64c30e3580bef09f5829236889f01511a125f98b83169b993/rignore-0.7.6-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09d12ac7a0b6210c07bcd145007117ebd8abe99c8eeb383e9e4673910c2754b2", size = 1168062, upload-time = "2025-11-05T20:41:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/b9/8b/571c178414eb4014969865317da8a02ce4cf5241a41676ef91a59aab24de/rignore-0.7.6-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a2b2b74a8c60203b08452479b90e5ce3dbe96a916214bc9eb2e5af0b6a9beb0", size = 942542, upload-time = "2025-11-05T20:41:41.838Z" }, + { url = "https://files.pythonhosted.org/packages/19/62/7a3cf601d5a45137a7e2b89d10c05b5b86499190c4b7ca5c3c47d79ee519/rignore-0.7.6-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fc5a531ef02131e44359419a366bfac57f773ea58f5278c2cdd915f7d10ea94", size = 958739, upload-time = "2025-11-05T20:42:12.463Z" }, + { url = "https://files.pythonhosted.org/packages/5f/1f/4261f6a0d7caf2058a5cde2f5045f565ab91aa7badc972b57d19ce58b14e/rignore-0.7.6-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7a1f77d9c4cd7e76229e252614d963442686bfe12c787a49f4fe481df49e7a9", size = 984138, upload-time = "2025-11-05T20:41:56.775Z" }, + { url = "https://files.pythonhosted.org/packages/2b/bf/628dfe19c75e8ce1f45f7c248f5148b17dfa89a817f8e3552ab74c3ae812/rignore-0.7.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ead81f728682ba72b5b1c3d5846b011d3e0174da978de87c61645f2ed36659a7", size = 1079299, upload-time = "2025-11-05T21:40:16.639Z" }, + { url = "https://files.pythonhosted.org/packages/af/a5/be29c50f5c0c25c637ed32db8758fdf5b901a99e08b608971cda8afb293b/rignore-0.7.6-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:12ffd50f520c22ffdabed8cd8bfb567d9ac165b2b854d3e679f4bcaef11a9441", size = 1139618, upload-time = "2025-11-05T21:40:34.507Z" }, + { url = "https://files.pythonhosted.org/packages/2a/40/3c46cd7ce4fa05c20b525fd60f599165e820af66e66f2c371cd50644558f/rignore-0.7.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:e5a16890fbe3c894f8ca34b0fcacc2c200398d4d46ae654e03bc9b3dbf2a0a72", size = 1117626, upload-time = "2025-11-05T21:40:51.494Z" }, + { url = "https://files.pythonhosted.org/packages/8c/b9/aea926f263b8a29a23c75c2e0d8447965eb1879d3feb53cfcf84db67ed58/rignore-0.7.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3abab3bf99e8a77488ef6c7c9a799fac22224c28fe9f25cc21aa7cc2b72bfc0b", size = 1128144, upload-time = "2025-11-05T21:41:09.169Z" }, + { url = "https://files.pythonhosted.org/packages/a4/f6/0d6242f8d0df7f2ecbe91679fefc1f75e7cd2072cb4f497abaab3f0f8523/rignore-0.7.6-cp314-cp314-win32.whl", hash = "sha256:eeef421c1782953c4375aa32f06ecae470c1285c6381eee2a30d2e02a5633001", size = 646385, upload-time = "2025-11-05T21:41:55.105Z" }, + { url = "https://files.pythonhosted.org/packages/d5/38/c0dcd7b10064f084343d6af26fe9414e46e9619c5f3224b5272e8e5d9956/rignore-0.7.6-cp314-cp314-win_amd64.whl", hash = "sha256:6aeed503b3b3d5af939b21d72a82521701a4bd3b89cd761da1e7dc78621af304", size = 725738, upload-time = "2025-11-05T21:41:39.736Z" }, + { url = "https://files.pythonhosted.org/packages/d9/7a/290f868296c1ece914d565757ab363b04730a728b544beb567ceb3b2d96f/rignore-0.7.6-cp314-cp314-win_arm64.whl", hash = "sha256:104f215b60b3c984c386c3e747d6ab4376d5656478694e22c7bd2f788ddd8304", size = 656008, upload-time = "2025-11-05T21:41:29.028Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d2/3c74e3cd81fe8ea08a8dcd2d755c09ac2e8ad8fe409508904557b58383d3/rignore-0.7.6-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:bb24a5b947656dd94cb9e41c4bc8b23cec0c435b58be0d74a874f63c259549e8", size = 882835, upload-time = "2025-11-05T20:42:45.443Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/a772a34b6b63154877433ac2d048364815b24c2dd308f76b212c408101a2/rignore-0.7.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b1e33c9501cefe24b70a1eafd9821acfd0ebf0b35c3a379430a14df089993e3", size = 820301, upload-time = "2025-11-05T20:42:29.226Z" }, + { url = "https://files.pythonhosted.org/packages/71/30/054880b09c0b1b61d17eeb15279d8bf729c0ba52b36c3ada52fb827cbb3c/rignore-0.7.6-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bec3994665a44454df86deb762061e05cd4b61e3772f5b07d1882a8a0d2748d5", size = 897611, upload-time = "2025-11-05T20:40:56.475Z" }, + { url = "https://files.pythonhosted.org/packages/1e/40/b2d1c169f833d69931bf232600eaa3c7998ba4f9a402e43a822dad2ea9f2/rignore-0.7.6-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26cba2edfe3cff1dfa72bddf65d316ddebf182f011f2f61538705d6dbaf54986", size = 873875, upload-time = "2025-11-05T20:41:11.561Z" }, + { url = "https://files.pythonhosted.org/packages/55/59/ca5ae93d83a1a60e44b21d87deb48b177a8db1b85e82fc8a9abb24a8986d/rignore-0.7.6-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ffa86694fec604c613696cb91e43892aa22e1fec5f9870e48f111c603e5ec4e9", size = 1167245, upload-time = "2025-11-05T20:41:28.29Z" }, + { url = "https://files.pythonhosted.org/packages/a5/52/cf3dce392ba2af806cba265aad6bcd9c48bb2a6cb5eee448d3319f6e505b/rignore-0.7.6-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48efe2ed95aa8104145004afb15cdfa02bea5cdde8b0344afeb0434f0d989aa2", size = 941750, upload-time = "2025-11-05T20:41:43.111Z" }, + { url = "https://files.pythonhosted.org/packages/ec/be/3f344c6218d779395e785091d05396dfd8b625f6aafbe502746fcd880af2/rignore-0.7.6-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dcae43eb44b7f2457fef7cc87f103f9a0013017a6f4e62182c565e924948f21", size = 958896, upload-time = "2025-11-05T20:42:13.784Z" }, + { url = "https://files.pythonhosted.org/packages/c9/34/d3fa71938aed7d00dcad87f0f9bcb02ad66c85d6ffc83ba31078ce53646a/rignore-0.7.6-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2cd649a7091c0dad2f11ef65630d30c698d505cbe8660dd395268e7c099cc99f", size = 983992, upload-time = "2025-11-05T20:41:58.022Z" }, + { url = "https://files.pythonhosted.org/packages/24/a4/52a697158e9920705bdbd0748d59fa63e0f3233fb92e9df9a71afbead6ca/rignore-0.7.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42de84b0289d478d30ceb7ae59023f7b0527786a9a5b490830e080f0e4ea5aeb", size = 1078181, upload-time = "2025-11-05T21:40:18.151Z" }, + { url = "https://files.pythonhosted.org/packages/ac/65/aa76dbcdabf3787a6f0fd61b5cc8ed1e88580590556d6c0207960d2384bb/rignore-0.7.6-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:875a617e57b53b4acbc5a91de418233849711c02e29cc1f4f9febb2f928af013", size = 1139232, upload-time = "2025-11-05T21:40:35.966Z" }, + { url = "https://files.pythonhosted.org/packages/08/44/31b31a49b3233c6842acc1c0731aa1e7fb322a7170612acf30327f700b44/rignore-0.7.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8703998902771e96e49968105207719f22926e4431b108450f3f430b4e268b7c", size = 1117349, upload-time = "2025-11-05T21:40:53.013Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ae/1b199a2302c19c658cf74e5ee1427605234e8c91787cfba0015f2ace145b/rignore-0.7.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:602ef33f3e1b04c1e9a10a3c03f8bc3cef2d2383dcc250d309be42b49923cabc", size = 1127702, upload-time = "2025-11-05T21:41:10.881Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d3/18210222b37e87e36357f7b300b7d98c6dd62b133771e71ae27acba83a4f/rignore-0.7.6-cp314-cp314t-win32.whl", hash = "sha256:c1d8f117f7da0a4a96a8daef3da75bc090e3792d30b8b12cfadc240c631353f9", size = 647033, upload-time = "2025-11-05T21:42:00.095Z" }, + { url = "https://files.pythonhosted.org/packages/3e/87/033eebfbee3ec7d92b3bb1717d8f68c88e6fc7de54537040f3b3a405726f/rignore-0.7.6-cp314-cp314t-win_amd64.whl", hash = "sha256:ca36e59408bec81de75d307c568c2d0d410fb880b1769be43611472c61e85c96", size = 725647, upload-time = "2025-11-05T21:41:44.449Z" }, + { url = "https://files.pythonhosted.org/packages/79/62/b88e5879512c55b8ee979c666ee6902adc4ed05007226de266410ae27965/rignore-0.7.6-cp314-cp314t-win_arm64.whl", hash = "sha256:b83adabeb3e8cf662cabe1931b83e165b88c526fa6af6b3aa90429686e474896", size = 656035, upload-time = "2025-11-05T21:41:31.13Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/a6250ff0c49a3cdb943910ada4116e708118e9b901c878cfae616c80a904/rignore-0.7.6-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a20b6fb61bcced9a83dfcca6599ad45182b06ba720cff7c8d891e5b78db5b65f", size = 886470, upload-time = "2025-11-05T20:42:52.314Z" }, + { url = "https://files.pythonhosted.org/packages/35/af/c69c0c51b8f9f7914d95c4ea91c29a2ac067572048cae95dd6d2efdbe05d/rignore-0.7.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:392dcabfecbe176c9ebbcb40d85a5e86a5989559c4f988c2741da7daf1b5be25", size = 825976, upload-time = "2025-11-05T20:42:35.118Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d2/1b264f56132264ea609d3213ab603d6a27016b19559a1a1ede1a66a03dcd/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22baa462abdc36fdd5a5e2dae423107723351b85ff093762f9261148b9d0a04a", size = 899739, upload-time = "2025-11-05T20:41:01.518Z" }, + { url = "https://files.pythonhosted.org/packages/55/e4/b3c5dfdd8d8a10741dfe7199ef45d19a0e42d0c13aa377c83bd6caf65d90/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53fb28882d2538cb2d231972146c4927a9d9455e62b209f85d634408c4103538", size = 874843, upload-time = "2025-11-05T20:41:17.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/10/d6f3750233881a2a154cefc9a6a0a9b19da526b19f7f08221b552c6f827d/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87409f7eeb1103d6b77f3472a3a0d9a5953e3ae804a55080bdcb0120ee43995b", size = 1170348, upload-time = "2025-11-05T20:41:34.21Z" }, + { url = "https://files.pythonhosted.org/packages/6e/10/ad98ca05c9771c15af734cee18114a3c280914b6e34fde9ffea2e61e88aa/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:684014e42e4341ab3ea23a203551857fcc03a7f8ae96ca3aefb824663f55db32", size = 942315, upload-time = "2025-11-05T20:41:48.508Z" }, + { url = "https://files.pythonhosted.org/packages/de/00/ab5c0f872acb60d534e687e629c17e0896c62da9b389c66d3aa16b817aa8/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77356ebb01ba13f8a425c3d30fcad40e57719c0e37670d022d560884a30e4767", size = 961047, upload-time = "2025-11-05T20:42:19.403Z" }, + { url = "https://files.pythonhosted.org/packages/b8/86/3030fdc363a8f0d1cd155b4c453d6db9bab47a24fcc64d03f61d9d78fe6a/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6cbd8a48abbd3747a6c830393cd578782fab5d43f4deea48c5f5e344b8fed2b0", size = 986090, upload-time = "2025-11-05T20:42:03.581Z" }, + { url = "https://files.pythonhosted.org/packages/33/b8/133aa4002cee0ebbb39362f94e4898eec7fbd09cec9fcbce1cd65b355b7f/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2673225dcec7f90497e79438c35e34638d0d0391ccea3cbb79bfb9adc0dc5bd7", size = 1079656, upload-time = "2025-11-05T21:40:24.89Z" }, + { url = "https://files.pythonhosted.org/packages/67/56/36d5d34210e5e7dfcd134eed8335b19e80ae940ee758f493e4f2b344dd70/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:c081f17290d8a2b96052b79207622aa635686ea39d502b976836384ede3d303c", size = 1139789, upload-time = "2025-11-05T21:40:42.119Z" }, + { url = "https://files.pythonhosted.org/packages/6b/5b/bb4f9420802bf73678033a4a55ab1bede36ce2e9b41fec5f966d83d932b3/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:57e8327aacc27f921968cb2a174f9e47b084ce9a7dd0122c8132d22358f6bd79", size = 1120308, upload-time = "2025-11-05T21:40:59.402Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8b/a1299085b28a2f6135e30370b126e3c5055b61908622f2488ade67641479/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d8955b57e42f2a5434670d5aa7b75eaf6e74602ccd8955dddf7045379cd762fb", size = 1129444, upload-time = "2025-11-05T21:41:17.906Z" }, +] + +[[package]] +name = "rsa" +version = "4.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/99/43/3291f1cc9106f4c63bdce7a8d0df5047fe8422a75b091c16b5e9355e0b11/ruff-0.15.12.tar.gz", hash = "sha256:ecea26adb26b4232c0c2ca19ccbc0083a68344180bba2a600605538ce51a40a6", size = 4643852, upload-time = "2026-04-24T18:17:14.305Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/6e/e78ffb61d4686f3d96ba3df2c801161843746dcbcbb17a1e927d4829312b/ruff-0.15.12-py3-none-linux_armv6l.whl", hash = "sha256:f86f176e188e94d6bdbc09f09bfd9dc729059ad93d0e7390b5a73efe19f8861c", size = 10640713, upload-time = "2026-04-24T18:17:22.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/08/a317bc231fb9e7b93e4ef3089501e51922ff88d6936ce5cf870c4fe55419/ruff-0.15.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e3bcd123364c3770b8e1b7baaf343cc99a35f197c5c6e8af79015c666c423a6c", size = 11069267, upload-time = "2026-04-24T18:17:30.105Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a4/f828e9718d3dce1f5f11c39c4f65afd32783c8b2aebb2e3d259e492c47bd/ruff-0.15.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fe87510d000220aa1ed530d4448a7c696a0cae1213e5ec30e5874287b66557b5", size = 10397182, upload-time = "2026-04-24T18:17:07.177Z" }, + { url = "https://files.pythonhosted.org/packages/71/e0/3310fc6d1b5e1fdea22bf3b1b807c7e187b581021b0d7d4514cccdb5fb71/ruff-0.15.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84a1630093121375a3e2a95b4a6dc7b59e2b4ee76216e32d81aae550a832d002", size = 10758012, upload-time = "2026-04-24T18:16:55.759Z" }, + { url = "https://files.pythonhosted.org/packages/11/c1/a606911aee04c324ddaa883ae418f3569792fd3c4a10c50e0dd0a2311e1e/ruff-0.15.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb129f40f114f089ebe0ca56c0d251cf2061b17651d464bb6478dc01e69f11f5", size = 10447479, upload-time = "2026-04-24T18:16:51.677Z" }, + { url = "https://files.pythonhosted.org/packages/9d/68/4201e8444f0894f21ab4aeeaee68aa4f10b51613514a20d80bd628d57e88/ruff-0.15.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0c862b172d695db7598426b8af465e7e9ac00a3ea2a3630ee67eb82e366aaa6", size = 11234040, upload-time = "2026-04-24T18:17:16.529Z" }, + { url = "https://files.pythonhosted.org/packages/34/ff/8a6d6cf4ccc23fd67060874e832c18919d1557a0611ebef03fdb01fff11e/ruff-0.15.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2849ea9f3484c3aca43a82f484210370319e7170df4dfe4843395ddf6c57bc33", size = 12087377, upload-time = "2026-04-24T18:17:04.944Z" }, + { url = "https://files.pythonhosted.org/packages/85/f6/c669cf73f5152f623d34e69866a46d5e6185816b19fcd5b6dd8a2d299922/ruff-0.15.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e77c7e51c07fe396826d5969a5b846d9cd4c402535835fb6e21ce8b28fef847", size = 11367784, upload-time = "2026-04-24T18:17:25.409Z" }, + { url = "https://files.pythonhosted.org/packages/e8/39/c61d193b8a1daaa8977f7dea9e8d8ba866e02ea7b65d32f6861693aa4c12/ruff-0.15.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b2f4f2f3b1026b5fb449b467d9264bf22067b600f7b6f41fc5958909f449d0", size = 11344088, upload-time = "2026-04-24T18:17:12.258Z" }, + { url = "https://files.pythonhosted.org/packages/c2/8d/49afab3645e31e12c590acb6d3b5b69d7aab5b81926dbaf7461f9441f37a/ruff-0.15.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9ba3b8f1afd7e2e43d8943e55f249e13f9682fde09711644a6e7290eb4f3e339", size = 11271770, upload-time = "2026-04-24T18:17:02.457Z" }, + { url = "https://files.pythonhosted.org/packages/46/06/33f41fe94403e2b755481cdfb9b7ef3e4e0ed031c4581124658d935d52b4/ruff-0.15.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e852ba9fdc890655e1d78f2df1499efbe0e54126bd405362154a75e2bde159c5", size = 10719355, upload-time = "2026-04-24T18:17:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/0d/59/18aa4e014debbf559670e4048e39260a85c7fcee84acfd761ac01e7b8d35/ruff-0.15.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dd8aed930da53780d22fc70bdf84452c843cf64f8cb4eb38984319c24c5cd5fd", size = 10462758, upload-time = "2026-04-24T18:17:32.347Z" }, + { url = "https://files.pythonhosted.org/packages/25/e7/cc9f16fd0f3b5fddcbd7ec3d6ae30c8f3fde1047f32a4093a98d633c6570/ruff-0.15.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:01da3988d225628b709493d7dc67c3b9b12c0210016b08690ef9bd27970b262b", size = 10953498, upload-time = "2026-04-24T18:17:20.674Z" }, + { url = "https://files.pythonhosted.org/packages/72/7a/a9ba7f98c7a575978698f4230c5e8cc54bbc761af34f560818f933dafa0c/ruff-0.15.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9cae0f92bd5700d1213188b31cd3bdd2b315361296d10b96b8e2337d3d11f53e", size = 11447765, upload-time = "2026-04-24T18:17:09.755Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f9/0ae446942c846b8266059ad8a30702a35afae55f5cdc54c5adf8d7afdc27/ruff-0.15.12-py3-none-win32.whl", hash = "sha256:d0185894e038d7043ba8fd6aee7499ece6462dc0ea9f1e260c7451807c714c20", size = 10657277, upload-time = "2026-04-24T18:17:18.591Z" }, + { url = "https://files.pythonhosted.org/packages/33/f1/9614e03e1cdcbf9437570b5400ced8a720b5db22b28d8e0f1bda429f660d/ruff-0.15.12-py3-none-win_amd64.whl", hash = "sha256:c87a162d61ab3adca47c03f7f717c68672edec7d1b5499e652331780fe74950d", size = 11837758, upload-time = "2026-04-24T18:17:00.113Z" }, + { url = "https://files.pythonhosted.org/packages/c0/98/6beb4b351e472e5f4c4613f7c35a5290b8be2497e183825310c4c3a3984b/ruff-0.15.12-py3-none-win_arm64.whl", hash = "sha256:a538f7a82d061cee7be55542aca1d86d1393d55d81d4fcc314370f4340930d4f", size = 11120821, upload-time = "2026-04-24T18:16:57.979Z" }, +] + +[[package]] +name = "sentry-sdk" +version = "2.59.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/65/e0/9bf5e5fc7442b10880f3ec0eff0ef4208b84a099606f343ec4f5445227fb/sentry_sdk-2.59.0.tar.gz", hash = "sha256:cd265808ef8bf3f3edf69b527c0a0b2b6b1322762679e55b8987db2e9584aec1", size = 447331, upload-time = "2026-05-04T12:19:06.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/00/b8cc413748fb6383d1582e7cda51314f99743351c462a92dc690d5b5853b/sentry_sdk-2.59.0-py2.py3-none-any.whl", hash = "sha256:abcf65ee9a9d9cdebf9ad369782408ecca9c1c792686ef06ba34f5ab233527fe", size = 468432, upload-time = "2026-05-04T12:19:04.741Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "sqladmin" +version = "0.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jinja2" }, + { name = "python-multipart" }, + { name = "sqlalchemy" }, + { name = "starlette" }, + { name = "wtforms" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/42/ac352f9afc118491a57c3acd1d54c84447f8d6a7b9420fdd9e4fe28b3ba7/sqladmin-0.25.0.tar.gz", hash = "sha256:370f183eca5ea95281176c086b23d704773e6c77b745342753601a9a9abcb93c", size = 1440449, upload-time = "2026-04-18T01:25:40.15Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/1c/4a9bdeb541478a27fd8bf2af85faa3a1574a8ab3de0f07e8c01b29ccd9ff/sqladmin-0.25.0-py3-none-any.whl", hash = "sha256:fd45a7c78e989cb1cf8242f94bd108475f5d35622d79035a2cb147350f54ce1d", size = 1456019, upload-time = "2026-04-18T01:25:38.523Z" }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.49" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/45/461788f35e0364a8da7bda51a1fe1b09762d0c32f12f63727998d85a873b/sqlalchemy-2.0.49.tar.gz", hash = "sha256:d15950a57a210e36dd4cec1aac22787e2a4d57ba9318233e2ef8b2daf9ff2d5f", size = 9898221, upload-time = "2026-04-03T16:38:11.704Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/b5/e3617cc67420f8f403efebd7b043128f94775e57e5b84e7255203390ceae/sqlalchemy-2.0.49-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5070135e1b7409c4161133aa525419b0062088ed77c92b1da95366ec5cbebbe", size = 2159126, upload-time = "2026-04-03T16:50:13.242Z" }, + { url = "https://files.pythonhosted.org/packages/20/9b/91ca80403b17cd389622a642699e5f6564096b698e7cdcbcbb6409898bc4/sqlalchemy-2.0.49-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ac7a3e245fd0310fd31495eb61af772e637bdf7d88ee81e7f10a3f271bff014", size = 3315509, upload-time = "2026-04-03T16:54:49.332Z" }, + { url = "https://files.pythonhosted.org/packages/b1/61/0722511d98c54de95acb327824cb759e8653789af2b1944ab1cc69d32565/sqlalchemy-2.0.49-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d4e5a0ceba319942fa6b585cf82539288a61e314ef006c1209f734551ab9536", size = 3315014, upload-time = "2026-04-03T16:56:56.376Z" }, + { url = "https://files.pythonhosted.org/packages/46/55/d514a653ffeb4cebf4b54c47bec32ee28ad89d39fafba16eeed1d81dccd5/sqlalchemy-2.0.49-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3ddcb27fb39171de36e207600116ac9dfd4ae46f86c82a9bf3934043e80ebb88", size = 3267388, upload-time = "2026-04-03T16:54:51.272Z" }, + { url = "https://files.pythonhosted.org/packages/2f/16/0dcc56cb6d3335c1671a2258f5d2cb8267c9a2260e27fde53cbfb1b3540a/sqlalchemy-2.0.49-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:32fe6a41ad97302db2931f05bb91abbcc65b5ce4c675cd44b972428dd2947700", size = 3289602, upload-time = "2026-04-03T16:56:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/f8ab6fb04470a133cd80608db40aa292e6bae5f162c3a3d4ab19544a67af/sqlalchemy-2.0.49-cp311-cp311-win32.whl", hash = "sha256:46d51518d53edfbe0563662c96954dc8fcace9832332b914375f45a99b77cc9a", size = 2119044, upload-time = "2026-04-03T17:00:53.455Z" }, + { url = "https://files.pythonhosted.org/packages/c4/59/55a6d627d04b6ebb290693681d7683c7da001eddf90b60cfcc41ee907978/sqlalchemy-2.0.49-cp311-cp311-win_amd64.whl", hash = "sha256:951d4a210744813be63019f3df343bf233b7432aadf0db54c75802247330d3af", size = 2143642, upload-time = "2026-04-03T17:00:54.769Z" }, + { url = "https://files.pythonhosted.org/packages/49/b3/2de412451330756aaaa72d27131db6dde23995efe62c941184e15242a5fa/sqlalchemy-2.0.49-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4bbccb45260e4ff1b7db0be80a9025bb1e6698bdb808b83fff0000f7a90b2c0b", size = 2157681, upload-time = "2026-04-03T16:53:07.132Z" }, + { url = "https://files.pythonhosted.org/packages/50/84/b2a56e2105bd11ebf9f0b93abddd748e1a78d592819099359aa98134a8bf/sqlalchemy-2.0.49-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb37f15714ec2652d574f021d479e78cd4eb9d04396dca36568fdfffb3487982", size = 3338976, upload-time = "2026-04-03T17:07:40Z" }, + { url = "https://files.pythonhosted.org/packages/2c/fa/65fcae2ed62f84ab72cf89536c7c3217a156e71a2c111b1305ab6f0690e2/sqlalchemy-2.0.49-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb9ec6436a820a4c006aad1ac351f12de2f2dbdaad171692ee457a02429b672", size = 3351937, upload-time = "2026-04-03T17:12:23.374Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2f/6fd118563572a7fe475925742eb6b3443b2250e346a0cc27d8d408e73773/sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8d6efc136f44a7e8bc8088507eaabbb8c2b55b3dbb63fe102c690da0ddebe55e", size = 3281646, upload-time = "2026-04-03T17:07:41.949Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d7/410f4a007c65275b9cf82354adb4bb8ba587b176d0a6ee99caa16fe638f8/sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e06e617e3d4fd9e51d385dfe45b077a41e9d1b033a7702551e3278ac597dc750", size = 3316695, upload-time = "2026-04-03T17:12:25.642Z" }, + { url = "https://files.pythonhosted.org/packages/d9/95/81f594aa60ded13273a844539041ccf1e66c5a7bed0a8e27810a3b52d522/sqlalchemy-2.0.49-cp312-cp312-win32.whl", hash = "sha256:83101a6930332b87653886c01d1ee7e294b1fe46a07dd9a2d2b4f91bcc88eec0", size = 2117483, upload-time = "2026-04-03T17:05:40.896Z" }, + { url = "https://files.pythonhosted.org/packages/47/9e/fd90114059175cac64e4fafa9bf3ac20584384d66de40793ae2e2f26f3bb/sqlalchemy-2.0.49-cp312-cp312-win_amd64.whl", hash = "sha256:618a308215b6cececb6240b9abde545e3acdabac7ae3e1d4e666896bf5ba44b4", size = 2144494, upload-time = "2026-04-03T17:05:42.282Z" }, + { url = "https://files.pythonhosted.org/packages/ae/81/81755f50eb2478eaf2049728491d4ea4f416c1eb013338682173259efa09/sqlalchemy-2.0.49-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df2d441bacf97022e81ad047e1597552eb3f83ca8a8f1a1fdd43cd7fe3898120", size = 2154547, upload-time = "2026-04-03T16:53:08.64Z" }, + { url = "https://files.pythonhosted.org/packages/a2/bc/3494270da80811d08bcfa247404292428c4fe16294932bce5593f215cad9/sqlalchemy-2.0.49-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8e20e511dc15265fb433571391ba313e10dd8ea7e509d51686a51313b4ac01a2", size = 3280782, upload-time = "2026-04-03T17:07:43.508Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f5/038741f5e747a5f6ea3e72487211579d8cbea5eb9827a9cbd61d0108c4bd/sqlalchemy-2.0.49-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47604cb2159f8bbd5a1ab48a714557156320f20871ee64d550d8bf2683d980d3", size = 3297156, upload-time = "2026-04-03T17:12:27.697Z" }, + { url = "https://files.pythonhosted.org/packages/88/50/a6af0ff9dc954b43a65ca9b5367334e45d99684c90a3d3413fc19a02d43c/sqlalchemy-2.0.49-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:22d8798819f86720bc646ab015baff5ea4c971d68121cb36e2ebc2ee43ead2b7", size = 3228832, upload-time = "2026-04-03T17:07:45.38Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d1/5f6bdad8de0bf546fc74370939621396515e0cdb9067402d6ba1b8afbe9a/sqlalchemy-2.0.49-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9b1c058c171b739e7c330760044803099c7fff11511e3ab3573e5327116a9c33", size = 3267000, upload-time = "2026-04-03T17:12:29.657Z" }, + { url = "https://files.pythonhosted.org/packages/f7/30/ad62227b4a9819a5e1c6abff77c0f614fa7c9326e5a3bdbee90f7139382b/sqlalchemy-2.0.49-cp313-cp313-win32.whl", hash = "sha256:a143af2ea6672f2af3f44ed8f9cd020e9cc34c56f0e8db12019d5d9ecf41cb3b", size = 2115641, upload-time = "2026-04-03T17:05:43.989Z" }, + { url = "https://files.pythonhosted.org/packages/17/3a/7215b1b7d6d49dc9a87211be44562077f5f04f9bb5a59552c1c8e2d98173/sqlalchemy-2.0.49-cp313-cp313-win_amd64.whl", hash = "sha256:12b04d1db2663b421fe072d638a138460a51d5a862403295671c4f3987fb9148", size = 2141498, upload-time = "2026-04-03T17:05:45.7Z" }, + { url = "https://files.pythonhosted.org/packages/28/4b/52a0cb2687a9cd1648252bb257be5a1ba2c2ded20ba695c65756a55a15a4/sqlalchemy-2.0.49-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24bd94bb301ec672d8f0623eba9226cc90d775d25a0c92b5f8e4965d7f3a1518", size = 3560807, upload-time = "2026-04-03T16:58:31.666Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d8/fda95459204877eed0458550d6c7c64c98cc50c2d8d618026737de9ed41a/sqlalchemy-2.0.49-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a51d3db74ba489266ef55c7a4534eb0b8db9a326553df481c11e5d7660c8364d", size = 3527481, upload-time = "2026-04-03T17:06:00.155Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0a/2aac8b78ac6487240cf7afef8f203ca783e8796002dc0cf65c4ee99ff8bb/sqlalchemy-2.0.49-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:55250fe61d6ebfd6934a272ee16ef1244e0f16b7af6cd18ab5b1fc9f08631db0", size = 3468565, upload-time = "2026-04-03T16:58:33.414Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/ce71cfa82c50a373fd2148b3c870be05027155ce791dc9a5dcf439790b8b/sqlalchemy-2.0.49-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:46796877b47034b559a593d7e4b549aba151dae73f9e78212a3478161c12ab08", size = 3477769, upload-time = "2026-04-03T17:06:02.787Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e8/0a9f5c1f7c6f9ca480319bf57c2d7423f08d31445974167a27d14483c948/sqlalchemy-2.0.49-cp313-cp313t-win32.whl", hash = "sha256:9c4969a86e41454f2858256c39bdfb966a20961e9b58bf8749b65abf447e9a8d", size = 2143319, upload-time = "2026-04-03T17:02:04.328Z" }, + { url = "https://files.pythonhosted.org/packages/0e/51/fb5240729fbec73006e137c4f7a7918ffd583ab08921e6ff81a999d6517a/sqlalchemy-2.0.49-cp313-cp313t-win_amd64.whl", hash = "sha256:b9870d15ef00e4d0559ae10ee5bc71b654d1f20076dbe8bc7ed19b4c0625ceba", size = 2175104, upload-time = "2026-04-03T17:02:05.989Z" }, + { url = "https://files.pythonhosted.org/packages/55/33/bf28f618c0a9597d14e0b9ee7d1e0622faff738d44fe986ee287cdf1b8d0/sqlalchemy-2.0.49-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:233088b4b99ebcbc5258c755a097aa52fbf90727a03a5a80781c4b9c54347a2e", size = 2156356, upload-time = "2026-04-03T16:53:09.914Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a7/5f476227576cb8644650eff68cc35fa837d3802b997465c96b8340ced1e2/sqlalchemy-2.0.49-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57ca426a48eb2c682dae8204cd89ea8ab7031e2675120a47924fabc7caacbc2a", size = 3276486, upload-time = "2026-04-03T17:07:46.9Z" }, + { url = "https://files.pythonhosted.org/packages/2e/84/efc7c0bf3a1c5eef81d397f6fddac855becdbb11cb38ff957888603014a7/sqlalchemy-2.0.49-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:685e93e9c8f399b0c96a624799820176312f5ceef958c0f88215af4013d29066", size = 3281479, upload-time = "2026-04-03T17:12:32.226Z" }, + { url = "https://files.pythonhosted.org/packages/91/68/bb406fa4257099c67bd75f3f2261b129c63204b9155de0d450b37f004698/sqlalchemy-2.0.49-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e0400fa22f79acc334d9a6b185dc00a44a8e6578aa7e12d0ddcd8434152b187", size = 3226269, upload-time = "2026-04-03T17:07:48.678Z" }, + { url = "https://files.pythonhosted.org/packages/67/84/acb56c00cca9f251f437cb49e718e14f7687505749ea9255d7bd8158a6df/sqlalchemy-2.0.49-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a05977bffe9bffd2229f477fa75eabe3192b1b05f408961d1bebff8d1cd4d401", size = 3248260, upload-time = "2026-04-03T17:12:34.381Z" }, + { url = "https://files.pythonhosted.org/packages/56/19/6a20ea25606d1efd7bd1862149bb2a22d1451c3f851d23d887969201633f/sqlalchemy-2.0.49-cp314-cp314-win32.whl", hash = "sha256:0f2fa354ba106eafff2c14b0cc51f22801d1e8b2e4149342023bd6f0955de5f5", size = 2118463, upload-time = "2026-04-03T17:05:47.093Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4f/8297e4ed88e80baa1f5aa3c484a0ee29ef3c69c7582f206c916973b75057/sqlalchemy-2.0.49-cp314-cp314-win_amd64.whl", hash = "sha256:77641d299179c37b89cf2343ca9972c88bb6eef0d5fc504a2f86afd15cd5adf5", size = 2144204, upload-time = "2026-04-03T17:05:48.694Z" }, + { url = "https://files.pythonhosted.org/packages/1f/33/95e7216df810c706e0cd3655a778604bbd319ed4f43333127d465a46862d/sqlalchemy-2.0.49-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c1dc3368794d522f43914e03312202523cc89692f5389c32bea0233924f8d977", size = 3565474, upload-time = "2026-04-03T16:58:35.128Z" }, + { url = "https://files.pythonhosted.org/packages/0c/a4/ed7b18d8ccf7f954a83af6bb73866f5bc6f5636f44c7731fbb741f72cc4f/sqlalchemy-2.0.49-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c821c47ecfe05cc32140dcf8dc6fd5d21971c86dbd56eabfe5ba07a64910c01", size = 3530567, upload-time = "2026-04-03T17:06:04.587Z" }, + { url = "https://files.pythonhosted.org/packages/73/a3/20faa869c7e21a827c4a2a42b41353a54b0f9f5e96df5087629c306df71e/sqlalchemy-2.0.49-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9c04bff9a5335eb95c6ecf1c117576a0aa560def274876fd156cfe5510fccc61", size = 3474282, upload-time = "2026-04-03T16:58:37.131Z" }, + { url = "https://files.pythonhosted.org/packages/b7/50/276b9a007aa0764304ad467eceb70b04822dc32092492ee5f322d559a4dc/sqlalchemy-2.0.49-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7f605a456948c35260e7b2a39f8952a26f077fd25653c37740ed186b90aaa68a", size = 3480406, upload-time = "2026-04-03T17:06:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/c3/c80fcdb41905a2df650c2a3e0337198b6848876e63d66fe9188ef9003d24/sqlalchemy-2.0.49-cp314-cp314t-win32.whl", hash = "sha256:6270d717b11c5476b0cbb21eedc8d4dbb7d1a956fd6c15a23e96f197a6193158", size = 2149151, upload-time = "2026-04-03T17:02:07.281Z" }, + { url = "https://files.pythonhosted.org/packages/05/52/9f1a62feab6ed368aff068524ff414f26a6daebc7361861035ae00b05530/sqlalchemy-2.0.49-cp314-cp314t-win_amd64.whl", hash = "sha256:275424295f4256fd301744b8f335cff367825d270f155d522b30c7bf49903ee7", size = 2184178, upload-time = "2026-04-03T17:02:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/e5/30/8519fdde58a7bdf155b714359791ad1dc018b47d60269d5d160d311fdc36/sqlalchemy-2.0.49-py3-none-any.whl", hash = "sha256:ec44cfa7ef1a728e88ad41674de50f6db8cfdb3e2af84af86e0041aaf02d43d0", size = 1942158, upload-time = "2026-04-03T16:53:44.135Z" }, +] + +[[package]] +name = "sqlalchemy-utils" +version = "0.41.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sqlalchemy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/bf/abfd5474cdd89ddd36dbbde9c6efba16bfa7f5448913eba946fed14729da/SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990", size = 138017, upload-time = "2024-03-24T15:17:28.196Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/f0/dc4757b83ac1ab853cf222df8535ed73973e0c203d983982ba7b8bc60508/SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e", size = 93083, upload-time = "2024-03-24T15:17:24.533Z" }, +] + +[[package]] +name = "starlette" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/81/69/17425771797c36cded50b7fe44e850315d039f28b15901ab44839e70b593/starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149", size = 2655289, upload-time = "2026-03-22T18:29:46.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/c9/584bc9651441b4ba60cc4d557d8a547b5aff901af35bda3a4ee30c819b82/starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b", size = 72651, upload-time = "2026-03-22T18:29:45.111Z" }, +] + +[[package]] +name = "taskiq" +version = "0.12.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "anyio" }, + { name = "packaging" }, + { name = "pycron" }, + { name = "pydantic" }, + { name = "taskiq-dependencies" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e8/e1/9bbf459974168c6a763f984a1fd6dbb06f5d0f07754e1dcd7361ef679543/taskiq-0.12.3.tar.gz", hash = "sha256:f9a3ae1b21d494a331e79e617ad116f3088e8bc98b9dc8cb9441e7957168cb76", size = 397625, upload-time = "2026-05-04T14:09:27.053Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/19/04/c9ccf3b1aeee67bf8a79e02522b0847940b3e3f2dae316e6c4ab50a3cb47/taskiq-0.12.3-py3-none-any.whl", hash = "sha256:113ba7f65c71076f58f017f540d17c6389df5e76516b19a1f91b087dcd7a5803", size = 91787, upload-time = "2026-05-04T14:09:28.42Z" }, +] + +[[package]] +name = "taskiq-aio-pika" +version = "0.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aio-pika" }, + { name = "aiostream" }, + { name = "taskiq" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d5/05/e9f4e5cbc7f9777a09f493e502242922df2d3e3779364d0292313995d68c/taskiq_aio_pika-0.6.0.tar.gz", hash = "sha256:0a4ec304a5e860e205aaea5077d90d2a009a4842f3ee008b5185c29301992ed9", size = 9492, upload-time = "2026-02-28T12:24:20.505Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/57/b06600675ef8ab6352f30632c0ece20d592f922531b3f490a0559ed792ea/taskiq_aio_pika-0.6.0-py3-none-any.whl", hash = "sha256:6bff38b61b24afd7d41b78ea9ffca0702fe9653e82289ca1287b063a53af2145", size = 10789, upload-time = "2026-02-28T12:24:19.654Z" }, +] + +[[package]] +name = "taskiq-dependencies" +version = "1.5.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/90/47a627696e53bfdcacabc3e8c05b73bf1424685bcb5f17209cb8b12da1bf/taskiq_dependencies-1.5.7.tar.gz", hash = "sha256:0d3b240872ef152b719153b9526d866d2be978aeeaea6600e878414babc2dcb4", size = 14875, upload-time = "2025-02-26T22:07:39.876Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/6d/4a012f2de002c2e93273f5e7d3e3feea02f7fdbb7b75ca2ca1dd10703091/taskiq_dependencies-1.5.7-py3-none-any.whl", hash = "sha256:6fcee5d159bdb035ef915d4d848826169b6f06fe57cc2297a39b62ea3e76036f", size = 13801, upload-time = "2025-02-26T22:07:38.622Z" }, +] + +[[package]] +name = "taskiq-redis" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "redis" }, + { name = "taskiq" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/0a/c555ac1d922e03b9fde2b1b609572a310a252f4bb79fbf964c3039efb6ff/taskiq_redis-1.2.2.tar.gz", hash = "sha256:103c488d143138bab8fc84044dbe68cd3561251090695a6042120398e9915325", size = 14460, upload-time = "2026-02-03T20:26:58.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/a6/a28f8e06540c041c03e9028a100c5b8949a01c4308f286a6c74197c3bf32/taskiq_redis-1.2.2-py3-none-any.whl", hash = "sha256:574d085c0c07f7fa9945e51195fe2db5b9d3c2a07bcfdc5a7ca323eae5319dff", size = 20666, upload-time = "2026-02-03T20:26:55.706Z" }, +] + +[[package]] +name = "testcontainers" +version = "4.14.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docker" }, + { name = "python-dotenv" }, + { name = "typing-extensions" }, + { name = "urllib3" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ca/ac/a597c3a0e02b26cbed6dd07df68be1e57684766fd1c381dee9b170a99690/testcontainers-4.14.2.tar.gz", hash = "sha256:1340ccf16fe3acd9389a6c9e1d9ab21d9fe99a8afdf8165f89c3e69c1967d239", size = 166841, upload-time = "2026-03-18T05:19:16.696Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/2d/26b8b30067d94339afee62c3edc9b803a6eb9332f521ba77d8aaab5de873/testcontainers-4.14.2-py3-none-any.whl", hash = "sha256:0d0522c3cd8f8d9627cda41f7a6b51b639fa57bdc492923c045117933c668d68", size = 125712, upload-time = "2026-03-18T05:19:15.29Z" }, +] + +[[package]] +name = "testcontainers-core" +version = "0.0.1rc1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docker" }, + { name = "wrapt" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/84/ac813e7aed7a527e5bc4c7cbb6e47d72498e76b8371e3805ccab96619b38/testcontainers_core-0.0.1rc1-py3-none-any.whl", hash = "sha256:69a8bf2ddb52ac2d03c26401b12c70db0453cced40372ad783d6dce417e52095", size = 11628, upload-time = "2023-01-06T16:37:25.437Z" }, +] + +[[package]] +name = "testcontainers-postgres" +version = "0.0.1rc1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "psycopg2-binary" }, + { name = "sqlalchemy" }, + { name = "testcontainers-core" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/ac/03bbe688c090b5e16b507b4e36d7c4e5d95e2a0861dd77922801088edfb1/testcontainers_postgres-0.0.1rc1-py3-none-any.whl", hash = "sha256:1bd0afcff2c236c08ffbf3e4926e713d8c58e20df82c31e62fb9cca70582fd5a", size = 2906, upload-time = "2023-01-06T16:37:45.675Z" }, +] + +[[package]] +name = "typer" +version = "0.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/51/9aed62104cea109b820bbd6c14245af756112017d309da813ef107d42e7e/typer-0.25.1.tar.gz", hash = "sha256:9616eb8853a09ffeabab1698952f33c6f29ffdbceb4eaeecf571880e8d7664cc", size = 122276, upload-time = "2026-04-30T19:32:16.964Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/f9/2b3ff4e56e5fa7debfaf9eb135d0da96f3e9a1d5b27222223c7296336e5f/typer-0.25.1-py3-none-any.whl", hash = "sha256:75caa44ed46a03fb2dab8808753ffacdbfea88495e74c85a28c5eefcf5f39c89", size = 58409, upload-time = "2026-04-30T19:32:18.271Z" }, +] + +[[package]] +name = "types-pyasn1" +version = "0.6.0.20260408" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/c0/02f897fc8543f64fa6b1ca6a30d388e37c4ec2f761f469a2d9a29b89cdef/types_pyasn1-0.6.0.20260408.tar.gz", hash = "sha256:32dc90927adbe504fd2eee83ae30cf5ef934e5db0d1d94886071fed47eb50c8c", size = 17312, upload-time = "2026-04-08T04:27:16.874Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/a5/473e06d5aaec3730aab5a9d40c2044e673c927412c24bd7f3fa0df7e95d3/types_pyasn1-0.6.0.20260408-py3-none-any.whl", hash = "sha256:ee7fbd98bce61193c5d4f8f7812fa53cddc5b8cc5ceb9fcda6eea539947c6d6b", size = 24044, upload-time = "2026-04-08T04:27:16.002Z" }, +] + +[[package]] +name = "types-python-jose" +version = "3.5.0.20260408" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/b1/87cdb410d22913df39d7aad8864f94a4f5ad8d507ee07556888fdbe55e19/types_python_jose-3.5.0.20260408.tar.gz", hash = "sha256:3f8dccdc327bfffea7a81084ea1cea722fa499f13c1d04f7978b491dd36e0cf1", size = 11989, upload-time = "2026-04-08T04:34:10.577Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d3/83/df2b34e64f0a674935d718471cf10fb392a7e5bdb0e9e7c739885b62d274/types_python_jose-3.5.0.20260408-py3-none-any.whl", hash = "sha256:968d8a8eac1ff9da249d6335a2bb9f82288d59ba23afe91fcc2662eb9f485e2a", size = 14694, upload-time = "2026-04-08T04:34:09.747Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "tzdata" +version = "2026.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/19/1b9b0e29f30c6d35cb345486df41110984ea67ae69dddbc0e8a100999493/tzdata-2026.2.tar.gz", hash = "sha256:9173fde7d80d9018e02a662e168e5a2d04f87c41ea174b139fbef642eda62d10", size = 198254, upload-time = "2026-04-24T15:22:08.651Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/e4/dccd7f47c4b64213ac01ef921a1337ee6e30e8c6466046018326977efd95/tzdata-2026.2-py2.py3-none-any.whl", hash = "sha256:bbe9af844f658da81a5f95019480da3a89415801f6cc966806612cc7169bffe7", size = 349321, upload-time = "2026-04-24T15:22:05.876Z" }, +] + +[[package]] +name = "ua-parser" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ua-parser-builtins" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/98/5e4b52d772a048af122a6fc5ce365c311efb9f5e79c55fd4fdd7c9f59e83/ua_parser-1.0.2.tar.gz", hash = "sha256:bab404ad42fb37f943107da2f6003ffc79724d11cc95076a7a539513371779da", size = 33239, upload-time = "2026-04-05T20:14:28.229Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/7c/6367995ff57aaa2d9e1055adbaec2519cf5a979780a83a93fdf8c6ec37be/ua_parser-1.0.2-py3-none-any.whl", hash = "sha256:0f8e6d0484af2a9ff804bba5a4fe696e87c028eaba98ad9a7dfae873fef7788a", size = 31219, upload-time = "2026-04-05T20:14:26.913Z" }, +] + +[[package]] +name = "ua-parser-builtins" +version = "202605" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/42/178db21aab1815583fcdb8ae465fc006b384fbe679412b11ddf8aae90f38/ua_parser_builtins-202605-py3-none-any.whl", hash = "sha256:a86976baa4b7c69a54269fe54091e3f0c7666f15a0f893855ff907a3bb6d878c", size = 90591, upload-time = "2026-05-01T21:25:50.636Z" }, +] + +[[package]] +name = "urllib3" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/0c/06f8b233b8fd13b9e5ee11424ef85419ba0d8ba0b3138bf360be2ff56953/urllib3-2.7.0.tar.gz", hash = "sha256:231e0ec3b63ceb14667c67be60f2f2c40a518cb38b03af60abc813da26505f4c", size = 433602, upload-time = "2026-05-07T16:13:18.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/3e/5db95bcf282c52709639744ca2a8b149baccf648e39c8cc87553df9eae0c/urllib3-2.7.0-py3-none-any.whl", hash = "sha256:9fb4c81ebbb1ce9531cce37674bbc6f1360472bc18ca9a553ede278ef7276897", size = 131087, upload-time = "2026-05-07T16:13:17.151Z" }, +] + +[[package]] +name = "user-agents" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ua-parser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e3/e1/63c5bfb485a945010c8cbc7a52f85573561737648d36b30394248730a7bc/user-agents-2.2.0.tar.gz", hash = "sha256:d36d25178db65308d1458c5fa4ab39c9b2619377010130329f3955e7626ead26", size = 9525, upload-time = "2020-08-23T06:01:56.382Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/1c/20bb3d7b2bad56d881e3704131ddedbb16eb787101306887dff349064662/user_agents-2.2.0-py3-none-any.whl", hash = "sha256:a98c4dc72ecbc64812c4534108806fb0a0b3a11ec3fd1eafe807cee5b0a942e7", size = 9614, upload-time = "2020-08-23T06:01:54.047Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.46.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1f/93/041fca8274050e40e6791f267d82e0e2e27dd165627bd640d3e0e378d877/uvicorn-0.46.0.tar.gz", hash = "sha256:fb9da0926999cc6cb22dc7cd71a94a632f078e6ae47ff683c5c420750fb7413d", size = 88758, upload-time = "2026-04-23T07:16:00.151Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/a3/5b1562db76a5a488274b2332a97199b32d0442aca0ed193697fd47786316/uvicorn-0.46.0-py3-none-any.whl", hash = "sha256:bbebbcbed972d162afca128605223022bedd345b7bc7855ce66deb31487a9048", size = 70926, upload-time = "2026-04-23T07:15:58.355Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420, upload-time = "2025-10-16T22:16:21.187Z" }, + { url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677, upload-time = "2025-10-16T22:16:22.558Z" }, + { url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819, upload-time = "2025-10-16T22:16:23.903Z" }, + { url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529, upload-time = "2025-10-16T22:16:25.246Z" }, + { url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267, upload-time = "2025-10-16T22:16:26.819Z" }, + { url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105, upload-time = "2025-10-16T22:16:28.252Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" }, + { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" }, + { url = "https://files.pythonhosted.org/packages/5f/6f/e62b4dfc7ad6518e7eff2516f680d02a0f6eb62c0c212e152ca708a0085e/uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4", size = 4426307, upload-time = "2025-10-16T22:16:32.917Z" }, + { url = "https://files.pythonhosted.org/packages/90/60/97362554ac21e20e81bcef1150cb2a7e4ffdaf8ea1e5b2e8bf7a053caa18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2", size = 4131970, upload-time = "2025-10-16T22:16:34.015Z" }, + { url = "https://files.pythonhosted.org/packages/99/39/6b3f7d234ba3964c428a6e40006340f53ba37993f46ed6e111c6e9141d18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0", size = 4296343, upload-time = "2025-10-16T22:16:35.149Z" }, + { url = "https://files.pythonhosted.org/packages/89/8c/182a2a593195bfd39842ea68ebc084e20c850806117213f5a299dfc513d9/uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705", size = 1358611, upload-time = "2025-10-16T22:16:36.833Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/e301ee96a6dc95224b6f1162cd3312f6d1217be3907b79173b06785f2fe7/uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8", size = 751811, upload-time = "2025-10-16T22:16:38.275Z" }, + { url = "https://files.pythonhosted.org/packages/b7/02/654426ce265ac19e2980bfd9ea6590ca96a56f10c76e63801a2df01c0486/uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d", size = 4288562, upload-time = "2025-10-16T22:16:39.375Z" }, + { url = "https://files.pythonhosted.org/packages/15/c0/0be24758891ef825f2065cd5db8741aaddabe3e248ee6acc5e8a80f04005/uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e", size = 4366890, upload-time = "2025-10-16T22:16:40.547Z" }, + { url = "https://files.pythonhosted.org/packages/d2/53/8369e5219a5855869bcee5f4d317f6da0e2c669aecf0ef7d371e3d084449/uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e", size = 4119472, upload-time = "2025-10-16T22:16:41.694Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ba/d69adbe699b768f6b29a5eec7b47dd610bd17a69de51b251126a801369ea/uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad", size = 4239051, upload-time = "2025-10-16T22:16:43.224Z" }, + { url = "https://files.pythonhosted.org/packages/90/cd/b62bdeaa429758aee8de8b00ac0dd26593a9de93d302bff3d21439e9791d/uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142", size = 1362067, upload-time = "2025-10-16T22:16:44.503Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f8/a132124dfda0777e489ca86732e85e69afcd1ff7686647000050ba670689/uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74", size = 752423, upload-time = "2025-10-16T22:16:45.968Z" }, + { url = "https://files.pythonhosted.org/packages/a3/94/94af78c156f88da4b3a733773ad5ba0b164393e357cc4bd0ab2e2677a7d6/uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35", size = 4272437, upload-time = "2025-10-16T22:16:47.451Z" }, + { url = "https://files.pythonhosted.org/packages/b5/35/60249e9fd07b32c665192cec7af29e06c7cd96fa1d08b84f012a56a0b38e/uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25", size = 4292101, upload-time = "2025-10-16T22:16:49.318Z" }, + { url = "https://files.pythonhosted.org/packages/02/62/67d382dfcb25d0a98ce73c11ed1a6fba5037a1a1d533dcbb7cab033a2636/uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6", size = 4114158, upload-time = "2025-10-16T22:16:50.517Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/f1171b4a882a5d13c8b7576f348acfe6074d72eaf52cccef752f748d4a9f/uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079", size = 4177360, upload-time = "2025-10-16T22:16:52.646Z" }, + { url = "https://files.pythonhosted.org/packages/79/7b/b01414f31546caf0919da80ad57cbfe24c56b151d12af68cee1b04922ca8/uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289", size = 1454790, upload-time = "2025-10-16T22:16:54.355Z" }, + { url = "https://files.pythonhosted.org/packages/d4/31/0bb232318dd838cad3fa8fb0c68c8b40e1145b32025581975e18b11fab40/uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3", size = 796783, upload-time = "2025-10-16T22:16:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/42/38/c9b09f3271a7a723a5de69f8e237ab8e7803183131bc57c890db0b6bb872/uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c", size = 4647548, upload-time = "2025-10-16T22:16:57.008Z" }, + { url = "https://files.pythonhosted.org/packages/c1/37/945b4ca0ac27e3dc4952642d4c900edd030b3da6c9634875af6e13ae80e5/uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21", size = 4467065, upload-time = "2025-10-16T22:16:58.206Z" }, + { url = "https://files.pythonhosted.org/packages/97/cc/48d232f33d60e2e2e0b42f4e73455b146b76ebe216487e862700457fbf3c/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88", size = 4328384, upload-time = "2025-10-16T22:16:59.36Z" }, + { url = "https://files.pythonhosted.org/packages/e4/16/c1fd27e9549f3c4baf1dc9c20c456cd2f822dbf8de9f463824b0c0357e06/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e", size = 4296730, upload-time = "2025-10-16T22:17:00.744Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" }, + { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" }, + { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" }, + { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" }, + { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" }, + { url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" }, + { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, + { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" }, + { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" }, + { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" }, + { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" }, + { url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" }, + { url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" }, + { url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" }, + { url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" }, + { url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" }, + { url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" }, + { url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" }, + { url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" }, + { url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" }, + { url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" }, + { url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" }, + { url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" }, + { url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" }, + { url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" }, + { url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" }, + { url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" }, + { url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" }, + { url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, + { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" }, + { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" }, + { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" }, +] + +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/db/de907251b4ff46ae804ad0409809504153b3f30984daf82a1d84a9875830/websockets-16.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8", size = 177340, upload-time = "2026-01-10T09:22:34.539Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fa/abe89019d8d8815c8781e90d697dec52523fb8ebe308bf11664e8de1877e/websockets-16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad", size = 175022, upload-time = "2026-01-10T09:22:36.332Z" }, + { url = "https://files.pythonhosted.org/packages/58/5d/88ea17ed1ded2079358b40d31d48abe90a73c9e5819dbcde1606e991e2ad/websockets-16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d", size = 175319, upload-time = "2026-01-10T09:22:37.602Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ae/0ee92b33087a33632f37a635e11e1d99d429d3d323329675a6022312aac2/websockets-16.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe", size = 184631, upload-time = "2026-01-10T09:22:38.789Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c5/27178df583b6c5b31b29f526ba2da5e2f864ecc79c99dae630a85d68c304/websockets-16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b", size = 185870, upload-time = "2026-01-10T09:22:39.893Z" }, + { url = "https://files.pythonhosted.org/packages/87/05/536652aa84ddc1c018dbb7e2c4cbcd0db884580bf8e95aece7593fde526f/websockets-16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5", size = 185361, upload-time = "2026-01-10T09:22:41.016Z" }, + { url = "https://files.pythonhosted.org/packages/6d/e2/d5332c90da12b1e01f06fb1b85c50cfc489783076547415bf9f0a659ec19/websockets-16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64", size = 184615, upload-time = "2026-01-10T09:22:42.442Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/d3f9576691cae9253b51555f841bc6600bf0a983a461c79500ace5a5b364/websockets-16.0-cp311-cp311-win32.whl", hash = "sha256:5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6", size = 178246, upload-time = "2026-01-10T09:22:43.654Z" }, + { url = "https://files.pythonhosted.org/packages/54/67/eaff76b3dbaf18dcddabc3b8c1dba50b483761cccff67793897945b37408/websockets-16.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac", size = 178684, upload-time = "2026-01-10T09:22:44.941Z" }, + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/72/07/c98a68571dcf256e74f1f816b8cc5eae6eb2d3d5cfa44d37f801619d9166/websockets-16.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d", size = 174947, upload-time = "2026-01-10T09:23:36.166Z" }, + { url = "https://files.pythonhosted.org/packages/7e/52/93e166a81e0305b33fe416338be92ae863563fe7bce446b0f687b9df5aea/websockets-16.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03", size = 175260, upload-time = "2026-01-10T09:23:37.409Z" }, + { url = "https://files.pythonhosted.org/packages/56/0c/2dbf513bafd24889d33de2ff0368190a0e69f37bcfa19009ef819fe4d507/websockets-16.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da", size = 176071, upload-time = "2026-01-10T09:23:39.158Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8f/aea9c71cc92bf9b6cc0f7f70df8f0b420636b6c96ef4feee1e16f80f75dd/websockets-16.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c", size = 176968, upload-time = "2026-01-10T09:23:41.031Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3f/f70e03f40ffc9a30d817eef7da1be72ee4956ba8d7255c399a01b135902a/websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767", size = 178735, upload-time = "2026-01-10T09:23:42.259Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, +] + +[[package]] +name = "wrapt" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/64/925f213fdcbb9baeb1530449ac71a4d57fc361c053d06bf78d0c5c7cd80c/wrapt-2.1.2.tar.gz", hash = "sha256:3996a67eecc2c68fd47b4e3c564405a5777367adfd9b8abb58387b63ee83b21e", size = 81678, upload-time = "2026-03-06T02:53:25.134Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/81/60c4471fce95afa5922ca09b88a25f03c93343f759aae0f31fb4412a85c7/wrapt-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:96159a0ee2b0277d44201c3b5be479a9979cf154e8c82fa5df49586a8e7679bb", size = 60666, upload-time = "2026-03-06T02:52:58.934Z" }, + { url = "https://files.pythonhosted.org/packages/6b/be/80e80e39e7cb90b006a0eaf11c73ac3a62bbfb3068469aec15cc0bc795de/wrapt-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98ba61833a77b747901e9012072f038795de7fc77849f1faa965464f3f87ff2d", size = 61601, upload-time = "2026-03-06T02:53:00.487Z" }, + { url = "https://files.pythonhosted.org/packages/b0/be/d7c88cd9293c859fc74b232abdc65a229bb953997995d6912fc85af18323/wrapt-2.1.2-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:767c0dbbe76cae2a60dd2b235ac0c87c9cccf4898aef8062e57bead46b5f6894", size = 114057, upload-time = "2026-03-06T02:52:44.08Z" }, + { url = "https://files.pythonhosted.org/packages/ea/25/36c04602831a4d685d45a93b3abea61eca7fe35dab6c842d6f5d570ef94a/wrapt-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c691a6bc752c0cc4711cc0c00896fcd0f116abc253609ef64ef930032821842", size = 116099, upload-time = "2026-03-06T02:54:56.74Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4e/98a6eb417ef551dc277bec1253d5246b25003cf36fdf3913b65cb7657a56/wrapt-2.1.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f3b7d73012ea75aee5844de58c88f44cf62d0d62711e39da5a82824a7c4626a8", size = 112457, upload-time = "2026-03-06T02:53:52.842Z" }, + { url = "https://files.pythonhosted.org/packages/cb/a6/a6f7186a5297cad8ec53fd7578533b28f795fdf5372368c74bd7e6e9841c/wrapt-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:577dff354e7acd9d411eaf4bfe76b724c89c89c8fc9b7e127ee28c5f7bcb25b6", size = 115351, upload-time = "2026-03-06T02:53:32.684Z" }, + { url = "https://files.pythonhosted.org/packages/97/6f/06e66189e721dbebd5cf20e138acc4d1150288ce118462f2fcbff92d38db/wrapt-2.1.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:3d7b6fd105f8b24e5bd23ccf41cb1d1099796524bcc6f7fbb8fe576c44befbc9", size = 111748, upload-time = "2026-03-06T02:53:08.455Z" }, + { url = "https://files.pythonhosted.org/packages/ef/43/4808b86f499a51370fbdbdfa6cb91e9b9169e762716456471b619fca7a70/wrapt-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:866abdbf4612e0b34764922ef8b1c5668867610a718d3053d59e24a5e5fcfc15", size = 113783, upload-time = "2026-03-06T02:53:02.02Z" }, + { url = "https://files.pythonhosted.org/packages/91/2c/a3f28b8fa7ac2cefa01cfcaca3471f9b0460608d012b693998cd61ef43df/wrapt-2.1.2-cp311-cp311-win32.whl", hash = "sha256:5a0a0a3a882393095573344075189eb2d566e0fd205a2b6414e9997b1b800a8b", size = 57977, upload-time = "2026-03-06T02:53:27.844Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c3/2b1c7bd07a27b1db885a2fab469b707bdd35bddf30a113b4917a7e2139d2/wrapt-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:64a07a71d2730ba56f11d1a4b91f7817dc79bc134c11516b75d1921a7c6fcda1", size = 60336, upload-time = "2026-03-06T02:54:28.104Z" }, + { url = "https://files.pythonhosted.org/packages/ec/5c/76ece7b401b088daa6503d6264dd80f9a727df3e6042802de9a223084ea2/wrapt-2.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:b89f095fe98bc12107f82a9f7d570dc83a0870291aeb6b1d7a7d35575f55d98a", size = 58756, upload-time = "2026-03-06T02:53:16.319Z" }, + { url = "https://files.pythonhosted.org/packages/4c/b6/1db817582c49c7fcbb7df6809d0f515af29d7c2fbf57eb44c36e98fb1492/wrapt-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ff2aad9c4cda28a8f0653fc2d487596458c2a3f475e56ba02909e950a9efa6a9", size = 61255, upload-time = "2026-03-06T02:52:45.663Z" }, + { url = "https://files.pythonhosted.org/packages/a2/16/9b02a6b99c09227c93cd4b73acc3678114154ec38da53043c0ddc1fba0dc/wrapt-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6433ea84e1cfacf32021d2a4ee909554ade7fd392caa6f7c13f1f4bf7b8e8748", size = 61848, upload-time = "2026-03-06T02:53:48.728Z" }, + { url = "https://files.pythonhosted.org/packages/af/aa/ead46a88f9ec3a432a4832dfedb84092fc35af2d0ba40cd04aea3889f247/wrapt-2.1.2-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c20b757c268d30d6215916a5fa8461048d023865d888e437fab451139cad6c8e", size = 121433, upload-time = "2026-03-06T02:54:40.328Z" }, + { url = "https://files.pythonhosted.org/packages/3a/9f/742c7c7cdf58b59085a1ee4b6c37b013f66ac33673a7ef4aaed5e992bc33/wrapt-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79847b83eb38e70d93dc392c7c5b587efe65b3e7afcc167aa8abd5d60e8761c8", size = 123013, upload-time = "2026-03-06T02:53:26.58Z" }, + { url = "https://files.pythonhosted.org/packages/e8/44/2c3dd45d53236b7ed7c646fcf212251dc19e48e599debd3926b52310fafb/wrapt-2.1.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f8fba1bae256186a83d1875b2b1f4e2d1242e8fac0f58ec0d7e41b26967b965c", size = 117326, upload-time = "2026-03-06T02:53:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/74/e2/b17d66abc26bd96f89dec0ecd0ef03da4a1286e6ff793839ec431b9fae57/wrapt-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e3d3b35eedcf5f7d022291ecd7533321c4775f7b9cd0050a31a68499ba45757c", size = 121444, upload-time = "2026-03-06T02:54:09.5Z" }, + { url = "https://files.pythonhosted.org/packages/3c/62/e2977843fdf9f03daf1586a0ff49060b1b2fc7ff85a7ea82b6217c1ae36e/wrapt-2.1.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:6f2c5390460de57fa9582bc8a1b7a6c86e1a41dfad74c5225fc07044c15cc8d1", size = 116237, upload-time = "2026-03-06T02:54:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/88/dd/27fc67914e68d740bce512f11734aec08696e6b17641fef8867c00c949fc/wrapt-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7dfa9f2cf65d027b951d05c662cc99ee3bd01f6e4691ed39848a7a5fffc902b2", size = 120563, upload-time = "2026-03-06T02:53:20.412Z" }, + { url = "https://files.pythonhosted.org/packages/ec/9f/b750b3692ed2ef4705cb305bd68858e73010492b80e43d2a4faa5573cbe7/wrapt-2.1.2-cp312-cp312-win32.whl", hash = "sha256:eba8155747eb2cae4a0b913d9ebd12a1db4d860fc4c829d7578c7b989bd3f2f0", size = 58198, upload-time = "2026-03-06T02:53:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/8e/b2/feecfe29f28483d888d76a48f03c4c4d8afea944dbee2b0cd3380f9df032/wrapt-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1c51c738d7d9faa0b3601708e7e2eda9bf779e1b601dce6c77411f2a1b324a63", size = 60441, upload-time = "2026-03-06T02:52:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/44/e1/e328f605d6e208547ea9fd120804fcdec68536ac748987a68c47c606eea8/wrapt-2.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:c8e46ae8e4032792eb2f677dbd0d557170a8e5524d22acc55199f43efedd39bf", size = 58836, upload-time = "2026-03-06T02:53:22.053Z" }, + { url = "https://files.pythonhosted.org/packages/4c/7a/d936840735c828b38d26a854e85d5338894cda544cb7a85a9d5b8b9c4df7/wrapt-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787fd6f4d67befa6fe2abdffcbd3de2d82dfc6fb8a6d850407c53332709d030b", size = 61259, upload-time = "2026-03-06T02:53:41.922Z" }, + { url = "https://files.pythonhosted.org/packages/5e/88/9a9b9a90ac8ca11c2fdb6a286cb3a1fc7dd774c00ed70929a6434f6bc634/wrapt-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4bdf26e03e6d0da3f0e9422fd36bcebf7bc0eeb55fdf9c727a09abc6b9fe472e", size = 61851, upload-time = "2026-03-06T02:52:48.672Z" }, + { url = "https://files.pythonhosted.org/packages/03/a9/5b7d6a16fd6533fed2756900fc8fc923f678179aea62ada6d65c92718c00/wrapt-2.1.2-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bbac24d879aa22998e87f6b3f481a5216311e7d53c7db87f189a7a0266dafffb", size = 121446, upload-time = "2026-03-06T02:54:14.013Z" }, + { url = "https://files.pythonhosted.org/packages/45/bb/34c443690c847835cfe9f892be78c533d4f32366ad2888972c094a897e39/wrapt-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16997dfb9d67addc2e3f41b62a104341e80cac52f91110dece393923c0ebd5ca", size = 123056, upload-time = "2026-03-06T02:54:10.829Z" }, + { url = "https://files.pythonhosted.org/packages/93/b9/ff205f391cb708f67f41ea148545f2b53ff543a7ac293b30d178af4d2271/wrapt-2.1.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:162e4e2ba7542da9027821cb6e7c5e068d64f9a10b5f15512ea28e954893a267", size = 117359, upload-time = "2026-03-06T02:53:03.623Z" }, + { url = "https://files.pythonhosted.org/packages/1f/3d/1ea04d7747825119c3c9a5e0874a40b33594ada92e5649347c457d982805/wrapt-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f29c827a8d9936ac320746747a016c4bc66ef639f5cd0d32df24f5eacbf9c69f", size = 121479, upload-time = "2026-03-06T02:53:45.844Z" }, + { url = "https://files.pythonhosted.org/packages/78/cc/ee3a011920c7a023b25e8df26f306b2484a531ab84ca5c96260a73de76c0/wrapt-2.1.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:a9dd9813825f7ecb018c17fd147a01845eb330254dff86d3b5816f20f4d6aaf8", size = 116271, upload-time = "2026-03-06T02:54:46.356Z" }, + { url = "https://files.pythonhosted.org/packages/98/fd/e5ff7ded41b76d802cf1191288473e850d24ba2e39a6ec540f21ae3b57cb/wrapt-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f8dbdd3719e534860d6a78526aafc220e0241f981367018c2875178cf83a413", size = 120573, upload-time = "2026-03-06T02:52:50.163Z" }, + { url = "https://files.pythonhosted.org/packages/47/c5/242cae3b5b080cd09bacef0591691ba1879739050cc7c801ff35c8886b66/wrapt-2.1.2-cp313-cp313-win32.whl", hash = "sha256:5c35b5d82b16a3bc6e0a04349b606a0582bc29f573786aebe98e0c159bc48db6", size = 58205, upload-time = "2026-03-06T02:53:47.494Z" }, + { url = "https://files.pythonhosted.org/packages/12/69/c358c61e7a50f290958809b3c61ebe8b3838ea3e070d7aac9814f95a0528/wrapt-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f8bc1c264d8d1cf5b3560a87bbdd31131573eb25f9f9447bb6252b8d4c44a3a1", size = 60452, upload-time = "2026-03-06T02:53:30.038Z" }, + { url = "https://files.pythonhosted.org/packages/8e/66/c8a6fcfe321295fd8c0ab1bd685b5a01462a9b3aa2f597254462fc2bc975/wrapt-2.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:3beb22f674550d5634642c645aba4c72a2c66fb185ae1aebe1e955fae5a13baf", size = 58842, upload-time = "2026-03-06T02:52:52.114Z" }, + { url = "https://files.pythonhosted.org/packages/da/55/9c7052c349106e0b3f17ae8db4b23a691a963c334de7f9dbd60f8f74a831/wrapt-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fc04bc8664a8bc4c8e00b37b5355cffca2535209fba1abb09ae2b7c76ddf82b", size = 63075, upload-time = "2026-03-06T02:53:19.108Z" }, + { url = "https://files.pythonhosted.org/packages/09/a8/ce7b4006f7218248dd71b7b2b732d0710845a0e49213b18faef64811ffef/wrapt-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a9b9d50c9af998875a1482a038eb05755dfd6fe303a313f6a940bb53a83c3f18", size = 63719, upload-time = "2026-03-06T02:54:33.452Z" }, + { url = "https://files.pythonhosted.org/packages/e4/e5/2ca472e80b9e2b7a17f106bb8f9df1db11e62101652ce210f66935c6af67/wrapt-2.1.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2d3ff4f0024dd224290c0eabf0240f1bfc1f26363431505fb1b0283d3b08f11d", size = 152643, upload-time = "2026-03-06T02:52:42.721Z" }, + { url = "https://files.pythonhosted.org/packages/36/42/30f0f2cefca9d9cbf6835f544d825064570203c3e70aa873d8ae12e23791/wrapt-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3278c471f4468ad544a691b31bb856374fbdefb7fee1a152153e64019379f015", size = 158805, upload-time = "2026-03-06T02:54:25.441Z" }, + { url = "https://files.pythonhosted.org/packages/bb/67/d08672f801f604889dcf58f1a0b424fe3808860ede9e03affc1876b295af/wrapt-2.1.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8914c754d3134a3032601c6984db1c576e6abaf3fc68094bb8ab1379d75ff92", size = 145990, upload-time = "2026-03-06T02:53:57.456Z" }, + { url = "https://files.pythonhosted.org/packages/68/a7/fd371b02e73babec1de6ade596e8cd9691051058cfdadbfd62a5898f3295/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ff95d4264e55839be37bafe1536db2ab2de19da6b65f9244f01f332b5286cfbf", size = 155670, upload-time = "2026-03-06T02:54:55.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/9fe0095dfdb621009f40117dcebf41d7396c2c22dca6eac779f4c007b86c/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:76405518ca4e1b76fbb1b9f686cff93aebae03920cc55ceeec48ff9f719c5f67", size = 144357, upload-time = "2026-03-06T02:54:24.092Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b6/ec7b4a254abbe4cde9fa15c5d2cca4518f6b07d0f1b77d4ee9655e30280e/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c0be8b5a74c5824e9359b53e7e58bef71a729bacc82e16587db1c4ebc91f7c5a", size = 150269, upload-time = "2026-03-06T02:53:31.268Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6b/2fabe8ebf148f4ee3c782aae86a795cc68ffe7d432ef550f234025ce0cfa/wrapt-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:f01277d9a5fc1862f26f7626da9cf443bebc0abd2f303f41c5e995b15887dabd", size = 59894, upload-time = "2026-03-06T02:54:15.391Z" }, + { url = "https://files.pythonhosted.org/packages/ca/fb/9ba66fc2dedc936de5f8073c0217b5d4484e966d87723415cc8262c5d9c2/wrapt-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:84ce8f1c2104d2f6daa912b1b5b039f331febfeee74f8042ad4e04992bd95c8f", size = 63197, upload-time = "2026-03-06T02:54:41.943Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1c/012d7423c95d0e337117723eb8ecf73c622ce15a97847e84cf3f8f26cd7e/wrapt-2.1.2-cp313-cp313t-win_arm64.whl", hash = "sha256:a93cd767e37faeddbe07d8fc4212d5cba660af59bdb0f6372c93faaa13e6e679", size = 60363, upload-time = "2026-03-06T02:54:48.093Z" }, + { url = "https://files.pythonhosted.org/packages/39/25/e7ea0b417db02bb796182a5316398a75792cd9a22528783d868755e1f669/wrapt-2.1.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:1370e516598854e5b4366e09ce81e08bfe94d42b0fd569b88ec46cc56d9164a9", size = 61418, upload-time = "2026-03-06T02:53:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0f/fa539e2f6a770249907757eaeb9a5ff4deb41c026f8466c1c6d799088a9b/wrapt-2.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6de1a3851c27e0bd6a04ca993ea6f80fc53e6c742ee1601f486c08e9f9b900a9", size = 61914, upload-time = "2026-03-06T02:52:53.37Z" }, + { url = "https://files.pythonhosted.org/packages/53/37/02af1867f5b1441aaeda9c82deed061b7cd1372572ddcd717f6df90b5e93/wrapt-2.1.2-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:de9f1a2bbc5ac7f6012ec24525bdd444765a2ff64b5985ac6e0692144838542e", size = 120417, upload-time = "2026-03-06T02:54:30.74Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b7/0138a6238c8ba7476c77cf786a807f871672b37f37a422970342308276e7/wrapt-2.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:970d57ed83fa040d8b20c52fe74a6ae7e3775ae8cff5efd6a81e06b19078484c", size = 122797, upload-time = "2026-03-06T02:54:51.539Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ad/819ae558036d6a15b7ed290d5b14e209ca795dd4da9c58e50c067d5927b0/wrapt-2.1.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3969c56e4563c375861c8df14fa55146e81ac11c8db49ea6fb7f2ba58bc1ff9a", size = 117350, upload-time = "2026-03-06T02:54:37.651Z" }, + { url = "https://files.pythonhosted.org/packages/8b/2d/afc18dc57a4600a6e594f77a9ae09db54f55ba455440a54886694a84c71b/wrapt-2.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:57d7c0c980abdc5f1d98b11a2aa3bb159790add80258c717fa49a99921456d90", size = 121223, upload-time = "2026-03-06T02:54:35.221Z" }, + { url = "https://files.pythonhosted.org/packages/b9/5b/5ec189b22205697bc56eb3b62aed87a1e0423e9c8285d0781c7a83170d15/wrapt-2.1.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:776867878e83130c7a04237010463372e877c1c994d449ca6aaafeab6aab2586", size = 116287, upload-time = "2026-03-06T02:54:19.654Z" }, + { url = "https://files.pythonhosted.org/packages/f7/2d/f84939a7c9b5e6cdd8a8d0f6a26cabf36a0f7e468b967720e8b0cd2bdf69/wrapt-2.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:fab036efe5464ec3291411fabb80a7a39e2dd80bae9bcbeeca5087fdfa891e19", size = 119593, upload-time = "2026-03-06T02:54:16.697Z" }, + { url = "https://files.pythonhosted.org/packages/0b/fe/ccd22a1263159c4ac811ab9374c061bcb4a702773f6e06e38de5f81a1bdc/wrapt-2.1.2-cp314-cp314-win32.whl", hash = "sha256:e6ed62c82ddf58d001096ae84ce7f833db97ae2263bff31c9b336ba8cfe3f508", size = 58631, upload-time = "2026-03-06T02:53:06.498Z" }, + { url = "https://files.pythonhosted.org/packages/65/0a/6bd83be7bff2e7efaac7b4ac9748da9d75a34634bbbbc8ad077d527146df/wrapt-2.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:467e7c76315390331c67073073d00662015bb730c566820c9ca9b54e4d67fd04", size = 60875, upload-time = "2026-03-06T02:53:50.252Z" }, + { url = "https://files.pythonhosted.org/packages/6c/c0/0b3056397fe02ff80e5a5d72d627c11eb885d1ca78e71b1a5c1e8c7d45de/wrapt-2.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:da1f00a557c66225d53b095a97eace0fc5349e3bfda28fa34ffae238978ee575", size = 59164, upload-time = "2026-03-06T02:53:59.128Z" }, + { url = "https://files.pythonhosted.org/packages/71/ed/5d89c798741993b2371396eb9d4634f009ff1ad8a6c78d366fe2883ea7a6/wrapt-2.1.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:62503ffbc2d3a69891cf29beeaccdb4d5e0a126e2b6a851688d4777e01428dbb", size = 63163, upload-time = "2026-03-06T02:52:54.873Z" }, + { url = "https://files.pythonhosted.org/packages/c6/8c/05d277d182bf36b0a13d6bd393ed1dec3468a25b59d01fba2dd70fe4d6ae/wrapt-2.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c7e6cd120ef837d5b6f860a6ea3745f8763805c418bb2f12eeb1fa6e25f22d22", size = 63723, upload-time = "2026-03-06T02:52:56.374Z" }, + { url = "https://files.pythonhosted.org/packages/f4/27/6c51ec1eff4413c57e72d6106bb8dec6f0c7cdba6503d78f0fa98767bcc9/wrapt-2.1.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3769a77df8e756d65fbc050333f423c01ae012b4f6731aaf70cf2bef61b34596", size = 152652, upload-time = "2026-03-06T02:53:23.79Z" }, + { url = "https://files.pythonhosted.org/packages/db/4c/d7dd662d6963fc7335bfe29d512b02b71cdfa23eeca7ab3ac74a67505deb/wrapt-2.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a76d61a2e851996150ba0f80582dd92a870643fa481f3b3846f229de88caf044", size = 158807, upload-time = "2026-03-06T02:53:35.742Z" }, + { url = "https://files.pythonhosted.org/packages/b4/4d/1e5eea1a78d539d346765727422976676615814029522c76b87a95f6bcdd/wrapt-2.1.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6f97edc9842cf215312b75fe737ee7c8adda75a89979f8e11558dfff6343cc4b", size = 146061, upload-time = "2026-03-06T02:52:57.574Z" }, + { url = "https://files.pythonhosted.org/packages/89/bc/62cabea7695cd12a288023251eeefdcb8465056ddaab6227cb78a2de005b/wrapt-2.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4006c351de6d5007aa33a551f600404ba44228a89e833d2fadc5caa5de8edfbf", size = 155667, upload-time = "2026-03-06T02:53:39.422Z" }, + { url = "https://files.pythonhosted.org/packages/e9/99/6f2888cd68588f24df3a76572c69c2de28287acb9e1972bf0c83ce97dbc1/wrapt-2.1.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a9372fc3639a878c8e7d87e1556fa209091b0a66e912c611e3f833e2c4202be2", size = 144392, upload-time = "2026-03-06T02:54:22.41Z" }, + { url = "https://files.pythonhosted.org/packages/40/51/1dfc783a6c57971614c48e361a82ca3b6da9055879952587bc99fe1a7171/wrapt-2.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3144b027ff30cbd2fca07c0a87e67011adb717eb5f5bd8496325c17e454257a3", size = 150296, upload-time = "2026-03-06T02:54:07.848Z" }, + { url = "https://files.pythonhosted.org/packages/6c/38/cbb8b933a0201076c1f64fc42883b0023002bdc14a4964219154e6ff3350/wrapt-2.1.2-cp314-cp314t-win32.whl", hash = "sha256:3b8d15e52e195813efe5db8cec156eebe339aaf84222f4f4f051a6c01f237ed7", size = 60539, upload-time = "2026-03-06T02:54:00.594Z" }, + { url = "https://files.pythonhosted.org/packages/82/dd/e5176e4b241c9f528402cebb238a36785a628179d7d8b71091154b3e4c9e/wrapt-2.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:08ffa54146a7559f5b8df4b289b46d963a8e74ed16ba3687f99896101a3990c5", size = 63969, upload-time = "2026-03-06T02:54:39Z" }, + { url = "https://files.pythonhosted.org/packages/5c/99/79f17046cf67e4a95b9987ea129632ba8bcec0bc81f3fb3d19bdb0bd60cd/wrapt-2.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:72aaa9d0d8e4ed0e2e98019cea47a21f823c9dd4b43c7b77bba6679ffcca6a00", size = 60554, upload-time = "2026-03-06T02:53:14.132Z" }, + { url = "https://files.pythonhosted.org/packages/1a/c7/8528ac2dfa2c1e6708f647df7ae144ead13f0a31146f43c7264b4942bf12/wrapt-2.1.2-py3-none-any.whl", hash = "sha256:b8fd6fa2b2c4e7621808f8c62e8317f4aae56e59721ad933bac5239d913cf0e8", size = 43993, upload-time = "2026-03-06T02:53:12.905Z" }, +] + +[[package]] +name = "wtforms" +version = "3.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/c7/96d10183c3470f1836846f7b9527d6cb0b6c2226ebca40f36fa29f23de60/wtforms-3.1.2.tar.gz", hash = "sha256:f8d76180d7239c94c6322f7990ae1216dae3659b7aa1cee94b6318bdffb474b9", size = 134705, upload-time = "2024-01-06T07:52:41.075Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/19/c3232f35e24dccfad372e9f341c4f3a1166ae7c66e4e1351a9467c921cc1/wtforms-3.1.2-py3-none-any.whl", hash = "sha256:bf831c042829c8cdbad74c27575098d541d039b1faa74c771545ecac916f2c07", size = 145961, upload-time = "2024-01-06T07:52:43.023Z" }, +] + +[[package]] +name = "yarl" +version = "1.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/aa/60da938b8f0997ba3a911263c40d82b6f645a67902a490b46f3355e10fae/yarl-1.23.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b35d13d549077713e4414f927cdc388d62e543987c572baee613bf82f11a4b99", size = 123641, upload-time = "2026-03-01T22:04:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/24/84/e237607faf4e099dbb8a4f511cfd5efcb5f75918baad200ff7380635631b/yarl-1.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbb0fef01f0c6b38cb0f39b1f78fc90b807e0e3c86a7ff3ce74ad77ce5c7880c", size = 86248, upload-time = "2026-03-01T22:04:44.757Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0d/71ceabc14c146ba8ee3804ca7b3d42b1664c8440439de5214d366fec7d3a/yarl-1.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc52310451fc7c629e13c4e061cbe2dd01684d91f2f8ee2821b083c58bd72432", size = 85988, upload-time = "2026-03-01T22:04:46.365Z" }, + { url = "https://files.pythonhosted.org/packages/8c/6c/4a90d59c572e46b270ca132aca66954f1175abd691f74c1ef4c6711828e2/yarl-1.23.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2c6b50c7b0464165472b56b42d4c76a7b864597007d9c085e8b63e185cf4a7a", size = 100566, upload-time = "2026-03-01T22:04:47.639Z" }, + { url = "https://files.pythonhosted.org/packages/49/fb/c438fb5108047e629f6282a371e6e91cf3f97ee087c4fb748a1f32ceef55/yarl-1.23.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aafe5dcfda86c8af00386d7781d4c2181b5011b7be3f2add5e99899ea925df05", size = 92079, upload-time = "2026-03-01T22:04:48.925Z" }, + { url = "https://files.pythonhosted.org/packages/d9/13/d269aa1aed3e4f50a5a103f96327210cc5fa5dd2d50882778f13c7a14606/yarl-1.23.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ee33b875f0b390564c1fb7bc528abf18c8ee6073b201c6ae8524aca778e2d83", size = 108741, upload-time = "2026-03-01T22:04:50.838Z" }, + { url = "https://files.pythonhosted.org/packages/85/fb/115b16f22c37ea4437d323e472945bea97301c8ec6089868fa560abab590/yarl-1.23.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c41e021bc6d7affb3364dc1e1e5fa9582b470f283748784bd6ea0558f87f42c", size = 108099, upload-time = "2026-03-01T22:04:52.499Z" }, + { url = "https://files.pythonhosted.org/packages/9a/64/c53487d9f4968045b8afa51aed7ca44f58b2589e772f32745f3744476c82/yarl-1.23.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99c8a9ed30f4164bc4c14b37a90208836cbf50d4ce2a57c71d0f52c7fb4f7598", size = 102678, upload-time = "2026-03-01T22:04:55.176Z" }, + { url = "https://files.pythonhosted.org/packages/85/59/cd98e556fbb2bf8fab29c1a722f67ad45c5f3447cac798ab85620d1e70af/yarl-1.23.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2af5c81a1f124609d5f33507082fc3f739959d4719b56877ab1ee7e7b3d602b", size = 100803, upload-time = "2026-03-01T22:04:56.588Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c0/b39770b56d4a9f0bb5f77e2f1763cd2d75cc2f6c0131e3b4c360348fcd65/yarl-1.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6b41389c19b07c760c7e427a3462e8ab83c4bb087d127f0e854c706ce1b9215c", size = 100163, upload-time = "2026-03-01T22:04:58.492Z" }, + { url = "https://files.pythonhosted.org/packages/e7/64/6980f99ab00e1f0ff67cb84766c93d595b067eed07439cfccfc8fb28c1a6/yarl-1.23.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1dc702e42d0684f42d6519c8d581e49c96cefaaab16691f03566d30658ee8788", size = 93859, upload-time = "2026-03-01T22:05:00.268Z" }, + { url = "https://files.pythonhosted.org/packages/38/69/912e6c5e146793e5d4b5fe39ff5b00f4d22463dfd5a162bec565ac757673/yarl-1.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0e40111274f340d32ebcc0a5668d54d2b552a6cca84c9475859d364b380e3222", size = 108202, upload-time = "2026-03-01T22:05:02.273Z" }, + { url = "https://files.pythonhosted.org/packages/59/97/35ca6767524687ad64e5f5c31ad54bc76d585585a9fcb40f649e7e82ffed/yarl-1.23.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:4764a6a7588561a9aef92f65bda2c4fb58fe7c675c0883862e6df97559de0bfb", size = 99866, upload-time = "2026-03-01T22:05:03.597Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1c/1a3387ee6d73589f6f2a220ae06f2984f6c20b40c734989b0a44f5987308/yarl-1.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:03214408cfa590df47728b84c679ae4ef00be2428e11630277be0727eba2d7cc", size = 107852, upload-time = "2026-03-01T22:05:04.986Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b8/35c0750fcd5a3f781058bfd954515dd4b1eab45e218cbb85cf11132215f1/yarl-1.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:170e26584b060879e29fac213e4228ef063f39128723807a312e5c7fec28eff2", size = 102919, upload-time = "2026-03-01T22:05:06.397Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1c/9a1979aec4a81896d597bcb2177827f2dbee3f5b7cc48b2d0dadb644b41d/yarl-1.23.0-cp311-cp311-win32.whl", hash = "sha256:51430653db848d258336cfa0244427b17d12db63d42603a55f0d4546f50f25b5", size = 82602, upload-time = "2026-03-01T22:05:08.444Z" }, + { url = "https://files.pythonhosted.org/packages/93/22/b85eca6fa2ad9491af48c973e4c8cf6b103a73dbb271fe3346949449fca0/yarl-1.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf49a3ae946a87083ef3a34c8f677ae4243f5b824bfc4c69672e72b3d6719d46", size = 87461, upload-time = "2026-03-01T22:05:10.145Z" }, + { url = "https://files.pythonhosted.org/packages/93/95/07e3553fe6f113e6864a20bdc53a78113cda3b9ced8784ee52a52c9f80d8/yarl-1.23.0-cp311-cp311-win_arm64.whl", hash = "sha256:b39cb32a6582750b6cc77bfb3c49c0f8760dc18dc96ec9fb55fbb0f04e08b928", size = 82336, upload-time = "2026-03-01T22:05:11.554Z" }, + { url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" }, + { url = "https://files.pythonhosted.org/packages/99/30/58260ed98e6ff7f90ba84442c1ddd758c9170d70327394a6227b310cd60f/yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8", size = 97587, upload-time = "2026-03-01T22:05:17.384Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/8b08aac08b50682e65759f7f8dde98ae8168f72487e7357a5d684c581ef9/yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072", size = 92528, upload-time = "2026-03-01T22:05:18.804Z" }, + { url = "https://files.pythonhosted.org/packages/52/07/0b7179101fe5f8385ec6c6bb5d0cb9f76bd9fb4a769591ab6fb5cdbfc69a/yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8", size = 105339, upload-time = "2026-03-01T22:05:20.235Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/36d82869ab5ec829ca8574dfcb92b51286fcfb1e9c7a73659616362dc880/yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7", size = 105061, upload-time = "2026-03-01T22:05:22.268Z" }, + { url = "https://files.pythonhosted.org/packages/66/3e/868e5c3364b6cee19ff3e1a122194fa4ce51def02c61023970442162859e/yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51", size = 100132, upload-time = "2026-03-01T22:05:23.638Z" }, + { url = "https://files.pythonhosted.org/packages/cf/26/9c89acf82f08a52cb52d6d39454f8d18af15f9d386a23795389d1d423823/yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67", size = 99289, upload-time = "2026-03-01T22:05:25.749Z" }, + { url = "https://files.pythonhosted.org/packages/6f/54/5b0db00d2cb056922356104468019c0a132e89c8d3ab67d8ede9f4483d2a/yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7", size = 96950, upload-time = "2026-03-01T22:05:27.318Z" }, + { url = "https://files.pythonhosted.org/packages/f6/40/10fa93811fd439341fad7e0718a86aca0de9548023bbb403668d6555acab/yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d", size = 93960, upload-time = "2026-03-01T22:05:28.738Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/8ae2e6cd77d0805f4526e30ec43b6f9a3dfc542d401ac4990d178e4bf0cf/yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760", size = 104703, upload-time = "2026-03-01T22:05:30.438Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0c/b3ceacf82c3fe21183ce35fa2acf5320af003d52bc1fcf5915077681142e/yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2", size = 98325, upload-time = "2026-03-01T22:05:31.835Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e0/12900edd28bdab91a69bd2554b85ad7b151f64e8b521fe16f9ad2f56477a/yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86", size = 105067, upload-time = "2026-03-01T22:05:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/15/61/74bb1182cf79c9bbe4eb6b1f14a57a22d7a0be5e9cedf8e2d5c2086474c3/yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34", size = 100285, upload-time = "2026-03-01T22:05:35.4Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/cd5ef733f2550de6241bd8bd8c3febc78158b9d75f197d9c7baa113436af/yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d", size = 82359, upload-time = "2026-03-01T22:05:36.811Z" }, + { url = "https://files.pythonhosted.org/packages/f5/be/25216a49daeeb7af2bec0db22d5e7df08ed1d7c9f65d78b14f3b74fd72fc/yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e", size = 87674, upload-time = "2026-03-01T22:05:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/d2/35/aeab955d6c425b227d5b7247eafb24f2653fedc32f95373a001af5dfeb9e/yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9", size = 81879, upload-time = "2026-03-01T22:05:40.006Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4b/a0a6e5d0ee8a2f3a373ddef8a4097d74ac901ac363eea1440464ccbe0898/yarl-1.23.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:16c6994ac35c3e74fb0ae93323bf8b9c2a9088d55946109489667c510a7d010e", size = 123796, upload-time = "2026-03-01T22:05:41.412Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/8925d68af039b835ae876db5838e82e76ec87b9782ecc97e192b809c4831/yarl-1.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a42e651629dafb64fd5b0286a3580613702b5809ad3f24934ea87595804f2c5", size = 86547, upload-time = "2026-03-01T22:05:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/50/06d511cc4b8e0360d3c94af051a768e84b755c5eb031b12adaaab6dec6e5/yarl-1.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c6b9461a2a8b47c65eef63bb1c76a4f1c119618ffa99ea79bc5bb1e46c5821b", size = 85854, upload-time = "2026-03-01T22:05:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f4/4e30b250927ffdab4db70da08b9b8d2194d7c7b400167b8fbeca1e4701ca/yarl-1.23.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2569b67d616eab450d262ca7cb9f9e19d2f718c70a8b88712859359d0ab17035", size = 98351, upload-time = "2026-03-01T22:05:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/86/fc/4118c5671ea948208bdb1492d8b76bdf1453d3e73df051f939f563e7dcc5/yarl-1.23.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e9d9a4d06d3481eab79803beb4d9bd6f6a8e781ec078ac70d7ef2dcc29d1bea5", size = 92711, upload-time = "2026-03-01T22:05:48.316Z" }, + { url = "https://files.pythonhosted.org/packages/56/11/1ed91d42bd9e73c13dc9e7eb0dd92298d75e7ac4dd7f046ad0c472e231cd/yarl-1.23.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f514f6474e04179d3d33175ed3f3e31434d3130d42ec153540d5b157deefd735", size = 106014, upload-time = "2026-03-01T22:05:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/ce/c9/74e44e056a23fbc33aca71779ef450ca648a5bc472bdad7a82339918f818/yarl-1.23.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fda207c815b253e34f7e1909840fd14299567b1c0eb4908f8c2ce01a41265401", size = 105557, upload-time = "2026-03-01T22:05:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/fe/b1e10b08d287f518994f1e2ff9b6d26f0adeecd8dd7d533b01bab29a3eda/yarl-1.23.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6cf500e61c90f305094911f9acc9c86da1a05a7a3f5be9f68817043f486e4", size = 101559, upload-time = "2026-03-01T22:05:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/72/59/c5b8d94b14e3d3c2a9c20cb100119fd534ab5a14b93673ab4cc4a4141ea5/yarl-1.23.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7504f2b476d21653e4d143f44a175f7f751cd41233525312696c76aa3dbb23f", size = 100502, upload-time = "2026-03-01T22:05:54.954Z" }, + { url = "https://files.pythonhosted.org/packages/77/4f/96976cb54cbfc5c9fd73ed4c51804f92f209481d1fb190981c0f8a07a1d7/yarl-1.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:578110dd426f0d209d1509244e6d4a3f1a3e9077655d98c5f22583d63252a08a", size = 98027, upload-time = "2026-03-01T22:05:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/63/6e/904c4f476471afdbad6b7e5b70362fb5810e35cd7466529a97322b6f5556/yarl-1.23.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:609d3614d78d74ebe35f54953c5bbd2ac647a7ddb9c30a5d877580f5e86b22f2", size = 95369, upload-time = "2026-03-01T22:05:58.141Z" }, + { url = "https://files.pythonhosted.org/packages/9d/40/acfcdb3b5f9d68ef499e39e04d25e141fe90661f9d54114556cf83be8353/yarl-1.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4966242ec68afc74c122f8459abd597afd7d8a60dc93d695c1334c5fd25f762f", size = 105565, upload-time = "2026-03-01T22:06:00.286Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c6/31e28f3a6ba2869c43d124f37ea5260cac9c9281df803c354b31f4dd1f3c/yarl-1.23.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0fd068364a6759bc794459f0a735ab151d11304346332489c7972bacbe9e72b", size = 99813, upload-time = "2026-03-01T22:06:01.712Z" }, + { url = "https://files.pythonhosted.org/packages/08/1f/6f65f59e72d54aa467119b63fc0b0b1762eff0232db1f4720cd89e2f4a17/yarl-1.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:39004f0ad156da43e86aa71f44e033de68a44e5a31fc53507b36dd253970054a", size = 105632, upload-time = "2026-03-01T22:06:03.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c4/18b178a69935f9e7a338127d5b77d868fdc0f0e49becd286d51b3a18c61d/yarl-1.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5723c01a56c5028c807c701aa66722916d2747ad737a046853f6c46f4875543", size = 101895, upload-time = "2026-03-01T22:06:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/8f/54/f5b870b5505663911dba950a8e4776a0dbd51c9c54c0ae88e823e4b874a0/yarl-1.23.0-cp313-cp313-win32.whl", hash = "sha256:1b6b572edd95b4fa8df75de10b04bc81acc87c1c7d16bcdd2035b09d30acc957", size = 82356, upload-time = "2026-03-01T22:06:06.04Z" }, + { url = "https://files.pythonhosted.org/packages/7a/84/266e8da36879c6edcd37b02b547e2d9ecdfea776be49598e75696e3316e1/yarl-1.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:baaf55442359053c7d62f6f8413a62adba3205119bcb6f49594894d8be47e5e3", size = 87515, upload-time = "2026-03-01T22:06:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/00/fd/7e1c66efad35e1649114fa13f17485f62881ad58edeeb7f49f8c5e748bf9/yarl-1.23.0-cp313-cp313-win_arm64.whl", hash = "sha256:fb4948814a2a98e3912505f09c9e7493b1506226afb1f881825368d6fb776ee3", size = 81785, upload-time = "2026-03-01T22:06:10.181Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fc/119dd07004f17ea43bb91e3ece6587759edd7519d6b086d16bfbd3319982/yarl-1.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:aecfed0b41aa72b7881712c65cf764e39ce2ec352324f5e0837c7048d9e6daaa", size = 130719, upload-time = "2026-03-01T22:06:11.708Z" }, + { url = "https://files.pythonhosted.org/packages/e6/0d/9f2348502fbb3af409e8f47730282cd6bc80dec6630c1e06374d882d6eb2/yarl-1.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a41bcf68efd19073376eb8cf948b8d9be0af26256403e512bb18f3966f1f9120", size = 89690, upload-time = "2026-03-01T22:06:13.429Z" }, + { url = "https://files.pythonhosted.org/packages/50/93/e88f3c80971b42cfc83f50a51b9d165a1dbf154b97005f2994a79f212a07/yarl-1.23.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cde9a2ecd91668bcb7f077c4966d8ceddb60af01b52e6e3e2680e4cf00ad1a59", size = 89851, upload-time = "2026-03-01T22:06:15.53Z" }, + { url = "https://files.pythonhosted.org/packages/1c/07/61c9dd8ba8f86473263b4036f70fb594c09e99c0d9737a799dfd8bc85651/yarl-1.23.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5023346c4ee7992febc0068e7593de5fa2bf611848c08404b35ebbb76b1b0512", size = 95874, upload-time = "2026-03-01T22:06:17.553Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e9/f9ff8ceefba599eac6abddcfb0b3bee9b9e636e96dbf54342a8577252379/yarl-1.23.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1009abedb49ae95b136a8904a3f71b342f849ffeced2d3747bf29caeda218c4", size = 88710, upload-time = "2026-03-01T22:06:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/eb/78/0231bfcc5d4c8eec220bc2f9ef82cb4566192ea867a7c5b4148f44f6cbcd/yarl-1.23.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a8d00f29b42f534cc8aa3931cfe773b13b23e561e10d2b26f27a8d309b0e82a1", size = 101033, upload-time = "2026-03-01T22:06:21.203Z" }, + { url = "https://files.pythonhosted.org/packages/cd/9b/30ea5239a61786f18fd25797151a17fbb3be176977187a48d541b5447dd4/yarl-1.23.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:95451e6ce06c3e104556d73b559f5da6c34a069b6b62946d3ad66afcd51642ea", size = 100817, upload-time = "2026-03-01T22:06:22.738Z" }, + { url = "https://files.pythonhosted.org/packages/62/e2/a4980481071791bc83bce2b7a1a1f7adcabfa366007518b4b845e92eeee3/yarl-1.23.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531ef597132086b6cf96faa7c6c1dcd0361dd5f1694e5cc30375907b9b7d3ea9", size = 97482, upload-time = "2026-03-01T22:06:24.21Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1e/304a00cf5f6100414c4b5a01fc7ff9ee724b62158a08df2f8170dfc72a2d/yarl-1.23.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:88f9fb0116fbfcefcab70f85cf4b74a2b6ce5d199c41345296f49d974ddb4123", size = 95949, upload-time = "2026-03-01T22:06:25.697Z" }, + { url = "https://files.pythonhosted.org/packages/68/03/093f4055ed4cae649ac53bca3d180bd37102e9e11d048588e9ab0c0108d0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e7b0460976dc75cb87ad9cc1f9899a4b97751e7d4e77ab840fc9b6d377b8fd24", size = 95839, upload-time = "2026-03-01T22:06:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/b9/28/4c75ebb108f322aa8f917ae10a8ffa4f07cae10a8a627b64e578617df6a0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:115136c4a426f9da976187d238e84139ff6b51a20839aa6e3720cd1026d768de", size = 90696, upload-time = "2026-03-01T22:06:29.048Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/42c2e2dd91c1a570402f51bdf066bfdb1241c2240ba001967bad778e77b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ead11956716a940c1abc816b7df3fa2b84d06eaed8832ca32f5c5e058c65506b", size = 100865, upload-time = "2026-03-01T22:06:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/74/05/1bcd60a8a0a914d462c305137246b6f9d167628d73568505fce3f1cb2e65/yarl-1.23.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:fe8f8f5e70e6dbdfca9882cd9deaac058729bcf323cf7a58660901e55c9c94f6", size = 96234, upload-time = "2026-03-01T22:06:32.692Z" }, + { url = "https://files.pythonhosted.org/packages/90/b2/f52381aac396d6778ce516b7bc149c79e65bfc068b5de2857ab69eeea3b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a0e317df055958a0c1e79e5d2aa5a5eaa4a6d05a20d4b0c9c3f48918139c9fc6", size = 100295, upload-time = "2026-03-01T22:06:34.268Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/638bae5bbf1113a659b2435d8895474598afe38b4a837103764f603aba56/yarl-1.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f0fd84de0c957b2d280143522c4f91a73aada1923caee763e24a2b3fda9f8a5", size = 97784, upload-time = "2026-03-01T22:06:35.864Z" }, + { url = "https://files.pythonhosted.org/packages/80/25/a3892b46182c586c202629fc2159aa13975d3741d52ebd7347fd501d48d5/yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595", size = 88313, upload-time = "2026-03-01T22:06:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/43/68/8c5b36aa5178900b37387937bc2c2fe0e9505537f713495472dcf6f6fccc/yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090", size = 94932, upload-time = "2026-03-01T22:06:39.579Z" }, + { url = "https://files.pythonhosted.org/packages/c6/cc/d79ba8292f51f81f4dc533a8ccfb9fc6992cabf0998ed3245de7589dc07c/yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144", size = 84786, upload-time = "2026-03-01T22:06:41.988Z" }, + { url = "https://files.pythonhosted.org/packages/90/98/b85a038d65d1b92c3903ab89444f48d3cee490a883477b716d7a24b1a78c/yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912", size = 124455, upload-time = "2026-03-01T22:06:43.615Z" }, + { url = "https://files.pythonhosted.org/packages/39/54/bc2b45559f86543d163b6e294417a107bb87557609007c007ad889afec18/yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474", size = 86752, upload-time = "2026-03-01T22:06:45.425Z" }, + { url = "https://files.pythonhosted.org/packages/24/f9/e8242b68362bffe6fb536c8db5076861466fc780f0f1b479fc4ffbebb128/yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719", size = 86291, upload-time = "2026-03-01T22:06:46.974Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d8/d1cb2378c81dd729e98c716582b1ccb08357e8488e4c24714658cc6630e8/yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319", size = 99026, upload-time = "2026-03-01T22:06:48.459Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ff/7196790538f31debe3341283b5b0707e7feb947620fc5e8236ef28d44f72/yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434", size = 92355, upload-time = "2026-03-01T22:06:50.306Z" }, + { url = "https://files.pythonhosted.org/packages/c1/56/25d58c3eddde825890a5fe6aa1866228377354a3c39262235234ab5f616b/yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723", size = 106417, upload-time = "2026-03-01T22:06:52.1Z" }, + { url = "https://files.pythonhosted.org/packages/51/8a/882c0e7bc8277eb895b31bce0138f51a1ba551fc2e1ec6753ffc1e7c1377/yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039", size = 106422, upload-time = "2026-03-01T22:06:54.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/2b/fef67d616931055bf3d6764885990a3ac647d68734a2d6a9e1d13de437a2/yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52", size = 101915, upload-time = "2026-03-01T22:06:55.895Z" }, + { url = "https://files.pythonhosted.org/packages/18/6a/530e16aebce27c5937920f3431c628a29a4b6b430fab3fd1c117b26ff3f6/yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c", size = 100690, upload-time = "2026-03-01T22:06:58.21Z" }, + { url = "https://files.pythonhosted.org/packages/88/08/93749219179a45e27b036e03260fda05190b911de8e18225c294ac95bbc9/yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae", size = 98750, upload-time = "2026-03-01T22:06:59.794Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cf/ea424a004969f5d81a362110a6ac1496d79efdc6d50c2c4b2e3ea0fc2519/yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e", size = 94685, upload-time = "2026-03-01T22:07:01.375Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b7/14341481fe568e2b0408bcf1484c652accafe06a0ade9387b5d3fd9df446/yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85", size = 106009, upload-time = "2026-03-01T22:07:03.151Z" }, + { url = "https://files.pythonhosted.org/packages/0a/e6/5c744a9b54f4e8007ad35bce96fbc9218338e84812d36f3390cea616881a/yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd", size = 100033, upload-time = "2026-03-01T22:07:04.701Z" }, + { url = "https://files.pythonhosted.org/packages/0c/23/e3bfc188d0b400f025bc49d99793d02c9abe15752138dcc27e4eaf0c4a9e/yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6", size = 106483, upload-time = "2026-03-01T22:07:06.231Z" }, + { url = "https://files.pythonhosted.org/packages/72/42/f0505f949a90b3f8b7a363d6cbdf398f6e6c58946d85c6d3a3bc70595b26/yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe", size = 102175, upload-time = "2026-03-01T22:07:08.4Z" }, + { url = "https://files.pythonhosted.org/packages/aa/65/b39290f1d892a9dd671d1c722014ca062a9c35d60885d57e5375db0404b5/yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169", size = 83871, upload-time = "2026-03-01T22:07:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5b/9b92f54c784c26e2a422e55a8d2607ab15b7ea3349e28359282f84f01d43/yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70", size = 89093, upload-time = "2026-03-01T22:07:11.501Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7d/8a84dc9381fd4412d5e7ff04926f9865f6372b4c2fd91e10092e65d29eb8/yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e", size = 83384, upload-time = "2026-03-01T22:07:13.069Z" }, + { url = "https://files.pythonhosted.org/packages/dd/8d/d2fad34b1c08aa161b74394183daa7d800141aaaee207317e82c790b418d/yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679", size = 131019, upload-time = "2026-03-01T22:07:14.903Z" }, + { url = "https://files.pythonhosted.org/packages/19/ff/33009a39d3ccf4b94d7d7880dfe17fb5816c5a4fe0096d9b56abceea9ac7/yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412", size = 89894, upload-time = "2026-03-01T22:07:17.372Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f1/dab7ac5e7306fb79c0190766a3c00b4cb8d09a1f390ded68c85a5934faf5/yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4", size = 89979, upload-time = "2026-03-01T22:07:19.361Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b1/08e95f3caee1fad6e65017b9f26c1d79877b502622d60e517de01e72f95d/yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c", size = 95943, upload-time = "2026-03-01T22:07:21.266Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cc/6409f9018864a6aa186c61175b977131f373f1988e198e031236916e87e4/yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4", size = 88786, upload-time = "2026-03-01T22:07:23.129Z" }, + { url = "https://files.pythonhosted.org/packages/76/40/cc22d1d7714b717fde2006fad2ced5efe5580606cb059ae42117542122f3/yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94", size = 101307, upload-time = "2026-03-01T22:07:24.689Z" }, + { url = "https://files.pythonhosted.org/packages/8f/0d/476c38e85ddb4c6ec6b20b815bdd779aa386a013f3d8b85516feee55c8dc/yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28", size = 100904, upload-time = "2026-03-01T22:07:26.287Z" }, + { url = "https://files.pythonhosted.org/packages/72/32/0abe4a76d59adf2081dcb0397168553ece4616ada1c54d1c49d8936c74f8/yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6", size = 97728, upload-time = "2026-03-01T22:07:27.906Z" }, + { url = "https://files.pythonhosted.org/packages/b7/35/7b30f4810fba112f60f5a43237545867504e15b1c7647a785fbaf588fac2/yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277", size = 95964, upload-time = "2026-03-01T22:07:30.198Z" }, + { url = "https://files.pythonhosted.org/packages/2d/86/ed7a73ab85ef00e8bb70b0cb5421d8a2a625b81a333941a469a6f4022828/yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4", size = 95882, upload-time = "2026-03-01T22:07:32.132Z" }, + { url = "https://files.pythonhosted.org/packages/19/90/d56967f61a29d8498efb7afb651e0b2b422a1e9b47b0ab5f4e40a19b699b/yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a", size = 90797, upload-time = "2026-03-01T22:07:34.404Z" }, + { url = "https://files.pythonhosted.org/packages/72/00/8b8f76909259f56647adb1011d7ed8b321bcf97e464515c65016a47ecdf0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb", size = 101023, upload-time = "2026-03-01T22:07:35.953Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e2/cab11b126fb7d440281b7df8e9ddbe4851e70a4dde47a202b6642586b8d9/yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41", size = 96227, upload-time = "2026-03-01T22:07:37.594Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9b/2c893e16bfc50e6b2edf76c1a9eb6cb0c744346197e74c65e99ad8d634d0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2", size = 100302, upload-time = "2026-03-01T22:07:39.334Z" }, + { url = "https://files.pythonhosted.org/packages/28/ec/5498c4e3a6d5f1003beb23405671c2eb9cdbf3067d1c80f15eeafe301010/yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4", size = 98202, upload-time = "2026-03-01T22:07:41.717Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c3/cd737e2d45e70717907f83e146f6949f20cc23cd4bf7b2688727763aa458/yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4", size = 90558, upload-time = "2026-03-01T22:07:43.433Z" }, + { url = "https://files.pythonhosted.org/packages/e1/19/3774d162f6732d1cfb0b47b4140a942a35ca82bb19b6db1f80e9e7bdc8f8/yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2", size = 97610, upload-time = "2026-03-01T22:07:45.773Z" }, + { url = "https://files.pythonhosted.org/packages/51/47/3fa2286c3cb162c71cdb34c4224d5745a1ceceb391b2bd9b19b668a8d724/yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25", size = 86041, upload-time = "2026-03-01T22:07:49.026Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" }, +] diff --git a/cli/README.md b/cli/README.md new file mode 100644 index 00000000..54e7de66 --- /dev/null +++ b/cli/README.md @@ -0,0 +1,48 @@ +# bp — FastAPI-boilerplate CLI + +`bp` is the developer/operator command-line tool for projects built on the +FastAPI boilerplate. It generates deployment artifacts, helps prepare the +runtime environment, and serves as the host for plugin commands and feature +generators. + +## Install + +This package is part of the workspace. From the repo root: + +```bash +uv sync # syncs the workspace; bp is available via `uv run bp` +uv run bp --help +``` + +To install `bp` machine-wide so it works outside this repo: + +```bash +uv tool install --editable ./cli +bp --help +``` + +## What's here + +``` +cli/src/cli/ +├── app.py root Typer app + plugin discovery +├── plugins.py entry-point loaders for bp.commands and bp.features +├── commands/ in-tree command sub-apps +│ ├── deploy.py bp deploy generate +│ └── env.py bp env gen-secret / bp env validate +├── features/ feature framework (manifest, plan, installer) +│ └── _builtins/ in-tree features +│ └── deploy/ compose/Dockerfile templates for local/prod/nginx +└── lib/ shared helpers (project discovery, prompts, render) +``` + +## Plugin extension points + +Two kinds of plugins, kept deliberately separate: + +- `bp.commands` entry-point group — third-party Typer sub-apps mounted under + `bp ` (e.g. `bp aws deploy`). +- `bp.features` entry-point group — code generators with a manifest that + `bp feature` can list, install, and remove. + +See `cli/src/cli/plugins.py` for the discovery contracts. diff --git a/cli/pyproject.toml b/cli/pyproject.toml new file mode 100644 index 00000000..3f0b85f1 --- /dev/null +++ b/cli/pyproject.toml @@ -0,0 +1,53 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "fastapi-boilerplate-cli" +version = "0.1.0" +description = "bp — developer/operator CLI for the FastAPI boilerplate. Hosts in-tree commands and discovers third-party plugins." +authors = [{ name = "Benav Labs", email = "contact@benav.io" }] +license = { text = "MIT" } +readme = "README.md" +requires-python = ">=3.11" +dependencies = [ + "typer>=0.12", + "jinja2>=3.1", + "fastapi-boilerplate", +] + +[project.scripts] +bp = "cli.app:app" + +[tool.uv.sources] +fastapi-boilerplate = { workspace = true } + +[tool.setuptools] +include-package-data = true + +[tool.setuptools.packages.find] +where = ["src"] +include = ["*"] + +[tool.setuptools.package-data] +"*" = ["*.j2", "*.toml", "*.conf", "py.typed"] + +[tool.ruff] +line-length = 128 + +[tool.ruff.lint] +select = ["E", "F", "I", "UP"] +extend-select = ["UP006", "UP007", "UP035", "UP039", "PLC0415"] + +[tool.ruff.lint.isort] +known-first-party = ["cli"] + +[tool.mypy] +python_version = "3.11" +warn_return_any = true +warn_unused_configs = true +warn_unused_ignores = true + +[[tool.mypy.overrides]] +module = "infrastructure.*" +ignore_missing_imports = true diff --git a/cli/src/cli/__init__.py b/cli/src/cli/__init__.py new file mode 100644 index 00000000..d32794ee --- /dev/null +++ b/cli/src/cli/__init__.py @@ -0,0 +1,12 @@ +"""bp — the FastAPI-boilerplate command-line tool. + +The CLI is a Typer application with two extension points: + +- `bp.commands` entry-point group: third-party packages can register + top-level Typer sub-apps that mount under `bp `. +- `bp.features` entry-point group: third-party packages can register + ``Feature`` instances that ``bp feature`` can list, install, and remove. + +In-tree commands and features live alongside this package and follow +the same contracts as plugins. +""" diff --git a/cli/src/cli/app.py b/cli/src/cli/app.py new file mode 100644 index 00000000..1de04161 --- /dev/null +++ b/cli/src/cli/app.py @@ -0,0 +1,55 @@ +"""bp — root Typer application and entry point. + +Mounts in-tree command sub-apps and discovers third-party plugins. +The shipped console script (``[project.scripts] bp``) targets +``app`` directly. +""" + +from __future__ import annotations + +import typer + +from . import plugins as _plugins +from .commands import deploy as _deploy_cmd +from .commands import env as _env_cmd + +app = typer.Typer( + name="bp", + help="FastAPI-boilerplate command-line tool.", + no_args_is_help=True, + pretty_exceptions_show_locals=False, +) + +# In-tree commands. Mounted before plugin discovery so a plugin can't +# silently shadow a built-in by registering the same name. +app.add_typer(_deploy_cmd.app, name="deploy", help="Generate deployment artifacts (Dockerfile, compose, nginx config).") +app.add_typer(_env_cmd.app, name="env", help="Inspect and prepare the runtime environment.") + + +def _mount_command_plugins() -> None: + """Mount external Typer sub-apps registered under ``bp.commands``.""" + builtin_names = {"deploy", "env", "feature"} + for name, sub_app in _plugins.discover_command_plugins().items(): + if name in builtin_names: + typer.secho( + f"warning: plugin command '{name}' shadows a built-in; ignoring.", + fg=typer.colors.YELLOW, + err=True, + ) + continue + app.add_typer(sub_app, name=name) + + +_mount_command_plugins() + + +@app.callback() +def _root() -> None: + """bp — FastAPI-boilerplate command-line tool.""" + # Typer uses this docstring as the root help text. The body is + # intentionally empty: the callback exists so options like + # ``--install-completion`` work without arguments. + + +if __name__ == "__main__": # pragma: no cover + app() diff --git a/cli/src/cli/commands/__init__.py b/cli/src/cli/commands/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/cli/src/cli/commands/deploy.py b/cli/src/cli/commands/deploy.py new file mode 100644 index 00000000..cc2a1674 --- /dev/null +++ b/cli/src/cli/commands/deploy.py @@ -0,0 +1,91 @@ +"""``bp deploy`` — generate deployment artifacts. + +Today this is just a wrapper around the in-tree ``deploy`` feature. +Other deploy-adjacent commands (``bp deploy nginx-tls``, ``bp deploy +github-actions``) can mount here as siblings. +""" + +from __future__ import annotations + +from enum import StrEnum +from pathlib import Path + +import typer + +from ..features.installer import FeatureInstaller +from ..features.registry import get_feature +from ..lib.project import discover_project +from ..lib.prompts import error, info + +app = typer.Typer(no_args_is_help=True, help="Generate deployment artifacts.") + + +class DeployMode(StrEnum): + local = "local" + prod = "prod" + nginx = "nginx" + + +@app.command("generate") +def generate( + mode: DeployMode = typer.Argument( + ..., + help="Deployment mode to generate. Pick `local` for hot-reload dev, `prod` for " + "single-host production, `nginx` for production behind a reverse proxy.", + ), + output_dir: Path = typer.Option( + None, + "--output-dir", + "-o", + help="Where to write the compose file. Defaults to the repo root.", + ), + api_port: int = typer.Option(8000, "--api-port", help="Host port to publish the API on."), + workers: int = typer.Option(4, "--workers", help="Number of API workers (prod / nginx only)."), + force: bool = typer.Option(False, "--force", "-f", help="Overwrite existing files without asking."), + yes: bool = typer.Option(False, "--yes", "-y", help="Assume yes for all prompts."), + dry_run: bool = typer.Option(False, "--dry-run", help="Show what would be written, don't touch disk."), +) -> None: + """Generate ``docker-compose.yml`` (and ``nginx/default.conf`` for nginx mode).""" + project = discover_project(output_dir) + feature = get_feature("deploy") + if feature is None: # pragma: no cover — built-in feature, always present + error("deploy feature is not registered.") + raise typer.Exit(code=1) + + target_root = (output_dir or project.repo_root).resolve() + target_root.mkdir(parents=True, exist_ok=True) + + params: dict = { + "mode": mode.value, + "api_port": api_port, + "workers": workers, + "compose_target": target_root / "docker-compose.yml", + } + if mode == DeployMode.nginx: + params["nginx_conf_target"] = target_root / "nginx" / "default.conf" + + plan = feature.plan(params, project) + + installer = FeatureInstaller(dry_run=dry_run, assume_yes=force or yes) + info(f"deploy: generating '{mode.value}' compose for {project.repo_root}") + result = installer.apply(plan) + + if result.files_skipped: + info("") + info(f"{len(result.files_skipped)} file(s) skipped.") + if dry_run: + info("") + info("dry-run complete — no files were written.") + return + + info("") + info("done. Next steps:") + if mode == DeployMode.local: + info(" docker compose up --build") + elif mode == DeployMode.prod: + info(" cp backend/.env.example backend/.env # if you haven't already") + info(" docker compose up -d --build") + else: + info(" cp backend/.env.example backend/.env # if you haven't already") + info(" docker compose up -d --build") + info(" curl -i http://localhost/api/v1/health") diff --git a/cli/src/cli/commands/env.py b/cli/src/cli/commands/env.py new file mode 100644 index 00000000..82b99bdd --- /dev/null +++ b/cli/src/cli/commands/env.py @@ -0,0 +1,111 @@ +"""``bp env`` — inspect and prepare the runtime environment. + +Two commands today: + +- ``bp env gen-secret`` prints a 64-char hex string suitable for + ``SECRET_KEY``. No filesystem I/O — pipe it into your secrets manager. + +- ``bp env validate`` runs the production security validator against + the current settings, regardless of the configured environment, and + prints critical errors and warnings. +""" + +from __future__ import annotations + +import logging +import secrets + +import typer + +# The installed package layout puts `infrastructure`, `modules`, etc. +# at the top of the import tree (see [tool.setuptools.packages.find] +# in backend/pyproject.toml). The CLI is only callable when the package +# is installed, so this form is always valid here. +from infrastructure.config.settings import get_settings +from infrastructure.security.production_validator import ( + ProductionSecurityError, + ProductionSecurityValidator, +) + +from ..lib.prompts import error, info, success, warn + +app = typer.Typer(no_args_is_help=True, help="Inspect and prepare the runtime environment.") + + +@app.command("gen-secret") +def gen_secret( + bytes_: int = typer.Option(32, "--bytes", min=16, max=128, help="Number of random bytes (hex output is 2x)."), +) -> None: + """Generate a high-entropy hex secret suitable for ``SECRET_KEY``.""" + typer.echo(secrets.token_hex(bytes_)) + + +@app.command("validate") +def validate() -> None: + """Run the production security validator against the current settings. + + Forces production-mode validation regardless of ``ENVIRONMENT`` so + you can audit a dev or staging config the same way prod is gated. + """ + settings = get_settings() + + class _ForcedProd(ProductionSecurityValidator): + def _is_production(self) -> bool: + return True + + validator = _ForcedProd(settings) + + captured_warnings: list[str] = [] + handler = _CapturingHandler(captured_warnings) + validator.logger.addHandler(handler) + # Silence the validator's normal logging while we drive it — we render + # the result ourselves below. + previous_level = validator.logger.level + previous_propagate = validator.logger.propagate + validator.logger.setLevel(logging.CRITICAL + 1) + validator.logger.propagate = False + handler.setLevel(logging.WARNING) # still capture warnings via the dedicated handler + + critical_errors: list[str] = [] + try: + try: + validator.validate_production_security() + except ProductionSecurityError as exc: + critical_errors = [line.strip(" •") for line in str(exc).splitlines()[1:] if line.strip()] + finally: + validator.logger.removeHandler(handler) + validator.logger.setLevel(previous_level) + validator.logger.propagate = previous_propagate + + if not critical_errors and not captured_warnings: + success("No issues found. Configuration would pass production validation.") + return + + if critical_errors: + error(f"Critical ({len(critical_errors)}):") + for item in critical_errors: + typer.secho(f" • {item}", fg=typer.colors.RED) + + if captured_warnings: + if critical_errors: + info("") + warn(f"Warnings ({len(captured_warnings)}):") + for item in captured_warnings: + typer.secho(f" • {item}", fg=typer.colors.YELLOW) + + if critical_errors: + raise typer.Exit(code=1) + + +class _CapturingHandler(logging.Handler): + """Capture only the warning lines emitted by the production validator.""" + + def __init__(self, sink: list[str]) -> None: + super().__init__(level=logging.WARNING) + self._sink = sink + + def emit(self, record: logging.LogRecord) -> None: + message = record.getMessage() + marker = "PRODUCTION SECURITY WARNING: " + if marker in message: + self._sink.append(message.split(marker, 1)[1]) diff --git a/cli/src/cli/features/__init__.py b/cli/src/cli/features/__init__.py new file mode 100644 index 00000000..424beee4 --- /dev/null +++ b/cli/src/cli/features/__init__.py @@ -0,0 +1,25 @@ +"""Feature framework: typed manifests, plans, and the installer. + +A feature is a self-contained unit of work that mutates a user's +project — generating files, editing existing files, running setup +hooks. ``Feature`` subclasses describe themselves via a manifest and +emit a ``FeaturePlan`` that the installer executes. + +For v1 only ``FileOp`` operations are supported. ``Codemod`` and ``Hook`` +slots in the plan exist for forward compatibility — installers raise +``NotImplementedError`` if a plan asks for one. +""" + +from .base import Codemod, Feature, FeatureManifest, FeaturePlan, FileOp, Hook +from .installer import FeatureInstaller, InstallResult + +__all__ = [ + "Codemod", + "Feature", + "FeatureInstaller", + "FeatureManifest", + "FeaturePlan", + "FileOp", + "Hook", + "InstallResult", +] diff --git a/cli/src/cli/features/_builtins/__init__.py b/cli/src/cli/features/_builtins/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/cli/src/cli/features/_builtins/deploy/__init__.py b/cli/src/cli/features/_builtins/deploy/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/cli/src/cli/features/_builtins/deploy/feature.py b/cli/src/cli/features/_builtins/deploy/feature.py new file mode 100644 index 00000000..f0b07b55 --- /dev/null +++ b/cli/src/cli/features/_builtins/deploy/feature.py @@ -0,0 +1,74 @@ +"""The deploy feature — generate Dockerfile-friendly compose files. + +Three modes are supported in v1: + +- ``local``: hot-reload dev stack (mounts source, exposes port 8000) +- ``prod``: production stack with multiple workers, ports exposed directly +- ``nginx``: ``prod`` plus an nginx reverse proxy on port 80 + +All modes target the existing multi-stage ``backend/Dockerfile`` — +no per-mode Dockerfile is generated. Modes only differ in the +compose file (and an optional nginx config). +""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any + +from ....lib.project import ProjectContext +from ...base import Feature, FeatureManifest, FeaturePlan, FileOp + +SUPPORTED_MODES: tuple[str, ...] = ("local", "prod", "nginx") + +_TEMPLATES_ROOT = Path(__file__).parent / "templates" + + +class DeployFeature(Feature): + def manifest(self) -> FeatureManifest: + return FeatureManifest( + name="deploy", + version="1.0", + summary="Generate a docker-compose.yml for local, prod, or nginx-fronted deployments.", + ) + + def plan(self, params: dict[str, Any], project: ProjectContext) -> FeaturePlan: + mode = params.get("mode") + if mode not in SUPPORTED_MODES: + raise ValueError(f"Unsupported deploy mode: {mode!r}. Choose one of {SUPPORTED_MODES}.") + + project_name = params.get("project_name") or project.repo_root.name + api_port = int(params.get("api_port", 8000)) + workers = int(params.get("workers", 4)) + postgres_image = params.get("postgres_image", "postgres:16-alpine") + redis_image = params.get("redis_image", "redis:7-alpine") + nginx_image = params.get("nginx_image", "nginx:1.27-alpine") + backend_context = params.get("backend_context", "./backend") + env_file = params.get("env_file", "./backend/.env") + + context = { + "mode": mode, + "project_name": project_name, + "api_port": api_port, + "workers": workers, + "postgres_image": postgres_image, + "redis_image": redis_image, + "nginx_image": nginx_image, + "backend_context": backend_context, + "env_file": env_file, + } + + compose_target = Path(params.get("compose_target") or (project.repo_root / "docker-compose.yml")) + files: list[FileOp] = [ + FileOp(template=f"{mode}/docker-compose.yml.j2", target=compose_target), + ] + if mode == "nginx": + nginx_target = Path(params.get("nginx_conf_target") or (project.repo_root / "nginx" / "default.conf")) + files.append(FileOp(template="nginx/default.conf.j2", target=nginx_target)) + + return FeaturePlan( + manifest=self.manifest(), + templates_root=_TEMPLATES_ROOT, + template_context=context, + files=tuple(files), + ) diff --git a/cli/src/cli/features/_builtins/deploy/templates/local/docker-compose.yml.j2 b/cli/src/cli/features/_builtins/deploy/templates/local/docker-compose.yml.j2 new file mode 100644 index 00000000..42daba99 --- /dev/null +++ b/cli/src/cli/features/_builtins/deploy/templates/local/docker-compose.yml.j2 @@ -0,0 +1,90 @@ +# Generated by `bp deploy generate local`. +# Hot-reload dev stack: API + Taskiq worker + Postgres + Redis. +# Source is mounted into the container so file changes trigger a reload. + +name: {{ project_name }} + +services: + api: + build: + context: {{ backend_context }} + dockerfile: Dockerfile + target: dev + env_file: + - {{ env_file }} + environment: + POSTGRES_SERVER: postgres + CACHE_REDIS_HOST: redis + SESSION_REDIS_HOST: redis + RATE_LIMITER_REDIS_HOST: redis + TASKIQ_REDIS_HOST: redis + volumes: + - {{ backend_context }}/src:/app/src + - {{ backend_context }}/tests:/app/tests + ports: + - "{{ api_port }}:8000" + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + + worker: + build: + context: {{ backend_context }} + dockerfile: Dockerfile + target: dev + env_file: + - {{ env_file }} + environment: + POSTGRES_SERVER: postgres + CACHE_REDIS_HOST: redis + SESSION_REDIS_HOST: redis + RATE_LIMITER_REDIS_HOST: redis + TASKIQ_REDIS_HOST: redis + working_dir: /app/src + command: > + taskiq worker infrastructure.taskiq.worker:default_broker + --reload + volumes: + - {{ backend_context }}/src:/app/src + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + + postgres: + image: {{ postgres_image }} + env_file: + - {{ env_file }} + environment: + # Map the boilerplate's POSTGRES_* vars to the official image's expected names. + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres} + POSTGRES_DB: ${POSTGRES_DB:-postgres} + volumes: + - postgres-data:/var/lib/postgresql/data + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-postgres}"] + interval: 5s + timeout: 3s + retries: 10 + + redis: + image: {{ redis_image }} + volumes: + - redis-data:/data + ports: + - "6379:6379" + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 5s + timeout: 3s + retries: 10 + +volumes: + postgres-data: + redis-data: diff --git a/cli/src/cli/features/_builtins/deploy/templates/nginx/default.conf.j2 b/cli/src/cli/features/_builtins/deploy/templates/nginx/default.conf.j2 new file mode 100644 index 00000000..06ef1316 --- /dev/null +++ b/cli/src/cli/features/_builtins/deploy/templates/nginx/default.conf.j2 @@ -0,0 +1,29 @@ +# Generated by `bp deploy generate nginx`. +# Drop-in nginx vhost in front of the FastAPI app container. + +upstream {{ project_name }}_api { + server api:8000; +} + +server { + listen 80; + server_name _; + + client_max_body_size 25M; + + # Forwarded headers — FastAPI / Starlette respect these by default. + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + + # Long-running endpoints (exports, AI, etc.) want generous timeouts. + proxy_connect_timeout 60s; + proxy_send_timeout 300s; + proxy_read_timeout 300s; + + location / { + proxy_pass http://{{ project_name }}_api; + proxy_redirect off; + } +} diff --git a/cli/src/cli/features/_builtins/deploy/templates/nginx/docker-compose.yml.j2 b/cli/src/cli/features/_builtins/deploy/templates/nginx/docker-compose.yml.j2 new file mode 100644 index 00000000..65c22985 --- /dev/null +++ b/cli/src/cli/features/_builtins/deploy/templates/nginx/docker-compose.yml.j2 @@ -0,0 +1,114 @@ +# Generated by `bp deploy generate nginx`. +# Production stack with an nginx reverse proxy in front of the API. +# Public traffic enters on port 80 (and 443 once you wire TLS); the +# API container only exposes itself on the internal compose network. + +name: {{ project_name }} + +services: + migrate: + build: + context: {{ backend_context }} + dockerfile: Dockerfile + target: migrate + env_file: + - {{ env_file }} + environment: + POSTGRES_SERVER: postgres + CONFIRM_PRODUCTION_MIGRATION: "yes" + depends_on: + postgres: + condition: service_healthy + restart: "no" + + api: + build: + context: {{ backend_context }} + dockerfile: Dockerfile + target: prod + env_file: + - {{ env_file }} + environment: + POSTGRES_SERVER: postgres + CACHE_REDIS_HOST: redis + SESSION_REDIS_HOST: redis + RATE_LIMITER_REDIS_HOST: redis + TASKIQ_REDIS_HOST: redis + WORKERS: "{{ workers }}" + expose: + - "8000" + depends_on: + migrate: + condition: service_completed_successfully + postgres: + condition: service_healthy + redis: + condition: service_healthy + restart: unless-stopped + + worker: + build: + context: {{ backend_context }} + dockerfile: Dockerfile + target: base + env_file: + - {{ env_file }} + environment: + POSTGRES_SERVER: postgres + CACHE_REDIS_HOST: redis + SESSION_REDIS_HOST: redis + RATE_LIMITER_REDIS_HOST: redis + TASKIQ_REDIS_HOST: redis + working_dir: /app/src + command: taskiq worker infrastructure.taskiq.worker:default_broker --workers 2 + depends_on: + migrate: + condition: service_completed_successfully + postgres: + condition: service_healthy + redis: + condition: service_healthy + restart: unless-stopped + + nginx: + image: {{ nginx_image }} + ports: + - "80:80" + volumes: + - ./nginx/default.conf:/etc/nginx/conf.d/default.conf:ro + depends_on: + - api + restart: unless-stopped + + postgres: + image: {{ postgres_image }} + env_file: + - {{ env_file }} + environment: + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres} + POSTGRES_DB: ${POSTGRES_DB:-postgres} + volumes: + - postgres-data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-postgres}"] + interval: 10s + timeout: 5s + retries: 10 + restart: unless-stopped + + redis: + image: {{ redis_image }} + command: ["redis-server", "--appendonly", "yes"] + volumes: + - redis-data:/data + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 10 + restart: unless-stopped + +volumes: + postgres-data: + redis-data: diff --git a/cli/src/cli/features/_builtins/deploy/templates/prod/docker-compose.yml.j2 b/cli/src/cli/features/_builtins/deploy/templates/prod/docker-compose.yml.j2 new file mode 100644 index 00000000..5f457bb7 --- /dev/null +++ b/cli/src/cli/features/_builtins/deploy/templates/prod/docker-compose.yml.j2 @@ -0,0 +1,104 @@ +# Generated by `bp deploy generate prod`. +# Production-ish single-host stack: multi-worker API + Taskiq worker +# + Postgres + Redis. The API is exposed directly on the host port — +# add a reverse proxy (or use the `nginx` mode) before public traffic. + +name: {{ project_name }} + +services: + migrate: + build: + context: {{ backend_context }} + dockerfile: Dockerfile + target: migrate + env_file: + - {{ env_file }} + environment: + POSTGRES_SERVER: postgres + CONFIRM_PRODUCTION_MIGRATION: "yes" + depends_on: + postgres: + condition: service_healthy + restart: "no" + + api: + build: + context: {{ backend_context }} + dockerfile: Dockerfile + target: prod + env_file: + - {{ env_file }} + environment: + POSTGRES_SERVER: postgres + CACHE_REDIS_HOST: redis + SESSION_REDIS_HOST: redis + RATE_LIMITER_REDIS_HOST: redis + TASKIQ_REDIS_HOST: redis + WORKERS: "{{ workers }}" + ports: + - "{{ api_port }}:8000" + depends_on: + migrate: + condition: service_completed_successfully + postgres: + condition: service_healthy + redis: + condition: service_healthy + restart: unless-stopped + + worker: + build: + context: {{ backend_context }} + dockerfile: Dockerfile + target: base + env_file: + - {{ env_file }} + environment: + POSTGRES_SERVER: postgres + CACHE_REDIS_HOST: redis + SESSION_REDIS_HOST: redis + RATE_LIMITER_REDIS_HOST: redis + TASKIQ_REDIS_HOST: redis + working_dir: /app/src + command: taskiq worker infrastructure.taskiq.worker:default_broker --workers 2 + depends_on: + migrate: + condition: service_completed_successfully + postgres: + condition: service_healthy + redis: + condition: service_healthy + restart: unless-stopped + + postgres: + image: {{ postgres_image }} + env_file: + - {{ env_file }} + environment: + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres} + POSTGRES_DB: ${POSTGRES_DB:-postgres} + volumes: + - postgres-data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-postgres}"] + interval: 10s + timeout: 5s + retries: 10 + restart: unless-stopped + + redis: + image: {{ redis_image }} + command: ["redis-server", "--appendonly", "yes"] + volumes: + - redis-data:/data + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 5s + retries: 10 + restart: unless-stopped + +volumes: + postgres-data: + redis-data: diff --git a/cli/src/cli/features/base.py b/cli/src/cli/features/base.py new file mode 100644 index 00000000..1bd8f019 --- /dev/null +++ b/cli/src/cli/features/base.py @@ -0,0 +1,98 @@ +"""Feature contracts: manifest, plan, base class, and operation types. + +The shapes here are designed so that today's "render templates to disk" +features and tomorrow's "drop a module + edit settings + run a +migration" features can both describe themselves with the same plan +schema. Installers grow capabilities over time; manifests don't have +to change. +""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any + +from ..lib.project import ProjectContext + + +@dataclass(frozen=True) +class FeatureManifest: + """Static metadata about a feature. + + The manifest is the part that's safe to read without executing + anything — used by ``bp feature list`` / ``bp feature info`` to + describe what's available. + """ + + name: str + version: str + summary: str + requires_features: tuple[str, ...] = () + adds_dependencies: tuple[str, ...] = () + + +@dataclass(frozen=True) +class FileOp: + """Render a Jinja template and write it to ``target``. + + ``template`` is interpreted relative to the feature's templates + directory. ``target`` is an absolute path on the user's disk. + """ + + template: str + target: Path + overwrite: bool = False + skip_if_exists: bool = False + executable: bool = False + + +@dataclass(frozen=True) +class Codemod: + """Reserved for AST-aware edits to existing source files. + + Not implemented in v1. Defining the type here means feature plans + can reference codemods today without breaking the manifest schema + when the installer learns to execute them. + """ + + target: Path + operation: str + params: dict[str, Any] = field(default_factory=dict) + + +@dataclass(frozen=True) +class Hook: + """Reserved for post-install commands (alembic revisions, scripts, etc.).""" + + name: str + command: tuple[str, ...] + cwd: Path | None = None + + +@dataclass(frozen=True) +class FeaturePlan: + """Concrete description of what a feature wants to do, given parameters.""" + + manifest: FeatureManifest + templates_root: Path | None + template_context: dict[str, Any] = field(default_factory=dict) + files: tuple[FileOp, ...] = () + codemods: tuple[Codemod, ...] = () + hooks: tuple[Hook, ...] = () + + +class Feature(ABC): + """Base class for all features. + + Subclasses declare a static ``manifest()`` and an instance-method + ``plan(params, project)`` that yields the concrete plan for the + current invocation. + """ + + @abstractmethod + def manifest(self) -> FeatureManifest: ... + + @abstractmethod + def plan(self, params: dict[str, Any], project: ProjectContext) -> FeaturePlan: ... diff --git a/cli/src/cli/features/installer.py b/cli/src/cli/features/installer.py new file mode 100644 index 00000000..8063842a --- /dev/null +++ b/cli/src/cli/features/installer.py @@ -0,0 +1,87 @@ +"""Execute a ``FeaturePlan`` against the user's project. + +V1 supports file rendering only. Codemods and hooks raise +``NotImplementedError`` so plans can declare them today and the +installer will gain support over time without breaking the contract. +""" + +from __future__ import annotations + +import stat +from dataclasses import dataclass, field +from pathlib import Path + +from ..lib.prompts import confirm_overwrite, info, success, warn +from ..lib.render import Renderer +from .base import FeaturePlan + + +@dataclass +class InstallResult: + """Summary of what an installer pass actually did.""" + + files_written: list[Path] = field(default_factory=list) + files_skipped: list[Path] = field(default_factory=list) + + +class FeatureInstaller: + """Apply a ``FeaturePlan`` to disk. + + Construction options control the prompting / I/O behavior. + """ + + def __init__(self, *, dry_run: bool = False, assume_yes: bool = False, quiet: bool = False) -> None: + self.dry_run = dry_run + self.assume_yes = assume_yes + self.quiet = quiet + + def apply(self, plan: FeaturePlan) -> InstallResult: + result = InstallResult() + if plan.codemods: + raise NotImplementedError("Codemod operations are not yet supported by the installer.") + if plan.hooks: + raise NotImplementedError("Post-install hooks are not yet supported by the installer.") + + renderer: Renderer | None = None + if plan.files and plan.templates_root is not None: + renderer = Renderer(plan.templates_root) + + for op in plan.files: + if op.target.exists(): + if op.skip_if_exists: + if not self.quiet: + warn(f"skip {op.target} (exists)") + result.files_skipped.append(op.target) + continue + if not op.overwrite: + if not confirm_overwrite(str(op.target), assume_yes=self.assume_yes): + if not self.quiet: + warn(f"skip {op.target} (kept existing)") + result.files_skipped.append(op.target) + continue + + if renderer is None: + raise RuntimeError(f"Plan has files but no templates_root: cannot render {op.template!r}") + + content = renderer.render(op.template, plan.template_context) + + if self.dry_run: + if not self.quiet: + info(f"would write {op.target} ({len(content)} bytes)") + result.files_written.append(op.target) + continue + + op.target.parent.mkdir(parents=True, exist_ok=True) + op.target.write_text(content, encoding="utf-8") + if op.executable: + _make_executable(op.target) + if not self.quiet: + success(f"wrote {op.target}") + result.files_written.append(op.target) + + return result + + +def _make_executable(path: Path) -> None: + current = path.stat().st_mode + path.chmod(current | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) diff --git a/cli/src/cli/features/registry.py b/cli/src/cli/features/registry.py new file mode 100644 index 00000000..85c3ae4a --- /dev/null +++ b/cli/src/cli/features/registry.py @@ -0,0 +1,44 @@ +"""Look up features by name across in-tree and plugin sources. + +In-tree features are imported lazily so that adding new ones doesn't +slow down every CLI invocation. Plugin features come from the +``bp.features`` entry-point group. +""" + +from __future__ import annotations + +import warnings + +from .. import plugins as _plugins +from ._builtins.deploy.feature import DeployFeature +from .base import Feature + + +def _builtin_features() -> dict[str, Feature]: + """Return in-tree features.""" + return { + "deploy": DeployFeature(), + } + + +def all_features() -> dict[str, Feature]: + """Return ``{name: feature}`` for every in-tree and plugin feature. + + In-tree features take precedence on name collisions; a warning is + surfaced when a plugin tries to shadow a built-in. + """ + found: dict[str, Feature] = dict(_builtin_features()) + for name, feature in _plugins.discover_feature_plugins().items(): + if name in found: + warnings.warn( + f"feature plugin {name!r} shadows a built-in; ignoring plugin.", + RuntimeWarning, + stacklevel=2, + ) + continue + found[name] = feature + return found + + +def get_feature(name: str) -> Feature | None: + return all_features().get(name) diff --git a/cli/src/cli/lib/__init__.py b/cli/src/cli/lib/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/cli/src/cli/lib/project.py b/cli/src/cli/lib/project.py new file mode 100644 index 00000000..5501a4e8 --- /dev/null +++ b/cli/src/cli/lib/project.py @@ -0,0 +1,70 @@ +"""Locate the user's project on disk and read context from it. + +The CLI is shipped inside ``backend/src/cli`` of the boilerplate, but +when invoked it operates on whichever directory the user is in. These +helpers resolve the repo root, the backend directory, and read values +from the project's ``.env`` files without importing the application. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path + + +@dataclass(frozen=True) +class ProjectContext: + """Resolved paths the CLI needs to operate on the user's repo.""" + + repo_root: Path + backend_dir: Path + + @property + def env_file(self) -> Path: + return self.backend_dir / ".env" + + @property + def env_example(self) -> Path: + return self.backend_dir / ".env.example" + + @property + def compose_file(self) -> Path: + return self.repo_root / "docker-compose.yml" + + +def discover_project(start: Path | None = None) -> ProjectContext: + """Walk up from ``start`` looking for a ``backend/pyproject.toml`` marker. + + Falls back to the current working directory if no marker is found — + the caller is responsible for deciding whether that's acceptable. + """ + current = (start or Path.cwd()).resolve() + for candidate in [current, *current.parents]: + if (candidate / "backend" / "pyproject.toml").is_file(): + return ProjectContext(repo_root=candidate, backend_dir=candidate / "backend") + if (candidate / "pyproject.toml").is_file() and candidate.name == "backend": + return ProjectContext(repo_root=candidate.parent, backend_dir=candidate) + return ProjectContext(repo_root=current, backend_dir=current / "backend") + + +def read_env_value(env_path: Path, key: str) -> str | None: + """Read a single value from a ``.env``-style file. + + Returns ``None`` if the file is missing or the key isn't set. Quotes + around the value (single or double) are stripped. Lines beginning + with ``#`` are skipped. + """ + if not env_path.is_file(): + return None + for raw in env_path.read_text(encoding="utf-8").splitlines(): + line = raw.strip() + if not line or line.startswith("#") or "=" not in line: + continue + name, _, value = line.partition("=") + if name.strip() != key: + continue + value = value.strip() + if len(value) >= 2 and value[0] == value[-1] and value[0] in {'"', "'"}: + value = value[1:-1] + return value + return None diff --git a/cli/src/cli/lib/prompts.py b/cli/src/cli/lib/prompts.py new file mode 100644 index 00000000..49cf3b9c --- /dev/null +++ b/cli/src/cli/lib/prompts.py @@ -0,0 +1,32 @@ +"""Consistent prompt helpers used across the CLI. + +Centralizes ``--yes`` (assume-yes) and ``--quiet`` handling so individual +commands don't each invent their own. +""" + +from __future__ import annotations + +import typer + + +def confirm_overwrite(path: str, *, assume_yes: bool) -> bool: + """Confirm overwriting an existing file. Returns True to proceed.""" + if assume_yes: + return True + return typer.confirm(f"{path} already exists. Overwrite?", default=False) + + +def info(message: str) -> None: + typer.echo(message) + + +def success(message: str) -> None: + typer.secho(message, fg=typer.colors.GREEN) + + +def warn(message: str) -> None: + typer.secho(message, fg=typer.colors.YELLOW) + + +def error(message: str) -> None: + typer.secho(message, fg=typer.colors.RED, err=True) diff --git a/cli/src/cli/lib/render.py b/cli/src/cli/lib/render.py new file mode 100644 index 00000000..f43b0ac3 --- /dev/null +++ b/cli/src/cli/lib/render.py @@ -0,0 +1,33 @@ +"""Jinja-based template rendering for in-tree features and plugins. + +A feature points at a directory containing Jinja templates. ``Renderer`` +loads them with a sandboxed environment, applies the supplied context, +and writes the result to disk. +""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any + +from jinja2 import Environment, FileSystemLoader, StrictUndefined + + +class Renderer: + """Render Jinja templates from a feature's templates directory.""" + + def __init__(self, templates_root: Path) -> None: + if not templates_root.is_dir(): + raise FileNotFoundError(f"Templates directory does not exist: {templates_root}") + self.templates_root = templates_root + self.env = Environment( + loader=FileSystemLoader(str(templates_root)), + keep_trailing_newline=True, + undefined=StrictUndefined, + autoescape=False, + ) + + def render(self, template_path: str, context: dict[str, Any]) -> str: + """Render ``template_path`` (relative to ``templates_root``) with ``context``.""" + template = self.env.get_template(template_path) + return template.render(**context) diff --git a/cli/src/cli/plugins.py b/cli/src/cli/plugins.py new file mode 100644 index 00000000..7326b5f2 --- /dev/null +++ b/cli/src/cli/plugins.py @@ -0,0 +1,109 @@ +"""Plugin discovery for the bp CLI. + +Two extension points, kept deliberately separate: + +- ``bp.commands`` — a Typer sub-app mounted under the root, e.g. + ``bp aws deploy``. Plugin packages declare entry points whose values + resolve to a ``typer.Typer`` instance. + +- ``bp.features`` — a ``Feature`` instance that ``bp feature`` can list + and apply. Plugin packages declare entry points whose values resolve + to ``Feature`` instances (or callables that return one). + +A broken plugin must not break the CLI. Discovery wraps each load in a +broad except and surfaces a warning so the user can still operate the +working subset. +""" + +from __future__ import annotations + +import sys +import warnings +from importlib.metadata import EntryPoint, entry_points + +import typer + +from .features.base import Feature + +COMMANDS_GROUP = "bp.commands" +FEATURES_GROUP = "bp.features" + + +def _safe_load(ep: EntryPoint) -> object | None: + try: + loaded: object = ep.load() + except Exception as exc: + warnings.warn( + f"Failed to load plugin {ep.name!r} from {ep.value!r}: {exc!s}", + RuntimeWarning, + stacklevel=2, + ) + return None + return loaded + + +def discover_command_plugins() -> dict[str, typer.Typer]: + """Return ``{name: typer_app}`` for every healthy ``bp.commands`` entry point.""" + found: dict[str, typer.Typer] = {} + for ep in entry_points(group=COMMANDS_GROUP): + loaded = _safe_load(ep) + if loaded is None: + continue + if isinstance(loaded, typer.Typer): + found[ep.name] = loaded + else: + warnings.warn( + f"Plugin {ep.name!r} resolved to {type(loaded).__name__}, expected typer.Typer; ignoring.", + RuntimeWarning, + stacklevel=2, + ) + return found + + +def discover_feature_plugins() -> dict[str, Feature]: + """Return ``{name: feature}`` for every healthy ``bp.features`` entry point. + + Entry-point values may be either a ``Feature`` instance or a callable + that returns one (helpful when constructing features needs the project + context). + """ + found: dict[str, Feature] = {} + for ep in entry_points(group=FEATURES_GROUP): + loaded = _safe_load(ep) + if loaded is None: + continue + candidate = loaded() if callable(loaded) and not isinstance(loaded, Feature) else loaded + if isinstance(candidate, Feature): + found[ep.name] = candidate + else: + warnings.warn( + f"Feature plugin {ep.name!r} resolved to {type(candidate).__name__}, expected Feature; ignoring.", + RuntimeWarning, + stacklevel=2, + ) + return found + + +def emit_plugin_warnings_to_stderr() -> None: + """Route plugin-load warnings to stderr so CLI output stays clean.""" + warnings.filterwarnings("default", category=RuntimeWarning, module=__name__) + if not any(isinstance(h, _StderrWarningHandler) for h in _installed_handlers()): + warnings.showwarning = _StderrWarningHandler() + + +class _StderrWarningHandler: + def __call__( + self, + message: Warning | str, + category: type[Warning], + filename: str, + lineno: int, + file: object = None, + line: str | None = None, + ) -> None: + print(f"warning: {message}", file=sys.stderr) + + +def _installed_handlers() -> list[object]: + handler = warnings.showwarning + return [handler] if handler is not None else [] diff --git a/cli/src/cli/py.typed b/cli/src/cli/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/default.conf b/default.conf deleted file mode 100644 index a763f9fa..00000000 --- a/default.conf +++ /dev/null @@ -1,32 +0,0 @@ -# ---------------- Running With One Server ---------------- -server { - listen 80; - - location / { - proxy_pass http://web:8000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } -} - - -# # ---------------- To Run with Multiple Servers, Uncomment below ---------------- -# upstream fastapi_app { -# server fastapi1:8000; # Replace with actual server names or IP addresses -# server fastapi2:8000; -# # Add more servers as needed -# } - -# server { -# listen 80; - -# location / { -# proxy_pass http://fastapi_app; -# proxy_set_header Host $host; -# proxy_set_header X-Real-IP $remote_addr; -# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; -# proxy_set_header X-Forwarded-Proto $scheme; -# } -# } diff --git a/docs/cli/commands.md b/docs/cli/commands.md new file mode 100644 index 00000000..aa4ec9c2 --- /dev/null +++ b/docs/cli/commands.md @@ -0,0 +1,177 @@ +# Commands + +Complete reference for the in-tree `bp` commands. Plugin commands have their own help text — see [Plugins](plugins.md). + +## `bp deploy` + +Generate deployment artifacts. Today's only sub-command is `generate`. + +### `bp deploy generate ` + +Render `docker-compose.yml` (and `nginx/default.conf` for `nginx` mode) for the chosen deployment shape. + +```text +Usage: bp deploy generate [OPTIONS] MODE + +Arguments: + MODE {local|prod|nginx} [required] + +Options: + -o, --output-dir DIRECTORY Where to write the compose file (default: repo root). + --api-port INTEGER Host port to publish the API on. [default: 8000] + --workers INTEGER Number of API workers (prod / nginx only). [default: 4] + -f, --force Overwrite existing files without asking. + -y, --yes Assume yes for all prompts. + --dry-run Show what would be written, don't touch disk. +``` + +#### Modes + +| Mode | Stack | Use for | +|---------|--------------------------------------------------------------------------|----------------------------------------| +| `local` | API (target `dev`) + worker + Postgres + Redis. Source mounted, hot-reload. | Local development | +| `prod` | API (target `prod`, `WORKERS` env) + worker + Postgres + Redis + migrate. | Single-host production, no proxy | +| `nginx` | `prod` + nginx reverse proxy on port 80. API exposed on internal network. | Production behind a reverse proxy | + +All modes target the same multi-stage `backend/Dockerfile` — no per-mode Dockerfile is generated. + +#### Examples + +```bash +# Generate the dev stack at the repo root +uv run bp deploy generate local + +# Override workers and port +uv run bp deploy generate prod --workers 8 --api-port 9000 + +# Dry run — print what would be written, don't touch disk +uv run bp deploy generate nginx --output-dir /tmp/scratch --dry-run + +# Generate into a separate directory (for staging configs in CI, etc.) +uv run bp deploy generate prod --output-dir ./deploy/prod --yes +``` + +#### What it writes + +| Mode | Files | +|---------|------------------------------------------------| +| `local` | `/docker-compose.yml` | +| `prod` | `/docker-compose.yml` | +| `nginx` | `/docker-compose.yml`
`/nginx/default.conf` | + +Existing files are protected: if a target already exists you'll be prompted to confirm overwrite. Use `--force` (or `--yes`) for non-interactive runs. + +#### Generated compose conventions + +All three modes use the same service names and networking: + +- **`api`** — the FastAPI application +- **`worker`** — Taskiq worker, running `taskiq worker infrastructure.taskiq.worker:default_broker` +- **`postgres`** — Postgres 16 (alpine), with health check +- **`redis`** — Redis 7 (alpine), with health check +- **`migrate`** (prod & nginx) — runs `alembic upgrade head` once, with `CONFIRM_PRODUCTION_MIGRATION=yes`. The `api` and `worker` services depend on it via `service_completed_successfully`. +- **`nginx`** (nginx mode only) — Nginx 1.27 alpine, mounting the generated `nginx/default.conf` read-only + +The compose file references `./backend/.env` for env vars. Make sure that file exists (`cp backend/.env.example backend/.env`) before `docker compose up`. + +## `bp env` + +Inspect and prepare the runtime environment. Two sub-commands today. + +### `bp env gen-secret` + +Generate a high-entropy hex secret suitable for `SECRET_KEY`. + +```text +Usage: bp env gen-secret [OPTIONS] + +Options: + --bytes INTEGER RANGE [16 ≤ x ≤ 128] Number of random bytes (hex output is 2x). [default: 32] +``` + +```bash +$ uv run bp env gen-secret +af97045f600bf988041ec4b6fd891763d8f79f01f0a0a4a7ed2022e57f771a9e + +$ uv run bp env gen-secret --bytes 16 +c042a8aa0d678a9c73dc371e3e0d6a5e +``` + +The default produces 64 hex characters (256 bits of entropy) — enough for any of the boilerplate's secret slots (`SECRET_KEY`, signed-cookie secrets, etc.). Pipe directly into your secrets manager: + +```bash +uv run bp env gen-secret | gh secret set SECRET_KEY --repo my-org/my-app +``` + +### `bp env validate` + +Run the production security validator against your current settings, regardless of `ENVIRONMENT`. + +```text +Usage: bp env validate +``` + +```bash +$ uv run bp env validate +Critical (2): + • SECRET_KEY is using default or insecure value. ... + • Database is using default credentials (POSTGRES_PASSWORD='postgres'). ... +``` + +The command: + +1. Imports the same `ProductionSecurityValidator` the application uses at startup +2. Forces `_is_production() = True` so the checks run regardless of `ENVIRONMENT` +3. Captures critical errors and warnings, prints them grouped, and exits non-zero if any critical issues exist + +Exit codes: + +| Code | Meaning | +|------|------------------------------------------------------| +| 0 | No critical issues. Warnings (if any) are advisory. | +| 1 | One or more critical issues found. | + +Useful in CI to gate deployments: + +```yaml +# .github/workflows/deploy.yml (excerpt) +- run: uv run bp env validate + env: + SECRET_KEY: ${{ secrets.SECRET_KEY }} + POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }} + # ... +``` + +The validator's specific checks are documented in [Production → The Production Validator](../user-guide/production.md#the-production-validator). + +## Global Options + +Typer-provided options work on any sub-command: + +| Option | Effect | +|-------------------------|-----------------------------------------------------| +| `--help` | Show command help and exit | +| `--install-completion` | Install shell completion (bash/zsh/fish) | +| `--show-completion` | Print completion script for the current shell | + +## Exit Codes + +| Code | Meaning | +|------|------------------------------------------------------| +| 0 | Success | +| 1 | Validation or operation failed | +| 2 | Argument parsing error (bad flag, missing argument) | + +## Discoverability + +Every command has `--help`: + +```bash +uv run bp --help +uv run bp deploy --help +uv run bp deploy generate --help +uv run bp env --help +uv run bp env validate --help +``` + +The root `bp --help` lists every mounted command group, including those contributed by plugins. If a plugin you installed isn't showing up, see [Plugins → Troubleshooting](plugins.md#troubleshooting). diff --git a/docs/cli/index.md b/docs/cli/index.md new file mode 100644 index 00000000..5ac3210d --- /dev/null +++ b/docs/cli/index.md @@ -0,0 +1,109 @@ +# bp — the FastAPI-boilerplate CLI + +`bp` is the developer/operator tool that ships alongside the boilerplate. It generates deployment artifacts, helps prepare the runtime environment, and serves as the host for plugin commands and feature generators. + +It is **not** part of the deployable backend. The backend image stays lean — `bp` lives in its own workspace package (`cli/`) and is only present in development environments and on machines where you've installed it as a tool. + +## What's in this section + +- **[Commands](commands.md)** — complete reference for the in-tree `deploy` and `env` sub-apps +- **[Plugins](plugins.md)** — extension points (`bp.commands` and `bp.features`) and authoring guide + +## Repository Layout + +```text +fastapi-boilerplate/ +├── pyproject.toml # workspace root (uv workspace metadata) +├── backend/ # deployable application — never ships bp +│ └── src/... +├── cli/ # the bp package +│ ├── pyproject.toml # typer + jinja2 + workspace dep on backend +│ └── src/cli/ +│ ├── app.py # Typer root + plugin discovery +│ ├── commands/ # in-tree command sub-apps +│ ├── features/ # feature framework + in-tree features +│ └── plugins.py # entry-point loaders +└── docs/ +``` + +The two-package split is deliberate: `cli/` depends on `backend/` (for things like `bp env validate`), but `backend/` never depends on `cli/`. Production images ship `backend/` only. + +## Install + +### In-repo (most common) + +```bash +git clone https://github.com/benavlabs/fastapi-boilerplate +cd fastapi-boilerplate +uv sync # syncs the workspace; installs backend + cli into one venv +uv run bp --help # works from anywhere in the repo +``` + +The workspace shares a single `.venv/` at the repo root. You can run `uv run bp` from any subdirectory — uv walks up to find the workspace root. + +### Machine-wide (optional) + +If you want `bp` on `PATH` outside the repo: + +```bash +uv tool install --editable ./cli +bp --help +``` + +`--editable` means edits to `cli/src/cli/` show up immediately without reinstall. Re-run `uv tool install --editable ./cli` only when `cli/pyproject.toml` changes (deps, entry points). To uninstall: + +```bash +uv tool uninstall fastapi-boilerplate-cli +``` + +## Quick Tour + +### Generate a Compose File + +```bash +uv run bp deploy generate local # hot-reload dev stack +uv run bp deploy generate prod --workers 8 # production stack +uv run bp deploy generate nginx # production behind nginx +``` + +Each command writes `docker-compose.yml` (and `nginx/default.conf` for the nginx mode) to the repo root by default. Use `--output-dir` to target somewhere else. + +### Generate a Secret + +```bash +uv run bp env gen-secret +# → 64-char hex suitable for SECRET_KEY +``` + +### Audit Production Settings + +```bash +uv run bp env validate +# Forces the production security validator regardless of ENVIRONMENT. +# Exits 1 if any critical issues are found. +``` + +## Command Tree + +```text +bp +├── deploy +│ └── generate [options] # mode ∈ {local, prod, nginx} +└── env + ├── gen-secret [--bytes N] + └── validate +``` + +Plugin sub-apps mount as siblings of `deploy` and `env`. See [Plugins](plugins.md) for details. + +## Design Principles + +- **No surprises in production.** `bp` never runs against production at runtime — it's a developer/operator tool. Production images don't even include the `cli` package. +- **Two extension points, kept separate.** Command plugins (Typer sub-apps) and feature plugins (code generators with manifests) have different lifecycles and shouldn't share machinery. +- **Templates as data.** Built-in features render Jinja templates. Plugin features do the same. The installer is the only thing that needs to know how to execute the plan. +- **Idempotent and dry-runnable.** Every operation that mutates the user's repo prompts before overwriting and supports `--dry-run`. + +## Next Steps + +- **[Commands](commands.md)** — full reference for the shipped commands +- **[Plugins](plugins.md)** — write your own commands or features diff --git a/docs/cli/plugins.md b/docs/cli/plugins.md new file mode 100644 index 00000000..1ed3de0c --- /dev/null +++ b/docs/cli/plugins.md @@ -0,0 +1,332 @@ +# Plugins + +`bp` has two extension points, kept deliberately separate. They have different lifecycles and shouldn't share machinery: + +- **Command plugins** (`bp.commands`) — third-party Typer sub-apps mounted under the root, e.g. `bp aws deploy`. Pure CLI surface; the plugin doesn't touch the user's repo. +- **Feature plugins** (`bp.features`) — code generators with a manifest that `bp feature` can list, install, and remove. Plugins of this kind mutate the user's repo (write files, edit settings, run hooks). + +Both are discovered via [Python entry points](https://packaging.python.org/en/latest/specifications/entry-points/), so a plugin is just a normal Python package on the user's machine. + +## How Discovery Works + +When `bp` starts, the root app: + +1. Mounts in-tree command sub-apps (`deploy`, `env`) +2. Calls `discover_command_plugins()` (from `cli.plugins`) which walks `entry_points(group="bp.commands")` and mounts each healthy result +3. Builds the feature registry on demand: in-tree features merged with `discover_feature_plugins()` from `bp.features` + +A broken plugin never breaks `bp`. Discovery wraps each load in `try/except`, surfaces a `RuntimeWarning`, and continues with the working subset. + +```python +# cli/src/cli/plugins.py (abridged) +def _safe_load(ep: EntryPoint) -> object | None: + try: + return ep.load() + except Exception as exc: + warnings.warn(f"Failed to load plugin {ep.name!r}: {exc}", RuntimeWarning) + return None +``` + +In-tree commands and features take precedence on name collisions — a plugin can't silently shadow a built-in. + +## Command Plugins + +Use a command plugin when you want to add new top-level verbs to `bp` without modifying the boilerplate. Examples: `bp fly deploy`, `bp pulumi up`, `bp lint custom-rules`. + +### Authoring a Command Plugin + +Create a Python package with a `typer.Typer` instance and register it under the `bp.commands` group. + +#### 1. Package layout + +```text +bp-deploy-fly/ +├── pyproject.toml +└── src/ + └── bp_deploy_fly/ + ├── __init__.py + └── cli.py +``` + +#### 2. `cli.py` — define the sub-app + +```python +import typer + +app = typer.Typer( + name="fly", + help="Deploy this app to Fly.io.", + no_args_is_help=True, +) + + +@app.command("deploy") +def deploy( + region: str = typer.Option("ord", help="Fly region to target."), + yes: bool = typer.Option(False, "--yes", "-y"), +) -> None: + """Build and deploy the current branch to Fly.io.""" + typer.echo(f"deploying to fly region={region}") + # ... real work here ... +``` + +#### 3. `pyproject.toml` — declare the entry point + +```toml +[project] +name = "bp-deploy-fly" +version = "0.1.0" +requires-python = ">=3.11" +dependencies = ["typer>=0.12"] + +[project.entry-points."bp.commands"] +fly = "bp_deploy_fly.cli:app" + +[tool.setuptools.packages.find] +where = ["src"] +``` + +The entry-point key (`fly`) becomes the sub-command name. The value (`bp_deploy_fly.cli:app`) is the import path to your `Typer` instance. + +#### 4. Install and use + +```bash +uv pip install bp-deploy-fly +uv run bp --help +# ╭─ Commands ────────────────────────────────────────────────╮ +# │ deploy Generate deployment artifacts ... │ +# │ env Inspect and prepare the runtime environment. │ +# │ fly Deploy this app to Fly.io. ← new │ +# ╰───────────────────────────────────────────────────────────╯ + +uv run bp fly deploy --region ord --yes +``` + +### Best Practices for Command Plugins + +- **Pick a unique name.** `bp` warns if you collide with a built-in (`deploy`, `env`, `feature`) and ignores you. Pick something specific (`fly`, `aws`, `pulumi`) rather than generic (`cloud`, `tools`). +- **Keep your sub-app self-contained.** Don't import from the host CLI's internals — use only the public Typer interface. The `cli.app` module is not a public API. +- **Honor the global UX patterns.** Support `--yes` for non-interactive runs and `--dry-run` where it makes sense. Match the style of the in-tree commands. +- **Don't block on broken external dependencies.** If your plugin needs `flyctl` or `kubectl`, check for it gracefully and surface a useful error — not a stack trace from `subprocess`. + +## Feature Plugins + +Use a feature plugin when you want to **mutate the user's repo** — drop in a new module, add OAuth providers, wire up a payments integration, write migrations. Features have a manifest, plan, and installer lifecycle so they can apply, list, and (eventually) revert cleanly. + +!!! note "Feature plugins are designed for, but not yet wired to, `bp feature`" + The feature framework (`Feature`, `FeatureManifest`, `FeaturePlan`, `FeatureInstaller`) is fully implemented and the registry already discovers plugin features. The user-facing `bp feature` sub-app is not yet shipped — features are reachable today only via in-tree commands like `bp deploy generate`. The contract below is stable; once `bp feature {list,add,remove,info}` lands, your plugins will work without changes. + +### The Feature Contract + +A feature is a class that subclasses `cli.features.base.Feature`: + +```python +from cli.features.base import Feature, FeatureManifest, FeaturePlan, FileOp +from cli.lib.project import ProjectContext + + +class MyFeature(Feature): + def manifest(self) -> FeatureManifest: + return FeatureManifest( + name="my-feature", + version="1.0", + summary="One-line description shown by `bp feature list`.", + ) + + def plan(self, params: dict, project: ProjectContext) -> FeaturePlan: + return FeaturePlan( + manifest=self.manifest(), + templates_root=Path(__file__).parent / "templates", + template_context={"project_name": project.repo_root.name, **params}, + files=( + FileOp(template="my_module.py.j2", + target=project.backend_dir / "src/modules/my_feature/__init__.py"), + # ... more files ... + ), + ) +``` + +Two methods, both required: + +- **`manifest()`** — static metadata. Called by registry / list operations without executing anything. +- **`plan(params, project)`** — given runtime parameters and a `ProjectContext`, return a concrete `FeaturePlan`. + +### The Plan Schema + +```python +@dataclass(frozen=True) +class FeaturePlan: + manifest: FeatureManifest + templates_root: Path | None + template_context: dict[str, Any] = field(default_factory=dict) + files: tuple[FileOp, ...] = () + codemods: tuple[Codemod, ...] = () # reserved for v2 — installer raises NotImplementedError + hooks: tuple[Hook, ...] = () # reserved for v2 — installer raises NotImplementedError +``` + +The schema covers today's "render Jinja templates to disk" and tomorrow's "mutate code + run a migration". Defining `Codemod` and `Hook` types now means feature plugins can declare them without breaking the contract when the installer learns to execute them. + +#### `FileOp` + +```python +@dataclass(frozen=True) +class FileOp: + template: str # path relative to templates_root + target: Path # absolute path on disk + overwrite: bool = False + skip_if_exists: bool = False + executable: bool = False +``` + +The installer renders `template` with the plan's `template_context` and writes the result to `target`. If the target exists: + +| Flags | Behavior | +|--------------------------|-------------------------------------------------------------| +| Default | Prompt the user before overwriting | +| `overwrite=True` | Overwrite without asking | +| `skip_if_exists=True` | Skip silently (preserves user customizations) | + +`executable=True` chmods the result with `+x` for owner/group/world. + +### Authoring a Feature Plugin + +#### 1. Package layout + +```text +bp-feature-microsoft-oauth/ +├── pyproject.toml +└── src/ + └── bp_feature_microsoft_oauth/ + ├── __init__.py + ├── feature.py + └── templates/ + └── microsoft_provider.py.j2 +``` + +#### 2. `feature.py` — define the feature + +```python +from pathlib import Path + +from cli.features.base import Feature, FeatureManifest, FeaturePlan, FileOp +from cli.lib.project import ProjectContext + + +class MicrosoftOAuthFeature(Feature): + def manifest(self) -> FeatureManifest: + return FeatureManifest( + name="microsoft-oauth", + version="1.0", + summary="Wire up Microsoft (Entra ID) as an OAuth provider.", + ) + + def plan(self, params: dict, project: ProjectContext) -> FeaturePlan: + templates_root = Path(__file__).parent / "templates" + provider_path = ( + project.backend_dir + / "src/infrastructure/auth/oauth/providers/microsoft.py" + ) + return FeaturePlan( + manifest=self.manifest(), + templates_root=templates_root, + template_context={ + "tenant_id": params.get("tenant_id", "common"), + }, + files=( + FileOp(template="microsoft_provider.py.j2", target=provider_path), + ), + ) + + +# Entry-point target — can be a Feature instance or a callable that returns one. +feature = MicrosoftOAuthFeature() +``` + +#### 3. `pyproject.toml` — declare the entry point + +```toml +[project] +name = "bp-feature-microsoft-oauth" +version = "0.1.0" +requires-python = ">=3.11" +dependencies = ["fastapi-boilerplate-cli"] + +[project.entry-points."bp.features"] +microsoft-oauth = "bp_feature_microsoft_oauth.feature:feature" + +[tool.setuptools] +include-package-data = true + +[tool.setuptools.packages.find] +where = ["src"] + +[tool.setuptools.package-data] +"*" = ["*.j2"] +``` + +Note `include-package-data = true` and the `package-data` glob — without these, your Jinja templates won't ship with the wheel. + +#### 4. Install + +```bash +uv pip install bp-feature-microsoft-oauth +# Once `bp feature` ships: +# uv run bp feature add microsoft-oauth --tenant-id +``` + +### Best Practices for Feature Plugins + +- **Be idempotent.** A user running your feature twice should be safe. Use `skip_if_exists=True` for files that the user might customize after first install. +- **Don't fight the user's structure.** Use `ProjectContext.backend_dir` and `ProjectContext.repo_root` rather than hardcoded paths — different projects may move things. +- **Declare every file.** Don't write files outside the plan. The installer's behavior (dry-run, idempotency, overwrite prompts) only works for files declared in the plan. +- **Put templates next to the feature.** `Path(__file__).parent / "templates"` is the canonical pattern. Keeps the plugin self-contained. +- **Use Jinja's `StrictUndefined`.** The bundled `Renderer` does this for you — missing context variables fail loudly rather than rendering empty strings. +- **Document required `params`.** Once `bp feature` lands, your manifest's `summary` is the first thing users see; document params clearly there. + +## Discovery & Diagnosis + +### Listing Plugins + +```bash +# In-tree + plugin commands +uv run bp --help + +# (Once shipped) — list features: +# uv run bp feature list +``` + +### Inspecting Entry Points + +```bash +uv run python -c " +from importlib.metadata import entry_points +for ep in entry_points(group='bp.commands'): + print(f'command {ep.name:>15s} -> {ep.value}') +for ep in entry_points(group='bp.features'): + print(f'feature {ep.name:>15s} -> {ep.value}') +" +``` + +### Troubleshooting + +#### My plugin doesn't appear in `bp --help` + +1. **Confirm it's installed in the right venv.** Run the entry-point inspection script above. If your plugin isn't listed, the package isn't installed where `bp` runs. +2. **Check for load warnings.** `bp` prints `warning: Failed to load plugin ...` to stderr if your plugin's import fails. Run `bp --help 2>&1` to make sure stderr isn't being suppressed. +3. **Confirm your entry-point shape.** For commands, the value must resolve to a `typer.Typer` instance. For features, a `Feature` instance or a callable returning one. + +#### My plugin shadows a built-in + +`bp` warns and ignores plugins that try to register a name already in use by a built-in (`deploy`, `env`, `feature`). Rename your plugin to a unique key. + +#### My templates aren't shipping in the wheel + +Set `include-package-data = true` in `pyproject.toml` and add a `package-data` glob like `"*" = ["*.j2"]`. Without these, Jinja templates next to your code aren't included in the built wheel. + +## Roadmap + +- **`bp feature {list,add,remove,info}`** — user-facing surface for the feature framework. Currently the registry exists; the commands don't. +- **Codemod execution** — apply AST-aware edits declared in `FeaturePlan.codemods`. Useful for "drop a module + register its router in `interfaces/main.py`" kinds of features. +- **Post-install hooks** — execute commands declared in `FeaturePlan.hooks`. Useful for "after writing the migration, run `alembic upgrade head`" kinds of features. + +The contracts (`Codemod`, `Hook`) are already in `cli.features.base` so plugin authors can declare them today without breaking the schema later. diff --git a/docs/getting-started/configuration.md b/docs/getting-started/configuration.md index 2ad3f6d7..09597b06 100644 --- a/docs/getting-started/configuration.md +++ b/docs/getting-started/configuration.md @@ -1,179 +1,225 @@ # Configuration -This guide covers the essential configuration steps to get your FastAPI application running quickly. +This guide covers the essential configuration steps to get your FastAPI application running. ## Quick Setup -The fastest way to get started is to copy the example environment file and modify just a few values: +Copy the example environment file and edit a few values: ```bash -cp src/.env.example src/.env +cd backend +cp .env.example .env ``` +The full set of variables lives in `backend/.env.example`. The sections below cover the ones you'll most likely want to change. + ## Essential Configuration -Open `src/.env` and set these required values: +Open `backend/.env` and set these required values. ### Application Settings ```env -# App Settings -APP_NAME="Your app name here" -APP_DESCRIPTION="Your app description here" -APP_VERSION="0.1" -CONTACT_NAME="Your name" -CONTACT_EMAIL="Your email" -LICENSE_NAME="The license you picked" +APP_NAME=Your app name here +APP_DESCRIPTION=Your app description here +VERSION=0.1.0 +CONTACT_NAME=Your name +CONTACT_EMAIL=your@email.com +LICENSE_NAME=The license you picked ``` -### Database Connection +### Environment Type ```env -# Database -POSTGRES_USER="your_postgres_user" -POSTGRES_PASSWORD="your_password" -POSTGRES_SERVER="localhost" # Use "db" for Docker Compose -POSTGRES_PORT=5432 # Use 5432 for Docker Compose -POSTGRES_DB="your_database_name" +# Options: development, staging, production, local +ENVIRONMENT=development ``` -### PGAdmin (Optional) +- **development**: API docs at `/docs`, `/redoc`, verbose logging +- **staging**: Structured logs, file output enabled +- **production**: JSON logs, security validation, docs gated by `ENABLE_DOCS_IN_PRODUCTION` +- **local**: Same defaults as development; useful for tests -For database administration: +### Database ```env -# PGAdmin -PGADMIN_DEFAULT_EMAIL="your_email_address" -PGADMIN_DEFAULT_PASSWORD="your_password" -PGADMIN_LISTEN_PORT=80 +POSTGRES_USER=postgres +POSTGRES_PASSWORD=changeme +POSTGRES_DB=postgres +POSTGRES_SERVER=db # use "localhost" without Docker +POSTGRES_PORT=5432 +CREATE_TABLES_ON_STARTUP=true ``` -**To connect to database in PGAdmin:** - -1. Login with `PGADMIN_DEFAULT_EMAIL` and `PGADMIN_DEFAULT_PASSWORD` -1. Click "Add Server" -1. Use these connection settings: - - **Hostname/address**: `db` (if using containers) or `localhost` - - **Port**: Value from `POSTGRES_PORT` - - **Database**: `postgres` (leave as default) - - **Username**: Value from `POSTGRES_USER` - - **Password**: Value from `POSTGRES_PASSWORD` - ### Security -Generate a secret key and set it: +Generate a strong `SECRET_KEY`: ```bash -# Generate a secure secret key -openssl rand -hex 32 +python -c "import secrets; print(secrets.token_urlsafe(64))" ``` ```env -# Cryptography -SECRET_KEY="your-generated-secret-key-here" # Result of openssl rand -hex 32 -ALGORITHM="HS256" # Default: HS256 -ACCESS_TOKEN_EXPIRE_MINUTES=30 # Default: 30 -REFRESH_TOKEN_EXPIRE_DAYS=7 # Default: 7 +SECRET_KEY=your-generated-secret-key-here + +# Production security validation (enabled by default in production) +PRODUCTION_SECURITY_VALIDATION_ENABLED=true +PRODUCTION_SECURITY_STRICT_MODE=false ``` -### First Admin User +### Sessions ```env -# Admin User -ADMIN_NAME="your_name" -ADMIN_EMAIL="your_email" -ADMIN_USERNAME="your_username" -ADMIN_PASSWORD="your_password" +SESSION_TIMEOUT_MINUTES=30 +SESSION_CLEANUP_INTERVAL_MINUTES=15 +MAX_SESSIONS_PER_USER=5 +SESSION_SECURE_COOKIES=true +SESSION_BACKEND=redis +SESSION_COOKIE_MAX_AGE=86400 + +# CSRF protection (set false to disable in dev/test) +CSRF_ENABLED=true + +# Login rate limiting +LOGIN_MAX_ATTEMPTS=5 +LOGIN_WINDOW_MINUTES=15 ``` -### Redis Configuration +### First Admin User + +The `setup_initial_data` script reads these on first run: ```env -# Redis Cache -REDIS_CACHE_HOST="localhost" # Use "redis" for Docker Compose -REDIS_CACHE_PORT=6379 - -# Client-side Cache -CLIENT_CACHE_MAX_AGE=30 # Default: 30 seconds +ADMIN_NAME=Admin User +ADMIN_EMAIL=admin@example.com +ADMIN_USERNAME=admin +ADMIN_PASSWORD=your-secure-password +``` -# Redis Job Queue -REDIS_QUEUE_HOST="localhost" # Use "redis" for Docker Compose -REDIS_QUEUE_PORT=6379 +Then run: -# Redis Rate Limiting -REDIS_RATE_LIMIT_HOST="localhost" # Use "redis" for Docker Compose -REDIS_RATE_LIMIT_PORT=6379 +```bash +uv run python -m scripts.setup_initial_data ``` -!!! warning "Redis in Production" -You may use the same Redis instance for caching and queues while developing, but use separate containers in production. +### Cache (Redis or Memcached) -### Rate Limiting Defaults +```env +CACHE_ENABLED=true +CACHE_BACKEND=redis # or "memcached" +DEFAULT_CACHE_EXPIRATION=3600 + +# Client-side cache (Cache-Control headers) +CLIENT_CACHE_ENABLED=true +CLIENT_CACHE_MAX_AGE=60 + +# Redis settings +CACHE_REDIS_HOST=redis # use "localhost" without Docker +CACHE_REDIS_PORT=6379 +CACHE_REDIS_DB=0 +CACHE_REDIS_PASSWORD= +``` + +### Rate Limiting ```env -# Default Rate Limits -DEFAULT_RATE_LIMIT_LIMIT=10 # Default: 10 requests -DEFAULT_RATE_LIMIT_PERIOD=3600 # Default: 3600 seconds (1 hour) +RATE_LIMITER_ENABLED=true +RATE_LIMITER_BACKEND=redis # or "memcached" +RATE_LIMITER_FAIL_OPEN=true +DEFAULT_RATE_LIMIT_LIMIT=100 +DEFAULT_RATE_LIMIT_PERIOD=60 + +# Redis (uses DB 1 by default to separate from cache) +RATE_LIMITER_REDIS_HOST=redis # use "localhost" without Docker +RATE_LIMITER_REDIS_PORT=6379 +RATE_LIMITER_REDIS_DB=1 +RATE_LIMITER_REDIS_PASSWORD= ``` -### CORS Configuration +### Background Tasks (Taskiq) + +```env +TASKIQ_ENABLED=true +TASKIQ_BROKER_TYPE=redis # or "rabbitmq" + +# Redis broker (uses DB 3 by default) +TASKIQ_REDIS_HOST=redis # use "localhost" without Docker +TASKIQ_REDIS_PORT=6379 +TASKIQ_REDIS_DB=3 +TASKIQ_REDIS_PASSWORD= + +TASKIQ_WORKER_CONCURRENCY=2 +TASKIQ_MAX_TASKS_PER_WORKER=1000 +``` -Configure Cross-Origin Resource Sharing for your frontend: +### CORS ```env -# CORS Settings -CORS_ORIGINS=["*"] # Comma-separated origins (use specific domains in production) -CORS_METHODS=["*"] # Comma-separated HTTP methods or "*" for all -CORS_HEADERS=["*"] # Comma-separated headers or "*" for all +CORS_ENABLED=true +CORS_ORIGINS=* # comma-separated origins +CORS_ALLOW_CREDENTIALS=true +CORS_ALLOW_METHODS=* +CORS_ALLOW_HEADERS=* ``` !!! warning "CORS in Production" -Never use `"*"` for CORS_ORIGINS in production. Specify exact domains: -`env CORS_ORIGINS=["https://yourapp.com","https://www.yourapp.com"] CORS_METHODS=["GET","POST","PUT","DELETE","PATCH"] CORS_HEADERS=["Authorization","Content-Type"] ` + Never use `*` for `CORS_ORIGINS` in production. Specify exact domains and explicit methods/headers: -### First Tier + ```env + CORS_ORIGINS=https://yourapp.com,https://www.yourapp.com + CORS_ALLOW_METHODS=GET,POST,PUT,DELETE,PATCH + CORS_ALLOW_HEADERS=Authorization,Content-Type + ``` + +### OAuth (Optional) + +For Google / GitHub sign-in: ```env -# Default Tier -TIER_NAME="free" +OAUTH_REDIRECT_BASE_URL=http://localhost:8000 + +# Google OAuth +OAUTH_GOOGLE_CLIENT_ID= +OAUTH_GOOGLE_CLIENT_SECRET= + +# GitHub OAuth +OAUTH_GITHUB_CLIENT_ID= +OAUTH_GITHUB_CLIENT_SECRET= ``` -## Environment Types +Leave the credentials empty to disable a provider. See [Authentication](../user-guide/authentication/index.md) for the OAuth setup walkthrough. -Set your environment type: +### Admin Interface ```env -ENVIRONMENT="local" # local, staging, or production +ADMIN_ENABLED=true # enables SQLAdmin at /admin ``` -- **local**: API docs available at `/docs`, `/redoc`, and `/openapi.json` -- **staging**: API docs available to superusers only -- **production**: API docs completely disabled - ## Docker Compose Settings -If using Docker Compose, use these values instead: +When running with Docker Compose, services reach each other by service name. Use these hosts in `.env`: ```env -# Docker Compose values -POSTGRES_SERVER="db" -REDIS_CACHE_HOST="redis" -REDIS_QUEUE_HOST="redis" -REDIS_RATE_LIMIT_HOST="redis" +POSTGRES_SERVER=db +CACHE_REDIS_HOST=redis +RATE_LIMITER_REDIS_HOST=redis +TASKIQ_REDIS_HOST=redis ``` -## Optional Services +## That's It -The boilerplate includes Redis for caching, job queues, and rate limiting. If running locally without Docker, either: +With these settings, start the app: -1. **Install Redis** and keep the default settings -1. **Disable Redis services** (see [User Guide - Configuration](../user-guide/configuration/index.md) for details) +=== "Local with uv" -## That's It! + ```bash + uv run fastapi dev src/interfaces/main.py + ``` -With these basic settings configured, you can start the application: +=== "Docker Compose" -- **Docker Compose**: `docker compose up` -- **Manual**: `uv run uvicorn src.app.main:app --reload` + ```bash + docker compose up + ``` -For detailed configuration options, advanced settings, and production deployment, see the [User Guide - Configuration](../user-guide/configuration/index.md). +For the full reference and advanced settings, see [User Guide → Configuration](../user-guide/configuration/index.md). diff --git a/docs/getting-started/first-run.md b/docs/getting-started/first-run.md index e12fceb7..0d6b6801 100644 --- a/docs/getting-started/first-run.md +++ b/docs/getting-started/first-run.md @@ -1,614 +1,280 @@ # First Run Guide -Congratulations on setting up the FastAPI Boilerplate! This guide will walk you through testing your installation, understanding the basics, and making your first customizations. +This guide walks you through verifying your installation, creating the admin user, and testing the main features. ## Verification Checklist -Before diving deeper, let's verify everything is working correctly. +Before diving deeper, verify everything is working. -### 1. Check All Services +### 1. Check Services -Ensure all services are running: +=== "Docker Compose" -```bash -# For Docker Compose users -docker compose ps - -# Expected output: -# NAME COMMAND SERVICE STATUS -# fastapi-boilerplate-web-1 "uvicorn app.main:app…" web running -# fastapi-boilerplate-db-1 "docker-entrypoint.s…" db running -# fastapi-boilerplate-redis-1 "docker-entrypoint.s…" redis running -# fastapi-boilerplate-worker-1 "arq src.app.core.wo…" worker running -``` + ```bash + docker compose ps + ``` -### 2. Test API Endpoints + You should see `web`, `db`, and `redis` services in `running` state. -Visit these URLs to confirm your API is working: +=== "Local with uv" -**API Documentation:** -- **Swagger UI**: [http://localhost:8000/docs](http://localhost:8000/docs) -- **ReDoc**: [http://localhost:8000/redoc](http://localhost:8000/redoc) + Verify Postgres and Redis are reachable: -**Health Check:** -```bash -curl http://localhost:8000/api/v1/health -``` + ```bash + pg_isready -h localhost -p 5432 + redis-cli ping # should print PONG + ``` -Expected response: -```json -{ - "status":"healthy", - "environment":"local", - "version":"0.1.0", - "timestamp":"2025-10-21T14:40:14+00:00" -} -``` +### 2. Test API Documentation + +Open these in a browser: + +- **Swagger UI**: +- **ReDoc**: + +### 3. Health Check -**Ready Check:** ```bash -curl http://localhost:8000/api/v1/ready +curl http://localhost:8000/health ``` Expected response: + ```json { - "status":"healthy", - "environment":"local", - "version":"0.1.0", - "app":"healthy", - "database":"healthy", - "redis":"healthy", - "timestamp":"2025-10-21T14:40:47+00:00" + "status": "healthy" } ``` -### 3. Database Connection +### 4. Database Tables -Check if the database tables were created: +Check that tables were created: -```bash -# For Docker Compose -docker compose exec db psql -U postgres -d myapp -c "\dt" - -# You should see tables like: -# public | users | table | postgres -# public | posts | table | postgres -# public | tiers | table | postgres -# public | rate_limits | table | postgres -``` +=== "Docker Compose" -### 4. Redis Connection + ```bash + docker compose exec db psql -U postgres -d postgres -c "\dt" + ``` -Test Redis connectivity: +=== "Local with uv" -```bash -# For Docker Compose -docker compose exec redis redis-cli ping + ```bash + psql -h localhost -U postgres -d postgres -c "\dt" + ``` -# Expected response: PONG -``` +You should see tables like `user`, `tiers`, `rate_limits`, `api_keys`, `key_usage`, `key_permissions`. ## Initial Setup -Before testing features, you need to create the first superuser and tier. - -### Creating the First Superuser +Create the first admin user and the default tier. !!! warning "Prerequisites" - Make sure the database and tables are created before running create_superuser. The database should be running and the API should have started at least once. - -#### Using Docker Compose - -If using Docker Compose, uncomment this section in your `docker-compose.yml`: - -```yaml -#-------- uncomment to create first superuser -------- -create_superuser: - build: - context: . - dockerfile: Dockerfile - env_file: - - ./src/.env - depends_on: - - db - command: python -m src.scripts.create_first_superuser - volumes: - - ./src:/code/src -``` - -Then run: + Make sure the database tables are created before running this. With `CREATE_TABLES_ON_STARTUP=true` (default), this happens automatically the first time the app boots. -```bash -# Start services and run create_superuser automatically -docker compose up -d +### Create Admin User and Default Tier -# Or run it manually -docker compose run --rm create_superuser +The admin credentials come from `ADMIN_NAME`, `ADMIN_EMAIL`, `ADMIN_USERNAME`, and `ADMIN_PASSWORD` in `backend/.env`. -# Stop the create_superuser service when done -docker compose stop create_superuser -``` +=== "Docker Compose" -#### From Scratch + ```bash + docker compose exec web python -m scripts.setup_initial_data + ``` -If running manually, use: +=== "Local with uv" -```bash -# Make sure you're in the root folder -uv run python -m src.scripts.create_first_superuser -``` + ```bash + cd backend + uv run python -m scripts.setup_initial_data + ``` -### Creating the First Tier +This creates: -!!! warning "Prerequisites" - Make sure the database and tables are created before running create_tier. +- A default tier (used as the fallback for new users) +- The admin user (with `is_superuser=true`) -#### Using Docker Compose +## Testing Core Features -Uncomment the `create_tier` service in `docker-compose.yml` and run: +### Authentication Flow (Sessions) -```bash -docker compose run --rm create_tier -``` +This boilerplate uses **server-side sessions** with HTTP-only cookies — no JWT. -#### From Scratch +#### 1. Log In ```bash -# Make sure you're in the root folder -uv run python -m src.scripts.create_first_tier +curl -X POST "http://localhost:8000/api/v1/auth/login" \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "username=admin&password=your_admin_password" \ + -c cookies.txt ``` -## Testing Core Features - -Let's test the main features of your API. +Response sets an HTTP-only `session_id` cookie and returns a CSRF token: -### Authentication Flow +```json +{ "csrf_token": "..." } +``` -#### 1. Login with Admin User +`cookies.txt` now holds your session — pass it back with `-b cookies.txt` on subsequent requests. -Use the admin credentials you set in your `.env` file: +#### 2. Get the Current User ```bash -curl -X POST "http://localhost:8000/api/v1/login" \ - -H "Content-Type: application/x-www-form-urlencoded" \ - -d "username=admin&password=your_admin_password" -``` - -You should receive a response like: -```json -{ - "access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9...", - "token_type": "bearer", - "refresh_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9..." -} +curl http://localhost:8000/api/v1/users/me -b cookies.txt ``` -#### 2. Create a New User +#### 3. Create a New User ```bash -curl -X POST "http://localhost:8000/api/v1/users" \ +curl -X POST "http://localhost:8000/api/v1/users/" \ -H "Content-Type: application/json" \ -d '{ "name": "John Doe", - "username": "johndoe", + "username": "johndoe", "email": "john@example.com", "password": "securepassword123" }' ``` -#### 3. Test Protected Endpoint +(User creation is open — no auth required.) -Use the access token from step 1: +#### 4. Check Auth Status ```bash -curl -X GET "http://localhost:8000/api/v1/users/me" \ - -H "Authorization: Bearer YOUR_ACCESS_TOKEN_HERE" +curl http://localhost:8000/api/v1/auth/check-auth -b cookies.txt ``` -### CRUD Operations +Returns `{"authenticated": true, "user": {...}, "session": {...}}` when logged in. -#### 1. Create a Post +#### 5. Log Out ```bash -curl -X POST "http://localhost:8000/api/v1/posts" \ - -H "Content-Type: application/json" \ - -H "Authorization: Bearer YOUR_ACCESS_TOKEN_HERE" \ - -d '{ - "title": "My First Post", - "content": "This is the content of my first post!" - }' +curl -X POST "http://localhost:8000/api/v1/auth/logout" -b cookies.txt -c cookies.txt ``` -#### 2. Get All Posts - -```bash -curl -X GET "http://localhost:8000/api/v1/posts" \ - -H "Authorization: Bearer YOUR_ACCESS_TOKEN_HERE" -``` +### API Keys -#### 3. Get Posts with Pagination +For programmatic access (machine-to-machine clients), create an API key while logged in: ```bash -curl -X GET "http://localhost:8000/api/v1/posts?page=1&items_per_page=5" \ - -H "Authorization: Bearer YOUR_ACCESS_TOKEN_HERE" +curl -X POST "http://localhost:8000/api/v1/api-keys/" \ + -H "Content-Type: application/json" \ + -b cookies.txt \ + -d '{ + "name": "My Integration Key", + "permissions": {}, + "usage_limits": {} + }' ``` -### Background Tasks +⚠️ **The full API key is shown once in the response.** Store it securely. -Test the job queue system: - -#### 1. Submit a Background Task +List your keys: ```bash -curl -X POST "http://localhost:8000/api/v1/tasks/task?message=hello" \ - -H "Authorization: Bearer YOUR_ACCESS_TOKEN_HERE" -``` - -Response: -```json -{ - "id": "550e8400-e29b-41d4-a716-446655440000" -} +curl http://localhost:8000/api/v1/api-keys/ -b cookies.txt ``` -#### 2. Check Task Status +### Tiers and Rate Limits ```bash -curl -X GET "http://localhost:8000/api/v1/tasks/task/550e8400-e29b-41d4-a716-446655440000" \ - -H "Authorization: Bearer YOUR_ACCESS_TOKEN_HERE" -``` +# List tiers +curl http://localhost:8000/api/v1/tiers/ -### Caching +# Get a tier by name +curl http://localhost:8000/api/v1/tiers/free -Test the caching system: - -#### 1. Make a Cached Request - -```bash -# First request (cache miss) -curl -X GET "http://localhost:8000/api/v1/users/johndoe" \ - -H "Authorization: Bearer YOUR_ACCESS_TOKEN_HERE" \ - -w "Time: %{time_total}s\n" - -# Second request (cache hit - should be faster) -curl -X GET "http://localhost:8000/api/v1/users/johndoe" \ - -H "Authorization: Bearer YOUR_ACCESS_TOKEN_HERE" \ - -w "Time: %{time_total}s\n" +# List rate limits +curl http://localhost:8000/api/v1/rate-limits/ ``` -## Your First Customization - -Let's create a simple custom endpoint to see how easy it is to extend the boilerplate. - -### 1. Create a Simple Model - -Create `src/app/models/item.py`: - -```python -from sqlalchemy import String -from sqlalchemy.orm import Mapped, mapped_column - -from app.core.db.database import Base - - -class Item(Base): - __tablename__ = "items" - - id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) - name: Mapped[str] = mapped_column(String(100)) - description: Mapped[str] = mapped_column(String(500), default="") -``` - -### 2. Create Pydantic Schemas - -Create `src/app/schemas/item.py`: - -```python -from pydantic import BaseModel, Field - - -class ItemBase(BaseModel): - name: str = Field(..., min_length=1, max_length=100) - description: str = Field("", max_length=500) - - -class ItemCreate(ItemBase): - pass - - -class ItemCreateInternal(ItemCreate): - pass - - -class ItemRead(ItemBase): - id: int - - -class ItemUpdate(BaseModel): - name: str | None = None - description: str | None = None - - -class ItemUpdateInternal(ItemUpdate): - pass - - -class ItemDelete(BaseModel): - is_deleted: bool = True -``` - -### 3. Create CRUD Operations - -Create `src/app/crud/crud_items.py`: - -```python -from fastcrud import FastCRUD - -from app.models.item import Item -from app.schemas.item import ItemCreateInternal, ItemUpdate, ItemUpdateInternal, ItemDelete - -CRUDItem = FastCRUD[Item, ItemCreateInternal, ItemUpdate, ItemUpdateInternal, ItemDelete] -crud_items = CRUDItem(Item) -``` +### Caching -### 4. Create API Endpoints - -Create `src/app/api/v1/items.py`: - -```python -from typing import Annotated - -from fastapi import APIRouter, Depends, HTTPException -from sqlalchemy.ext.asyncio import AsyncSession - -from app.api.dependencies import get_current_user -from app.core.db.database import async_get_db -from app.crud.crud_items import crud_items -from app.schemas.item import ItemCreate, ItemRead, ItemUpdate -from app.schemas.user import UserRead - -router = APIRouter(tags=["items"]) - - -@router.post("/", response_model=ItemRead, status_code=201) -async def create_item( - item: ItemCreate, - db: Annotated[AsyncSession, Depends(async_get_db)], - current_user: Annotated[UserRead, Depends(get_current_user)] -): - """Create a new item.""" - db_item = await crud_items.create(db=db, object=item) - return db_item - - -@router.get("/{item_id}", response_model=ItemRead) -async def get_item( - item_id: int, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - """Get an item by ID.""" - db_item = await crud_items.get(db=db, id=item_id) - if not db_item: - raise HTTPException(status_code=404, detail="Item not found") - return db_item - - -@router.get("/", response_model=list[ItemRead]) -async def get_items( - db: Annotated[AsyncSession, Depends(async_get_db)], - skip: int = 0, - limit: int = 100 -): - """Get all items.""" - items = await crud_items.get_multi(db=db, offset=skip, limit=limit) - return items["data"] - - -@router.patch("/{item_id}", response_model=ItemRead) -async def update_item( - item_id: int, - item_update: ItemUpdate, - db: Annotated[AsyncSession, Depends(async_get_db)], - current_user: Annotated[UserRead, Depends(get_current_user)] -): - """Update an item.""" - db_item = await crud_items.get(db=db, id=item_id) - if not db_item: - raise HTTPException(status_code=404, detail="Item not found") - - updated_item = await crud_items.update(db=db, object=item_update, id=item_id) - return updated_item - - -@router.delete("/{item_id}") -async def delete_item( - item_id: int, - db: Annotated[AsyncSession, Depends(async_get_db)], - current_user: Annotated[UserRead, Depends(get_current_user)] -): - """Delete an item.""" - db_item = await crud_items.get(db=db, id=item_id) - if not db_item: - raise HTTPException(status_code=404, detail="Item not found") - - await crud_items.delete(db=db, id=item_id) - return {"message": "Item deleted successfully"} -``` +Repeat a request twice and watch the timing — the second one should hit Redis: -### 5. Register the Router - -Add your new router to `src/app/api/v1/__init__.py`: - -```python -from fastapi import APIRouter - -from app.api.v1.login import router as login_router -from app.api.v1.logout import router as logout_router -from app.api.v1.posts import router as posts_router -from app.api.v1.rate_limits import router as rate_limits_router -from app.api.v1.tasks import router as tasks_router -from app.api.v1.tiers import router as tiers_router -from app.api.v1.users import router as users_router -from app.api.v1.items import router as items_router # Add this line - -router = APIRouter(prefix="/v1") -router.include_router(login_router, prefix="/login") -router.include_router(logout_router, prefix="/logout") -router.include_router(users_router, prefix="/users") -router.include_router(posts_router, prefix="/posts") -router.include_router(tasks_router, prefix="/tasks") -router.include_router(tiers_router, prefix="/tiers") -router.include_router(rate_limits_router, prefix="/rate_limits") -router.include_router(items_router, prefix="/items") # Add this line +```bash +curl http://localhost:8000/api/v1/users/johndoe -b cookies.txt -w "\nTime: %{time_total}s\n" +curl http://localhost:8000/api/v1/users/johndoe -b cookies.txt -w "\nTime: %{time_total}s\n" ``` -### 6. Create and Run Migration - -Import your new model in `src/app/models/__init__.py`: +### Background Tasks (Taskiq) -```python -from .user import User -from .post import Post -from .tier import Tier -from .rate_limit import RateLimit -from .item import Item # Add this line -``` +Background processing is enabled out of the box but no example endpoint ships with the starter. To register and dispatch your own task, see [Background Tasks](../user-guide/background-tasks/index.md). -Create and run the migration: +To start a worker locally: ```bash -# For Docker Compose -docker compose exec web alembic revision --autogenerate -m "Add items table" -docker compose exec web alembic upgrade head - -# For manual installation -cd src -uv run alembic revision --autogenerate -m "Add items table" -uv run alembic upgrade head +cd backend +uv run taskiq worker infrastructure.taskiq.worker:default_broker ``` -### 7. Test Your New Endpoint - -Restart your application and test the new endpoints: +## Adding Your First Feature Module -```bash -# Create an item -curl -X POST "http://localhost:8000/api/v1/items/" \ - -H "Content-Type: application/json" \ - -H "Authorization: Bearer YOUR_ACCESS_TOKEN_HERE" \ - -d '{ - "name": "My First Item", - "description": "This is a test item" - }' +The codebase uses **vertical-slice modules** — each feature owns its models, schemas, CRUD, service, and routes in one folder under `backend/src/modules/`. -# Get all items -curl -X GET "http://localhost:8000/api/v1/items/" \ - -H "Authorization: Bearer YOUR_ACCESS_TOKEN_HERE" -``` +For a step-by-step walkthrough of adding a new module, see the [Development Guide](../user-guide/development.md). ## Debugging Common Issues -### Logs and Monitoring - -#### Check Application Logs +### Application Logs -```bash -# For Docker Compose -docker compose logs web +=== "Docker Compose" -# For manual installation -tail -f src/app/logs/app.log -``` + ```bash + docker compose logs -f web + ``` -#### Check Database Logs +=== "Local with uv" -```bash -# For Docker Compose -docker compose logs db -``` + Logs are printed to stdout where `fastapi dev` is running. -#### Check Worker Logs +### Database Logs ```bash -# For Docker Compose -docker compose logs worker +docker compose logs -f db ``` -### Performance Testing +### Run Migrations Manually -#### Test API Response Times +If you need to re-run migrations: ```bash -# Test endpoint performance -curl -w "Time: %{time_total}s\n" \ - -H "Authorization: Bearer YOUR_ACCESS_TOKEN_HERE" \ - http://localhost:8000/api/v1/users/me -``` - -#### Test Database Performance - -```bash -# Check active connections -docker compose exec db psql -U postgres -d myapp -c "SELECT count(*) FROM pg_stat_activity;" -``` - -## Monitoring Dashboard - -### Redis Monitor - -```bash -# Monitor Redis operations -docker compose exec redis redis-cli monitor +cd backend +uv run alembic upgrade head ``` -### Database Activity +### Reset Everything (Docker) ```bash -# Check database activity -docker compose exec db psql -U postgres -d myapp -c "SELECT * FROM pg_stat_activity;" +cd backend +docker compose down -v # ⚠️ wipes the database volume +docker compose up ``` ## Next Steps -Now that you've verified everything works and created your first custom endpoint, you're ready to dive deeper: +You've verified your install and tested the main features. Now: -### Essential Learning +### Essential Reading -1. **[Project Structure](../user-guide/project-structure.md)** - Understand how the code is organized -2. **[Database Guide](../user-guide/database/index.md)** - Learn about models, schemas, and CRUD operations -3. **[Authentication](../user-guide/authentication/index.md)** - Deep dive into JWT and user management +1. **[Project Structure](../user-guide/project-structure.md)** - How the code is organized +2. **[Database Guide](../user-guide/database/index.md)** - Models, schemas, CRUD +3. **[Authentication](../user-guide/authentication/index.md)** - Sessions, OAuth, API keys ### Advanced Features -1. **[Caching](../user-guide/caching/index.md)** - Speed up your API with Redis caching -2. **[Background Tasks](../user-guide/background-tasks/index.md)** - Process long-running tasks asynchronously -3. **[Rate Limiting](../user-guide/rate-limiting/index.md)** - Protect your API from abuse +1. **[Caching](../user-guide/caching/index.md)** - Redis-backed cache +2. **[Background Tasks](../user-guide/background-tasks/index.md)** - Async jobs with Taskiq +3. **[Rate Limiting](../user-guide/rate-limiting/index.md)** - Per-tier rate limits ### Development Workflow -1. **[Development Guide](../user-guide/development.md)** - Best practices for extending the boilerplate -2. **[Testing](../user-guide/testing.md)** - Write tests for your new features -3. **[Production](../user-guide/production.md)** - Deploy your API to production +1. **[Development Guide](../user-guide/development.md)** - Extend the boilerplate +2. **[Testing](../user-guide/testing.md)** - Test your features +3. **[Production](../user-guide/production.md)** - Deploy ## Getting Help -If you encounter any issues: - -1. **Check the logs** for error messages -2. **Verify your configuration** in the `.env` file -3. **Review the [GitHub Issues](https://github.com/benavlabs/fastapi-boilerplate/issues)** for common solutions -4. **Search [existing issues](https://github.com/benavlabs/fastapi-boilerplate/issues)** on GitHub -5. **Create a [new issue](https://github.com/benavlabs/fastapi-boilerplate/issues/new)** with detailed information - -## Congratulations! - -You've successfully: - -- Verified your FastAPI Boilerplate installation -- Tested core API functionality -- Created your first custom endpoint -- Run database migrations -- Tested authentication and CRUD operations - -You're now ready to build amazing APIs with FastAPI! \ No newline at end of file +- **Check the logs** for error messages +- **Verify your `backend/.env`** has the right values +- **Search [GitHub Issues](https://github.com/benavlabs/fastapi-boilerplate/issues)** for similar problems +- **Open a [new issue](https://github.com/benavlabs/fastapi-boilerplate/issues/new)** with details diff --git a/docs/getting-started/index.md b/docs/getting-started/index.md index b5356ee9..83b0044f 100644 --- a/docs/getting-started/index.md +++ b/docs/getting-started/index.md @@ -1,140 +1,116 @@ # Getting Started -Welcome to the FastAPI Boilerplate! This guide will have you up and running with a production-ready API in just a few minutes. +Welcome to the FastAPI Boilerplate! This guide will have you up and running in just a few minutes. ## Quick Start (5 minutes) -The fastest way to get started is using Docker Compose. This will set up everything you need including PostgreSQL, Redis, and the API server. +Pick whichever workflow fits you: -### Prerequisites +=== "Local with uv" -Make sure you have installed: + ### Prerequisites -- [Docker](https://docs.docker.com/get-docker/) (20.10+) -- [Docker Compose](https://docs.docker.com/compose/install/) (1.29+) + - [uv](https://docs.astral.sh/uv/getting-started/installation/) (0.4+) + - PostgreSQL and Redis running locally (or use the Docker tab) -### 1. Get the Template + ### 1. Get the Template -Start by using this template for your new project: + 1. Click **"Use this template"** on the [GitHub repository](https://github.com/benavlabs/fastapi-boilerplate) + 2. Create a new repository with your project name + 3. Clone your new repository and `cd` into the backend: -1. Click **"Use this template"** on the [GitHub repository](https://github.com/benavlabs/fastapi-boilerplate) -2. Create a new repository with your project name -3. Clone your new repository: + ```bash + git clone https://github.com/yourusername/your-project-name + cd your-project-name/backend + ``` -```bash -git clone https://github.com/yourusername/your-project-name -cd your-project-name -``` + ### 2. Install Dependencies -### 2. Environment Setup + ```bash + uv sync --extra dev + ``` -Create your environment configuration: + ### 3. Environment Setup -```bash -# Create the environment file -touch src/.env -``` + ```bash + cp .env.example .env + # then edit .env to set your database creds, SECRET_KEY, etc. + ``` -Add the following basic configuration to `src/.env`: - -```env -# Application -APP_NAME="My FastAPI App" -APP_DESCRIPTION="My awesome API" -APP_VERSION="0.1.0" - -# Database -POSTGRES_USER="postgres" -POSTGRES_PASSWORD="changethis" -POSTGRES_SERVER="db" -POSTGRES_PORT=5432 -POSTGRES_DB="myapp" - -# Security -SECRET_KEY="your-secret-key-here" -ALGORITHM="HS256" -ACCESS_TOKEN_EXPIRE_MINUTES=30 -REFRESH_TOKEN_EXPIRE_DAYS=7 - -# Redis -REDIS_CACHE_HOST="redis" -REDIS_CACHE_PORT=6379 -REDIS_QUEUE_HOST="redis" -REDIS_QUEUE_PORT=6379 - -# Admin User -ADMIN_NAME="Admin" -ADMIN_EMAIL="admin@example.com" -ADMIN_USERNAME="admin" -ADMIN_PASSWORD="changethis" - -# Environment -ENVIRONMENT="local" -``` + !!! warning "SECRET_KEY" + Generate a secure key with `python -c "import secrets; print(secrets.token_urlsafe(64))"` and replace the default value in `.env`. -!!! warning "Security Note" - Generate a secure secret key using: `openssl rand -hex 32` + ### 4. Run the Server -### 3. Start the Application + ```bash + uv run fastapi dev src/interfaces/main.py + ``` -Launch all services with a single command: +=== "Docker Compose" -```bash -docker compose up -``` + ### Prerequisites -This will start: -- **FastAPI server** on port 8000 -- **PostgreSQL database** -- **Redis** for caching and job queues -- **Worker** for background tasks + - [Docker](https://docs.docker.com/get-docker/) (20.10+) + - [Docker Compose](https://docs.docker.com/compose/install/) (v2+) -### 4. Verify Installation + ### 1. Get the Template -Once the containers are running, you should see output like: + ```bash + git clone https://github.com/yourusername/your-project-name + cd your-project-name/backend + ``` -``` -fastapi-boilerplate-web-1 | INFO: Application startup complete. -fastapi-boilerplate-db-1 | database system is ready to accept connections -fastapi-boilerplate-worker-1 | redis_version=7.x.x mem_usage=1MB clients_connected=1 -``` + ### 2. Environment Setup + + ```bash + cp .env.example .env + # then edit .env to set your DB password, SECRET_KEY, etc. + ``` + + ### 3. Start Everything + + ```bash + docker compose up + ``` + + This brings up: + + - **FastAPI app** on port 8000 + - **PostgreSQL** database + - **Redis** for cache, rate limiting, and sessions + +## Verify It's Running -Visit these URLs to confirm everything is working: +Visit: -- **API Documentation**: [http://localhost:8000/docs](http://localhost:8000/docs) -- **Alternative Docs**: [http://localhost:8000/redoc](http://localhost:8000/redoc) -- **Health Check**: [http://localhost:8000/api/v1/health](http://localhost:8000/api/v1/health) -- **Ready Check**: [http://localhost:8000/api/v1/ready](http://localhost:8000/api/v1/ready) +- **Swagger UI**: [http://localhost:8000/docs](http://localhost:8000/docs) +- **ReDoc**: [http://localhost:8000/redoc](http://localhost:8000/redoc) +- **Health Check**: [http://localhost:8000/health](http://localhost:8000/health) ## You're Ready! -Congratulations! You now have a fully functional FastAPI application with: +You now have a working FastAPI app with: -- REST API with automatic documentation -- PostgreSQL database with migrations -- Redis caching and job queues -- JWT authentication system -- Background task processing -- Rate limiting -- Admin user created +- REST API with automatic OpenAPI docs +- PostgreSQL database with Alembic migrations +- Redis-backed cache and rate limiting +- Session-based authentication with optional OAuth (Google, GitHub) +- API keys with per-key permissions +- SQLAdmin admin interface at `/admin` +- Async background task support via Taskiq ## Test Your API -Try these quick tests to see your API in action: - ### 1. Health Check -```bash -curl http://localhost:8000/api/v1/health -``` -### 2. Ready Check ```bash -curl http://localhost:8000/api/v1/ready +curl http://localhost:8000/health ``` -### 3. Create a User +### 2. Create a User + ```bash -curl -X POST "http://localhost:8000/api/v1/users" \ +curl -X POST "http://localhost:8000/api/v1/users/" \ -H "Content-Type: application/json" \ -d '{ "name": "John Doe", @@ -144,38 +120,47 @@ curl -X POST "http://localhost:8000/api/v1/users" \ }' ``` -### 4. Login +### 3. Log In (Session Cookie) + ```bash -curl -X POST "http://localhost:8000/api/v1/login" \ +curl -X POST "http://localhost:8000/api/v1/auth/login" \ -H "Content-Type: application/x-www-form-urlencoded" \ - -d "username=johndoe&password=securepassword" + -d "username=johndoe&password=securepassword" \ + -c cookies.txt ``` -## Next Steps +The response sets an HTTP-only session cookie and returns a CSRF token. Use `-b cookies.txt` on subsequent requests to send the session along. -Now that you have the basics running, explore these guides to learn more: +### 4. Get the Current User + +```bash +curl http://localhost:8000/api/v1/users/me -b cookies.txt +``` + +## Next Steps ### Essential Reading -- **[Configuration Guide](configuration.md)** - Understand all configuration options -- **[Project Structure](../user-guide/project-structure.md)** - Learn how the code is organized -- **[Authentication](../user-guide/authentication/index.md)** - Set up user management + +- **[Configuration Guide](configuration.md)** - Environment variables and settings +- **[Project Structure](../user-guide/project-structure.md)** - How the code is organized +- **[Authentication](../user-guide/authentication/index.md)** - Sessions, OAuth, and API keys ### Popular Features -- **[Database Operations](../user-guide/database/index.md)** - Working with models and CRUD -- **[Caching](../user-guide/caching/index.md)** - Speed up your API with Redis caching -- **[Background Tasks](../user-guide/background-tasks/index.md)** - Process jobs asynchronously + +- **[Database Operations](../user-guide/database/index.md)** - Models, schemas, and CRUD +- **[Caching](../user-guide/caching/index.md)** - Redis-backed caching +- **[Background Tasks](../user-guide/background-tasks/index.md)** - Async jobs with Taskiq - **[Rate Limiting](../user-guide/rate-limiting/index.md)** - Protect your API from abuse ### Development & Deployment -- **[Development Guide](../user-guide/development.md)** - Extend and customize the boilerplate + +- **[Development Guide](../user-guide/development.md)** - Extend and customize - **[Testing](../user-guide/testing.md)** - Write tests for your API - **[Production Deployment](../user-guide/production.md)** - Deploy to production ## Alternative Setup Methods -Not using Docker? No problem! - -- **[Manual Installation](installation.md)** - Install dependencies manually +- **[Manual Installation](installation.md)** - Step-by-step setup details ## Need Help? @@ -184,4 +169,4 @@ Not using Docker? No problem! --- -**Ready to dive deeper?** Continue with the [detailed installation guide](installation.md) or explore the [user guide](../user-guide/index.md). \ No newline at end of file +**Ready to dive deeper?** Continue with the [installation guide](installation.md) or jump to the [user guide](../user-guide/index.md). diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md index f040f3dd..87441f47 100644 --- a/docs/getting-started/installation.md +++ b/docs/getting-started/installation.md @@ -1,416 +1,301 @@ # Installation Guide -This guide covers different ways to install and set up the FastAPI Boilerplate depending on your needs and environment. +This guide covers different ways to install the FastAPI Boilerplate depending on your needs. ## System Requirements -Before you begin, ensure your system meets these requirements: - - **Python**: 3.11 or higher -- **Operating System**: Linux, macOS, or Windows (with WSL2 recommended) -- **Memory**: Minimum 4GB RAM (8GB recommended) -- **Disk Space**: At least 2GB free space +- **Operating System**: Linux, macOS, or Windows (WSL2 recommended) +- **Memory**: 4GB RAM minimum (8GB recommended) +- **Disk Space**: ~2GB free ## Method 1: Docker Compose (Recommended) -Docker Compose is the easiest way to get started. It handles all dependencies and services automatically. +Docker Compose handles every service for you (Postgres + Redis + the app). ### Prerequisites -Install these tools on your system: - -- [Docker](https://docs.docker.com/get-docker/) (version 20.10+) -- [Docker Compose](https://docs.docker.com/compose/install/) (version 1.29+) - -### Installation Steps - -1. **Get the template**: +- [Docker](https://docs.docker.com/get-docker/) (20.10+) +- [Docker Compose](https://docs.docker.com/compose/install/) (v2+) - ```bash - git clone https://github.com/benavlabs/fastapi-boilerplate - cd fastapi-boilerplate - ``` +### Steps -1. **Quick setup** (recommended): - - ```bash - # Interactive setup - choose your deployment type - ./setup.py - - # Or specify directly: ./setup.py local, ./setup.py staging, ./setup.py production - ``` - - This automatically copies the correct `Dockerfile`, `docker-compose.yml`, and `.env` files for your chosen deployment scenario. +1. **Clone the repository**: -1. **Start services**: + ```bash + git clone https://github.com/benavlabs/fastapi-boilerplate + cd fastapi-boilerplate/backend + ``` - ```bash - docker compose up -d - ``` +2. **Set up the environment file**: -#### Manual Setup Alternative + ```bash + cp .env.example .env + # Edit .env: set SECRET_KEY, change default DB password, etc. + ``` -If you prefer to set up manually: +3. **Bring up the stack**: -```bash -# Copy configuration files for local development -cp scripts/local_with_uvicorn/Dockerfile Dockerfile -cp scripts/local_with_uvicorn/docker-compose.yml docker-compose.yml -cp scripts/local_with_uvicorn/.env.example src/.env -# Edit src/.env with your configuration if needed -``` + ```bash + docker compose up + ``` -1. **Verify installation**: +4. **Verify it's running**: - ```bash - curl http://localhost:8000/docs - ``` + ```bash + curl http://localhost:8000/health + ``` ### What Gets Installed -Docker Compose sets up these services: +- **App** (FastAPI + Uvicorn) on port 8000 +- **PostgreSQL** on port 5432 +- **Redis** on port 6379 -- **Web server** (FastAPI + Uvicorn) on port 8000 -- **PostgreSQL** database on port 5432 (internal) -- **Redis** server on port 6379 (internal) -- **ARQ Worker** for background tasks -- **NGINX** (optional, for production) +## Method 2: Manual Installation (Local with uv) -## Method 2: Manual Installation - -For more control or development purposes, you can install everything manually. +For development on your host machine. You provide PostgreSQL and Redis yourself. ### Prerequisites -1. **Install Python 3.11+**: - - ```bash - # On Ubuntu/Debian - sudo apt update - sudo apt install python3.11 python3.11-pip +1. **Python 3.11+**: - # On macOS (with Homebrew) - brew install python@3.11 + ```bash + # Ubuntu/Debian + sudo apt update && sudo apt install python3.11 - # On Windows - # Download from python.org - ``` + # macOS + brew install python@3.11 -1. **Install uv** (Python package manager): + # Windows: download from python.org + ``` - ```bash - pip install uv - ``` +2. **uv** (fast Python package manager): -1. **Install PostgreSQL**: + ```bash + # macOS / Linux + curl -LsSf https://astral.sh/uv/install.sh | sh - ```bash - # On Ubuntu/Debian - sudo apt install postgresql postgresql-contrib + # or via pip + pip install uv + ``` - # On macOS - brew install postgresql +3. **PostgreSQL**: - # On Windows - # Download from postgresql.org - ``` + ```bash + # Ubuntu/Debian + sudo apt install postgresql postgresql-contrib -1. **Install Redis**: + # macOS + brew install postgresql + ``` - ```bash - # On Ubuntu/Debian - sudo apt install redis-server +4. **Redis**: - # On macOS - brew install redis + ```bash + # Ubuntu/Debian + sudo apt install redis-server - # On Windows - # Download from redis.io - ``` + # macOS + brew install redis + ``` -### Installation Steps +### Steps 1. **Clone the repository**: - ```bash - git clone https://github.com/benavlabs/fastapi-boilerplate - cd fastapi-boilerplate - ``` - -1. **Install Python dependencies**: - - ```bash - uv sync - ``` - -1. **Set up environment variables**: + ```bash + git clone https://github.com/benavlabs/fastapi-boilerplate + cd fastapi-boilerplate + ``` - ```bash - cp src/.env.example src/.env - # Edit src/.env with your local database/Redis settings - ``` + The repo is a [uv workspace](https://docs.astral.sh/uv/concepts/projects/workspaces/) — the deployable application lives in `backend/` and the developer CLI (`bp`) lives in `cli/`. Both share a single virtual environment at the repo root. -1. **Set up PostgreSQL**: +2. **Install Python dependencies**: - ```bash - # Create database and user - sudo -u postgres psql - CREATE DATABASE myapp; - CREATE USER myuser WITH PASSWORD 'mypassword'; - GRANT ALL PRIVILEGES ON DATABASE myapp TO myuser; - \q - ``` + ```bash + uv sync --all-packages --all-extras + ``` -1. **Run database migrations**: + This syncs the whole workspace — backend, CLI, and dev tools — into one `.venv/` at the repo root. From here on, `uv run ` works from any subdirectory. - ```bash - cd src - uv run alembic upgrade head - ``` +3. **Set up environment variables**: -1. **Create admin user**: + ```bash + cp backend/.env.example backend/.env + # Edit backend/.env: point POSTGRES_SERVER and CACHE_REDIS_HOST at localhost, + # set a strong SECRET_KEY (or run `uv run bp env gen-secret`), etc. + ``` - ```bash - uv run python -m src.scripts.create_first_superuser - ``` + The CLI ships a few helpers for this step — see [CLI → Commands](../cli/commands.md): -1. **Start the application**: + ```bash + uv run bp env gen-secret # print a fresh SECRET_KEY + uv run bp env validate # audit the .env against the production validator + ``` - ```bash - uv run uvicorn src.app.main:app --reload --host 0.0.0.0 --port 8000 - ``` +4. **Set up PostgreSQL** (if not already configured): -1. **Start the worker** (in another terminal): + ```bash + sudo -u postgres psql + CREATE DATABASE myapp; + CREATE USER myuser WITH PASSWORD 'mypassword'; + GRANT ALL PRIVILEGES ON DATABASE myapp TO myuser; + \q + ``` - ```bash - uv run arq src.app.core.worker.settings.WorkerSettings - ``` +5. **Run database migrations** (from `backend/` — alembic looks for `alembic.ini` in cwd): -## Method 3: Development Setup - -For contributors and advanced users who want to modify the boilerplate. - -### Additional Prerequisites - -- **Git** for version control - -### Installation Steps - -1. **Fork and clone**: + ```bash + cd backend + uv run alembic upgrade head + ``` - ```bash - # Fork the repository on GitHub first - git clone https://github.com/yourusername/fastapi-boilerplate - cd fastapi-boilerplate - ``` +6. **Create the initial admin user and tier**: -1. **Install development dependencies**: + ```bash + uv run python -m scripts.setup_initial_data + ``` - ```bash - uv sync --group dev - ``` +7. **Start the app**: -1. **Set up pre-commit hooks**: + ```bash + uv run fastapi dev src/interfaces/main.py + ``` - ```bash - uv run pre-commit install - ``` +8. **(Optional) Start a Taskiq worker** (in a second terminal): -1. **Set up development environment**: + ```bash + uv run taskiq worker infrastructure.taskiq.worker:default_broker + ``` - ```bash - cp src/.env.example src/.env - # Configure for development - ``` +## Method 3: Development Setup -1. **Run tests to verify setup**: +For contributors and anyone modifying the boilerplate itself. - ```bash - uv run pytest - ``` +### Steps -## Docker Services Breakdown +1. **Fork and clone**: -Understanding what each Docker service does: + ```bash + # Fork on GitHub first + git clone https://github.com/yourusername/fastapi-boilerplate + cd fastapi-boilerplate + ``` -### Web Service +2. **Install dev dependencies**: -```yaml -web: - build: . - ports: - - "8000:8000" - depends_on: - - db - - redis -``` + ```bash + uv sync --all-packages --all-extras + ``` -- Runs the FastAPI application -- Handles HTTP requests -- Auto-reloads on code changes (development) +3. **Set up pre-commit hooks** (from the repo root): -### Database Service + ```bash + uv run --project backend pre-commit install + ``` -```yaml -db: - image: postgres:13 - environment: - POSTGRES_DB: myapp - POSTGRES_USER: postgres - POSTGRES_PASSWORD: changethis -``` +4. **Set up the environment**: -- PostgreSQL database server -- Persistent data storage -- Automatic initialization - -### Redis Service - -```yaml -redis: - image: redis:alpine - command: redis-server --appendonly yes -``` + ```bash + cp backend/.env.example backend/.env + ``` -- In-memory data store -- Used for caching and job queues -- Persistent storage with AOF +5. **Run the test suite to verify your setup**: -### Worker Service - -```yaml -worker: - build: . - command: arq src.app.core.worker.settings.WorkerSettings - depends_on: - - redis -``` - -- Background task processor -- Handles async jobs -- Scales independently + ```bash + uv run pytest + ``` ## Configuration ### Environment Variables -The application uses environment variables for configuration. Key variables: +The app reads its configuration from `backend/.env` (see `backend/.env.example` for the full reference). Common variables: ```env +# Environment +ENVIRONMENT=development + # Database POSTGRES_USER=postgres -POSTGRES_PASSWORD=changethis -POSTGRES_SERVER=localhost # or "db" for Docker +POSTGRES_PASSWORD=postgres +POSTGRES_DB=postgres +POSTGRES_SERVER=db # use "localhost" without Docker POSTGRES_PORT=5432 -POSTGRES_DB=myapp -# Redis -REDIS_CACHE_HOST=localhost # or "redis" for Docker -REDIS_CACHE_PORT=6379 +# Cache (Redis) +CACHE_BACKEND=redis +CACHE_REDIS_HOST=redis # use "localhost" without Docker +CACHE_REDIS_PORT=6379 # Security -SECRET_KEY=your-secret-key-here -ALGORITHM=HS256 -ACCESS_TOKEN_EXPIRE_MINUTES=30 -``` - -### Database Connection - -For manual installation, update your database settings: +SECRET_KEY=insecure-change-this-in-production -```env -# Local PostgreSQL -POSTGRES_SERVER=localhost -POSTGRES_PORT=5432 - -# Docker PostgreSQL -POSTGRES_SERVER=db -POSTGRES_PORT=5432 +# Initial admin +ADMIN_NAME=Admin User +ADMIN_EMAIL=admin@example.com +ADMIN_USERNAME=admin +ADMIN_PASSWORD=your-secure-password ``` +See [Environment Variables](../user-guide/configuration/environment-variables.md) for the complete reference. + ## Verification -After installation, verify everything works: +After installing, verify everything works: -1. **API Documentation**: http://localhost:8000/docs -1. **Health Check**: http://localhost:8000/api/v1/health -1. **Ready Check**: http://localhost:8000/api/v1/ready -1. **Database Connection**: Check logs for successful connection -1. **Redis Connection**: Test caching functionality -1. **Background Tasks**: Submit a test job +1. **API Docs**: +2. **Health Check**: `curl http://localhost:8000/health` +3. **Admin Panel**: +4. **Database**: check the app log for "Database connected" / no errors at startup ## Troubleshooting -### Common Issues - -**Port Already in Use**: +### Port Already in Use ```bash -# Check what's using port 8000 +# Find what's on port 8000 lsof -i :8000 - -# Kill the process kill -9 ``` -**Database Connection Error**: +### Database Connection Error ```bash -# Check PostgreSQL status +# Linux sudo systemctl status postgresql - -# Restart PostgreSQL sudo systemctl restart postgresql + +# macOS (Homebrew) +brew services restart postgresql ``` -**Redis Connection Error**: +### Redis Connection Error ```bash -# Check Redis status -redis-cli ping - -# Start Redis +redis-cli ping # should print PONG +# If not, start it: redis-server ``` -**Permission Errors**: +### Docker — Clean Reset ```bash -# Fix Docker permissions -sudo usermod -aG docker $USER -# Log out and back in -``` - -### Docker Issues - -**Clean Reset**: - -```bash -# Stop all containers -docker compose down - -# Remove volumes (⚠️ deletes data) -docker compose down -v - -# Rebuild images +cd backend +docker compose down # stop containers +docker compose down -v # also delete volumes (⚠️ wipes data) docker compose build --no-cache - -# Start fresh docker compose up ``` ## Next Steps -After successful installation: - 1. **[Configuration Guide](configuration.md)** - Set up your environment -1. **[First Run](first-run.md)** - Test your installation -1. **[Project Structure](../user-guide/project-structure.md)** - Understand the codebase +2. **[First Run](first-run.md)** - Test your installation +3. **[Project Structure](../user-guide/project-structure.md)** - Understand the codebase ## Need Help? -If you encounter issues: - -- Check the [GitHub Issues](https://github.com/benavlabs/fastapi-boilerplate/issues) for common problems -- Search [existing issues](https://github.com/benavlabs/fastapi-boilerplate/issues) -- Create a [new issue](https://github.com/benavlabs/fastapi-boilerplate/issues/new) with details +- Check the [GitHub Issues](https://github.com/benavlabs/fastapi-boilerplate/issues) for known problems +- Open a [new issue](https://github.com/benavlabs/fastapi-boilerplate/issues/new) with details diff --git a/docs/index.md b/docs/index.md index a0c624c8..d3c5e984 100644 --- a/docs/index.md +++ b/docs/index.md @@ -44,9 +44,8 @@ This boilerplate leverages cutting-edge Python technologies: - **[SQLAlchemy 2.0](https://docs.sqlalchemy.org/en/20/)** - Python SQL toolkit and Object Relational Mapper - **[PostgreSQL](https://www.postgresql.org)** - Advanced open source relational database - **[Redis](https://redis.io)** - In-memory data store for caching and message brokering -- **[ARQ](https://arq-docs.helpmanual.io)** - Job queues and RPC with asyncio and Redis +- **[Taskiq](https://taskiq-python.github.io/)** - Async-first task queue with Redis/RabbitMQ brokers - **[Docker](https://docs.docker.com/compose/)** - Containerization for easy deployment -- **[NGINX](https://nginx.org/en/)** - High-performance web server for reverse proxy and load balancing ## Key Features @@ -54,14 +53,14 @@ This boilerplate leverages cutting-edge Python technologies: - Fully async architecture - Pydantic V2 for ultra-fast data validation - SQLAlchemy 2.0 with efficient query patterns -- Built-in caching with Redis -- Horizontal scaling with NGINX load balancing +- Built-in caching with Redis or Memcached ### Security & Authentication -- JWT-based authentication with refresh tokens -- Cookie-based secure token storage +- Server-side session authentication with secure HTTP-only cookies +- OAuth 2.0 sign-in (Google, with GitHub provider scaffolded) using PKCE +- API keys with per-key permissions and usage tracking +- CSRF protection and login rate limiting - Role-based access control with user tiers -- Rate limiting to prevent abuse - Production-ready security configurations ### Developer Experience @@ -76,24 +75,32 @@ This boilerplate leverages cutting-edge Python technologies: - Environment-based configuration - Structured logging - Health checks and monitoring -- NGINX reverse proxy setup -- Gunicorn with Uvicorn workers - Database connection pooling +- Async task processing with Taskiq workers ## Quick Start -Get up and running in less than 5 minutes: +Get up and running in less than 5 minutes. Pick whichever fits your workflow: -```bash -# Clone the repository -git clone https://github.com/benavlabs/fastapi-boilerplate -cd fastapi-boilerplate +=== "Local with uv" -# Start with Docker Compose -docker compose up -``` + ```bash + git clone https://github.com/benavlabs/fastapi-boilerplate + cd fastapi-boilerplate/backend + uv sync --extra dev + cp .env.example .env # then edit values as needed + uv run fastapi dev src/interfaces/main.py + ``` -That's it! Your API will be available at `http://localhost:8000/docs` +=== "Docker Compose" + + ```bash + git clone https://github.com/benavlabs/fastapi-boilerplate + cd fastapi-boilerplate/backend + docker compose up + ``` + +Your API will be available at `http://localhost:8000/docs` **[Continue with the Getting Started Guide →](getting-started/index.md)** diff --git a/docs/user-guide/admin-panel/adding-models.md b/docs/user-guide/admin-panel/adding-models.md index fd368523..dd2bc63f 100644 --- a/docs/user-guide/admin-panel/adding-models.md +++ b/docs/user-guide/admin-panel/adding-models.md @@ -1,480 +1,366 @@ -# Adding Models +# Adding Models to the Admin -Learn how to extend the admin interface with your new models by following the patterns established in the FastAPI boilerplate. The boilerplate already includes User, Tier, and Post models - we'll show you how to add your own models using these working examples. +Adding your own models to the admin is straightforward, but there's one quirk to know upfront: the boilerplate's models use SQLAlchemy's `MappedAsDataclass`, which requires a special mixin to play nicely with SQLAdmin. -> **CRUDAdmin Features**: This guide shows boilerplate-specific patterns. For advanced model configuration options and features, see the [CRUDAdmin documentation](https://benavlabs.github.io/crudadmin/). +For the full range of options, see the [SQLAdmin documentation](https://aminalaee.dev/sqladmin/). -## Understanding the Existing Setup +## The DataclassModelMixin -The boilerplate comes with three models already registered in the admin interface. Understanding how they're implemented will help you add your own models successfully. +SQLAdmin's default insert flow creates an empty model instance, then sets attributes one by one. That breaks dataclass models with required fields that have no defaults. -### Current Model Registration - -The admin interface is configured in `src/app/admin/views.py`: +The boilerplate solves this with `DataclassModelMixin` (`backend/src/interfaces/admin/mixins.py`) — it constructs the model with all the form data at once. ```python -def register_admin_views(admin: CRUDAdmin) -> None: - """Register all models and their schemas with the admin interface.""" - - # User model with password handling - password_transformer = PasswordTransformer( - password_field="password", - hashed_field="hashed_password", - hash_function=get_password_hash, - required_fields=["name", "username", "email"], - ) - - admin.add_view( - model=User, - create_schema=UserCreate, - update_schema=UserUpdate, - allowed_actions={"view", "create", "update"}, - password_transformer=password_transformer, - ) +from ..mixins import DataclassModelMixin - admin.add_view( - model=Tier, - create_schema=TierCreate, - update_schema=TierUpdate, - allowed_actions={"view", "create", "update", "delete"} - ) +class MyModelAdmin(DataclassModelMixin, ModelView, model=MyModel): + ... +``` - admin.add_view( - model=Post, - create_schema=PostCreateAdmin, # Special admin-only schema - update_schema=PostUpdate, - allowed_actions={"view", "create", "update", "delete"} - ) +**Every admin view in the codebase uses this mixin.** If you forget it, you'll get an `AttributeError` (or worse, a silent NULL) when creating records. + +## Adding a New Model View + +### 1. Create the View File + +```python +# backend/src/interfaces/admin/views/widgets.py +from sqladmin import ModelView + +from ....modules.widgets.models import Widget +from ....modules.widgets.schemas import WidgetCreate, WidgetUpdate +from ..mixins import DataclassModelMixin + + +class WidgetAdmin(DataclassModelMixin, ModelView, model=Widget): + name = "Widget" + name_plural = "Widgets" + icon = "fa-solid fa-cube" + category = "Inventory" + + # List view + column_list = [Widget.id, Widget.name, Widget.owner_id, Widget.created_at] + column_searchable_list = [Widget.name] + column_sortable_list = [Widget.id, Widget.name, Widget.created_at] + column_default_sort = [(Widget.id, True)] # True = descending + + # Detail view + column_details_list = "__all__" + + # Forms — derived from your Pydantic schemas + form_create_rules = list(WidgetCreate.model_fields.keys()) + form_edit_rules = list(WidgetUpdate.model_fields.keys()) + + # Permissions + can_create = True + can_edit = True + can_delete = True + can_view_details = True + can_export = True ``` -Each model registration follows the same pattern: specify the SQLAlchemy model, appropriate Pydantic schemas for create/update operations, and define which actions are allowed. +### 2. Register It + +```python +# backend/src/interfaces/admin/views/__init__.py +from sqladmin import Admin + +from .tiers import TierAdmin +from .users import UserAdmin +from .widgets import WidgetAdmin # new + +__all__ = [ + "UserAdmin", + "TierAdmin", + "WidgetAdmin", # new + "register_admin_views", +] -## Step-by-Step Model Addition -Let's walk through adding a new model to your admin interface using a product catalog example. +def register_admin_views(admin: Admin) -> None: + admin.add_view(UserAdmin) + admin.add_view(TierAdmin) + admin.add_view(WidgetAdmin) # new +``` + +That's it — restart the app and Widgets show up in the sidebar under the "Inventory" category. -### Step 1: Create Your Model +## Configuration Options -First, create your SQLAlchemy model following the boilerplate's patterns: +### Column Display ```python -# src/app/models/product.py -from decimal import Decimal -from sqlalchemy.orm import Mapped, mapped_column -from sqlalchemy import String, Numeric, ForeignKey, Text, Boolean -from sqlalchemy.types import DateTime -from datetime import datetime - -from ..core.db.database import Base - -class Product(Base): - __tablename__ = "products" - - id: Mapped[int] = mapped_column(primary_key=True) - name: Mapped[str] = mapped_column(String(100), nullable=False) - description: Mapped[str | None] = mapped_column(Text, nullable=True) - price: Mapped[Decimal] = mapped_column(Numeric(10, 2), nullable=False) - is_active: Mapped[bool] = mapped_column(Boolean, default=True) - created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) - - # Foreign key relationship (similar to Post.created_by_user_id) - category_id: Mapped[int] = mapped_column(ForeignKey("categories.id")) +column_list = [MyModel.id, MyModel.name, MyModel.status] +column_labels = { + "hashed_password": "Password", # rename a column header +} ``` -### Step 2: Create Pydantic Schemas +The boilerplate's `UserAdmin` uses `column_labels` to render `hashed_password` as just "Password" — the actual hashing happens in `on_model_change`. -Create schemas for the admin interface following the boilerplate's pattern: +### Search and Sort ```python -# src/app/schemas/product.py -from decimal import Decimal -from pydantic import BaseModel, Field -from typing import Annotated - -class ProductCreate(BaseModel): - name: Annotated[str, Field(min_length=2, max_length=100)] - description: Annotated[str | None, Field(max_length=1000, default=None)] - price: Annotated[Decimal, Field(gt=0, le=999999.99)] - is_active: Annotated[bool, Field(default=True)] - category_id: Annotated[int, Field(gt=0)] - -class ProductUpdate(BaseModel): - name: Annotated[str | None, Field(min_length=2, max_length=100, default=None)] - description: Annotated[str | None, Field(max_length=1000, default=None)] - price: Annotated[Decimal | None, Field(gt=0, le=999999.99, default=None)] - is_active: Annotated[bool | None, Field(default=None)] - category_id: Annotated[int | None, Field(gt=0, default=None)] +column_searchable_list = [MyModel.name, MyModel.email] +column_sortable_list = [MyModel.id, MyModel.created_at] +column_default_sort = [(MyModel.created_at, True)] # True = descending ``` -### Step 3: Register with Admin Interface +### Form Rules -Add your model to `src/app/admin/views.py`: +Use your Pydantic schemas to drive form fields — keeps the admin forms aligned with your API validation: ```python -# Add import at the top -from ..models.product import Product -from ..schemas.product import ProductCreate, ProductUpdate - -def register_admin_views(admin: CRUDAdmin) -> None: - """Register all models and their schemas with the admin interface.""" - - # ... existing model registrations ... - - # Add your new model - admin.add_view( - model=Product, - create_schema=ProductCreate, - update_schema=ProductUpdate, - allowed_actions={"view", "create", "update", "delete"} - ) +form_create_rules = list(MyModelCreate.model_fields.keys()) +form_edit_rules = list(MyModelUpdate.model_fields.keys()) ``` -### Step 4: Create and Run Migration +You can also write the list explicitly if you want a different order or to include FK columns (more on that below). -Generate the database migration for your new model: +## Foreign Keys and Relationships -```bash -# Generate migration -uv run alembic revision --autogenerate -m "Add product model" +The boilerplate's models use a **dual pattern**: foreign-key columns for database operations and relationships for SQLAdmin display. -# Apply migration -uv run alembic upgrade head -``` +### The Model Pattern -### Step 5: Test Your New Model +Every model that has a foreign key also defines the corresponding relationship: -Start your application and test the new model in the admin interface: +```python +# modules/widgets/models.py -```bash -# Start the application -uv run fastapi dev +from typing import TYPE_CHECKING +from sqlalchemy import ForeignKey, Integer +from sqlalchemy.orm import Mapped, mapped_column, relationship -# Visit http://localhost:8000/admin -# Login with your admin credentials -# You should see "Products" in the admin navigation -``` +from ...infrastructure.database.session import Base -## Learning from Existing Models +if TYPE_CHECKING: + from ..user.models import User -Each model in the boilerplate demonstrates different admin interface patterns you can follow. -### User Model - Password Handling +class Widget(Base, ...): + __tablename__ = "widgets" + ... -The User model shows how to handle sensitive fields like passwords: + # Foreign-key column — used by FastCRUD and DB constraints + owner_id: Mapped[int] = mapped_column( + Integer, ForeignKey("user.id"), index=True, + ) -```python -# Password transformer for secure password handling -password_transformer = PasswordTransformer( - password_field="password", # Field in the schema - hashed_field="hashed_password", # Field in the database model - hash_function=get_password_hash, # Your app's hash function - required_fields=["name", "username", "email"], # Fields required for user creation -) - -admin.add_view( - model=User, - create_schema=UserCreate, - update_schema=UserUpdate, - allowed_actions={"view", "create", "update"}, # No delete for users - password_transformer=password_transformer, -) + # Relationship — used by SQLAdmin for display and form dropdowns. + # Required: lazy="selectin" (async) and init=False (excluded from dataclass __init__) + owner: Mapped["User"] = relationship( + "User", lazy="selectin", init=False, + ) ``` -**When to use this pattern:** - -- Models with password fields -- Any field that needs transformation before storage -- Fields requiring special security handling +### Why Both? -### Tier Model - Simple CRUD +- **FastCRUD** works with FK columns directly and returns dicts: `widget["owner_id"]` +- **SQLAdmin** uses the relationship to render a friendly dropdown showing the related object's `__repr__` instead of a raw integer -The Tier model demonstrates straightforward CRUD operations: +### `column_list` Uses the Relationship ```python -admin.add_view( - model=Tier, - create_schema=TierCreate, - update_schema=TierUpdate, - allowed_actions={"view", "create", "update", "delete"} # Full CRUD -) +class WidgetAdmin(DataclassModelMixin, ModelView, model=Widget): + # Use Widget.owner (relationship), not Widget.owner_id (FK column). + # This shows "user@example.com" instead of just an integer. + column_list = [Widget.id, Widget.name, Widget.owner, Widget.created_at] ``` -**When to use this pattern:** +The boilerplate's `UserAdmin` does this for tier: + +```python +column_list = [User.id, User.name, User.username, User.email, User.is_superuser, User.tier] +``` -- Reference data (categories, types, statuses) -- Configuration models -- Simple data without complex relationships +`User.tier` is the relationship, not `User.tier_id`. -### Post Model - Admin-Specific Schemas +### Form Rules Use FK Column Names -The Post model shows how to create admin-specific schemas when the regular API schemas don't work for admin purposes: +For forms, include the **FK column name** (the underscore-id one) in your rules. SQLAdmin auto-generates a searchable dropdown: ```python -# Special admin schema (different from regular PostCreate) -class PostCreateAdmin(BaseModel): - title: Annotated[str, Field(min_length=2, max_length=30)] - text: Annotated[str, Field(min_length=1, max_length=63206)] - created_by_user_id: int # Required in admin, but not in API - media_url: Annotated[str | None, Field(pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", default=None)] - -admin.add_view( - model=Post, - create_schema=PostCreateAdmin, # Admin-specific schema - update_schema=PostUpdate, # Regular update schema works fine - allowed_actions={"view", "create", "update", "delete"} -) +form_create_rules = [*WidgetCreate.model_fields.keys(), "owner_id"] ``` -**When to use this pattern:** - -- Models where admins need to set fields that users can't -- Models requiring additional validation for admin operations -- Cases where API schemas are too restrictive or too permissive for admin use +### `lazy="selectin"` Is Required -## Advanced Model Configuration +SQLAdmin runs in async context, so relationships must use `lazy="selectin"` to avoid lazy-loading errors. Symptom of forgetting: `MissingGreenlet` or `greenlet_spawn has not been called`. Both User and Tier models in the boilerplate already use this pattern. -### Customizing Field Display +### Don't Set `default=None` on Relationships -You can control how fields appear in the admin interface by modifying your schemas: +For nullable foreign keys, never set `default=None` on the relationship: ```python -class ProductCreateAdmin(BaseModel): - name: Annotated[str, Field( - min_length=2, - max_length=100, - description="Product name as shown to customers" - )] - description: Annotated[str | None, Field( - max_length=1000, - description="Detailed product description (supports HTML)" - )] - price: Annotated[Decimal, Field( - gt=0, - le=999999.99, - description="Price in USD (up to 2 decimal places)" - )] - category_id: Annotated[int, Field( - gt=0, - description="Product category (creates dropdown automatically)" - )] +# WRONG — SQLAlchemy clears the FK during commit +tier: Mapped["Tier | None"] = relationship("Tier", default=None, init=False) + +# CORRECT — relationship returns None naturally when FK is null +tier: Mapped["Tier | None"] = relationship("Tier", init=False) ``` -### Restricting Actions +The User model demonstrates the correct pattern. -Control what operations are available for each model: +`DataclassModelMixin` automatically filters out relationship objects before constructing the dataclass — so the form data containing `owner_id=42` works, but a stray `owner=` would be ignored. -```python -# Read-only model (reports, logs, etc.) -admin.add_view( - model=AuditLog, - create_schema=None, # No creation allowed - update_schema=None, # No updates allowed - allowed_actions={"view"} # Only viewing -) - -# No deletion allowed (users, critical data) -admin.add_view( - model=User, - create_schema=UserCreate, - update_schema=UserUpdate, - allowed_actions={"view", "create", "update"} # No delete -) -``` +## Data Transformation Hooks -### Handling Complex Fields +### `on_model_change` — Transform Before Save -Some models may have fields that don't work well in the admin interface. Use select schemas to exclude problematic fields: +Runs before insert and update. Use it to hash passwords, normalize fields, etc. ```python -from pydantic import BaseModel - -# Create a simplified view schema -class ProductAdminView(BaseModel): - id: int - name: str - price: Decimal - is_active: bool - # Exclude complex fields like large text or binary data - -admin.add_view( - model=Product, - create_schema=ProductCreate, - update_schema=ProductUpdate, - select_schema=ProductAdminView, # Controls what's shown in lists - allowed_actions={"view", "create", "update", "delete"} -) +from typing import Any +from starlette.requests import Request + + +class UserAdmin(DataclassModelMixin, ModelView, model=User): + async def on_model_change( + self, + data: dict[str, Any], + model: Any, + is_created: bool, + request: Request, + ) -> None: + if is_created and data.get("hashed_password"): + # Form's "Password" field maps to hashed_password column; + # hash the plaintext before the row is created + data["hashed_password"] = get_password_hash(data["hashed_password"]) ``` -## Common Model Patterns +`is_created` distinguishes create from update. For new records, `model` is `None`. -### Reference Data Models +### `after_model_change` — Side Effects After Save -For categories, types, and other reference data: +Runs after the record is committed. Useful for sending welcome emails, dispatching webhooks, etc. ```python -# Simple reference model -class Category(Base): - __tablename__ = "categories" - id: Mapped[int] = mapped_column(primary_key=True) - name: Mapped[str] = mapped_column(String(50), unique=True) - description: Mapped[str | None] = mapped_column(Text) - -# Simple schemas -class CategoryCreate(BaseModel): - name: str = Field(..., min_length=2, max_length=50) - description: str | None = None - -# Registration -admin.add_view( - model=Category, - create_schema=CategoryCreate, - update_schema=CategoryCreate, # Same schema for create and update - allowed_actions={"view", "create", "update", "delete"} -) +async def after_model_change( + self, + data: dict[str, Any], + model: Any, + is_created: bool, + request: Request, +) -> None: + if is_created: + await notify_new_user(model) ``` -### User-Generated Content +### `delete_model` — Custom Delete Behavior -For content models with user associations: +Override when delete needs to do more than `DELETE FROM`. The boilerplate's `TierAdmin` uses this to call the tier service's `permanent_delete`, which validates that no users or rate limits still reference the tier: ```python -class BlogPost(Base): - __tablename__ = "blog_posts" - id: Mapped[int] = mapped_column(primary_key=True) - title: Mapped[str] = mapped_column(String(200)) - content: Mapped[str] = mapped_column(Text) - author_id: Mapped[int] = mapped_column(ForeignKey("users.id")) - published_at: Mapped[datetime | None] = mapped_column(DateTime) - -# Admin schema with required author -class BlogPostCreateAdmin(BaseModel): - title: str = Field(..., min_length=5, max_length=200) - content: str = Field(..., min_length=10) - author_id: int = Field(..., gt=0) # Admin must specify author - published_at: datetime | None = None - -admin.add_view( - model=BlogPost, - create_schema=BlogPostCreateAdmin, - update_schema=BlogPostUpdate, - allowed_actions={"view", "create", "update", "delete"} -) -``` +async def delete_model(self, request: Request, pk: str) -> None: + from ....modules.tier.crud import crud_tiers -### Configuration Models + async with local_session() as db: + tier_service = TierService() -For application settings and configuration: + tier = await crud_tiers.get(db=db, id=int(pk)) + if not tier: + raise ValueError(f"Tier with ID {pk} not found") -```python -class SystemSetting(Base): - __tablename__ = "system_settings" - id: Mapped[int] = mapped_column(primary_key=True) - key: Mapped[str] = mapped_column(String(100), unique=True) - value: Mapped[str] = mapped_column(Text) - description: Mapped[str | None] = mapped_column(Text) - -# Restricted actions - settings shouldn't be deleted -admin.add_view( - model=SystemSetting, - create_schema=SystemSettingCreate, - update_schema=SystemSettingUpdate, - allowed_actions={"view", "create", "update"} # No delete -) + await tier_service.permanent_delete(tier["name"], db) ``` -## Testing Your Models +## Bulk Actions -After adding models to the admin interface, test them thoroughly: +Bulk actions let admins select multiple records and operate on them at once. Use the `@action` decorator: -### Manual Testing - -1. **Access**: Navigate to `/admin` and log in -2. **Create**: Try creating new records with valid and invalid data -3. **Edit**: Test updating existing records -4. **Validation**: Verify that your schema validation works correctly -5. **Relationships**: Test foreign key relationships (dropdowns should populate) +```python +from sqladmin import action +from starlette.requests import Request +from starlette.responses import RedirectResponse -### Development Testing -```python -# Test your admin configuration -# src/scripts/test_admin.py -from app.admin.initialize import create_admin_interface - -def test_admin_setup(): - admin = create_admin_interface() - if admin: - print("Admin interface created successfully") - print(f"Models registered: {len(admin._views)}") - for model_name in admin._views: - print(f" - {model_name}") - else: - print("Admin interface disabled") - -if __name__ == "__main__": - test_admin_setup() +class WidgetAdmin(DataclassModelMixin, ModelView, model=Widget): + @action( + name="deactivate", + label="Deactivate Selected", + confirmation_message="Deactivate these widgets?", + add_in_list=True, + ) + async def action_deactivate(self, request: Request) -> RedirectResponse: + pks = request.query_params.get("pks", "").split(",") + if pks and pks[0]: + ids = [int(pk) for pk in pks] + async with local_session() as db: + await crud_widgets.update( + db=db, + object={"is_active": False}, + allow_multiple=True, + id__in=ids, + ) + await db.commit() + + referer = request.headers.get("Referer") + return RedirectResponse(referer or request.url_for("admin:list", identity=self.identity)) ``` -```bash -# Run the test -uv run python src/scripts/test_admin.py -``` +Notes: -## Updating Model Registration +- Selected IDs come from `request.query_params["pks"]` as a comma-separated string +- `local_session()` is the boilerplate's session-maker — import it from `infrastructure/database/session.py` +- Always commit before redirecting, otherwise the change reverts when the request ends -When you need to modify how existing models appear in the admin interface: +## Icons -### Adding Actions +SQLAdmin uses [Font Awesome](https://fontawesome.com/icons) icons. Set them with `icon`: ```python -# Enable deletion for a model that previously didn't allow it -admin.add_view( - model=Product, - create_schema=ProductCreate, - update_schema=ProductUpdate, - allowed_actions={"view", "create", "update", "delete"} # Added delete -) +icon = "fa-solid fa-user" # users +icon = "fa-solid fa-layer-group" # tiers / categories +icon = "fa-solid fa-key" # api keys +icon = "fa-solid fa-gauge-high" # rate limits +icon = "fa-solid fa-cube" # generic ``` -### Changing Schemas +## Categories + +Group related views together with `category`: ```python -# Switch to admin-specific schemas -admin.add_view( - model=User, - create_schema=UserCreateAdmin, # New admin schema - update_schema=UserUpdateAdmin, # New admin schema - allowed_actions={"view", "create", "update"}, - password_transformer=password_transformer, -) +class WidgetAdmin(...): + category = "Inventory" ``` -### Performance Optimization +Views with the same category appear under the same sidebar header. The boilerplate's existing views use `"Users & Access"`. -For models with many records, consider using select schemas to limit data: +## Soft Delete vs Hard Delete + +Models that mix in `SoftDeleteMixin` have `is_deleted` and `deleted_at` columns. SQLAdmin's default delete is a hard `DELETE FROM` — if you want soft-deletion behavior, override `delete_model`: ```python -# Only show essential fields in lists -class UserListView(BaseModel): - id: int - username: str - email: str - is_active: bool - -admin.add_view( - model=User, - create_schema=UserCreate, - update_schema=UserUpdate, - select_schema=UserListView, # Faster list loading - allowed_actions={"view", "create", "update"}, - password_transformer=password_transformer, -) +async def delete_model(self, request: Request, pk: str) -> None: + async with local_session() as db: + await crud_widgets.delete(db=db, id=int(pk)) # FastCRUD soft-deletes via the mixin + await db.commit() ``` -## What's Next +Most of the time you actually want a hard delete here (the admin is editing the canonical row, not making a user-visible deletion), but be deliberate about which behavior you want. + +## Real Examples in the Codebase + +The boilerplate ships two admin views — read them as reference implementations: + +| File | What it shows | +|------|---------------| +| `backend/src/interfaces/admin/views/users.py` | `on_model_change` for password hashing, OAuth-provider select field, relationship in `column_list`, custom `column_labels` | +| `backend/src/interfaces/admin/views/tiers.py` | `delete_model` override that calls a service method, schema-driven form rules | + +## Key Files -With your models successfully added to the admin interface, you're ready to: +| Component | Location | +|-----------|----------| +| Dataclass mixin | `backend/src/interfaces/admin/mixins.py` | +| View registry | `backend/src/interfaces/admin/views/__init__.py` | +| Example views | `backend/src/interfaces/admin/views/*.py` | +| Auth backend | `backend/src/interfaces/admin/auth.py` | -1. **[User Management](user-management.md)** - Learn how to manage admin users and implement security best practices +## Next Steps -Your models are now fully integrated into the admin interface and ready for production use. The admin panel will automatically handle form generation, validation, and database operations based on your model and schema definitions. \ No newline at end of file +- **[User Management](user-management.md)** — Hardening admin authentication +- **[Models](../database/models.md)** — Defining the SQLAlchemy models that admin views render +- **[Schemas](../database/schemas.md)** — Pydantic schemas used for form rules diff --git a/docs/user-guide/admin-panel/configuration.md b/docs/user-guide/admin-panel/configuration.md index 32ca406b..22d26a16 100644 --- a/docs/user-guide/admin-panel/configuration.md +++ b/docs/user-guide/admin-panel/configuration.md @@ -1,378 +1,187 @@ -# Configuration +# Admin Panel Configuration -Learn how to configure the admin panel (powered by [CRUDAdmin](https://github.com/benavlabs/crudadmin)) using the FastAPI boilerplate's built-in environment variable system. The admin panel is fully integrated with your application's configuration and requires no additional setup files or complex initialization. +The admin panel has a deliberately small surface area: it's a [SQLAdmin](https://aminalaee.dev/sqladmin/) instance gated by a username/password from environment variables. Configuration boils down to a handful of `.env` values. -> **About CRUDAdmin**: For complete configuration options and advanced features, see the [CRUDAdmin documentation](https://benavlabs.github.io/crudadmin/). +## Environment Variables -## Environment-Based Configuration +```env +# Toggle the admin panel (default: true) +ADMIN_ENABLED=true -The FastAPI boilerplate handles all admin panel configuration through environment variables defined in your `.env` file. This approach provides consistent configuration across development, staging, and production environments. +# Admin login credentials +ADMIN_USERNAME=admin +ADMIN_PASSWORD=your-secure-password -```bash -# Basic admin panel configuration in .env -CRUD_ADMIN_ENABLED=true -ADMIN_USERNAME="admin" -ADMIN_PASSWORD="SecurePassword123!" -CRUD_ADMIN_MOUNT_PATH="/admin" +# Used for admin session encryption (same SECRET_KEY as the rest of the app) +SECRET_KEY= ``` -The configuration system automatically: - -- Validates all environment variables at startup -- Provides sensible defaults for optional settings -- Adapts security settings based on your environment (local/staging/production) -- Integrates with your application's existing security and database systems +That's the whole admin-specific config. Everything else (engine, models, mount path) is hardcoded in `src/interfaces/admin/initialize.py` for simplicity. -## Core Configuration Settings +### Backing Settings Classes -### Enable/Disable Admin Panel +The variables map to two settings classes in `src/infrastructure/config/settings.py`: -Control whether the admin panel is available: +- **`AdminSettings`** — `ADMIN_NAME`, `ADMIN_EMAIL`, `ADMIN_USERNAME`, `ADMIN_PASSWORD`, `DEFAULT_TIER_NAME`. Used by both the admin panel login *and* `scripts/setup_initial_data.py` to bootstrap the first superuser. +- **`SQLAdminSettings`** — `ADMIN_ENABLED`. Single toggle for the admin panel. -```bash -# Enable admin panel (default: true) -CRUD_ADMIN_ENABLED=true - -# Disable admin panel completely -CRUD_ADMIN_ENABLED=false -``` +## What Happens at Startup -When disabled, the admin interface is not mounted and consumes no resources. +1. `infrastructure/main.py` calls `create_admin_interface(app)` from `interfaces/admin/initialize.py` +2. If `ADMIN_ENABLED=false`, the function returns `None` and the admin panel is **not mounted** +3. Otherwise, an `AdminAuth` backend is constructed using `SECRET_KEY` +4. A SQLAdmin `Admin` instance is created against the app's existing database `engine` +5. `register_admin_views(admin)` adds `UserAdmin` and `TierAdmin` (from `views/`) +6. The admin app is mounted at `/admin` -### Admin Access Credentials +## Login Authentication -Configure the initial admin user that's created automatically: - -```bash -# Required: Admin user credentials -ADMIN_USERNAME="your-admin-username" # Admin login username -ADMIN_PASSWORD="YourSecurePassword123!" # Admin login password - -# Optional: Additional admin user details (uses existing settings) -ADMIN_NAME="Administrator" # Display name (from FirstUserSettings) -ADMIN_EMAIL="admin@yourcompany.com" # Admin email (from FirstUserSettings) -``` +Login flow (in `interfaces/admin/auth.py`): -**How this works:** - -- The admin user is created automatically when the application starts -- Only created if no admin users exist (safe for restarts) -- Uses your application's existing password hashing system -- Credentials are validated according to CRUDAdmin requirements - -### Interface Configuration - -Customize where and how the admin panel appears: +```python +class AdminAuth(AuthenticationBackend): + async def login(self, request: Request) -> bool: + form = await request.form() + username = form.get("username") + password = form.get("password") -```bash -# Admin panel URL path (default: "/admin") -CRUD_ADMIN_MOUNT_PATH="/admin" # Access at http://localhost:8000/admin -CRUD_ADMIN_MOUNT_PATH="/management" # Access at http://localhost:8000/management -CRUD_ADMIN_MOUNT_PATH="/internal" # Access at http://localhost:8000/internal + settings = get_settings() + if username == settings.ADMIN_USERNAME and password == settings.ADMIN_PASSWORD: + request.session.update({"admin_authenticated": True}) + return True + return False ``` -The admin panel is mounted as a sub-application at your specified path. +Notes: -## Session Management Configuration +- Credentials come from environment variables, **not the database**. Restart the app to change them. +- Only one admin login is supported. There's no multi-admin user table. +- The session is encrypted with `SECRET_KEY` via Starlette's `SessionMiddleware`. +- Logout clears the session: `request.session.clear()`. -Control how admin users stay logged in and how sessions are managed. +If you need multiple admin operators, see [User Management](user-management.md) for ways to extend this. -### Basic Session Settings +## Mount Path -```bash -# Session limits and timeouts -CRUD_ADMIN_MAX_SESSIONS=10 # Max concurrent sessions per user -CRUD_ADMIN_SESSION_TIMEOUT=1440 # Session timeout in minutes (24 hours) +The admin panel is hardcoded at `/admin` (defined when `Admin(...)` is instantiated). To change the path, edit `src/interfaces/admin/initialize.py`: -# Cookie security -SESSION_SECURE_COOKIES=true # Require HTTPS for cookies (production) +```python +admin = Admin( + app=app, + engine=engine, + authentication_backend=authentication_backend, + title="Admin", + base_url="/management", # add this to change the mount path +) ``` -**Session behavior:** +If you change it, also update any internal links in your frontend or operational docs. -- Each admin login creates a new session -- Sessions expire after the timeout period of inactivity -- When max sessions are exceeded, oldest sessions are removed -- Session cookies are HTTP-only and secure (when HTTPS is enabled) +## Database Connection -### Memory Sessions (Development) +SQLAdmin reuses the **same SQLAlchemy engine** the rest of the app uses (imported from `infrastructure/database/session.py`). There's no separate admin database connection or pool to configure. -For local development, sessions are stored in memory by default: +## Session Cookies -```bash -# Development configuration -ENVIRONMENT="local" # Enables memory sessions -CRUD_ADMIN_REDIS_ENABLED=false # Explicitly disable Redis (default) -``` - -**Memory session characteristics:** - -- Fast performance with no external dependencies -- Sessions lost when application restarts -- Suitable for single-developer environments -- Not suitable for load-balanced deployments +The admin login uses Starlette's `SessionMiddleware`, which is added to the FastAPI app in `src/interfaces/main.py`: -### Redis Sessions (Production) - -For production deployments, enable Redis session storage: - -```bash -# Enable Redis sessions -CRUD_ADMIN_REDIS_ENABLED=true - -# Redis connection settings -CRUD_ADMIN_REDIS_HOST="localhost" # Redis server hostname -CRUD_ADMIN_REDIS_PORT=6379 # Redis server port -CRUD_ADMIN_REDIS_DB=0 # Redis database number -CRUD_ADMIN_REDIS_PASSWORD="secure-pass" # Redis authentication -CRUD_ADMIN_REDIS_SSL=false # Enable SSL/TLS connection +```python +app.add_middleware(SessionMiddleware, secret_key=settings.SECRET_KEY) ``` -**Redis session benefits:** +Cookie behavior: -- Sessions persist across application restarts -- Supports multiple application instances (load balancing) -- Configurable expiration and cleanup -- Production-ready scalability +- HTTP-only by default +- Encrypted/signed with `SECRET_KEY` +- Same-site `lax` +- **Not** marked `Secure` automatically — if you serve the app over HTTPS, set `SESSION_SECURE_COOKIES=true` and adjust the middleware as needed (the Starlette `SessionMiddleware` doesn't have a built-in production-secure flag the way our session backend does) -**Redis URL construction:** +For production behind HTTPS, you'll typically want to: -The boilerplate automatically constructs the Redis URL from your environment variables: +1. Terminate TLS at the proxy / load balancer +2. Strip `/admin` from public-facing routing entirely (see [Production Hardening](#production-hardening) below) -```python -# Automatic URL generation in src/app/admin/initialize.py -redis_url = f"redis{'s' if settings.CRUD_ADMIN_REDIS_SSL else ''}://" -if settings.CRUD_ADMIN_REDIS_PASSWORD: - redis_url += f":{settings.CRUD_ADMIN_REDIS_PASSWORD}@" -redis_url += f"{settings.CRUD_ADMIN_REDIS_HOST}:{settings.CRUD_ADMIN_REDIS_PORT}/{settings.CRUD_ADMIN_REDIS_DB}" -``` +## Development vs Production -## Security Configuration +### Development -The admin panel automatically adapts its security settings based on your deployment environment. +The default `.env.example` is already development-ready: -### Environment-Based Security - -```bash -# Environment setting affects security behavior -ENVIRONMENT="local" # Development mode -ENVIRONMENT="staging" # Staging mode -ENVIRONMENT="production" # Production mode with enhanced security +```env +ENVIRONMENT=development +ADMIN_ENABLED=true +ADMIN_USERNAME=admin +ADMIN_PASSWORD=your-secure-password +SECRET_KEY=insecure-secret-key-change-this-in-production ``` -**Security changes by environment:** +Open , log in, and you have access to Users and Tiers. -| Setting | Local | Staging | Production | -|---------|-------|---------|------------| -| **HTTPS Enforcement** | Disabled | Optional | Enabled | -| **Secure Cookies** | Optional | Recommended | Required | -| **Session Tracking** | Optional | Recommended | Enabled | -| **Event Logging** | Optional | Recommended | Enabled | +### Production Hardening -### Audit and Tracking +Three options, ordered by aggressiveness: -Enable comprehensive logging for compliance and security monitoring: +1. **Disable entirely** + ```env + ADMIN_ENABLED=false + ``` + Simplest. The admin panel never mounts. Run admin tasks via scripts (`uv run python -m scripts.setup_initial_data`, custom one-offs) or temporary overrides. -```bash -# Event and session tracking -CRUD_ADMIN_TRACK_EVENTS=true # Log all admin actions -CRUD_ADMIN_TRACK_SESSIONS=true # Track session lifecycle - -# Available in admin interface -# - View all admin actions with timestamps -# - Monitor active sessions -# - Track user activity patterns -``` +2. **Restrict at the proxy/load balancer** + Keep `ADMIN_ENABLED=true` but only allow the `/admin` path from your VPN's CIDR range or a specific IP allowlist. The app stays the same; the network blocks public access. -### Access Restrictions +3. **Use a strong unique password** + If you can't restrict at the network layer, treat `ADMIN_PASSWORD` like a production secret: + - Pull from a secrets manager at deploy time, never commit + - Rotate periodically + - Use a long, high-entropy password (the production security validator will refuse to start the app if `SECRET_KEY` is the placeholder, but it doesn't validate `ADMIN_PASSWORD`) -The boilerplate supports IP and network-based access restrictions (configured in code): +The Production Security Validator (`infrastructure/security/`) checks several things at startup when `ENVIRONMENT=production`, but admin credentials aren't currently in the validation list. Be deliberate about what you set. -```python -# In src/app/admin/initialize.py - customize as needed -admin = CRUDAdmin( - # ... other settings ... - allowed_ips=settings.CRUD_ADMIN_ALLOWED_IPS_LIST, # Specific IP addresses - allowed_networks=settings.CRUD_ADMIN_ALLOWED_NETWORKS_LIST, # CIDR network ranges -) -``` +## Environment Detection -To implement IP restrictions, extend the `CRUDAdminSettings` class in `src/app/core/config.py`. +The admin panel itself doesn't change behavior between `local` / `development` / `staging` / `production` — it's the same SQLAdmin app. What changes is the surrounding environment: -## Integration with Application Settings +- **Cookie security**: derived from your reverse proxy / TLS setup, not from the `ENVIRONMENT` setting +- **Logging**: admin actions go through the same logger configured by `infrastructure/logging/` +- **Session backend**: Starlette's `SessionMiddleware` is in-memory + cookie-based, not the same as the API's `SESSION_BACKEND` (Redis/memcached/memory). Restart-resilience for the *admin* login isn't relevant — admins re-log-in fine. -The admin panel leverages your existing application configuration for seamless integration. +## Troubleshooting -### Shared Security Settings +### `/admin` returns 404 +Check `ADMIN_ENABLED`. If it's `false` (or unset and Pydantic resolves to a falsy value), the admin app isn't mounted. Verify with: ```bash -# Uses your application's main secret key -SECRET_KEY="your-application-secret-key" # Shared with admin panel - -# Inherits database settings -POSTGRES_USER="dbuser" # Admin uses same database -POSTGRES_PASSWORD="dbpass" -POSTGRES_SERVER="localhost" -POSTGRES_DB="yourapp" +cd backend +uv run python -c "from src.infrastructure.config.settings import get_settings; print(get_settings().ADMIN_ENABLED)" ``` -### Automatic Configuration Loading +### Login form keeps rejecting credentials +- Confirm `ADMIN_USERNAME` and `ADMIN_PASSWORD` in `backend/.env` match what you're typing +- Restart the app after changing env vars (settings are read at startup) +- If running in Docker, confirm the env vars are actually reaching the container (`docker compose exec app env | grep ADMIN_`) -The admin panel automatically inherits settings from your application: +### Admin session keeps logging out +The Starlette `SessionMiddleware` cookie's lifetime is controlled by the browser (it's a session cookie). For longer-lived admin sessions, edit the middleware setup in `src/interfaces/main.py` to pass `max_age=...`: ```python -# In src/app/admin/initialize.py -admin = CRUDAdmin( - session=async_get_db, # Your app's database session - SECRET_KEY=settings.SECRET_KEY.get_secret_value(), # Your app's secret key - enforce_https=settings.ENVIRONMENT == EnvironmentOption.PRODUCTION, - # ... other settings from your app configuration +app.add_middleware( + SessionMiddleware, + secret_key=settings.SECRET_KEY, + max_age=60 * 60 * 8, # 8 hours ) ``` -## Deployment Examples - -### Development Environment - -Perfect for local development with minimal setup: - -```bash -# .env.development -ENVIRONMENT="local" -CRUD_ADMIN_ENABLED=true -ADMIN_USERNAME="dev-admin" -ADMIN_PASSWORD="dev123" -CRUD_ADMIN_MOUNT_PATH="/admin" - -# Memory sessions - no external dependencies -CRUD_ADMIN_REDIS_ENABLED=false - -# Optional tracking for testing -CRUD_ADMIN_TRACK_EVENTS=false -CRUD_ADMIN_TRACK_SESSIONS=false -``` - -### Staging Environment - -Staging environment with Redis but relaxed security: - -```bash -# .env.staging -ENVIRONMENT="staging" -CRUD_ADMIN_ENABLED=true -ADMIN_USERNAME="staging-admin" -ADMIN_PASSWORD="StagingPassword123!" - -# Redis sessions for testing production behavior -CRUD_ADMIN_REDIS_ENABLED=true -CRUD_ADMIN_REDIS_HOST="staging-redis.example.com" -CRUD_ADMIN_REDIS_PASSWORD="staging-redis-pass" - -# Enable tracking for testing -CRUD_ADMIN_TRACK_EVENTS=true -CRUD_ADMIN_TRACK_SESSIONS=true -SESSION_SECURE_COOKIES=true -``` - -### Production Environment - -Production-ready configuration with full security: - -```bash -# .env.production -ENVIRONMENT="production" -CRUD_ADMIN_ENABLED=true -ADMIN_USERNAME="prod-admin" -ADMIN_PASSWORD="VerySecureProductionPassword123!" - -# Redis sessions for scalability -CRUD_ADMIN_REDIS_ENABLED=true -CRUD_ADMIN_REDIS_HOST="redis.internal.company.com" -CRUD_ADMIN_REDIS_PORT=6379 -CRUD_ADMIN_REDIS_PASSWORD="ultra-secure-redis-password" -CRUD_ADMIN_REDIS_SSL=true - -# Full security and tracking -SESSION_SECURE_COOKIES=true -CRUD_ADMIN_TRACK_EVENTS=true -CRUD_ADMIN_TRACK_SESSIONS=true -CRUD_ADMIN_MAX_SESSIONS=5 -CRUD_ADMIN_SESSION_TIMEOUT=480 # 8 hours for security -``` - -### Docker Deployment - -Configure for containerized deployments: - -```yaml -# docker-compose.yml -version: '3.8' -services: - web: - build: . - environment: - - ENVIRONMENT=production - - ADMIN_USERNAME=${ADMIN_USERNAME} - - ADMIN_PASSWORD=${ADMIN_PASSWORD} - - # Redis connection - - CRUD_ADMIN_REDIS_ENABLED=true - - CRUD_ADMIN_REDIS_HOST=redis - - CRUD_ADMIN_REDIS_PORT=6379 - - CRUD_ADMIN_REDIS_PASSWORD=${REDIS_PASSWORD} - - depends_on: - - redis - - postgres - - redis: - image: redis:7-alpine - command: redis-server --requirepass ${REDIS_PASSWORD} - volumes: - - redis_data:/data -``` - -```bash -# .env file for Docker -ADMIN_USERNAME="docker-admin" -ADMIN_PASSWORD="DockerSecurePassword123!" -REDIS_PASSWORD="docker-redis-password" -``` - -## Configuration Validation - -The boilerplate automatically validates your configuration at startup and provides helpful error messages. - -### Common Configuration Issues - -**Missing Required Variables:** -```bash -# Error: Admin credentials not provided -# Solution: Add to .env -ADMIN_USERNAME="your-admin" -ADMIN_PASSWORD="your-password" -``` - -**Invalid Redis Configuration:** -```bash -# Error: Redis connection failed -# Check Redis server and credentials -CRUD_ADMIN_REDIS_HOST="correct-redis-host" -CRUD_ADMIN_REDIS_PASSWORD="correct-password" -``` +### Wrong `engine` connection / "no such table" +The admin uses the same engine as the API, which means it requires `CREATE_TABLES_ON_STARTUP=true` (default) or applied Alembic migrations. If `/admin` shows views but they're empty / error, check: -**Security Warnings:** ```bash -# Warning: Weak admin password -# Use stronger password with mixed case, numbers, symbols -ADMIN_PASSWORD="StrongerPassword123!" +cd backend +uv run alembic current ``` -## What's Next - -With your admin panel configured, you're ready to: - -1. **[Adding Models](adding-models.md)** - Register your application models with the admin interface -2. **[User Management](user-management.md)** - Manage admin users and implement security best practices +## Next Steps -The configuration system provides flexibility for any deployment scenario while maintaining consistency across environments. \ No newline at end of file +- **[Adding Models](adding-models.md)** — Register your own models with the admin +- **[User Management](user-management.md)** — Extending admin authentication +- **[Production](../production.md)** — Production hardening checklist diff --git a/docs/user-guide/admin-panel/index.md b/docs/user-guide/admin-panel/index.md index 39a64423..25bb4cd5 100644 --- a/docs/user-guide/admin-panel/index.md +++ b/docs/user-guide/admin-panel/index.md @@ -1,295 +1,122 @@ # Admin Panel -The FastAPI boilerplate comes with a pre-configured web-based admin interface powered by [CRUDAdmin](https://github.com/benavlabs/crudadmin) that provides instant database management capabilities. Learn how to access, configure, and customize the admin panel for your development and production needs. +The boilerplate ships a built-in admin panel powered by [SQLAdmin](https://aminalaee.dev/sqladmin/). It gives you a web interface for browsing and editing the database without writing custom CRUD endpoints. -> **Powered by CRUDAdmin**: This admin panel is built with [CRUDAdmin](https://github.com/benavlabs/crudadmin), a modern admin interface generator for FastAPI applications. -> -> - **📚 CRUDAdmin Documentation**: [benavlabs.github.io/crudadmin](https://benavlabs.github.io/crudadmin/) -> - **💻 CRUDAdmin GitHub**: [github.com/benavlabs/crudadmin](https://github.com/benavlabs/crudadmin) +## Accessing the Admin Panel -## What You'll Learn - -- **[Configuration](configuration.md)** - Environment variables and deployment settings -- **[Adding Models](adding-models.md)** - Register your new models with the admin interface -- **[User Management](user-management.md)** - Manage admin users and security - -## Admin Panel Overview - -Your FastAPI boilerplate includes a fully configured admin interface that's ready to use out of the box. The admin panel automatically provides web-based management for your database models without requiring any additional setup. - -**What's Already Configured:** - -- Complete admin interface mounted at `/admin` -- User, Tier, and Post models already registered -- Automatic form generation and validation -- Session management with configurable backends -- Security features and access controls +The admin panel is mounted at `/admin`. It's enabled by default — toggle it with: -**Accessing the Admin Panel:** - -1. Start your application: `uv run fastapi dev` -2. Navigate to: `http://localhost:8000/admin` -3. Login with default credentials (configured via environment variables) - -## Pre-Registered Models - -The boilerplate comes with three models already set up in the admin interface: - -### User Management -```python -# Already registered in your admin -admin.add_view( - model=User, - create_schema=UserCreate, - update_schema=UserUpdate, - allowed_actions={"view", "create", "update"}, - password_transformer=password_transformer, # Automatic password hashing -) +```env +ADMIN_ENABLED=true # set to false to disable entirely ``` -**Features:** - -- Create and manage application users -- Automatic password hashing with bcrypt -- User profile management (name, username, email) -- Tier assignment for subscription management - -### Tier Management -```python -# Subscription tiers for your application -admin.add_view( - model=Tier, - create_schema=TierCreate, - update_schema=TierUpdate, - allowed_actions={"view", "create", "update", "delete"} -) -``` - -**Features:** - -- Manage subscription tiers and pricing -- Configure rate limits per tier -- Full CRUD operations available - -### Content Management -```python -# Post/content management -admin.add_view( - model=Post, - create_schema=PostCreateAdmin, # Special admin schema - update_schema=PostUpdate, - allowed_actions={"view", "create", "update", "delete"} -) -``` - -**Features:** - -- Manage user-generated content -- Handle media URLs and content validation -- Associate posts with users - -## Quick Start - -### 1. Set Up Admin Credentials - -Configure your admin login in your `.env` file: - -```bash -# Admin Panel Access -ADMIN_USERNAME="your-admin-username" -ADMIN_PASSWORD="YourSecurePassword123!" +Authentication is **separate from your app's session auth**. Admin login uses simple username/password credentials read from environment variables: -# Basic Configuration -CRUD_ADMIN_ENABLED=true -CRUD_ADMIN_MOUNT_PATH="/admin" +```env +ADMIN_USERNAME=admin +ADMIN_PASSWORD=your-secure-password +SECRET_KEY= ``` -### 2. Start the Application +Visit , enter those credentials, and you're in. -```bash -# Development -uv run fastapi dev +## What You'll Learn -# The admin panel will be available at: -# http://localhost:8000/admin -``` +- **[Configuration](configuration.md)** - Environment variables and deployment settings +- **[Adding Models](adding-models.md)** - Register your own models with the admin interface +- **[User Management](user-management.md)** - Admin authentication and security -### 3. Login and Explore +## What's Included -1. **Access**: Navigate to `/admin` in your browser -2. **Login**: Use the credentials from your environment variables -3. **Explore**: Browse the pre-configured models (Users, Tiers, Posts) +The boilerplate registers two model views out of the box (in `src/interfaces/admin/views/`): -## Environment Configuration +| View | Source | Notes | +|------|--------|-------| +| **Users** | `views/users.py` | Create / edit / delete users; password hashing applied automatically; soft-delete-aware | +| **Tiers** | `views/tiers.py` | Manage subscription tiers; uses `TierService.permanent_delete` to prevent orphaning users / rate limits | -The admin panel is configured entirely through environment variables, making it easy to adapt for different deployment environments. +Both are categorized under "Users & Access" and provide search, sort, filter, and CSV export. -### Basic Settings +If you want admin views for `RateLimit`, `APIKey`, etc., follow the [Adding Models](adding-models.md) guide. -```bash -# Enable/disable admin panel -CRUD_ADMIN_ENABLED=true # Set to false to disable completely +## Common Operations -# Admin interface path -CRUD_ADMIN_MOUNT_PATH="/admin" # Change the URL path +### Creating a User -# Admin user credentials (created automatically) -ADMIN_USERNAME="admin" # Your admin username -ADMIN_PASSWORD="SecurePassword123!" # Your admin password -``` +Navigate to **Users → Create**. Fill the form. The `Password` field accepts plaintext — `UserAdmin.on_model_change` runs `get_password_hash()` before saving so the database only ever sees the hash. -### Session Management +### Editing a User -```bash -# Session configuration -CRUD_ADMIN_MAX_SESSIONS=10 # Max concurrent sessions per user -CRUD_ADMIN_SESSION_TIMEOUT=1440 # Session timeout (24 hours) -SESSION_SECURE_COOKIES=true # HTTPS-only cookies -``` +Click any user row → **Edit**. You can change the tier, toggle `is_superuser`, update OAuth fields, etc. The hashed password field is shown but you only need to fill it if you want to reset the password. -### Production Security +### Deleting a Tier -```bash -# Security settings for production -ENVIRONMENT="production" # Enables HTTPS enforcement -CRUD_ADMIN_TRACK_EVENTS=true # Log admin actions -CRUD_ADMIN_TRACK_SESSIONS=true # Track session activity -``` +The Tier delete button calls `TierService.permanent_delete`, which **fails** if any users or rate limits still reference the tier. This prevents dangling foreign keys. Reassign or remove the dependents first. -### Redis Session Storage +## How Authentication Works -For production deployments with multiple server instances: +The admin panel uses session-based auth via `SessionMiddleware` (Starlette), separate from the API's session system. When you submit the login form: -```bash -# Enable Redis sessions -CRUD_ADMIN_REDIS_ENABLED=true -CRUD_ADMIN_REDIS_HOST="localhost" -CRUD_ADMIN_REDIS_PORT=6379 -CRUD_ADMIN_REDIS_DB=0 -CRUD_ADMIN_REDIS_PASSWORD="your-redis-password" -CRUD_ADMIN_REDIS_SSL=false -``` +1. `AdminAuth.login` validates the credentials against `ADMIN_USERNAME` / `ADMIN_PASSWORD` +2. On success, sets `request.session["admin_authenticated"] = True` +3. Subsequent requests check that flag -## How It Works +This is intentionally simpler than the main app's session system — the admin panel is for a small number of trusted operators, not end users. The session is encrypted with `SECRET_KEY`. -The admin panel integrates seamlessly with your FastAPI application through several key components: +## How It's Wired -### Automatic Initialization +The admin app is created in `src/interfaces/admin/initialize.py` and mounted in `src/interfaces/main.py` at startup: ```python -# In src/app/main.py - already configured -admin = create_admin_interface() - -@asynccontextmanager -async def lifespan_with_admin(app: FastAPI): - async with default_lifespan(app): - if admin: - await admin.initialize() # Sets up admin database - yield - -# Admin is mounted automatically at your configured path -if admin: - app.mount(settings.CRUD_ADMIN_MOUNT_PATH, admin.app) -``` +# interfaces/admin/initialize.py +from sqladmin import Admin -### Configuration Integration +from ...infrastructure.config.settings import get_settings +from ...infrastructure.database.session import engine +from .auth import AdminAuth +from .views import register_admin_views -```python -# In src/app/admin/initialize.py - uses your existing settings -admin = CRUDAdmin( - session=async_get_db, # Your database session - SECRET_KEY=settings.SECRET_KEY, # Your app's secret key - mount_path=settings.CRUD_ADMIN_MOUNT_PATH, # Configurable path - secure_cookies=settings.SESSION_SECURE_COOKIES, - enforce_https=settings.ENVIRONMENT == EnvironmentOption.PRODUCTION, - # ... all configured via environment variables -) -``` -### Model Registration +def create_admin_interface(app) -> Admin | None: + settings = get_settings() + if not settings.ADMIN_ENABLED: + return None -```python -# In src/app/admin/views.py - pre-configured models -def register_admin_views(admin: CRUDAdmin): - # Password handling for User model - password_transformer = PasswordTransformer( - password_field="password", - hashed_field="hashed_password", - hash_function=get_password_hash, # Uses your app's password hashing + admin = Admin( + app=app, + engine=engine, + authentication_backend=AdminAuth(secret_key=settings.SECRET_KEY), + title="Admin", ) - - # Register your models with appropriate schemas - admin.add_view(model=User, create_schema=UserCreate, ...) - admin.add_view(model=Tier, create_schema=TierCreate, ...) - admin.add_view(model=Post, create_schema=PostCreateAdmin, ...) + register_admin_views(admin) + return admin ``` -## Development vs Production +Calling `create_admin_interface(app)` from `main.py` mounts everything at `/admin`. If `ADMIN_ENABLED=false`, the function returns `None` and nothing is mounted. -### Development Setup +## Disabling in Production -For local development, minimal configuration is needed: +If you don't want the admin panel reachable in production, set: -```bash -# .env for development -CRUD_ADMIN_ENABLED=true -ADMIN_USERNAME="admin" -ADMIN_PASSWORD="admin123" -ENVIRONMENT="local" - -# Uses memory sessions (fast, no external dependencies) -CRUD_ADMIN_REDIS_ENABLED=false +```env +ADMIN_ENABLED=false ``` -### Production Setup - -For production deployments, enable additional security features: - -```bash -# .env for production -CRUD_ADMIN_ENABLED=true -ADMIN_USERNAME="production-admin" -ADMIN_PASSWORD="VerySecureProductionPassword123!" -ENVIRONMENT="production" - -# Redis sessions for scalability -CRUD_ADMIN_REDIS_ENABLED=true -CRUD_ADMIN_REDIS_HOST="your-redis-host" -CRUD_ADMIN_REDIS_PASSWORD="secure-redis-password" -CRUD_ADMIN_REDIS_SSL=true - -# Enhanced security -SESSION_SECURE_COOKIES=true -CRUD_ADMIN_TRACK_EVENTS=true -CRUD_ADMIN_TRACK_SESSIONS=true -``` - -## Getting Started Guide - -### 1. **[Configuration](configuration.md)** - Environment Setup - -Learn about all available environment variables and how to configure the admin panel for different deployment scenarios. Understand session backends and security settings. - -Perfect for setting up development environments and preparing for production deployment. - -### 2. **[Adding Models](adding-models.md)** - Extend the Admin Interface - -Discover how to register your new models with the admin interface. Learn from the existing User, Tier, and Post implementations to add your own models. - -Essential when you create new database models and want them managed through the admin interface. - -### 3. **[User Management](user-management.md)** - Admin Security - -Understand how admin authentication works, how to create additional admin users, and implement security best practices for production environments. - -Critical for production deployments where multiple team members need admin access. +Or keep it enabled but restrict network access at the load balancer / proxy level (e.g. only allow `/admin` from your VPN's CIDR). -## What's Next +## Key Files -Ready to start using your admin panel? Follow this path: +| Component | Location | +|-----------|----------| +| Admin app factory | `backend/src/interfaces/admin/initialize.py` | +| Authentication backend | `backend/src/interfaces/admin/auth.py` | +| Dataclass-model mixin | `backend/src/interfaces/admin/mixins.py` | +| User view | `backend/src/interfaces/admin/views/users.py` | +| Tier view | `backend/src/interfaces/admin/views/tiers.py` | +| View registry | `backend/src/interfaces/admin/views/__init__.py` | -1. **[Configuration](configuration.md)** - Set up your environment variables and understand deployment options -2. **[Adding Models](adding-models.md)** - Add your new models to the admin interface -3. **[User Management](user-management.md)** - Implement secure admin authentication +## Next Steps -The admin panel is ready to use immediately with sensible defaults, and each guide shows you how to customize it for your specific needs. \ No newline at end of file +1. **[Configuration](configuration.md)** — Environment variables and deployment options +2. **[Adding Models](adding-models.md)** — Walkthrough for registering your own model views +3. **[User Management](user-management.md)** — Hardening the admin login for production diff --git a/docs/user-guide/admin-panel/user-management.md b/docs/user-guide/admin-panel/user-management.md index 53d84f9d..4d555348 100644 --- a/docs/user-guide/admin-panel/user-management.md +++ b/docs/user-guide/admin-panel/user-management.md @@ -1,213 +1,179 @@ -# User Management +# Admin User Management -Learn how to manage admin users in your FastAPI boilerplate's admin panel. The boilerplate automatically creates admin users from environment variables and provides a separate authentication system (powered by [CRUDAdmin](https://github.com/benavlabs/crudadmin)) from your application users. +Admin authentication in this boilerplate is intentionally simple: a single admin user defined by environment variables, gated by `SECRET_KEY`-encrypted Starlette sessions. There's no admin user table, no multi-operator flow out of the box. -> **CRUDAdmin Authentication**: For advanced authentication features and session management, see the [CRUDAdmin documentation](https://benavlabs.github.io/crudadmin/). +This page covers the trade-offs, hardening options, and what to do if you need something more sophisticated. -## Initial Admin Setup +## How It Works -### Configure Admin Credentials +The admin login (`interfaces/admin/auth.py`) compares submitted credentials against `ADMIN_USERNAME` and `ADMIN_PASSWORD`: -Set your admin credentials in your `.env` file: - -```bash -# Required admin credentials -ADMIN_USERNAME="admin" -ADMIN_PASSWORD="SecurePassword123!" - -# Optional details -ADMIN_NAME="Administrator" -ADMIN_EMAIL="admin@yourcompany.com" +```python +class AdminAuth(AuthenticationBackend): + async def login(self, request: Request) -> bool: + form = await request.form() + username = form.get("username") + password = form.get("password") + + settings = get_settings() + if username == settings.ADMIN_USERNAME and password == settings.ADMIN_PASSWORD: + request.session.update({"admin_authenticated": True}) + return True + return False ``` -### Access the Admin Panel +On success, `admin_authenticated=True` is stored in a `SECRET_KEY`-encrypted Starlette session cookie. Subsequent requests check that flag. + +There is no admin user model, no admin password hashing, no admin user table — credentials live in the environment. -Start your application and access the admin panel: +## Initial Setup -```bash -# Start application -uv run fastapi dev +In `backend/.env`: -# Visit: http://localhost:8000/admin -# Login with your ADMIN_USERNAME and ADMIN_PASSWORD +```env +ADMIN_USERNAME=admin +ADMIN_PASSWORD=your-secure-password +SECRET_KEY= ``` -The boilerplate automatically creates the initial admin user from your environment variables when the application starts. +Restart the app, navigate to , and log in. -## Managing Admin Users +The same `ADMIN_*` env vars are also read by `scripts/setup_initial_data.py` to bootstrap the **first application superuser**, but the two systems are otherwise unrelated. See the next section for the distinction. -### Creating Additional Admin Users +## Two Separate User Systems -Once logged in, you can create more admin users through the admin interface: +The boilerplate maintains two completely separate concepts of "user". Don't confuse them. -1. Navigate to the admin users section in the admin panel -2. Click "Create" or "Add New" -3. Fill in the required fields: - - Username (must be unique) - - Password (will be hashed automatically) - - Email (optional) +| | Admin login | Application users | +|---|---|---| +| **Identifies** | An operator of the SQLAdmin panel | Your app's end users | +| **Storage** | Environment variables | Database (`user` table) | +| **Auth method** | Plaintext compare against `ADMIN_PASSWORD` | bcrypt-hashed password verified by sessions | +| **Multiple accounts?** | No (single `ADMIN_USERNAME`) | Yes (one row per user) | +| **Used by** | `/admin` only | `/api/v1/*` and `/admin` (the User model itself) | +| **Login URL** | `/admin/login` | `/api/v1/auth/login` | -### Admin User Requirements +A user with `is_superuser=true` in the application database can call superuser-only API endpoints (e.g. `DELETE /api/v1/users/db/{username}`). They **cannot** log into the admin panel unless their credentials happen to match `ADMIN_USERNAME` / `ADMIN_PASSWORD`. The two systems don't share state. -- **Username**: 3-50 characters, letters/numbers/underscores/hyphens -- **Password**: Minimum 8 characters with mixed case, numbers, and symbols -- **Email**: Valid email format (optional) +## Managing Application Users via the Admin -### Updating and Removing Users +Once logged in to `/admin`: -- **Update**: Find the user in the admin panel and click "Edit" -- **Remove**: Click "Delete" (ensure you have alternative admin access first) +- **Users** view: create / edit / delete application users (goes against the `user` table) +- **Tiers** view: assign tiers, edit names and descriptions +- Password fields go through `on_model_change` for automatic hashing +- Toggle `is_superuser` directly in the edit form -## Security Configuration +The `/admin` panel is the easiest way to grant superuser status to an existing application user. -### Environment-Specific Settings +## Hardening for Production -Configure different security levels for each environment: +### Option 1: Disable in Production -```bash -# Development -ADMIN_USERNAME="dev-admin" -ADMIN_PASSWORD="DevPass123!" -ENVIRONMENT="local" +The simplest move: don't expose the admin panel at all in production. -# Production -ADMIN_USERNAME="prod-admin" -ADMIN_PASSWORD="VerySecurePassword123!" -ENVIRONMENT="production" -CRUD_ADMIN_TRACK_EVENTS=true -CRUD_ADMIN_TRACK_SESSIONS=true -SESSION_SECURE_COOKIES=true +```env +ADMIN_ENABLED=false ``` -### Session Management +`create_admin_interface()` short-circuits when this is false, and nothing is mounted. Run admin tasks via scripts or DB tools instead. -Control admin sessions with these settings: +### Option 2: Network-Restrict the Path -```bash -# Session limits and timeouts -CRUD_ADMIN_MAX_SESSIONS=10 # Max concurrent sessions per user -CRUD_ADMIN_SESSION_TIMEOUT=1440 # Timeout in minutes (24 hours) -SESSION_SECURE_COOKIES=true # HTTPS-only cookies -``` - -### Enable Tracking +Keep `ADMIN_ENABLED=true` but allow `/admin/*` only from your VPN or office IP range at the load balancer / reverse proxy. The app stays the same; the network blocks public access. -Monitor admin activity by enabling event tracking: +This is usually the right call when you need occasional access without baking new admin code paths. -```bash -# Track admin actions and sessions -CRUD_ADMIN_TRACK_EVENTS=true # Log all admin actions -CRUD_ADMIN_TRACK_SESSIONS=true # Track session lifecycle -``` +### Option 3: Strong Credentials + TLS -## Production Deployment +If you need `/admin` reachable from the internet: -### Secure Credential Management +- Generate a long, high-entropy `ADMIN_PASSWORD` and pull it from a secrets manager at deploy time +- Use HTTPS (terminate at your proxy) +- Enable secure cookies if you serve over HTTPS — see [Configuration](configuration.md#session-cookies) +- Rotate the password periodically (requires a deploy) -For production, use Docker secrets or Kubernetes secrets instead of plain text: +The production security validator (`infrastructure/security/`) does **not** check admin credentials specifically — it only catches the placeholder `SECRET_KEY`, `DEBUG=true`, and `CORS_ORIGINS=*`. You're responsible for the strength of `ADMIN_PASSWORD`. -```yaml -# docker-compose.yml -services: - web: - secrets: - - admin_username - - admin_password - environment: - - ADMIN_USERNAME_FILE=/run/secrets/admin_username - - ADMIN_PASSWORD_FILE=/run/secrets/admin_password +## Recovering from a Lost Admin Password -secrets: - admin_username: - file: ./secrets/admin_username.txt - admin_password: - file: ./secrets/admin_password.txt -``` +Since admin credentials are env vars, recovery is mechanical: -### Production Security Settings - -```bash -# Production .env -ENVIRONMENT="production" -ADMIN_USERNAME="prod-admin" -ADMIN_PASSWORD="UltraSecurePassword123!" - -# Enhanced security -CRUD_ADMIN_REDIS_ENABLED=true -CRUD_ADMIN_REDIS_HOST="redis.internal.company.com" -CRUD_ADMIN_REDIS_PASSWORD="secure-redis-password" -CRUD_ADMIN_REDIS_SSL=true - -# Monitoring -CRUD_ADMIN_TRACK_EVENTS=true -CRUD_ADMIN_TRACK_SESSIONS=true -SESSION_SECURE_COOKIES=true -CRUD_ADMIN_MAX_SESSIONS=5 -CRUD_ADMIN_SESSION_TIMEOUT=480 # 8 hours -``` +1. Edit `backend/.env` (or your secrets manager / orchestrator config) to set new `ADMIN_USERNAME` / `ADMIN_PASSWORD` +2. Restart the app -## Application User Management +There's no database row to fix. There's no email-based reset flow either — these credentials aren't meant to be self-service. -### Admin vs Application Users +## When You Need Multiple Admins -Your boilerplate maintains two separate user systems: +The single-credential design works for small teams or solo deployments. If you need real multi-operator admin auth, you have a few options: -- **Admin Users**: Access the admin panel (stored by CRUDAdmin) -- **Application Users**: Use your application (stored in your User model) +### Option A: Use Application Superusers + a Dedicated Admin Route -### Managing Application Users +Skip the SQLAdmin login entirely. Restrict `/admin` access to authenticated app users with `is_superuser=true` by writing a custom `AuthenticationBackend`: -Through the admin panel, you can manage your application's users: +```python +# interfaces/admin/auth.py +from sqladmin.authentication import AuthenticationBackend +from starlette.requests import Request + +# Pseudocode — wire to your existing session backend +class AdminAuth(AuthenticationBackend): + async def login(self, request: Request) -> bool: + # Reuse your /api/v1/auth/login flow: + # validate credentials, look up the user, check is_superuser + ... + + async def authenticate(self, request: Request) -> bool: + # Read the app's session_id cookie, validate it, confirm is_superuser + ... +``` -1. Navigate to "Users" section (your application users) -2. View, create, update user profiles -3. Manage user tiers and subscriptions -4. View user-generated content (posts) +Trade-offs: now any app superuser can log in. You also need to think about CSRF (the app uses double-submit; SQLAdmin posts forms separately). -The User model is already registered with password hashing and proper permissions. +### Option B: Add an `AdminUser` Model -## Emergency Recovery +Build a small model (`AdminUser` with `username`, `hashed_password`, `is_active`) and override `AdminAuth.login` to query it. Add a one-off script to seed admin users. -### Lost Admin Password +### Option C: External Auth (OIDC / SAML) -If you lose admin access, update your environment variables: +For larger orgs, mount the admin behind an SSO proxy (Authelia, Pomerium, AWS ALB with Cognito). The admin app trusts the proxy's authentication header and grants access on its presence. -```bash -# Update .env file -ADMIN_USERNAME="emergency-admin" -ADMIN_PASSWORD="EmergencyPassword123!" +None of these are wired up in the boilerplate — pick the one that fits your environment and implement it. The SQLAdmin docs cover [Authentication](https://aminalaee.dev/sqladmin/authentication/) extensions in detail. -# Restart application -uv run fastapi dev -``` +## Auditing Admin Activity -### Database Recovery (Advanced) +The admin panel doesn't log every action by default. If you need an audit trail: -For direct database password reset: +- The boilerplate's logging infrastructure (`infrastructure/logging/`) gives you correlation IDs out of the box. SQLAdmin requests pass through it like any other. +- Override `on_model_change` / `after_model_change` / `delete_model` in your views to log explicitly: ```python -# Generate bcrypt hash -import bcrypt -password = "NewPassword123!" -hashed = bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt()) -print(hashed.decode('utf-8')) -``` +from src.infrastructure.logging import get_logger + +logger = get_logger() -```sql --- Update in database -UPDATE admin_users -SET password_hash = '' -WHERE username = 'admin'; + +class UserAdmin(DataclassModelMixin, ModelView, model=User): + async def on_model_change(self, data, model, is_created, request): + action = "created" if is_created else "updated" + logger.info(f"Admin {action} user", extra={"user_id": data.get("id"), "actor": "admin"}) ``` -## What's Next +For richer auditing, write to a dedicated log stream or push events to a SIEM. + +## Key Files -Your admin user management is now configured with: +| Component | Location | +|-----------|----------| +| Admin auth backend | `backend/src/interfaces/admin/auth.py` | +| Admin app factory | `backend/src/interfaces/admin/initialize.py` | +| Settings classes | `backend/src/infrastructure/config/settings.py` (`AdminSettings`, `SQLAdminSettings`) | +| Initial data script | `backend/scripts/setup_initial_data.py` | -- Automatic admin user creation from environment variables -- Secure authentication separate from application users -- Environment-specific security settings -- Production-ready credential management -- Emergency recovery procedures +## Next Steps -You can now securely manage both admin users and your application users through the admin panel. +- **[Configuration](configuration.md)** — Environment variables and cookie behavior +- **[Adding Models](adding-models.md)** — Register your own admin views +- **[Permissions](../authentication/permissions.md)** — Application-level superuser checks +- **[Production](../production.md)** — Production hardening checklist diff --git a/docs/user-guide/api/endpoints.md b/docs/user-guide/api/endpoints.md index ff270b7a..9562a251 100644 --- a/docs/user-guide/api/endpoints.md +++ b/docs/user-guide/api/endpoints.md @@ -1,327 +1,441 @@ # API Endpoints -This guide shows you how to create API endpoints using the boilerplate's established patterns. You'll learn the common patterns you need for building CRUD APIs. +This guide shows the patterns the boilerplate uses for endpoints, so adding new ones stays consistent with the existing modules. ## Quick Start -Here's how to create a typical endpoint using the boilerplate's patterns: +A typical endpoint lives in `modules//routes.py` and delegates work to a service: ```python -from fastapi import APIRouter, Depends, HTTPException -from typing import Annotated +# backend/src/modules/widgets/routes.py +from typing import Annotated, Any + +from fastapi import APIRouter, Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from ...infrastructure.auth.http_exceptions import HTTPException +from ...infrastructure.auth.session.dependencies import get_current_user +from ...infrastructure.database.session import async_session +from ..common.utils.error_handler import handle_exception +from .schemas import WidgetCreate, WidgetRead +from .service import WidgetService + +router = APIRouter(tags=["Widgets"]) + + +def get_widget_service() -> WidgetService: + """Per-module service factory used by Depends().""" + return WidgetService() + + +@router.get("/{widget_id}", response_model=WidgetRead) +async def get_widget( + widget_id: int, + db: Annotated[AsyncSession, Depends(async_session)], + widget_service: Annotated[WidgetService, Depends(get_widget_service)], +) -> dict[str, Any]: + """Get a widget by id.""" + try: + widget = await widget_service.get_by_id(widget_id, db) + if widget is None: + raise HTTPException(status_code=404, detail=f"Widget {widget_id} not found") + return widget + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="An unexpected error occurred") +``` -from app.core.db.database import async_get_db -from app.crud.crud_users import crud_users -from app.schemas.user import UserRead, UserCreate -from app.api.dependencies import get_current_user +Register the router in `interfaces/api/v1/__init__.py`: -router = APIRouter(prefix="/users", tags=["users"]) +```python +from ....modules.widgets.routes import router as widgets_router -@router.get("/{user_id}", response_model=UserRead) -async def get_user( - user_id: int, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - """Get a user by ID.""" - user = await crud_users.get(db=db, id=user_id, schema_to_select=UserRead) - if not user: - raise HTTPException(status_code=404, detail="User not found") - return user +router.include_router(widgets_router, prefix="/widgets") ``` -That's it! The boilerplate handles the rest. +The endpoint is now live at `GET /api/v1/widgets/{widget_id}`. + +## Common Patterns + +The pattern across every module is the same: -## Common Endpoint Patterns +1. **Routes** define HTTP shape and delegate to a service +2. **Service** holds business logic (permission checks, multi-step orchestration) +3. **CRUD** does the database I/O -### 1. Get Single Item +Below are the canonical patterns. They mirror what's already in `modules/user/routes.py`, `modules/tier/routes.py`, etc. + +### Get a Single Item ```python -@router.get("/{user_id}", response_model=UserRead) -async def get_user( - user_id: int, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - user = await crud_users.get(db=db, id=user_id, schema_to_select=UserRead) - if not user: - raise HTTPException(status_code=404, detail="User not found") - return user +@router.get("/{widget_id}", response_model=WidgetRead) +async def get_widget( + widget_id: int, + db: Annotated[AsyncSession, Depends(async_session)], + widget_service: Annotated[WidgetService, Depends(get_widget_service)], +) -> dict[str, Any]: + try: + widget = await widget_service.get_by_id(widget_id, db) + if widget is None: + raise HTTPException(status_code=404, detail=f"Widget {widget_id} not found") + return widget + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="An unexpected error occurred") ``` -### 2. Get Multiple Items (with Pagination) +### Get Multiple Items (Paginated) ```python -from fastcrud import PaginatedListResponse, paginated_response +from fastcrud import PaginatedListResponse, compute_offset, paginated_response -@router.get("/", response_model=PaginatedListResponse[UserRead]) -async def get_users( + +@router.get("/", response_model=PaginatedListResponse[WidgetRead]) +async def list_widgets( + db: Annotated[AsyncSession, Depends(async_session)], + widget_service: Annotated[WidgetService, Depends(get_widget_service)], page: int = 1, items_per_page: int = 10, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - users = await crud_users.get_multi( - db=db, - offset=(page - 1) * items_per_page, +) -> dict[str, Any]: + result = await widget_service.get_paginated( + skip=compute_offset(page, items_per_page), limit=items_per_page, - schema_to_select=UserRead, - return_as_model=True, - return_total_count=True - ) - return paginated_response( - crud_data=users, - page=page, - items_per_page=items_per_page + db=db, ) + return paginated_response(crud_data=result, page=page, items_per_page=items_per_page) ``` -### 3. Create Item +See [Pagination](pagination.md) for the full pattern. + +### Create ```python -@router.post("/", response_model=UserRead, status_code=201) -async def create_user( - user_data: UserCreate, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - # Check if user already exists - if await crud_users.exists(db=db, email=user_data.email): - raise HTTPException(status_code=409, detail="Email already exists") - - # Create user - new_user = await crud_users.create(db=db, object=user_data) - return new_user +@router.post("/", response_model=WidgetRead, status_code=201) +async def create_widget( + widget: WidgetCreate, + db: Annotated[AsyncSession, Depends(async_session)], + widget_service: Annotated[WidgetService, Depends(get_widget_service)], +) -> dict[str, Any]: + try: + return await widget_service.create(widget, db) + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="An unexpected error occurred") ``` -### 4. Update Item +The service does the duplicate check / business validation: ```python -@router.patch("/{user_id}", response_model=UserRead) -async def update_user( - user_id: int, - user_data: UserUpdate, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - # Check if user exists - if not await crud_users.exists(db=db, id=user_id): - raise HTTPException(status_code=404, detail="User not found") - - # Update user - updated_user = await crud_users.update(db=db, object=user_data, id=user_id) - return updated_user +# modules/widgets/service.py +async def create(self, widget: WidgetCreate, db: AsyncSession) -> dict[str, Any]: + if await crud_widgets.exists(db=db, name=widget.name): + raise ResourceExistsError("Widget with this name already exists") + return await crud_widgets.create(db=db, object=widget, schema_to_select=WidgetRead) ``` -### 5. Delete Item (Soft Delete) +### Update ```python -@router.delete("/{user_id}") -async def delete_user( - user_id: int, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - if not await crud_users.exists(db=db, id=user_id): - raise HTTPException(status_code=404, detail="User not found") - - await crud_users.delete(db=db, id=user_id) - return {"message": "User deleted"} +@router.patch("/{widget_id}", response_model=WidgetRead) +async def update_widget( + widget_id: int, + values: WidgetUpdate, + db: Annotated[AsyncSession, Depends(async_session)], + widget_service: Annotated[WidgetService, Depends(get_widget_service)], +) -> dict[str, Any]: + try: + return await widget_service.update(widget_id, values, db) + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="An unexpected error occurred") ``` -## Adding Authentication - -To require login, add the `get_current_user` dependency: +### Delete (Soft Delete) ```python -@router.get("/me", response_model=UserRead) -async def get_my_profile( - current_user: Annotated[dict, Depends(get_current_user)] -): - """Get current user's profile.""" - return current_user - -@router.post("/", response_model=UserRead) -async def create_user( - user_data: UserCreate, - current_user: Annotated[dict, Depends(get_current_user)], # Requires login - db: Annotated[AsyncSession, Depends(async_get_db)] -): - # Only logged-in users can create users - new_user = await crud_users.create(db=db, object=user_data) - return new_user +@router.delete("/{widget_id}", status_code=204) +async def delete_widget( + widget_id: int, + db: Annotated[AsyncSession, Depends(async_session)], + widget_service: Annotated[WidgetService, Depends(get_widget_service)], +) -> None: + try: + await widget_service.delete(widget_id, db) + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="An unexpected error occurred") ``` -## Adding Admin-Only Endpoints +`crud_widgets.delete()` flips `is_deleted=True` if the model uses `SoftDeleteMixin`. Use `db_delete()` when you actually want to remove the row. + +## Authentication + +All session-based auth dependencies live in `infrastructure/auth/session/dependencies`. -For admin-only endpoints, use `get_current_superuser`: +### Require Login ```python -from app.api.dependencies import get_current_superuser +from ...infrastructure.auth.session.dependencies import get_current_user -@router.delete("/{user_id}/permanent", dependencies=[Depends(get_current_superuser)]) -async def permanently_delete_user( - user_id: int, - db: Annotated[AsyncSession, Depends(async_get_db)] + +@router.get("/me", response_model=WidgetRead) +async def my_widget( + current_user: Annotated[dict[str, Any], Depends(get_current_user)], + db: Annotated[AsyncSession, Depends(async_session)], + widget_service: Annotated[WidgetService, Depends(get_widget_service)], +) -> dict[str, Any]: + return await widget_service.get_by_owner(current_user["id"], db) +``` + +### Optional Auth + +```python +from ...infrastructure.auth.session.dependencies import get_optional_user + + +@router.get("/", response_model=list[WidgetRead]) +async def list_widgets( + user: Annotated[dict[str, Any] | None, Depends(get_optional_user)], + ... ): - """Admin-only: Permanently delete user from database.""" - await crud_users.db_delete(db=db, id=user_id) - return {"message": "User permanently deleted"} + # Show extra fields when logged in + ... +``` + +### Superuser Only + +```python +from ...infrastructure.auth.session.dependencies import get_current_superuser + + +@router.delete("/{widget_id}/permanent") +async def hard_delete_widget( + widget_id: int, + db: Annotated[AsyncSession, Depends(async_session)], + widget_service: Annotated[WidgetService, Depends(get_widget_service)], + _: Annotated[dict[str, Any], Depends(get_current_superuser)], +) -> dict[str, str]: + await widget_service.permanent_delete(widget_id, db) + return {"message": "Widget permanently deleted"} +``` + +The leading underscore on the dependency-only parameter is the convention used across the boilerplate. + +### API Key Authentication + +For machine-to-machine clients, see [Authentication](../authentication/index.md). API keys are managed via the `/api/v1/api-keys/*` endpoints in `modules/api_keys/routes.py`. + +## Path & Query Parameters + +### Path Parameters + +```python +@router.get("/{widget_id}") +async def get_widget(widget_id: int, ...): + ... ``` -## Query Parameters +FastAPI validates `widget_id` is an int automatically. Invalid input returns `422`. -### Simple Parameters +### Simple Query Parameters ```python @router.get("/search") -async def search_users( - name: str | None = None, # Optional string - age: int | None = None, # Optional integer - is_active: bool = True, # Boolean with default - db: Annotated[AsyncSession, Depends(async_get_db)] -): - filters = {"is_active": is_active} - if name: - filters["name"] = name - if age: - filters["age"] = age - - users = await crud_users.get_multi(db=db, **filters) - return users["data"] +async def search_widgets( + db: Annotated[AsyncSession, Depends(async_session)], + widget_service: Annotated[WidgetService, Depends(get_widget_service)], + name: str | None = None, + is_active: bool = True, +) -> list[dict[str, Any]]: + return await widget_service.search(db=db, name=name, is_active=is_active) ``` -### Parameters with Validation +### Query Validation ```python from fastapi import Query + @router.get("/") -async def get_users( - page: Annotated[int, Query(ge=1)] = 1, # Must be >= 1 - limit: Annotated[int, Query(ge=1, le=100)] = 10, # Between 1-100 - search: Annotated[str | None, Query(max_length=50)] = None, # Max 50 chars - db: Annotated[AsyncSession, Depends(async_get_db)] +async def list_widgets( + db: Annotated[AsyncSession, Depends(async_session)], + page: Annotated[int, Query(ge=1)] = 1, + items_per_page: Annotated[int, Query(ge=1, le=100)] = 10, + search: Annotated[str | None, Query(max_length=50)] = None, ): - # Use the validated parameters - users = await crud_users.get_multi( - db=db, - offset=(page - 1) * limit, - limit=limit - ) - return users["data"] + ... ``` ## Error Handling -The boilerplate includes custom exceptions you can use: +The boilerplate uses two layers of exceptions: + +### Domain exceptions (services) + +Defined in `modules/common/exceptions.py`: + +- `ResourceNotFoundError` +- `ResourceExistsError` +- `PermissionDeniedError` +- `UserNotFoundError`, `UserExistsError` +- `TierNotFoundError` +- `ValidationError` + +Service methods raise these — they don't know about HTTP. + +### HTTP exceptions (routes) + +Re-exported from FastCRUD in `infrastructure/auth/http_exceptions.py`: + +- `HTTPException` (the FastAPI base) +- `BadRequestException` — 400 +- `UnauthorizedException` — 401 +- `ForbiddenException` — 403 +- `NotFoundException` — 404 +- `UnprocessableEntityException` — 422 +- `DuplicateValueException` — 409 +- `RateLimitException` — 429 +- `CSRFException` — 403 with `X-CSRF-Error` header (defined locally for CSRF flows) + +### The `handle_exception` Bridge + +Routes wrap their work in a `try/except` and let `handle_exception()` map domain errors to HTTP errors: + +```python +from ..common.utils.error_handler import handle_exception + + +try: + return await widget_service.update(widget_id, values, db) +except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="An unexpected error occurred") +``` + +`handle_exception` returns the matching HTTP exception (or `None` for unrecognized errors, which become a 500). + +### Direct HTTP Exceptions + +When you have an immediate HTTP-shaped failure with no service involvement, raise directly: ```python -from app.core.exceptions.http_exceptions import ( - NotFoundException, - DuplicateValueException, - ForbiddenException -) - -@router.get("/{user_id}") -async def get_user(user_id: int, db: AsyncSession): - user = await crud_users.get(db=db, id=user_id) - if not user: - raise NotFoundException("User not found") # Returns 404 - return user - -@router.post("/") -async def create_user(user_data: UserCreate, db: AsyncSession): - if await crud_users.exists(db=db, email=user_data.email): - raise DuplicateValueException("Email already exists") # Returns 409 - - return await crud_users.create(db=db, object=user_data) +from ...infrastructure.auth.http_exceptions import NotFoundException + + +@router.get("/{name}", response_model=TierRead) +async def get_tier_by_name(...): + try: + return await tier_service.get_by_name(name, db) + except TierNotFoundError: + raise NotFoundException("Tier not found") ``` +This pattern is used in `modules/tier/routes.py`. See [Exceptions](exceptions.md) for the full picture. + ## File Uploads ```python -from fastapi import UploadFile, File +from fastapi import File, UploadFile + @router.post("/{user_id}/avatar") async def upload_avatar( user_id: int, + current_user: Annotated[dict[str, Any], Depends(get_current_user)], + db: Annotated[AsyncSession, Depends(async_session)], file: UploadFile = File(...), - current_user: Annotated[dict, Depends(get_current_user)], - db: Annotated[AsyncSession, Depends(async_get_db)] -): - # Check file type - if not file.content_type.startswith('image/'): +) -> dict[str, str]: + if not file.content_type or not file.content_type.startswith("image/"): raise HTTPException(status_code=400, detail="File must be an image") - - # Save file and update user - # ... file handling logic ... - + + # ...persist the file via your storage backend, then update the user... return {"message": "Avatar uploaded successfully"} ``` -## Creating New Endpoints +The boilerplate doesn't ship a default storage backend; pick one (local disk, S3, GCS) and add it as a settings group when you need it. + +## Adding a New Endpoint Module + +The full flow for adding `widgets`: + +### 1. Create the Module + +```bash +mkdir -p backend/src/modules/widgets +touch backend/src/modules/widgets/__init__.py +``` + +### 2. Add the Stack + +| File | Contents | +|------|----------| +| `models.py` | SQLAlchemy `Widget` model (see [Models](../database/models.md)) | +| `schemas.py` | `WidgetCreate`, `WidgetRead`, `WidgetUpdate` (see [Schemas](../database/schemas.md)) | +| `crud.py` | `crud_widgets: FastCRUD = FastCRUD(Widget)` | +| `service.py` | `WidgetService` with `create`, `get_by_id`, `update`, `delete` methods | +| `routes.py` | `APIRouter` with the endpoints | -### Step 1: Create the Router File +### 3. Register the Model -Create `src/app/api/v1/posts.py`: +In `backend/src/modules/__init__.py`: ```python -from fastapi import APIRouter, Depends, HTTPException -from typing import Annotated - -from app.core.db.database import async_get_db -from app.crud.crud_posts import crud_posts # You'll create this -from app.schemas.post import PostRead, PostCreate, PostUpdate # You'll create these -from app.api.dependencies import get_current_user - -router = APIRouter(prefix="/posts", tags=["posts"]) - -@router.get("/", response_model=list[PostRead]) -async def get_posts(db: Annotated[AsyncSession, Depends(async_get_db)]): - posts = await crud_posts.get_multi(db=db, schema_to_select=PostRead) - return posts["data"] - -@router.post("/", response_model=PostRead, status_code=201) -async def create_post( - post_data: PostCreate, - current_user: Annotated[dict, Depends(get_current_user)], - db: Annotated[AsyncSession, Depends(async_get_db)] -): - # Add current user as post author - post_dict = post_data.model_dump() - post_dict["author_id"] = current_user["id"] - - new_post = await crud_posts.create(db=db, object=post_dict) - return new_post +from .widgets.models import Widget + +__all__ = [..., "Widget"] ``` -### Step 2: Register the Router +### 4. Mount the Router -In `src/app/api/v1/__init__.py`, add: +In `backend/src/interfaces/api/v1/__init__.py`: ```python -from .posts import router as posts_router +from ....modules.widgets.routes import router as widgets_router -api_router.include_router(posts_router) +router.include_router(widgets_router, prefix="/widgets") ``` -### Step 3: Test Your Endpoints +### 5. Generate a Migration -Your new endpoints will be available at: -- `GET /api/v1/posts/` - Get all posts -- `POST /api/v1/posts/` - Create new post (requires login) +```bash +cd backend +uv run alembic revision --autogenerate -m "Add widgets table" +uv run alembic upgrade head +``` -## Best Practices +### 6. Test -1. **Always use the database dependency**: `Depends(async_get_db)` -2. **Use existing CRUD methods**: `crud_users.get()`, `crud_users.create()`, etc. -3. **Check if items exist before operations**: Use `crud_users.exists()` -4. **Use proper HTTP status codes**: `status_code=201` for creation -5. **Add authentication when needed**: `Depends(get_current_user)` -6. **Use response models**: `response_model=UserRead` -7. **Handle errors with custom exceptions**: `NotFoundException`, `DuplicateValueException` +```bash +curl http://localhost:8000/api/v1/widgets/ +``` -## What's Next +Your routes are now visible in `/docs`. -Now that you understand basic endpoints: +## Best Practices -- **[Pagination](pagination.md)** - Add pagination to your endpoints
-- **[Exceptions](exceptions.md)** - Custom error handling and HTTP exceptions
-- **[CRUD Operations](../database/crud.md)** - Understand the CRUD layer
+1. **Delegate to a service** — keep `routes.py` thin. Routes handle HTTP; services hold rules. +2. **Use the `handle_exception` pattern** — uniform error translation across the codebase. +3. **Prefer `schema_to_select=`** — only return the columns the response model needs. +4. **Use `*Update` schemas with all fields optional** — partial updates are the convention. +5. **Match status codes to actions**: 201 on create, 204 on delete-with-no-body, 200 default. +6. **Keep route signatures consistent** — `db` and `_service` injected via `Annotated[..., Depends(...)]`, dependency-only auth as `_`. +7. **Don't import models across modules** — except for foreign-key relationships (and even then via `TYPE_CHECKING`). + +## What's Next -The boilerplate provides everything you need - just follow these patterns! \ No newline at end of file +- **[Pagination](pagination.md)** — Paginate list endpoints with `PaginatedListResponse` +- **[Exceptions](exceptions.md)** — The full exception model +- **[API Versioning](versioning.md)** — How `/api/v1/` is wired and how to add `/api/v2/` +- **[CRUD Operations](../database/crud.md)** — The data layer below your service diff --git a/docs/user-guide/api/exceptions.md b/docs/user-guide/api/exceptions.md index d4b9cd7c..fb53087e 100644 --- a/docs/user-guide/api/exceptions.md +++ b/docs/user-guide/api/exceptions.md @@ -1,469 +1,345 @@ # API Exception Handling -Learn how to handle errors properly in your API endpoints using the boilerplate's built-in exceptions and patterns. +The boilerplate has a deliberate two-layer exception model: -## Quick Start +1. **Domain exceptions** raised by services (`modules/common/exceptions.py`) +2. **HTTP exceptions** raised by routes (`infrastructure/auth/http_exceptions.py`) -The boilerplate provides ready-to-use exceptions that return proper HTTP status codes: +Plus an automatic mapping layer that translates one to the other so routes don't have to know about specific HTTP status codes for every domain failure. -```python -from app.core.exceptions.http_exceptions import NotFoundException - -@router.get("/{user_id}") -async def get_user(user_id: int, db: AsyncSession): - user = await crud_users.get(db=db, id=user_id) - if not user: - raise NotFoundException("User not found") # Returns 404 - return user -``` - -That's it! The exception automatically becomes a proper JSON error response. +## Domain Exceptions -## Built-in Exceptions +Defined in `backend/src/modules/common/exceptions.py`. Services raise these — they describe *what went wrong*, not how to translate it to HTTP. -The boilerplate includes common HTTP exceptions you'll need: +| Exception | Used when | +|-----------|-----------| +| `DomainError` | Base class for all domain errors | +| `ResourceNotFoundError` | A requested record doesn't exist | +| `ResourceExistsError` | A unique constraint would fail | +| `ValidationError` | Input doesn't satisfy a business rule | +| `PermissionDeniedError` | The current user can't perform this action | +| `UserNotFoundError` (extends `ResourceNotFoundError`) | Specific: user lookup failed | +| `UserExistsError` (extends `ResourceExistsError`) | Specific: duplicate username/email | +| `TierNotFoundError` (extends `ResourceNotFoundError`) | Specific: tier lookup failed | +| `RateLimitNotFoundError` (extends `ResourceNotFoundError`) | Specific: rate limit row missing | +| `InsufficientCreditsError` | Quota / credit balance hit zero | +| `UsageLimitExceededError` | API key usage limit hit | -### NotFoundException (404) ```python -from app.core.exceptions.http_exceptions import NotFoundException +# modules/user/service.py +async def create(self, user: UserCreate, db: AsyncSession) -> dict[str, Any]: + if await crud_users.exists(db=db, email=user.email): + raise UserExistsError("Email already registered") -@router.get("/{user_id}") -async def get_user(user_id: int): - user = await crud_users.get(db=db, id=user_id) - if not user: - raise NotFoundException("User not found") - return user + if await crud_users.exists(db=db, username=user.username): + raise UserExistsError("Username already taken") -# Returns: -# Status: 404 -# {"detail": "User not found"} + # ... ``` -### DuplicateValueException (409) -```python -from app.core.exceptions.http_exceptions import DuplicateValueException - -@router.post("/") -async def create_user(user_data: UserCreate): - if await crud_users.exists(db=db, email=user_data.email): - raise DuplicateValueException("Email already exists") - - return await crud_users.create(db=db, object=user_data) - -# Returns: -# Status: 409 -# {"detail": "Email already exists"} -``` +The service doesn't know or care that this becomes a `409 Conflict` over HTTP — that mapping happens elsewhere. -### ForbiddenException (403) -```python -from app.core.exceptions.http_exceptions import ForbiddenException - -@router.delete("/{user_id}") -async def delete_user( - user_id: int, - current_user: Annotated[dict, Depends(get_current_user)] - ): - if current_user["id"] != user_id and not current_user["is_superuser"]: - raise ForbiddenException("You can only delete your own account") - - await crud_users.delete(db=db, id=user_id) - return {"message": "User deleted"} - -# Returns: -# Status: 403 -# {"detail": "You can only delete your own account"} -``` +## HTTP Exceptions -### UnauthorizedException (401) -```python -from app.core.exceptions.http_exceptions import UnauthorizedException - -# This is typically used in the auth system, but you can use it too: -@router.get("/admin-only") -async def admin_endpoint(): - # Some validation logic - if not user_is_admin: - raise UnauthorizedException("Admin access required") - - return {"data": "secret admin data"} - -# Returns: -# Status: 401 -# {"detail": "Admin access required"} -``` +Re-exported from FastCRUD in `backend/src/infrastructure/auth/http_exceptions.py`: -## Common Patterns +| Exception | Status | +|-----------|--------| +| `BadRequestException` | 400 | +| `UnauthorizedException` | 401 | +| `ForbiddenException` | 403 | +| `NotFoundException` | 404 | +| `DuplicateValueException` | 409 | +| `UnprocessableEntityException` | 422 | +| `RateLimitException` | 429 | +| `HTTPException` | base FastAPI class | +| `CSRFException` | 403 with `X-CSRF-Error: true` header (defined locally) | -### Check Before Create -```python -@router.post("/", response_model=UserRead) -async def create_user(user_data: UserCreate, db: AsyncSession): - # Check email - if await crud_users.exists(db=db, email=user_data.email): - raise DuplicateValueException("Email already exists") - - # Check username - if await crud_users.exists(db=db, username=user_data.username): - raise DuplicateValueException("Username already taken") - - # Create user - return await crud_users.create(db=db, object=user_data) - -# For public registration endpoints, consider rate limiting -# to prevent email enumeration attacks -``` +Use these from routes when you have an HTTP-shaped failure and no service involvement: -### Check Before Update ```python -@router.patch("/{user_id}", response_model=UserRead) -async def update_user( - user_id: int, - user_data: UserUpdate, - db: AsyncSession - ): - # Check if user exists - if not await crud_users.exists(db=db, id=user_id): - raise NotFoundException("User not found") - - # Check for email conflicts (if email is being updated) - if user_data.email: - existing = await crud_users.get(db=db, email=user_data.email) - if existing and existing.id != user_id: - raise DuplicateValueException("Email already taken") - - # Update user - return await crud_users.update(db=db, object=user_data, id=user_id) -``` +from ...infrastructure.auth.http_exceptions import NotFoundException -### Check Ownership -```python -@router.get("/{post_id}") -async def get_post( - post_id: int, - current_user: Annotated[dict, Depends(get_current_user)], - db: AsyncSession - ): - post = await crud_posts.get(db=db, id=post_id) - if not post: - raise NotFoundException("Post not found") - - # Check if user owns the post or is admin - if post.author_id != current_user["id"] and not current_user["is_superuser"]: - raise ForbiddenException("You can only view your own posts") - - return post +@router.get("/{name}", response_model=TierRead) +async def get_tier_by_name(...): + try: + return await tier_service.get_by_name(name, db) + except TierNotFoundError: + raise NotFoundException("Tier not found") ``` -> **Note:** -> Some CRUD helper functions may evolve to return falsy-but-valid values (e.g. empty objects). -> To future-proof your API, prefer `if post is None:` instead of `if not post:` when checking existence. +## The Mapping Layer -## Validation Errors +`modules/common/utils/error_handler.py` ships two ways to bridge domain → HTTP errors: -FastAPI automatically handles Pydantic validation errors, but you can catch and customize them: +### Global Handler (Automatic) -```python -from fastapi import HTTPException -from pydantic import ValidationError +`register_exception_handlers(app)` is called in `infrastructure/app_factory.py` at startup. It installs: -@router.post("/") -async def create_user(user_data: UserCreate): - try: - # If user_data fails validation, Pydantic raises ValidationError - # FastAPI automatically converts this to a 422 response - return await crud_users.create(db=db, object=user_data) - except ValidationError as e: - # You can catch and customize if needed - raise HTTPException( - status_code=400, - detail=f"Invalid data: {e.errors()}" - ) -``` +- A `RequestValidationError` handler (Pydantic 422s) → returns a generic `Invalid request` message + a `support_id` +- A catch-all `DomainError` handler → maps to the right HTTP status via `EXCEPTION_MAPPING`, returns a **generic** message + `support_id`. The full details are logged server-side. +- A `CatchAllErrorMiddleware` that converts truly unhandled exceptions into 500s with a `support_id` + +This means: **any uncaught `DomainError` raised in a service automatically becomes a properly-shaped HTTP response.** You don't have to wire it up per-route. -## Standard HTTP Exceptions +### Manual Handler (Explicit) -For other status codes, use FastAPI's HTTPException: +Inside route handlers, you can use `handle_exception()` to translate explicitly. This is the convention in the existing routes — it's slightly more verbose but it keeps the error path obvious in code review: ```python -from fastapi import HTTPException - -# Bad Request (400) -@router.post("/") -async def create_something(data: dict): - if not data.get("required_field"): - raise HTTPException( - status_code=400, - detail="required_field is missing" - ) +from ..common.utils.error_handler import handle_exception +from ...infrastructure.auth.http_exceptions import HTTPException -# Too Many Requests (429) -@router.post("/") -async def rate_limited_endpoint(): - if rate_limit_exceeded(): - raise HTTPException( - status_code=429, - detail="Rate limit exceeded. Try again later." - ) -# Internal Server Error (500) -@router.get("/") -async def risky_endpoint(): +@router.post("/", response_model=UserRead, status_code=201) +async def create_user( + user: UserCreate, + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], +) -> dict[str, Any]: try: - # Some operation that might fail - result = risky_operation() - return result + return await user_service.create(user, db) except Exception as e: - # Log the error - logger.error(f"Unexpected error: {e}") - raise HTTPException( - status_code=500, - detail="An unexpected error occurred" - ) + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") ``` -## Creating Custom Exceptions +`handle_exception()`: -If you need custom exceptions, follow the boilerplate's pattern: +- Returns the mapped `HTTPException` if `e` is a `DomainError` +- Returns `e` unchanged if it's already an `HTTPException` +- Returns `None` otherwise (route then raises a 500) + +### The Default Mapping + +The mapping in `modules/common/constants.py`: ```python -# In app/core/exceptions/http_exceptions.py (add to existing file) -from fastapi import HTTPException +EXCEPTION_MAPPING: dict[type[DomainError], Callable[[str], HTTPException]] = { + InsufficientCreditsError: lambda m: HTTPException(status_code=402, detail=m or "Insufficient credits."), + ResourceNotFoundError: lambda m: NotFoundException("The requested resource was not found."), + ResourceExistsError: lambda m: DuplicateValueException("This resource already exists."), + ValidationError: lambda m: UnprocessableEntityException(detail=m), + PermissionDeniedError: lambda m: ForbiddenException("You don't have permission for this action."), + UserNotFoundError: lambda m: NotFoundException("User not found."), + UserExistsError: lambda m: DuplicateValueException(m or "A user with this email or username already exists."), + TierNotFoundError: lambda m: NotFoundException("The requested tier was not found."), + RateLimitNotFoundError: lambda m: NotFoundException("Rate limit configuration not found."), +} +``` -class PaymentRequiredException(HTTPException): - """402 Payment Required""" - def __init__(self, detail: str = "Payment required"): - super().__init__(status_code=402, detail=detail) +Notice the default messages **don't echo the raised exception's message** — most map to generic strings to avoid leaking internal details. The full message goes to logs, with a `support_id` returned to the client so you can correlate. -class TooManyRequestsException(HTTPException): - """429 Too Many Requests""" - def __init__(self, detail: str = "Too many requests"): - super().__init__(status_code=429, detail=detail) +## Response Format -# Use them in your endpoints -from app.core.exceptions.http_exceptions import PaymentRequiredException +### Standard error -@router.get("/premium-feature") -async def premium_feature(current_user: dict): - if current_user["tier"] == "free": - raise PaymentRequiredException("Upgrade to access this feature") - - return {"data": "premium content"} +```json +{ + "detail": "User not found.", + "support_id": "a1b2c3d4" +} ``` -## Error Response Format - -All exceptions return consistent JSON responses: +### Validation error (422) ```json { - "detail": "Error message here" + "detail": "Invalid request. Please check your input and try again.", + "support_id": "a1b2c3d4" } ``` -For validation errors (422), you get more detail: +### `InsufficientCreditsError` (402) — exception + +This is the one case where the original error message is preserved, because the frontend needs the credit info for upgrade prompts: ```json { - "detail": [ - { - "type": "missing", - "loc": ["body", "email"], - "msg": "Field required", - "input": null - } - ] + "detail": "Need 100 more credits to complete this operation", + "support_id": "a1b2c3d4" } ``` -## Global Exception Handling +## Common Patterns -The boilerplate includes global exception handlers. You can add your own in `main.py`: +### Check Before Create ```python -from fastapi import FastAPI, Request -from fastapi.responses import JSONResponse - -app = FastAPI() - -@app.exception_handler(ValueError) -async def value_error_handler(request: Request, exc: ValueError): - """Handle ValueError exceptions globally""" - return JSONResponse( - status_code=400, - content={"detail": f"Invalid value: {str(exc)}"} - ) - -@app.exception_handler(Exception) -async def general_exception_handler(request: Request, exc: Exception): - """Catch-all exception handler""" - # Log the error - logger.error(f"Unhandled exception: {exc}") - - return JSONResponse( - status_code=500, - content={"detail": "An unexpected error occurred"} - ) +# Service method — domain layer +async def create(self, user: UserCreate, db: AsyncSession) -> dict[str, Any]: + if await crud_users.exists(db=db, email=user.email): + raise UserExistsError("Email already registered") + if await crud_users.exists(db=db, username=user.username): + raise UserExistsError("Username already taken") + # ... ``` -## Security Considerations +The route doesn't need to know — `UserExistsError` becomes a 409 automatically. -### Authentication Endpoints - Use Generic Messages +### Permission Check -For security, authentication endpoints should use generic error messages to prevent information disclosure: +```python +async def update_profile( + self, current_user: dict, target_username: str, values: UserUpdate, db: AsyncSession, +) -> None: + if current_user["username"] != target_username and not current_user["is_superuser"]: + raise PermissionDeniedError("You can only update your own profile") + # ... +``` + +### Resource Lookup ```python -# SECURITY: Don't reveal if username exists -@router.post("/login") -async def login(credentials: LoginCredentials): - user = await crud_users.get(db=db, username=credentials.username) - - # Don't do this - reveals if username exists - # if not user: - # raise NotFoundException("User not found") - # if not verify_password(credentials.password, user.hashed_password): - # raise UnauthorizedException("Invalid password") - - # Do this - generic message for all auth failures - if not user or not verify_password(credentials.password, user.hashed_password): - raise UnauthorizedException("Invalid username or password") - - return create_access_token(user.id) - -# SECURITY: Don't reveal if email is registered during password reset -@router.post("/forgot-password") -async def forgot_password(email: str): - user = await crud_users.get(db=db, email=email) - - # Don't do this - reveals if email exists - # if not user: - # raise NotFoundException("Email not found") - - # Do this - always return success message - if user: - await send_password_reset_email(user.email) - - # Always return the same message - return {"message": "If the email exists, a reset link has been sent"} +async def get_by_username(self, username: str, db: AsyncSession) -> dict[str, Any]: + user = await crud_users.get(db=db, username=username, is_deleted=False) + if user is None: + raise UserNotFoundError(f"User '{username}' not found") + return user ``` -### Resource Access - Be Specific When Safe +### Direct HTTP for non-domain failures -For non-auth operations, specific messages help developers: +When the failure has no domain meaning (e.g. a missing query parameter combination), raise the HTTP exception directly: ```python -# Safe to be specific for resource operations -@router.get("/{post_id}") -async def get_post( - post_id: int, - current_user: Annotated[dict, Depends(get_current_user)] +from ...infrastructure.auth.http_exceptions import BadRequestException + + +@router.get("/") +async def search( + q: str | None = None, + tag: str | None = None, ): - post = await crud_posts.get(db=db, id=post_id) - if not post: - raise NotFoundException("Post not found") # Safe to be specific - - if post.author_id != current_user["id"]: - # Don't reveal post exists if user can't access it - raise NotFoundException("Post not found") # Generic, not "Access denied" - - return post + if q is None and tag is None: + raise BadRequestException("Provide either ?q= or ?tag=") + # ... ``` -## Best Practices +## Adding a Custom Domain Exception -### 1. Use Specific Exceptions (When Safe) -```python -# Good for non-sensitive operations -if not user: - raise NotFoundException("User not found") +1. **Define the exception** in `modules/common/exceptions.py`: -# Good for validation errors -raise DuplicateValueException("Username already taken") -``` + ```python + class WidgetExceededError(DomainError): + """Raised when a user tries to create more widgets than their tier allows.""" + pass + ``` + +2. **Add a mapping** in `modules/common/constants.py`: + + ```python + from .exceptions import WidgetExceededError + + EXCEPTION_MAPPING = { + # ...existing entries... + WidgetExceededError: lambda m: HTTPException( + status_code=403, detail="You've hit your widget limit" + ), + } + ``` + +3. **Raise it from your service**: + + ```python + raise WidgetExceededError("Free tier limited to 10 widgets") + ``` + +The global handler (and `handle_exception()`) picks up the new mapping automatically. + +## Adding a Custom HTTP Exception + +If you need an HTTP exception not already exported, define it in `infrastructure/auth/http_exceptions.py` like the existing `CSRFException`: -### 2. Use Generic Messages for Security ```python -# Good for authentication -raise UnauthorizedException("Invalid username or password") +class PaymentRequiredException(HTTPException): + """402 Payment Required.""" -# Good for authorization (don't reveal resource exists) -raise NotFoundException("Resource not found") # Instead of "Access denied" + def __init__(self, detail: str = "Payment required") -> None: + super().__init__(status_code=402, detail=detail) ``` -### 3. Check Permissions Early +Then re-export it via `__all__` and import it where needed. + +## Security Considerations + +### Generic Messages for Auth + +Authentication routes already follow this pattern in `infrastructure/auth/routes.py`: + ```python -@router.delete("/{user_id}") -async def delete_user( - user_id: int, - current_user: Annotated[dict, Depends(get_current_user)] -): - # Check permission first - if current_user["id"] != user_id: - raise ForbiddenException("Cannot delete other users") - - # Then check if user exists - if not await crud_users.exists(db=db, id=user_id): - raise NotFoundException("User not found") - - await crud_users.delete(db=db, id=user_id) +user = await authenticate_user(...) +if user is None: + logger.warning(f"Failed login attempt for {form_data.username} from IP {ip_address}") + raise UnauthorizedException("Incorrect username or password") ``` -### 4. Log Important Errors -```python -import logging +It doesn't say "username not found" or "wrong password" — both reveal whether the username exists. -logger = logging.getLogger(__name__) +### Hide Resource Existence -@router.post("/") -async def create_user(user_data: UserCreate): - try: - return await crud_users.create(db=db, object=user_data) - except Exception as e: - logger.error(f"Failed to create user: {e}") - raise HTTPException(status_code=500, detail="User creation failed") +For protected resources the user shouldn't even know about, return 404 instead of 403: + +```python +post = await crud_posts.get(db=db, id=post_id) +if post is None: + raise NotFoundException("Post not found") + +if post["author_id"] != current_user["id"]: + # 404, not 403 — don't reveal the post exists + raise NotFoundException("Post not found") ``` +### Don't Leak Internal Details + +The global handler is already defensive about this — it returns generic messages and writes the real error to logs with a `support_id`. The `support_id` is your handle for grep'ing logs when a user reports an issue. + ## Testing Exceptions -Test that your endpoints raise the right exceptions: +The codebase uses `pytest-asyncio` and FastAPI's `TestClient` for route tests: ```python -import pytest -from httpx import AsyncClient - @pytest.mark.asyncio async def test_user_not_found(client: AsyncClient): - response = await client.get("/api/v1/users/99999") - assert response.status_code == 404 - assert "User not found" in response.json()["detail"] + resp = await client.get("/api/v1/users/not-a-user") + assert resp.status_code == 404 + body = resp.json() + assert body["detail"] + assert "support_id" in body + @pytest.mark.asyncio async def test_duplicate_email(client: AsyncClient): - # Create a user - await client.post("/api/v1/users/", json={ + payload = { "name": "Test User", "username": "test1", "email": "test@example.com", - "password": "Password123!" - }) - - # Try to create another with same email - response = await client.post("/api/v1/users/", json={ - "name": "Test User 2", - "username": "test2", - "email": "test@example.com", # Same email - "password": "Password123!" - }) - - assert response.status_code == 409 - assert "Email already exists" in response.json()["detail"] + "password": "Password123!", + } + await client.post("/api/v1/users/", json=payload) + + payload["username"] = "test2" # different username, same email + resp = await client.post("/api/v1/users/", json=payload) + assert resp.status_code == 409 ``` -## What's Next +For service-level tests, just assert the right `DomainError` is raised: + +```python +@pytest.mark.asyncio +async def test_create_duplicate_user_raises(db_session, existing_user): + service = UserService() + with pytest.raises(UserExistsError): + await service.create( + UserCreate(name="...", username=existing_user["username"], email="x@x.com", password="..."), + db_session, + ) +``` -Now that you understand error handling: -- **[Versioning](versioning.md)** - Learn how to version your APIs
-- **[Database CRUD](../database/crud.md)** - Understand the database operations
-- **[Authentication](../authentication/index.md)** - Add user authentication to your APIs +## What's Next -Proper error handling makes your API much more user-friendly and easier to debug! \ No newline at end of file +- **[Versioning](versioning.md)** — Versioning strategy +- **[CRUD Operations](../database/crud.md)** — How services use CRUD +- **[Authentication](../authentication/index.md)** — Sessions, OAuth, API keys diff --git a/docs/user-guide/api/index.md b/docs/user-guide/api/index.md index 1c7e4df1..36113761 100644 --- a/docs/user-guide/api/index.md +++ b/docs/user-guide/api/index.md @@ -4,122 +4,204 @@ Learn how to build REST APIs with the FastAPI Boilerplate. This section covers e ## What You'll Learn -- **[Endpoints](endpoints.md)** - Create CRUD endpoints with authentication and validation -- **[Pagination](pagination.md)** - Add pagination to handle large datasets -- **[Exception Handling](exceptions.md)** - Handle errors properly with built-in exceptions +- **[Endpoints](endpoints.md)** - Create endpoints with authentication and validation +- **[Pagination](pagination.md)** - Add pagination to list endpoints +- **[Exception Handling](exceptions.md)** - Handle errors with the boilerplate's exception types - **[API Versioning](versioning.md)** - Version your APIs and maintain backward compatibility -- **Database Integration** - Use the boilerplate's CRUD layer and schemas ## Quick Overview -The boilerplate provides everything you need for API development: +Routes are defined in each module's `routes.py`. The aggregator at `interfaces/api/v1/__init__.py` mounts each module's router under `/api/v1`. ```python +# backend/src/modules/user/routes.py +from typing import Annotated, Any from fastapi import APIRouter, Depends -from app.crud.crud_users import crud_users -from app.schemas.user import UserRead, UserCreate -from app.core.db.database import async_get_db +from sqlalchemy.ext.asyncio import AsyncSession -router = APIRouter(prefix="/users", tags=["users"]) +from ...infrastructure.database.session import async_session +from .schemas import UserCreate, UserRead +from .service import UserService + +router = APIRouter(tags=["Users"]) + + +def get_user_service() -> UserService: + return UserService() -@router.get("/", response_model=list[UserRead]) -async def get_users(db: Annotated[AsyncSession, Depends(async_get_db)]): - users = await crud_users.get_multi(db=db, schema_to_select=UserRead) - return users["data"] @router.post("/", response_model=UserRead, status_code=201) async def create_user( - user_data: UserCreate, - db: Annotated[AsyncSession, Depends(async_get_db)] - ): - return await crud_users.create(db=db, object=user_data) + user: UserCreate, + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], +) -> dict[str, Any]: + return await user_service.create(user, db) +``` + +The aggregator wires it up: + +```python +# backend/src/interfaces/api/v1/__init__.py +from fastapi import APIRouter + +from ....modules.user.routes import router as users_router + +router = APIRouter(prefix="/v1") +router.include_router(users_router, prefix="/users") ``` +Final URL: `POST /api/v1/users/`. + ## Key Features -### 🔐 **Built-in Authentication** -Add authentication to any endpoint: +### Built-in Authentication + +Session-based auth with HTTP-only cookies. Pull the current user from `infrastructure/auth/session/dependencies`: + ```python -from app.api.dependencies import get_current_user +from ...infrastructure.auth.session.dependencies import get_current_user @router.get("/me", response_model=UserRead) -async def get_profile(current_user: Annotated[dict, Depends(get_current_user)]): +async def get_profile( + current_user: Annotated[dict[str, Any], Depends(get_current_user)], +) -> dict[str, Any]: return current_user ``` -### 📊 **Easy Pagination** -Paginate any endpoint with one line: +For superuser-only endpoints, swap in `get_current_superuser`. See [Authentication](../authentication/index.md) for the full picture. + +### Easy Pagination + +The boilerplate uses FastCRUD's `PaginatedListResponse` and `paginated_response()` helper: + ```python -from fastcrud import PaginatedListResponse +from fastcrud import PaginatedListResponse, compute_offset, paginated_response @router.get("/", response_model=PaginatedListResponse[UserRead]) -async def get_users(page: int = 1, items_per_page: int = 10): - # Add pagination to any endpoint +async def list_users( + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], + page: int = 1, + items_per_page: int = 10, +) -> dict[str, Any]: + result = await user_service.get_paginated( + skip=compute_offset(page, items_per_page), + limit=items_per_page, + db=db, + ) + return paginated_response(crud_data=result, page=page, items_per_page=items_per_page) ``` -### ✅ **Automatic Validation** -Request and response validation is handled automatically: +### Automatic Validation + +Request bodies, query parameters, and response models are all validated by Pydantic: + ```python @router.post("/", response_model=UserRead) -async def create_user(user_data: UserCreate): # ← Validates input - return await crud_users.create(object=user_data) # ← Validates output +async def create_user(user: UserCreate): # ← validates input + return await user_service.create(user, db) # ← validates output via response_model ``` -### 🛡️ **Error Handling** -Use built-in exceptions for consistent error responses: +### Error Handling + +Domain errors live in `modules/common/exceptions.py`. Routes catch them and translate them to HTTP responses via `handle_exception`: + ```python -from app.core.exceptions.http_exceptions import NotFoundException - -@router.get("/{user_id}") -async def get_user(user_id: int): - user = await crud_users.get(id=user_id) - if not user: - raise NotFoundException("User not found") # Returns proper 404 - return user +from ...infrastructure.auth.http_exceptions import HTTPException +from ..common.utils.error_handler import handle_exception + +@router.get("/{username}", response_model=UserRead) +async def get_user_by_username( + username: str, + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], +) -> dict[str, Any]: + try: + user = await user_service.get_by_username(username, db) + if user is None: + raise HTTPException(status_code=404, detail=f"User with username {username} not found") + return user + except Exception as e: + http_exception = handle_exception(e) + if http_exception: + raise http_exception + raise HTTPException(status_code=500, detail="An unexpected error occurred") ``` -## Architecture +See [Exception Handling](exceptions.md) for the full catalog. -The boilerplate follows a layered architecture: +## Architecture -``` -API Endpoint +```text +HTTP Request + ↓ +APIRouter (modules//routes.py) ↓ -Pydantic Schema (validation) +Service (modules//service.py) — business rules, permission checks ↓ -CRUD Layer (database operations) +FastCRUD (modules//crud.py) ↓ -SQLAlchemy Model (database) +SQLAlchemy Model (modules//models.py) + ↓ +PostgreSQL ``` -This separation makes your code: -- **Testable** - Mock any layer easily -- **Maintainable** - Clear separation of concerns -- **Scalable** - Add features without breaking existing code +The split keeps: + +- HTTP concerns (status codes, schemas, dependencies) in `routes.py` +- Business logic (validation, orchestration) in `service.py` +- Database I/O in `crud.py` + +You can mock any layer in tests; you can change one without breaking the others. ## Directory Structure ```text -src/app/api/ -├── dependencies.py # Shared dependencies (auth, rate limiting) -└── v1/ # API version 1 - ├── users.py # User endpoints - ├── posts.py # Post endpoints - ├── login.py # Authentication - └── ... # Other endpoints +backend/src/ +├── interfaces/ +│ └── api/ +│ ├── __init__.py # mounts /api +│ └── v1/ +│ └── __init__.py # mounts /v1 + every module's router +├── infrastructure/ +│ └── auth/ +│ └── routes.py # /api/v1/auth/* (login, OAuth, check-auth) +└── modules/ + ├── user/routes.py # /api/v1/users/* + ├── tier/routes.py # /api/v1/tiers/* + ├── rate_limit/routes.py # /api/v1/rate-limits/* + └── api_keys/routes.py # /api/v1/api-keys/* ``` +Auth lives in `infrastructure/auth/routes.py` instead of in a feature module because authentication is structural — every other feature depends on it. + +## Mounted Endpoints + +What ships out of the box (40 total routes): + +| Prefix | Source | Notes | +|--------|--------|-------| +| `POST/GET/PATCH/DELETE /api/v1/users/*` | `modules/user/routes.py` | Open create, session/superuser-gated reads/updates | +| `GET /api/v1/tiers/*` | `modules/tier/routes.py` | Public list + lookup by name | +| `GET/PATCH/DELETE /api/v1/rate-limits/*` | `modules/rate_limit/routes.py` | List/get public; PATCH/DELETE require superuser | +| `POST /api/v1/auth/login`, `logout`, `refresh-csrf`, `check-auth` | `infrastructure/auth/routes.py` | Session auth | +| `GET /api/v1/auth/oauth/google`, `oauth/callback/google` | `infrastructure/auth/routes.py` | Google OAuth | +| `POST/GET/PATCH/DELETE /api/v1/api-keys/*` | `modules/api_keys/routes.py` | Authenticated key management | +| `GET /admin/*` | `interfaces/admin/initialize.py` | SQLAdmin UI | +| `GET /docs`, `/redoc`, `/openapi.json` | FastAPI built-ins | Disabled in production unless `ENABLE_DOCS_IN_PRODUCTION=true` | +| `GET /health` | App factory | Liveness check | + ## What's Next Start with the basics: -1. **[Endpoints](endpoints.md)** - Learn the common patterns for creating API endpoints -2. **[Pagination](pagination.md)** - Add pagination to handle large datasets -3. **[Exception Handling](exceptions.md)** - Handle errors properly with built-in exceptions -4. **[API Versioning](versioning.md)** - Version your APIs and maintain backward compatibility +1. **[Endpoints](endpoints.md)** - Common patterns for new routes +2. **[Pagination](pagination.md)** - List endpoints with paged responses +3. **[Exception Handling](exceptions.md)** - The boilerplate's exception model +4. **[API Versioning](versioning.md)** - Versioning strategy -Then dive deeper into the foundation: -5. **[Database Schemas](../database/schemas.md)** - Create schemas for your data -6. **[CRUD Operations](../database/crud.md)** - Understand the database layer +Then go deeper: -Each guide builds on the previous one with practical examples you can use immediately. \ No newline at end of file +5. **[Database Schemas](../database/schemas.md)** - Pydantic shapes used in routes +6. **[CRUD Operations](../database/crud.md)** - The data layer below the service diff --git a/docs/user-guide/api/pagination.md b/docs/user-guide/api/pagination.md index 27da9a00..d67c1e84 100644 --- a/docs/user-guide/api/pagination.md +++ b/docs/user-guide/api/pagination.md @@ -1,316 +1,308 @@ # API Pagination -This guide shows you how to add pagination to your API endpoints using the boilerplate's built-in utilities. Pagination helps you handle large datasets efficiently. +The boilerplate uses FastCRUD's `PaginatedListResponse[T]` and `paginated_response()` helpers for paginated list endpoints. This page documents the pattern. ## Quick Start -Here's how to add basic pagination to any endpoint: - ```python -from fastcrud import PaginatedListResponse +from typing import Annotated, Any + +from fastapi import APIRouter, Depends +from fastcrud import PaginatedListResponse, compute_offset, paginated_response +from sqlalchemy.ext.asyncio import AsyncSession + +from ...infrastructure.database.session import async_session +from .schemas import UserRead +from .service import UserService + @router.get("/", response_model=PaginatedListResponse[UserRead]) -async def get_users( +async def list_users( + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], page: int = 1, items_per_page: int = 10, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - users = await crud_users.get_multi( - db=db, - offset=(page - 1) * items_per_page, +) -> dict[str, Any]: + result = await user_service.get_paginated( + skip=compute_offset(page, items_per_page), limit=items_per_page, - schema_to_select=UserRead, - return_as_model=True, - return_total_count=True - ) - - return paginated_response( - crud_data=users, - page=page, - items_per_page=items_per_page + db=db, ) + return paginated_response(crud_data=result, page=page, items_per_page=items_per_page) ``` -That's it! Your endpoint now returns paginated results with metadata. +`compute_offset(page, items_per_page)` is the documented helper — use it instead of computing `(page - 1) * items_per_page` by hand. -## What You Get +## Response Shape -The response includes everything frontends need: +`paginated_response()` returns: ```json { - "data": [ - { - "id": 1, - "name": "John Doe", - "username": "johndoe", - "email": "john@example.com" - } - // ... more users - ], - "total_count": 150, - "has_more": true, - "page": 1, - "items_per_page": 10, - "total_pages": 15 + "data": [ + { "id": 1, "name": "User Userson", "username": "userson", "email": "user@example.com" } + ], + "total_count": 150, + "has_more": true, + "page": 1, + "items_per_page": 10 } ``` -## Adding Filters +`has_more` is `True` when there are still rows past the current page (`page * items_per_page < total_count`). The boilerplate doesn't return `total_pages` — frontends can derive it as `ceil(total_count / items_per_page)` if they need it. + +## Where the Service Does the Work -You can easily add filtering to paginated endpoints: +The route stays thin. The actual `get_multi` call lives in the service: + +```python +# modules/user/service.py +from fastcrud.types import GetMultiResponseDict +from .crud import crud_users +from .schemas import UserRead + + +class UserService: + async def get_paginated( + self, db: AsyncSession, skip: int = 0, limit: int = 100, + ) -> GetMultiResponseDict: + return await crud_users.get_multi( + db=db, + offset=skip, + limit=limit, + is_deleted=False, + schema_to_select=UserRead, + return_total_count=True, + ) +``` + +`return_total_count=True` is what makes the response include `total_count` (and therefore makes `has_more` accurate). + +## Filtering + +Add filter parameters to the route, pass them to the service: ```python @router.get("/", response_model=PaginatedListResponse[UserRead]) -async def get_users( +async def list_users( + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], page: int = 1, items_per_page: int = 10, - # Add filter parameters search: str | None = None, - is_active: bool | None = None, tier_id: int | None = None, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - # Build filters - filters = {} - if search: - filters["name__icontains"] = search # Search by name - if is_active is not None: - filters["is_active"] = is_active - if tier_id: +) -> dict[str, Any]: + result = await user_service.get_paginated( + skip=compute_offset(page, items_per_page), + limit=items_per_page, + db=db, + search=search, + tier_id=tier_id, + ) + return paginated_response(crud_data=result, page=page, items_per_page=items_per_page) +``` + +In the service, build the `crud_users.get_multi` filters: + +```python +async def get_paginated( + self, + db: AsyncSession, + skip: int = 0, + limit: int = 100, + search: str | None = None, + tier_id: int | None = None, +) -> GetMultiResponseDict: + filters: dict[str, Any] = {"is_deleted": False} + if tier_id is not None: filters["tier_id"] = tier_id - - users = await crud_users.get_multi( + if search: + filters["username__icontains"] = search + + return await crud_users.get_multi( db=db, - offset=(page - 1) * items_per_page, - limit=items_per_page, + offset=skip, + limit=limit, schema_to_select=UserRead, - return_as_model=True, return_total_count=True, - **filters - ) - - return paginated_response( - crud_data=users, - page=page, - items_per_page=items_per_page + **filters, ) ``` -Now you can call: +FastCRUD's `__icontains` / `__contains` / `__gt` / `__in` operators avoid raw SQL. See [CRUD Operations](../database/crud.md) for the full list. + +## Sorting + +FastCRUD accepts `sort_columns` and `sort_orders`: + +```python +result = await crud_users.get_multi( + db=db, + offset=skip, + limit=limit, + sort_columns="created_at", + sort_orders="desc", + return_total_count=True, +) +``` -- `/users/?search=john` - Find users with "john" in their name -- `/users/?is_active=true` - Only active users -- `/users/?tier_id=1&page=2` - Users in tier 1, page 2 +For multiple sort keys, pass lists: -## Adding Sorting +```python +sort_columns=["tier_id", "created_at"], +sort_orders=["asc", "desc"], +``` -Add sorting options to your paginated endpoints: +Expose this from the route as a query parameter: ```python +from fastapi import Query + + @router.get("/", response_model=PaginatedListResponse[UserRead]) -async def get_users( +async def list_users( + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], page: int = 1, items_per_page: int = 10, - # Add sorting parameters - sort_by: str = "created_at", - sort_order: str = "desc", - db: Annotated[AsyncSession, Depends(async_get_db)] -): - users = await crud_users.get_multi( - db=db, - offset=(page - 1) * items_per_page, - limit=items_per_page, - schema_to_select=UserRead, - return_as_model=True, - return_total_count=True, - sort_columns=sort_by, - sort_orders=sort_order - ) - - return paginated_response( - crud_data=users, - page=page, - items_per_page=items_per_page - ) + sort_by: Annotated[str, Query(pattern=r"^(created_at|username|email)$")] = "created_at", + sort_order: Annotated[str, Query(pattern=r"^(asc|desc)$")] = "desc", +) -> dict[str, Any]: + ... ``` -Usage: - -- `/users/?sort_by=name&sort_order=asc` - Sort by name A-Z -- `/users/?sort_by=created_at&sort_order=desc` - Newest first +The `pattern` constraint stops clients from passing arbitrary column names that could leak fields you didn't mean to sort by. ## Validation -Add validation to prevent issues: +Always cap `items_per_page` to keep callers from asking for thousands of rows: ```python from fastapi import Query + @router.get("/", response_model=PaginatedListResponse[UserRead]) -async def get_users( - page: Annotated[int, Query(ge=1)] = 1, # Must be >= 1 - items_per_page: Annotated[int, Query(ge=1, le=100)] = 10, # Between 1-100 - db: Annotated[AsyncSession, Depends(async_get_db)] -): - # Your pagination logic here +async def list_users( + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], + page: Annotated[int, Query(ge=1)] = 1, + items_per_page: Annotated[int, Query(ge=1, le=100)] = 10, +) -> dict[str, Any]: + ... ``` -## Complete Example +The boilerplate uses `ge=1, le=100` for the user list endpoint and `ge=1, le=1000` for API-key usage history (`modules/api_keys/routes.py`). Pick a cap that matches the row size of the model you're paginating. + +## Real Endpoint: List Users -Here's a full-featured paginated endpoint: +From `modules/user/routes.py`: ```python -@router.get("/", response_model=PaginatedListResponse[UserRead]) +@router.get( + "/", + response_model=PaginatedListResponse[UserRead], + summary="List All Users (Admin)", + responses={ + 401: {"description": "Not authenticated"}, + 403: {"description": "Not authorized - requires admin privileges"}, + }, +) async def get_users( - # Pagination - page: Annotated[int, Query(ge=1)] = 1, - items_per_page: Annotated[int, Query(ge=1, le=100)] = 10, - - # Filtering - search: Annotated[str | None, Query(max_length=100)] = None, - is_active: bool | None = None, - tier_id: int | None = None, - - # Sorting - sort_by: str = "created_at", - sort_order: str = "desc", - - db: Annotated[AsyncSession, Depends(async_get_db)] -): - """Get paginated users with filtering and sorting.""" - - # Build filters - filters = {"is_deleted": False} # Always exclude deleted users - - if is_active is not None: - filters["is_active"] = is_active - if tier_id: - filters["tier_id"] = tier_id - - # Handle search - search_criteria = [] - if search: - from sqlalchemy import or_, func - search_criteria = [ - or_( - func.lower(User.name).contains(search.lower()), - func.lower(User.username).contains(search.lower()), - func.lower(User.email).contains(search.lower()) - ) - ] - - users = await crud_users.get_multi( - db=db, - offset=(page - 1) * items_per_page, + db: Annotated[AsyncSession, Depends(async_session)], + _: Annotated[dict[str, Any], Depends(get_current_superuser)], + user_service: Annotated[UserService, Depends(get_user_service)], + page: int = 1, + items_per_page: int = 10, +) -> dict[str, Any]: + """Get paginated list of all users (admin only).""" + users_data = await user_service.get_paginated( + skip=compute_offset(page, items_per_page), limit=items_per_page, - schema_to_select=UserRead, - return_as_model=True, - return_total_count=True, - sort_columns=sort_by, - sort_orders=sort_order, - **filters, - **{"filter_criteria": search_criteria} if search_criteria else {} - ) - - return paginated_response( - crud_data=users, - page=page, - items_per_page=items_per_page + db=db, ) + return paginated_response(crud_data=users_data, page=page, items_per_page=items_per_page) ``` -This endpoint supports: - -- `/users/` - First 10 users -- `/users/?page=2&items_per_page=20` - Page 2, 20 items -- `/users/?search=john&is_active=true` - Active users named john -- `/users/?sort_by=name&sort_order=asc` - Sorted by name +## Real Endpoint: API Key Usage History -## Simple List (No Pagination) - -Sometimes you just want a simple list without pagination: +From `modules/api_keys/routes.py` — same pattern, different limit cap: ```python -@router.get("/all", response_model=list[UserRead]) -async def get_all_users( - limit: int = 100, # Prevent too many results - db: Annotated[AsyncSession, Depends(async_get_db)] -): - users = await crud_users.get_multi( +@router.get( + "/{key_id}/usage", + response_model=PaginatedListResponse[KeyUsageRead], +) +async def get_key_usage( + key_id: int = Path(..., description="API key ID"), + current_user: User = Depends(get_current_user), + api_key_service: APIKeyService = Depends(get_api_key_service), + db: AsyncSession = Depends(async_session), + page: int = Query(1, ge=1, description="Page number"), + items_per_page: int = Query(100, ge=1, le=1000, description="Items per page"), +) -> dict[str, Any]: + result = await api_key_service.get_key_usage( + key_id=key_id, + user_id=current_user["id"] if isinstance(current_user, dict) else current_user.id, + limit=items_per_page, + offset=compute_offset(page, items_per_page), db=db, - limit=limit, - schema_to_select=UserRead, - return_as_model=True ) - return users["data"] + return paginated_response(crud_data=result, page=page, items_per_page=items_per_page) ``` -## Performance Tips +## Simple List Without Pagination + +If you genuinely don't need pagination (e.g. an admin endpoint that returns a tiny enumerable like all tiers), call `get_multi` once and return the `data` list directly: -1. **Always set a maximum page size**: ```python -items_per_page: Annotated[int, Query(ge=1, le=100)] = 10 # Max 100 items +@router.get("/all", response_model=list[TierRead]) +async def list_all_tiers( + db: Annotated[AsyncSession, Depends(async_session)], + tier_service: Annotated[TierService, Depends(get_tier_service)], +) -> list[dict[str, Any]]: + result = await tier_service.get_all(db=db, skip=0, limit=1000) + return result["data"] ``` -2. **Use `schema_to_select` to only fetch needed fields**: +Even here, set a generous-but-finite `limit` — never an unbounded query. + +## Performance Tips + +### Cap `items_per_page` + +Already covered, but worth repeating: an `Annotated[int, Query(ge=1, le=100)]` is your safety net. + +### Use `schema_to_select` + +Only return the columns the response model needs. For a `UserRead` schema, this avoids fetching `hashed_password`: + ```python -users = await crud_users.get_multi( - schema_to_select=UserRead, # Only fetch UserRead fields - return_as_model=True +return await crud_users.get_multi( + db=db, + schema_to_select=UserRead, + return_total_count=True, + offset=skip, + limit=limit, ) ``` -3. **Add database indexes** for columns you sort by: -```sql --- In your migration -CREATE INDEX idx_users_created_at ON users(created_at); -CREATE INDEX idx_users_name ON users(name); -``` +### Index columns you sort or filter on -## Common Patterns +When you add new sort/filter parameters that target a column without an index, generate an Alembic migration that adds one: -### Admin List with All Users ```python -@router.get("/admin", dependencies=[Depends(get_current_superuser)]) -async def get_all_users_admin( - include_deleted: bool = False, - page: int = 1, - items_per_page: int = 50, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - filters = {} - if not include_deleted: - filters["is_deleted"] = False - - users = await crud_users.get_multi(db=db, **filters) - return paginated_response(users, page, items_per_page) +def upgrade() -> None: + op.create_index("ix_user_created_at", "user", ["created_at"]) ``` -### User's Own Items -```python -@router.get("/my-posts", response_model=PaginatedListResponse[PostRead]) -async def get_my_posts( - page: int = 1, - items_per_page: int = 10, - current_user: Annotated[dict, Depends(get_current_user)], - db: Annotated[AsyncSession, Depends(async_get_db)] -): - posts = await crud_posts.get_multi( - db=db, - author_id=current_user["id"], # Only user's own posts - offset=(page - 1) * items_per_page, - limit=items_per_page - ) - return paginated_response(posts, page, items_per_page) -``` +The User model already indexes `username`, `email`, `tier_id`, `google_id`, and `github_id` for this reason. -## What's Next +### Beware of large offsets -Now that you understand pagination: +`OFFSET 100000` still has Postgres scan and discard 100,000 rows. For very large datasets, consider keyset pagination (filtering by `created_at < cursor`) instead of page-based pagination. FastCRUD's `__lt` / `__gt` operators support this directly. -- **[Database CRUD](../database/crud.md)** - Learn more about the CRUD operations -- **[Database Schemas](../database/schemas.md)** - Create schemas for your data -- **[Authentication](../authentication/index.md)** - Add user authentication to your endpoints +## What's Next -The boilerplate makes pagination simple - just use these patterns! \ No newline at end of file +- **[CRUD Operations](../database/crud.md)** — Filter/sort/offset/limit semantics +- **[Schemas](../database/schemas.md)** — How `*Read` schemas pair with `schema_to_select` +- **[Authentication](../authentication/index.md)** — Gating list endpoints behind login or admin diff --git a/docs/user-guide/api/versioning.md b/docs/user-guide/api/versioning.md index 5ac309f5..d2eb967a 100644 --- a/docs/user-guide/api/versioning.md +++ b/docs/user-guide/api/versioning.md @@ -1,418 +1,333 @@ # API Versioning -Learn how to version your APIs properly using the boilerplate's built-in versioning structure and best practices for maintaining backward compatibility. +The boilerplate ships a `v1` namespace under `/api/v1/`. This page documents the actual wiring and how to add `/api/v2/` when you need to make breaking changes. -## Quick Start - -The boilerplate is already set up for versioning with a `v1` structure: +## How It's Wired Today ```text -src/app/api/ -├── dependencies.py # Shared across all versions -└── v1/ # Version 1 of your API - ├── __init__.py # Router registration - ├── users.py # User endpoints - ├── posts.py # Post endpoints - └── ... # Other endpoints +backend/src/interfaces/api/ +├── __init__.py # mounts /api → v1 +└── v1/ + └── __init__.py # mounts /v1 + each module's router ``` -Your endpoints are automatically available at `/api/v1/...`: +`interfaces/api/__init__.py`: -- `GET /api/v1/users/` - Get users -- `POST /api/v1/users/` - Create user -- `GET /api/v1/posts/` - Get posts +```python +from fastapi import APIRouter -## Current Structure +from .v1 import router as v1_router -### Version 1 (v1) +router = APIRouter(prefix="/api") +router.include_router(v1_router) +``` -The current API version is in `src/app/api/v1/`: +`interfaces/api/v1/__init__.py`: ```python -# src/app/api/v1/__init__.py from fastapi import APIRouter -from .users import router as users_router -from .posts import router as posts_router -from .login import router as login_router - -# Main v1 router -api_router = APIRouter() - -# Include all v1 endpoints -api_router.include_router(users_router) -api_router.include_router(posts_router) -api_router.include_router(login_router) +from ....infrastructure.auth.routes import router as auth_router +from ....modules.api_keys.routes import router as api_keys_router +from ....modules.rate_limit.routes import router as rate_limits_router +from ....modules.tier.routes import router as tiers_router +from ....modules.user.routes import router as users_router + +router = APIRouter(prefix="/v1") +router.include_router(users_router, prefix="/users") +router.include_router(tiers_router, prefix="/tiers") +router.include_router(rate_limits_router, prefix="/rate-limits") +router.include_router(auth_router, prefix="/auth") +router.include_router(api_keys_router, prefix="/api-keys") ``` -### Main App Registration +The aggregator is the **only** place that knows about every module's router. Each module exposes a single `router` from its `routes.py`, and v1 mounts them all under their respective prefixes. -In `src/app/main.py`, v1 is registered: +`infrastructure/main.py` then mounts the API tree: ```python -from fastapi import FastAPI -from app.api.v1 import api_router as api_v1_router - -app = FastAPI() +from ..interfaces.api import router -# Register v1 API -app.include_router(api_v1_router, prefix="/api/v1") +app.include_router(router) ``` -## Adding Version 2 +So `users_router → /users → /v1/users → /api/v1/users → /api/v1/users/me`, etc. -When you need to make breaking changes, create a new version: +## Endpoints Today -### Step 1: Create v2 Directory +| URL prefix | Source | +|------------|--------| +| `/api/v1/users/*` | `modules/user/routes.py` | +| `/api/v1/tiers/*` | `modules/tier/routes.py` | +| `/api/v1/rate-limits/*` | `modules/rate_limit/routes.py` | +| `/api/v1/auth/*` | `infrastructure/auth/routes.py` | +| `/api/v1/api-keys/*` | `modules/api_keys/routes.py` | -```text -src/app/api/ -├── dependencies.py -├── v1/ # Keep v1 unchanged -│ ├── __init__.py -│ ├── users.py -│ └── ... -└── v2/ # New version - ├── __init__.py - ├── users.py # Updated user endpoints - └── ... -``` +## Adding `v2` -### Step 2: Create v2 Router +When you need to make breaking changes — new response shapes, removed fields, different auth requirements — add a new version sibling instead of mutating v1. + +### Step 1: Create the v2 Aggregator + +```bash +mkdir backend/src/interfaces/api/v2 +touch backend/src/interfaces/api/v2/__init__.py +``` ```python -# src/app/api/v2/__init__.py +# backend/src/interfaces/api/v2/__init__.py from fastapi import APIRouter -from .users import router as users_router -# Import other v2 routers +# Import the v2-flavored route modules — see Step 2 below +from ....modules.user.routes_v2 import router as users_router -# Main v2 router -api_router = APIRouter() +router = APIRouter(prefix="/v2") +router.include_router(users_router, prefix="/users") -# Include v2 endpoints -api_router.include_router(users_router) +# Re-export anything that didn't change in v2 from v1: +# from ....modules.tier.routes import router as tiers_router +# router.include_router(tiers_router, prefix="/tiers") ``` -### Step 3: Register v2 in Main App +### Step 2: Create v2 Routes Per Module + +Two patterns work, pick the one that fits the change: + +**Pattern A: a separate `routes_v2.py`** — when v2's routes are different enough that mixing them in `routes.py` would be confusing. ```python -# src/app/main.py -from fastapi import FastAPI -from app.api.v1 import api_router as api_v1_router -from app.api.v2 import api_router as api_v2_router +# backend/src/modules/user/routes_v2.py +from typing import Annotated, Any -app = FastAPI() +from fastapi import APIRouter, Depends +from fastcrud import PaginatedListResponse, compute_offset, paginated_response +from sqlalchemy.ext.asyncio import AsyncSession -# Register both versions -app.include_router(api_v1_router, prefix="/api/v1") -app.include_router(api_v2_router, prefix="/api/v2") -``` +from ...infrastructure.auth.session.dependencies import get_current_user +from ...infrastructure.database.session import async_session +from .schemas_v2 import UserReadV2 +from .service import UserService +from .routes import get_user_service # reuse the service factory -## Version 2 Example +router = APIRouter(tags=["Users (v2)"]) -Here's how you might evolve the user endpoints in v2: -### v1 User Endpoint -```python -# src/app/api/v1/users.py -from app.schemas.user import UserRead, UserCreate +# v2 makes pagination mandatory and renames profile_image_url -> avatar_url +@router.get("/", response_model=PaginatedListResponse[UserReadV2]) +async def list_users( + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], + page: int = 1, + items_per_page: int = 10, +) -> dict[str, Any]: + result = await user_service.get_paginated_v2( + skip=compute_offset(page, items_per_page), + limit=items_per_page, + db=db, + ) + return paginated_response(crud_data=result, page=page, items_per_page=items_per_page) +``` + +**Pattern B: alias the existing router** — when v2's behavior is identical and only the URL prefix needs to differ: -@router.get("/", response_model=list[UserRead]) -async def get_users(): - users = await crud_users.get_multi(db=db, schema_to_select=UserRead) - return users["data"] +```python +# backend/src/interfaces/api/v2/__init__.py +from ....modules.tier.routes import router as tiers_router -@router.post("/", response_model=UserRead) -async def create_user(user_data: UserCreate): - return await crud_users.create(db=db, object=user_data) +router.include_router(tiers_router, prefix="/tiers") ``` -### v2 User Endpoint (with breaking changes) +### Step 3: Mount v2 Alongside v1 + ```python -# src/app/api/v2/users.py -from app.schemas.user import UserReadV2, UserCreateV2 # New schemas -from fastcrud import PaginatedListResponse +# backend/src/interfaces/api/__init__.py +from fastapi import APIRouter -# Breaking change: Always return paginated response -@router.get("/", response_model=PaginatedListResponse[UserReadV2]) -async def get_users(page: int = 1, items_per_page: int = 10): - users = await crud_users.get_multi( - db=db, - offset=(page - 1) * items_per_page, - limit=items_per_page, - schema_to_select=UserReadV2 - ) - return paginated_response(users, page, items_per_page) - -# Breaking change: Require authentication -@router.post("/", response_model=UserReadV2) -async def create_user( - user_data: UserCreateV2, - current_user: Annotated[dict, Depends(get_current_user)] # Now required -): - return await crud_users.create(db=db, object=user_data) +from .v1 import router as v1_router +from .v2 import router as v2_router + +router = APIRouter(prefix="/api") +router.include_router(v1_router) +router.include_router(v2_router) ``` +Both `/api/v1/users/` and `/api/v2/users/` are now live. + ## Schema Versioning -Create separate schemas for different versions: +Keep v1 schemas exactly as they are; add v2 schemas in a new file. Never edit a v1 schema in a way that changes the wire format — that's the whole point of having a v2. -### Version 1 Schema ```python -# src/app/schemas/user.py (existing) +# backend/src/modules/user/schemas.py — UNCHANGED class UserRead(BaseModel): id: int name: str username: str - email: str + email: EmailStr profile_image_url: str tier_id: int | None + is_superuser: bool = False + email_verified: bool = False + oauth_provider: str | None = None -class UserCreate(BaseModel): - name: str - username: str - email: str - password: str -``` - -### Version 2 Schema (with changes) -```python -# src/app/schemas/user_v2.py (new file) -from datetime import datetime +# backend/src/modules/user/schemas_v2.py — NEW class UserReadV2(BaseModel): id: int name: str username: str - email: str - avatar_url: str # Changed from profile_image_url - subscription_tier: str # Changed from tier_id to string - created_at: datetime # New field - is_verified: bool # New field - -class UserCreateV2(BaseModel): - name: str - username: str - email: str - password: str - accept_terms: bool # New required field + email: EmailStr + avatar_url: str # renamed from profile_image_url + subscription_tier: str | None # changed from tier_id (int) to tier name + is_superuser: bool = False + email_verified: bool = False + created_at: datetime # newly exposed ``` -## Gradual Migration Strategy +Service methods that produce the v2 shape live next to the v1 ones — `UserService.get_paginated` for v1, `UserService.get_paginated_v2` for v2 — so the service still owns the data assembly logic. -### 1. Keep Both Versions Running - -```python -# Both versions work simultaneously -# v1: GET /api/v1/users/ -> list[UserRead] -# v2: GET /api/v2/users/ -> PaginatedListResponse[UserReadV2] -``` +## Sharing Code Across Versions -### 2. Add Deprecation Warnings +The CRUD layer, services, and infrastructure are **shared**. Only the routes and schemas duplicate. That's the point — it's cheap to add a version because most of the codebase doesn't move. -```python -# src/app/api/v1/users.py -import warnings -from fastapi import HTTPException - -@router.get("/", response_model=list[UserRead]) -async def get_users(response: Response): - # Add deprecation header - response.headers["X-API-Deprecation"] = "v1 is deprecated. Use v2." - response.headers["X-API-Sunset"] = "2024-12-31" # When v1 will be removed - - users = await crud_users.get_multi(db=db, schema_to_select=UserRead) - return users["data"] +```text +modules/user/ +├── models.py ← shared +├── crud.py ← shared +├── service.py ← shared (add v2-shaped methods if needed) +├── schemas.py ← v1 schemas +├── schemas_v2.py ← v2 schemas +├── routes.py ← v1 routes +└── routes_v2.py ← v2 routes ``` -### 3. Monitor Usage +## Deprecating a Version -Track which versions are being used: +When v2 is ready and v1 should sunset: + +### 1. Add a deprecation header to v1 endpoints ```python -# src/app/api/middleware.py -from fastapi import Request -import logging - -logger = logging.getLogger(__name__) - -async def version_tracking_middleware(request: Request, call_next): - if request.url.path.startswith("/api/v1/"): - logger.info(f"v1 usage: {request.method} {request.url.path}") - elif request.url.path.startswith("/api/v2/"): - logger.info(f"v2 usage: {request.method} {request.url.path}") - - response = await call_next(request) - return response +# Inside a v1 route handler +@router.get("/", response_model=list[UserRead], deprecated=True) +async def list_users( + response: Response, + ..., +) -> list[dict[str, Any]]: + response.headers["Deprecation"] = "true" + response.headers["Sunset"] = "Wed, 31 Dec 2025 00:00:00 GMT" + response.headers["Link"] = '; rel="successor-version"' + return await ... ``` -## Shared Code Between Versions +The `Deprecation`, `Sunset`, and `Link` headers come from the IETF [API Deprecation](https://datatracker.ietf.org/doc/html/rfc8594) drafts — clients with HTTP-aware tooling pick them up automatically. -Keep common logic in shared modules: +The `deprecated=True` flag also marks the endpoint in `/docs`. -### Shared Dependencies -```python -# src/app/api/dependencies.py - shared across all versions -async def get_current_user(...): - # Authentication logic used by all versions - pass - -async def get_db(): - # Database connection used by all versions - pass -``` +### 2. Track v1 usage -### Shared CRUD Operations -```python -# The CRUD layer can be shared between versions -# Only the schemas and endpoints change +If you have logging middleware or observability, slice request counts by `request.url.path.startswith("/api/v1/")` to know when v1 traffic is low enough to retire. -# v1 endpoint -@router.get("/", response_model=list[UserRead]) -async def get_users_v1(): - users = await crud_users.get_multi(schema_to_select=UserRead) - return users["data"] +### 3. Remove v1 after sunset -# v2 endpoint -@router.get("/", response_model=PaginatedListResponse[UserReadV2]) -async def get_users_v2(): - users = await crud_users.get_multi(schema_to_select=UserReadV2) - return paginated_response(users, page, items_per_page) -``` +When the sunset date passes and traffic is gone: + +1. Delete `interfaces/api/v1/` +2. Delete the v1-only `schemas.py` blocks (or rename `schemas_v2.py` → `schemas.py`) +3. Delete v1-only service methods +4. Update `interfaces/api/__init__.py` to mount only v2 -## Version Discovery +## Per-Version OpenAPI Documentation -Let clients discover available versions: +By default, `/docs` shows every route. To split docs per version, mount each version as a sub-app with its own `FastAPI()` instance: ```python -# src/app/api/versions.py -from fastapi import APIRouter +# backend/src/interfaces/main.py — sketch +from fastapi import FastAPI -router = APIRouter() - -@router.get("/versions") -async def get_api_versions(): - return { - "available_versions": ["v1", "v2"], - "current_version": "v2", - "deprecated_versions": [], - "sunset_dates": { - "v1": "2024-12-31" - } - } -``` +from .api.v1 import router as v1_router +from .api.v2 import router as v2_router -Register it in main.py: -```python -# src/app/main.py -from app.api.versions import router as versions_router +main = FastAPI(title="My API") -app.include_router(versions_router, prefix="/api") -# Now available at GET /api/versions +v1 = FastAPI(title="My API v1", version="1.0.0") +v1.include_router(v1_router) +main.mount("/api/v1", v1) + +v2 = FastAPI(title="My API v2", version="2.0.0") +v2.include_router(v2_router) +main.mount("/api/v2", v2) ``` +You'll get `/api/v1/docs` and `/api/v2/docs` independently. Note the boilerplate ships a single mounted app today — adopt this only when you genuinely need separate docs. + ## Testing Multiple Versions -Test both versions to ensure compatibility: +Once v2 exists, run the test suite against both: ```python -# tests/test_api_versioning.py import pytest from httpx import AsyncClient -@pytest.mark.asyncio -async def test_v1_users(client: AsyncClient): - """Test v1 returns simple list""" - response = await client.get("/api/v1/users/") - assert response.status_code == 200 - - data = response.json() - assert isinstance(data, list) # v1 returns list - -@pytest.mark.asyncio -async def test_v2_users(client: AsyncClient): - """Test v2 returns paginated response""" - response = await client.get("/api/v2/users/") - assert response.status_code == 200 - - data = response.json() - assert "data" in data # v2 returns paginated response - assert "total_count" in data - assert "page" in data -``` - -## OpenAPI Documentation - -Each version gets its own docs: - -```python -# src/app/main.py -from fastapi import FastAPI -# Create separate apps for documentation -v1_app = FastAPI(title="My API v1", version="1.0.0") -v2_app = FastAPI(title="My API v2", version="2.0.0") +@pytest.mark.asyncio +async def test_v1_users_returns_list(client: AsyncClient): + resp = await client.get("/api/v1/users/") + # whatever v1's contract is — list, paginated, etc. + assert resp.status_code in {200, 401, 403} -# Register routes -v1_app.include_router(api_v1_router) -v2_app.include_router(api_v2_router) -# Mount as sub-applications -main_app = FastAPI() -main_app.mount("/api/v1", v1_app) -main_app.mount("/api/v2", v2_app) +@pytest.mark.asyncio +async def test_v2_users_paginated(client: AsyncClient): + resp = await client.get("/api/v2/users/") + assert resp.status_code == 200 + body = resp.json() + assert "data" in body + assert "total_count" in body + assert "page" in body ``` -Now you have separate documentation: -- `/api/v1/docs` - v1 documentation -- `/api/v2/docs` - v2 documentation - ## Best Practices -### 1. Semantic Versioning +### What counts as a breaking change? -- **v1.0** → **v1.1**: New features (backward compatible) -- **v1.1** → **v2.0**: Breaking changes (new version) +- Removing a field from a response +- Renaming a field +- Changing a field's type (e.g. `tier_id: int | None` → `tier_name: str`) +- Tightening validation in a way that previously-valid input now fails +- Adding a required request field +- Changing default behavior (e.g. unpaginated → paginated) +- Changing auth requirements -### 2. Clear Migration Path +If you're not making a breaking change, just add the new field/feature to v1. -```python -# Document what changed in v2 -""" -API v2 Changes: -- GET /users/ now returns paginated response instead of array -- POST /users/ now requires authentication -- UserRead.profile_image_url renamed to avatar_url -- UserRead.tier_id changed to subscription_tier (string) -- Added UserRead.created_at and is_verified fields -- UserCreate now requires accept_terms field -""" -``` +### Keep the URL pattern consistent -### 3. Gradual Deprecation +Always `/api/v{number}/resource`. Don't get clever with version-in-headers schemes — URL versioning is unambiguous to humans and to caches. -1. Release v2 alongside v1 -2. Add deprecation warnings to v1 -3. Set sunset date for v1 -4. Monitor v1 usage -5. Remove v1 after sunset date +### Don't fork the service layer prematurely -### 4. Consistent Patterns +If v2 only changes the response shape, derive the v2 dict from the same service method via a small adapter; only fork the service when business logic actually differs. -Keep the same patterns across versions: +### Document changes in a changelog -- Same URL structure: `/api/v{number}/resource` -- Same HTTP methods and status codes -- Same authentication approach -- Same error response format +Tag the v2 release with the list of breaking changes: -## What's Next +```markdown +## API v2 + +Breaking changes vs v1: +- `GET /users/` now returns `PaginatedListResponse` instead of `list[UserRead]` +- `UserRead.profile_image_url` renamed to `avatar_url` +- `UserRead.tier_id` (int) replaced with `subscription_tier` (string) +- `POST /users/` now requires authentication +- `UserCreate` now requires `accept_terms: bool` +``` -Now that you understand API versioning: +A short, blunt list helps consumers migrate. -- **[Database Migrations](../database/migrations.md)** - Handle database schema changes -- **[Testing](../testing.md)** - Test multiple API versions -- **[Production](../production.md)** - Deploy versioned APIs +## What's Next -Proper versioning lets you evolve your API without breaking existing clients! \ No newline at end of file +- **[Database Migrations](../database/migrations.md)** — Schema changes that may motivate a new API version +- **[Endpoints](endpoints.md)** — Patterns for routes +- **[Schemas](../database/schemas.md)** — Versioned shapes diff --git a/docs/user-guide/authentication/index.md b/docs/user-guide/authentication/index.md index a78f380d..2deee345 100644 --- a/docs/user-guide/authentication/index.md +++ b/docs/user-guide/authentication/index.md @@ -1,198 +1,329 @@ # Authentication & Security -Learn how to implement secure authentication in your FastAPI application. The boilerplate provides a complete JWT-based authentication system with user management, permissions, and security best practices. +The boilerplate uses **server-side sessions with HTTP-only cookies** — not JWT. Sessions are stored in Redis (or memory/memcached, configurable), CSRF-protected, and rate-limited at the login endpoint. + +For machine-to-machine clients, the boilerplate ships **API keys** with per-key permissions and usage tracking. ## What You'll Learn -- **[JWT Tokens](jwt-tokens.md)** - Understand access and refresh token management -- **[User Management](user-management.md)** - Handle registration, login, and user profiles -- **[Permissions](permissions.md)** - Implement role-based access control and authorization +- **[Sessions](sessions.md)** - Server-side sessions, cookies, and CSRF protection +- **[User Management](user-management.md)** - Registration, login, profile operations +- **[Permissions](permissions.md)** - Role-based access control and resource ownership -## Authentication Overview +## Why Sessions, Not JWT -The system uses JWT tokens with refresh token rotation for secure, stateless authentication: +The original boilerplate used JWT with refresh tokens and a token blacklist. We replaced that with sessions because: -```python -# Basic login flow -@router.post("/login", response_model=Token) -async def login_for_access_token(response: Response, form_data: OAuth2PasswordRequestForm): - user = await authenticate_user(form_data.username, form_data.password, db) - access_token = await create_access_token(data={"sub": user["username"]}) - refresh_token = await create_refresh_token(data={"sub": user["username"]}) - - # Set secure HTTP-only cookie for refresh token - response.set_cookie("refresh_token", refresh_token, httponly=True, secure=True) - return {"access_token": access_token, "token_type": "bearer"} +- **Logout is trivial.** Delete the session row, done. No blacklist to maintain. +- **Rotating credentials is trivial.** Update the session record. No need to wait for tokens to expire. +- **CSRF is built in.** Server-side sessions naturally pair with double-submit CSRF tokens. +- **Storage is server-side.** No risk of accidentally leaking long-lived tokens via XSS to client storage. +- **Sessions match how most users actually want to think about authentication.** "Is this person logged in?" is a database question, not a cryptographic one. + +If you specifically need stateless tokens (e.g. for inter-service auth where you can't share a session store), use **API keys** — they're stateless from the client's perspective and authenticated server-side. + +## Authentication Mechanisms + +The boilerplate supports three auth pathways. They coexist; you pick the right one per endpoint. + +### 1. Sessions (Browser Clients) + +```bash +# Log in — server sets the session cookie and returns a CSRF token +curl -X POST "http://localhost:8000/api/v1/auth/login" \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "username=admin&password=your_admin_password" \ + -c cookies.txt +# → { "csrf_token": "..." } + +# Subsequent requests — send the cookie back +curl http://localhost:8000/api/v1/users/me -b cookies.txt + +# Log out +curl -X POST http://localhost:8000/api/v1/auth/logout -b cookies.txt +``` + +Routes use `Depends(get_current_user)` to require an authenticated session. + +### 2. OAuth (Google) + +For social sign-in — Google OAuth 2.0 with PKCE is wired up. The user is redirected to Google, signs in, and is bounced back to a callback that creates a session. + +```bash +# Start the flow +curl http://localhost:8000/api/v1/auth/oauth/google +# → { "url": "https://accounts.google.com/...?state=..." } + +# After the user signs in at Google, they hit the callback: +# GET /api/v1/auth/oauth/callback/google?code=...&state=... +# The server creates a session and either redirects or returns JSON. +``` + +A GitHub OAuth provider is **scaffolded** in `infrastructure/auth/oauth/providers/github.py` but no GitHub callback routes are wired yet. Wire those up in `infrastructure/auth/routes.py` if you need GitHub sign-in. + +### 3. API Keys (Machine-to-Machine) + +For server-to-server clients, programs, scripts, integrations: + +```bash +# Create a key (requires an authenticated session) +curl -X POST "http://localhost:8000/api/v1/api-keys/" \ + -H "Content-Type: application/json" \ + -b cookies.txt \ + -d '{"name": "Integration Key", "permissions": {}, "usage_limits": {}}' +# → { "key": "shown ONCE — store securely", ... } ``` +The full key is returned only on creation. Each key has its own permissions, usage limits, and audit trail (`KeyUsage` rows). + ## Key Features -### JWT Token System -- **Access tokens**: Short-lived (30 minutes), for API requests -- **Refresh tokens**: Long-lived (7 days), stored in secure cookies -- **Token blacklisting**: Secure logout implementation -- **Automatic expiration**: Built-in token lifecycle management +### Server-Side Sessions + +- **Session storage**: Redis by default; memory/memcached available (`SESSION_BACKEND` env var) +- **HTTP-only cookies**: `session_id` cookie cannot be read by JavaScript +- **CSRF tokens**: Returned on login, also set as a cookie, must be sent in `X-CSRF-Token` for state-changing requests +- **Configurable timeout**: `SESSION_TIMEOUT_MINUTES`, `SESSION_COOKIE_MAX_AGE` +- **Per-user limits**: `MAX_SESSIONS_PER_USER` caps simultaneous sessions per account +- **Automatic cleanup**: `SESSION_CLEANUP_INTERVAL_MINUTES` controls expiry sweeps ### User Management -- **Flexible authentication**: Username or email login -- **Secure passwords**: bcrypt hashing with salt -- **Profile management**: Complete user CRUD operations -- **Soft delete**: User deactivation without data loss + +- **Username or email** login (the same `/api/v1/auth/login` endpoint accepts either) +- **bcrypt** password hashing +- **Soft delete** for user records — accounts are deactivated, not destroyed (toggle via `is_deleted`) +- **GDPR/LGPD anonymization** endpoint for hard-clearing PII (`DELETE /api/v1/users/db/{username}`) +- **OAuth flag** on the user model (`google_id`, `github_id`, `oauth_provider`) ### Permission System -- **Superuser privileges**: Administrative access control -- **Resource ownership**: User-specific data access -- **User tiers**: Subscription-based feature access -- **Rate limiting**: Per-user and per-tier API limits + +- **Superuser flag** on `User.is_superuser` for admin-only routes +- **Tier-based** access via the `Tier` model — every user belongs to a tier, and rate limits are configured per tier path +- **Resource ownership** checks live in services (the route doesn't decide who owns what) + +### Login Rate Limiting + +The login endpoint tracks failed attempts per IP+username. Configurable: + +```env +LOGIN_MAX_ATTEMPTS=5 +LOGIN_WINDOW_MINUTES=15 +``` + +When the limit is hit, `POST /api/v1/auth/login` returns `401 Unauthorized: Too many failed login attempts. Please try again later.` ## Authentication Patterns -### Endpoint Protection +All session deps live in `src/infrastructure/auth/session/dependencies.py`. + +### Required Authentication ```python -# Required authentication -@router.get("/protected") -async def protected_endpoint(current_user: dict = Depends(get_current_user)): - return {"message": f"Hello {current_user['username']}"} - -# Optional authentication -@router.get("/public") -async def public_endpoint(user: dict | None = Depends(get_optional_user)): - if user: - return {"premium_content": True} - return {"premium_content": False} - -# Superuser only -@router.get("/admin", dependencies=[Depends(get_current_superuser)]) -async def admin_endpoint(): - return {"admin_data": "sensitive"} +from ...infrastructure.auth.session.dependencies import get_current_user + +@router.get("/me", response_model=UserRead) +async def me( + current_user: Annotated[dict[str, Any], Depends(get_current_user)], +) -> dict[str, Any]: + return current_user ``` -### Resource Ownership +Returns 401 if the session cookie is missing or invalid. + +### Optional Authentication ```python -@router.patch("/posts/{post_id}") -async def update_post(post_id: int, current_user: dict = Depends(get_current_user)): - post = await crud_posts.get(db=db, id=post_id) - - # Check ownership or admin privileges - if post["created_by_user_id"] != current_user["id"] and not current_user["is_superuser"]: - raise ForbiddenException("Cannot update other users' posts") - - return await crud_posts.update(db=db, id=post_id, object=updates) +from ...infrastructure.auth.session.dependencies import get_optional_user + +@router.get("/") +async def list_things( + user: Annotated[dict[str, Any] | None, Depends(get_optional_user)], +): + # Logged-in users see extras; anonymous users still get a response + if user is not None: + return {"premium": True} + return {"premium": False} ``` -## Security Features +### Superuser Only -### Token Security -- Short-lived access tokens limit exposure -- HTTP-only refresh token cookies prevent XSS -- Token blacklisting enables secure logout -- Configurable token expiration times +```python +from ...infrastructure.auth.session.dependencies import get_current_superuser + +@router.delete("/{username}/permanent") +async def gdpr_delete_user( + username: str, + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], + _: Annotated[dict[str, Any], Depends(get_current_superuser)], +) -> dict[str, str]: + ... +``` -### Password Security -- bcrypt hashing with automatic salt generation -- Configurable password complexity requirements -- No plain text passwords stored anywhere -- Rate limiting on authentication endpoints +The leading underscore is the codebase's convention for dependency-only parameters. -### API Protection -- CORS policies for cross-origin request control -- Rate limiting prevents brute force attacks -- Input validation prevents injection attacks -- Consistent error messages prevent information disclosure +### Resource Ownership -## Configuration +Ownership is checked in the service layer, not in the route: -### JWT Settings -```env -SECRET_KEY="your-super-secret-key-here" -ALGORITHM="HS256" -ACCESS_TOKEN_EXPIRE_MINUTES=30 -REFRESH_TOKEN_EXPIRE_DAYS=7 +```python +# modules/user/service.py +async def verify_user_permission( + self, + current_user: dict[str, Any], + target_username: str, + action: str, +) -> None: + if current_user["username"] != target_username and not current_user["is_superuser"]: + raise PermissionDeniedError(f"Cannot {action} for another user") ``` -### Security Settings -```env -# Cookie security -COOKIE_SECURE=true -COOKIE_SAMESITE="lax" +The route delegates and the service raises `PermissionDeniedError` (which auto-maps to 403). See [Exceptions](../api/exceptions.md) for the mapping layer. -# Password requirements -PASSWORD_MIN_LENGTH=8 -ENABLE_PASSWORD_COMPLEXITY=true -``` +## Security Features -## Getting Started +### Session Security -Follow this progressive learning path: +- HTTP-only `session_id` cookie — JavaScript can't read it (XSS-safe) +- `Secure` cookies in non-dev environments (`SESSION_SECURE_COOKIES=true`) +- CSRF token validation for state-changing requests (`CSRF_ENABLED=true`) +- IP and user-agent recorded with each session +- Per-user session count cap (`MAX_SESSIONS_PER_USER`) -### 1. **[JWT Tokens](jwt-tokens.md)** - Foundation -Understand how JWT tokens work, including access and refresh token management, verification, and blacklisting. +### Password Security -### 2. **[User Management](user-management.md)** - Core Features -Implement user registration, login, profile management, and administrative operations. +- bcrypt hashing with automatic salt +- Pydantic validation enforces minimum length and complexity at the schema level (`UserCreate.password`) +- Plaintext passwords are never stored or logged +- Login rate limiting prevents credential stuffing -### 3. **[Permissions](permissions.md)** - Access Control -Set up role-based access control, resource ownership checking, and tier-based permissions. +### Production Validator -## Implementation Examples +When `ENVIRONMENT=production` and `PRODUCTION_SECURITY_VALIDATION_ENABLED=true` (both default), the app refuses to start if it finds insecure settings: -### Quick Authentication Setup +- Default `SECRET_KEY` value +- `DEBUG=true` +- `CORS_ORIGINS=*` -```python -# Protect an endpoint -@router.get("/my-data") -async def get_my_data(current_user: dict = Depends(get_current_user)): - return await get_user_specific_data(current_user["id"]) - -# Check user permissions -def check_tier_access(user: dict, required_tier: str): - if not user.get("tier") or user["tier"]["name"] != required_tier: - raise ForbiddenException(f"Requires {required_tier} tier") - -# Custom authentication dependency -async def get_premium_user(current_user: dict = Depends(get_current_user)): - check_tier_access(current_user, "Pro") - return current_user +`PRODUCTION_SECURITY_STRICT_MODE=true` makes the validator stricter still. + +## Configuration + +The full reference is in [Environment Variables](../configuration/environment-variables.md). The most relevant settings: + +```env +# Sessions +SESSION_TIMEOUT_MINUTES=30 +SESSION_CLEANUP_INTERVAL_MINUTES=15 +MAX_SESSIONS_PER_USER=5 +SESSION_SECURE_COOKIES=true +SESSION_BACKEND=redis # redis | memory | memcached +SESSION_COOKIE_MAX_AGE=86400 + +# CSRF +CSRF_ENABLED=true # set false for dev/test + +# Login rate limiting +LOGIN_MAX_ATTEMPTS=5 +LOGIN_WINDOW_MINUTES=15 + +# OAuth +OAUTH_REDIRECT_BASE_URL=http://localhost:8000 +OAUTH_GOOGLE_CLIENT_ID= +OAUTH_GOOGLE_CLIENT_SECRET= +OAUTH_GITHUB_CLIENT_ID= # provider scaffolded; routes not wired +OAUTH_GITHUB_CLIENT_SECRET= + +# Security +SECRET_KEY= +PRODUCTION_SECURITY_VALIDATION_ENABLED=true +PRODUCTION_SECURITY_STRICT_MODE=false ``` -### Frontend Integration +## Quick Examples + +### Frontend Login Flow (JavaScript) ```javascript -// Basic authentication flow -class AuthManager { +class AuthClient { async login(username, password) { - const response = await fetch('/api/v1/login', { + const res = await fetch('/api/v1/auth/login', { method: 'POST', - headers: {'Content-Type': 'application/x-www-form-urlencoded'}, - body: new URLSearchParams({username, password}) + credentials: 'include', // important — accept cookies + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + body: new URLSearchParams({ username, password }), }); - - const tokens = await response.json(); - localStorage.setItem('access_token', tokens.access_token); - return tokens; + if (!res.ok) throw new Error('login failed'); + const { csrf_token } = await res.json(); + // Store the CSRF token in memory; cookie is set automatically + this.csrfToken = csrf_token; + return csrf_token; } - - async makeAuthenticatedRequest(url, options = {}) { - const token = localStorage.getItem('access_token'); + + async post(url, body) { return fetch(url, { - ...options, + method: 'POST', + credentials: 'include', headers: { - ...options.headers, - 'Authorization': `Bearer ${token}` - } + 'Content-Type': 'application/json', + 'X-CSRF-Token': this.csrfToken, // required for state-changing requests + }, + body: JSON.stringify(body), }); } + + async logout() { + await fetch('/api/v1/auth/logout', { + method: 'POST', + credentials: 'include', + headers: { 'X-CSRF-Token': this.csrfToken }, + }); + this.csrfToken = null; + } } ``` -## What's Next +The `credentials: 'include'` flag is what makes the browser actually send cookies cross-origin. Pair this with proper CORS settings on the server side (`CORS_ALLOW_CREDENTIALS=true`). + +### Custom Tier-Based Dependency + +You can combine the built-in deps to enforce tier checks: + +```python +from typing import Annotated, Any +from fastapi import Depends, HTTPException + +from ...infrastructure.auth.session.dependencies import get_current_user -Start building your authentication system: -1. **[JWT Tokens](jwt-tokens.md)** - Learn token creation, verification, and lifecycle management -2. **[User Management](user-management.md)** - Implement registration, login, and profile operations -3. **[Permissions](permissions.md)** - Add authorization patterns and access control +async def require_tier( + tier_name: str, + user: Annotated[dict[str, Any], Depends(get_current_user)], +) -> dict[str, Any]: + user_tier = user.get("tier") or {} + if user_tier.get("name") != tier_name: + raise HTTPException(status_code=403, detail=f"Requires {tier_name} tier") + return user + + +# Usage with a Pro tier +@router.get("/premium") +async def premium_feature( + user: Annotated[dict[str, Any], Depends(lambda u=Depends(get_current_user): require_tier("pro", u))], +): + return {"data": "premium content"} +``` + +In practice, prefer raising `PermissionDeniedError` from inside a service method so the mapping layer translates it consistently (see [Exceptions](../api/exceptions.md)). + +## Getting Started + +1. **[Sessions](sessions.md)** — How sessions work, cookie handling, CSRF +2. **[User Management](user-management.md)** — Registration, login, profile +3. **[Permissions](permissions.md)** — Role-based and resource-based access control + +## What's Next -The authentication system provides a secure foundation for your API. Each guide includes practical examples and implementation details for production-ready authentication. \ No newline at end of file +- **[Environment Variables](../configuration/environment-variables.md)** — All auth-related settings +- **[Exceptions](../api/exceptions.md)** — How `PermissionDeniedError` and friends become HTTP 403/401 +- **[API Endpoints](../api/endpoints.md)** — Patterns for protecting routes diff --git a/docs/user-guide/authentication/jwt-tokens.md b/docs/user-guide/authentication/jwt-tokens.md deleted file mode 100644 index e4a3f14e..00000000 --- a/docs/user-guide/authentication/jwt-tokens.md +++ /dev/null @@ -1,632 +0,0 @@ -# JWT Tokens - -JSON Web Tokens (JWT) form the backbone of modern web authentication. This comprehensive guide explains how the boilerplate implements a secure, stateless authentication system using access and refresh tokens. - -## Understanding JWT Authentication - -JWT tokens are self-contained, digitally signed packages of information that can be safely transmitted between parties. Unlike traditional session-based authentication that requires server-side storage, JWT tokens are stateless - all the information needed to verify a user's identity is contained within the token itself. - -### Why Use JWT? - -**Stateless Design**: No need to store session data on the server, making it perfect for distributed systems and microservices. - -**Scalability**: Since tokens contain all necessary information, they work seamlessly across multiple servers without shared session storage. - -**Security**: Digital signatures ensure tokens can't be tampered with, and expiration times limit exposure if compromised. - -**Cross-Domain Support**: Unlike cookies, JWT tokens work across different domains and can be used in mobile applications. - -## Token Types - -The authentication system uses a **dual-token approach** for maximum security and user experience: - -### Access Tokens - -Access tokens are short-lived credentials that prove a user's identity for API requests. Think of them as temporary keys that grant access to protected resources. - -- **Purpose**: Authenticate API requests and authorize actions -- **Lifetime**: 30 minutes (configurable) - short enough to limit damage if compromised -- **Storage**: Authorization header (`Bearer `) - sent with each API request -- **Usage**: Include in every call to protected endpoints - -**Why Short-Lived?** If an access token is stolen (e.g., through XSS), the damage window is limited to 30 minutes before it expires naturally. - -### Refresh Tokens - -Refresh tokens are longer-lived credentials used solely to generate new access tokens. They provide a balance between security and user convenience. - -- **Purpose**: Generate new access tokens without requiring re-login -- **Lifetime**: 7 days (configurable) - long enough for good UX, short enough for security -- **Storage**: Secure HTTP-only cookie - inaccessible to JavaScript, preventing XSS attacks -- **Usage**: Automatically used by the browser when access tokens need refreshing - -**Why HTTP-Only Cookies?** This prevents malicious JavaScript from accessing refresh tokens, providing protection against XSS attacks while allowing automatic renewal. - -## Token Creation - -Understanding how tokens are created helps you customize the authentication system for your specific needs. - -### Creating Access Tokens - -Access tokens are generated during login and token refresh operations. The process involves encoding user information with an expiration time and signing it with your secret key. - -```python -from datetime import timedelta -from app.core.security import create_access_token, ACCESS_TOKEN_EXPIRE_MINUTES - -# Basic access token with default expiration -access_token = await create_access_token(data={"sub": username}) - -# Custom expiration for special cases (e.g., admin sessions) -custom_expires = timedelta(minutes=60) -access_token = await create_access_token(data={"sub": username}, expires_delta=custom_expires) -``` - -**When to Customize Expiration:** - -- **High-security environments**: Shorter expiration (15 minutes) -- **Development/testing**: Longer expiration for convenience -- **Admin operations**: Variable expiration based on sensitivity - -### Creating Refresh Tokens - -Refresh tokens follow the same creation pattern but with longer expiration times. They're typically created only during login. - -```python -from app.core.security import create_refresh_token, REFRESH_TOKEN_EXPIRE_DAYS - -# Standard refresh token -refresh_token = await create_refresh_token(data={"sub": username}) - -# Extended refresh token for "remember me" functionality -extended_expires = timedelta(days=30) -refresh_token = await create_refresh_token(data={"sub": username}, expires_delta=extended_expires) -``` - -### Token Structure - -JWT tokens consist of three parts separated by dots: `header.payload.signature`. The payload contains the actual user information and metadata. - -```python -# Access token payload structure -{ - "sub": "username", # Subject (user identifier) - "exp": 1234567890, # Expiration timestamp (Unix) - "token_type": "access", # Distinguishes from refresh tokens - "iat": 1234567890, # Issued at (automatic) -} - -# Refresh token payload structure -{ - "sub": "username", # Same user identifier - "exp": 1234567890, # Longer expiration time - "token_type": "refresh", # Prevents confusion/misuse - "iat": 1234567890, # Issue timestamp -} -``` - -**Key Fields Explained:** - -- **`sub` (Subject)**: Identifies the user - can be username, email, or user ID -- **`exp` (Expiration)**: Unix timestamp when token becomes invalid -- **`token_type`**: Custom field preventing tokens from being used incorrectly -- **`iat` (Issued At)**: Useful for token rotation and audit trails - -## Token Verification - -Token verification is a multi-step process that ensures both the token's authenticity and the user's current authorization status. - -### Verifying Access Tokens - -Every protected endpoint must verify the access token before processing the request. This involves checking the signature, expiration, and blacklist status. - -```python -from app.core.security import verify_token, TokenType - -# Verify access token in endpoint -token_data = await verify_token(token, TokenType.ACCESS, db) -if token_data: - username = token_data.username_or_email - # Token is valid, proceed with request processing -else: - # Token is invalid, expired, or blacklisted - raise UnauthorizedException("Invalid or expired token") -``` - -### Verifying Refresh Tokens - -Refresh token verification follows the same process but with different validation rules and outcomes. - -```python -# Verify refresh token for renewal -token_data = await verify_token(token, TokenType.REFRESH, db) -if token_data: - # Generate new access token - new_access_token = await create_access_token(data={"sub": token_data.username_or_email}) - return {"access_token": new_access_token, "token_type": "bearer"} -else: - # Refresh token invalid - user must log in again - raise UnauthorizedException("Invalid refresh token") -``` - -### Token Verification Process - -The verification process includes several security checks to prevent various attack vectors: - -```python -async def verify_token(token: str, expected_token_type: TokenType, db: AsyncSession) -> TokenData | None: - # 1. Check blacklist first (prevents use of logged-out tokens) - is_blacklisted = await crud_token_blacklist.exists(db, token=token) - if is_blacklisted: - return None - - try: - # 2. Verify signature and decode payload - payload = jwt.decode(token, SECRET_KEY.get_secret_value(), algorithms=[ALGORITHM]) - - # 3. Extract and validate claims - username_or_email: str | None = payload.get("sub") - token_type: str | None = payload.get("token_type") - - # 4. Ensure token type matches expectation - if username_or_email is None or token_type != expected_token_type: - return None - - # 5. Return validated data - return TokenData(username_or_email=username_or_email) - - except JWTError: - # Token is malformed, expired, or signature invalid - return None -``` - -**Security Checks Explained:** - -1. **Blacklist Check**: Prevents use of tokens from logged-out users -1. **Signature Verification**: Ensures token hasn't been tampered with -1. **Expiration Check**: Automatically handled by JWT library -1. **Type Validation**: Prevents refresh tokens from being used as access tokens -1. **Subject Validation**: Ensures token contains valid user identifier - -## Client-Side Authentication Flow - -Understanding the complete authentication flow helps frontend developers integrate properly with the API. - -### Recommended Client Flow - -**1. Login Process** - -```javascript -// Send credentials to login endpoint -const response = await fetch('/api/v1/login', { - method: 'POST', - headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, - body: 'username=user&password=pass', - credentials: 'include' // Important: includes cookies -}); - -const { access_token, token_type } = await response.json(); - -// Store access token in memory (not localStorage) -sessionStorage.setItem('access_token', access_token); -``` - -**2. Making Authenticated Requests** - -```javascript -// Include access token in Authorization header -const response = await fetch('/api/v1/protected-endpoint', { - headers: { - 'Authorization': `Bearer ${sessionStorage.getItem('access_token')}` - }, - credentials: 'include' -}); -``` - -**3. Handling Token Expiration** - -```javascript -// Automatic token refresh on 401 errors -async function apiCall(url, options = {}) { - let response = await fetch(url, { - ...options, - headers: { - ...options.headers, - 'Authorization': `Bearer ${sessionStorage.getItem('access_token')}` - }, - credentials: 'include' - }); - - // If token expired, try to refresh - if (response.status === 401) { - const refreshResponse = await fetch('/api/v1/refresh', { - method: 'POST', - credentials: 'include' // Sends refresh token cookie - }); - - if (refreshResponse.ok) { - const { access_token } = await refreshResponse.json(); - sessionStorage.setItem('access_token', access_token); - - // Retry original request - response = await fetch(url, { - ...options, - headers: { - ...options.headers, - 'Authorization': `Bearer ${access_token}` - }, - credentials: 'include' - }); - } else { - // Refresh failed - redirect to login - window.location.href = '/login'; - } - } - - return response; -} -``` - -**4. Logout Process** - -```javascript -// Clear tokens and call logout endpoint -await fetch('/api/v1/logout', { - method: 'POST', - credentials: 'include' -}); - -sessionStorage.removeItem('access_token'); -// Refresh token cookie is cleared by server -``` - -### Cookie Configuration - -The refresh token cookie is configured for maximum security: - -```python -response.set_cookie( - key="refresh_token", - value=refresh_token, - httponly=True, # Prevents JavaScript access (XSS protection) - secure=True, # HTTPS only in production - samesite="Lax", # CSRF protection with good usability - max_age=REFRESH_TOKEN_EXPIRE_DAYS * 24 * 60 * 60, -) -``` - -**SameSite Options:** - -- **`Lax`** (Recommended): Cookies sent on top-level navigation but not cross-site requests -- **`Strict`**: Maximum security but may break some user flows -- **`None`**: Required for cross-origin requests (must use with Secure) - -## Token Blacklisting - -Token blacklisting solves a fundamental problem with JWT tokens: once issued, they remain valid until expiration, even if the user logs out. Blacklisting provides immediate token revocation. - -### Why Blacklisting Matters - -Without blacklisting, logged-out users could continue accessing your API until their tokens naturally expire. This creates security risks, especially on shared computers or if tokens are compromised. - -### Blacklisting Implementation - -The system uses a database table to track invalidated tokens: - -```python -# models/token_blacklist.py -class TokenBlacklist(Base): - __tablename__ = "token_blacklist" - - id: Mapped[int] = mapped_column(primary_key=True) - token: Mapped[str] = mapped_column(unique=True, index=True) # Full token string - expires_at: Mapped[datetime] = mapped_column() # When to clean up - created_at: Mapped[datetime] = mapped_column(default=datetime.utcnow) -``` - -**Design Considerations:** - -- **Unique constraint**: Prevents duplicate entries -- **Index on token**: Fast lookup during verification -- **Expires_at field**: Enables automatic cleanup of old entries - -### Blacklisting Tokens - -The system provides functions for both single token and dual token blacklisting: - -```python -from app.core.security import blacklist_token, blacklist_tokens - -# Single token blacklisting (for specific scenarios) -await blacklist_token(token, db) - -# Dual token blacklisting (standard logout) -await blacklist_tokens(access_token, refresh_token, db) -``` - -### Blacklisting Process - -The blacklisting process extracts the expiration time from the token to set an appropriate cleanup schedule: - -```python -async def blacklist_token(token: str, db: AsyncSession) -> None: - # 1. Decode token to extract expiration (no verification needed) - payload = jwt.decode(token, SECRET_KEY.get_secret_value(), algorithms=[ALGORITHM]) - exp_timestamp = payload.get("exp") - - if exp_timestamp is not None: - # 2. Convert Unix timestamp to datetime - expires_at = datetime.fromtimestamp(exp_timestamp) - - # 3. Store in blacklist with expiration - await crud_token_blacklist.create(db, object=TokenBlacklistCreate(token=token, expires_at=expires_at)) -``` - -**Cleanup Strategy**: Blacklisted tokens can be automatically removed from the database after their natural expiration time, preventing unlimited database growth. - -## Login Flow Implementation - -### Complete Login Endpoint - -```python -@router.post("/login", response_model=Token) -async def login_for_access_token( - response: Response, - form_data: Annotated[OAuth2PasswordRequestForm, Depends()], - db: Annotated[AsyncSession, Depends(async_get_db)], -) -> dict[str, str]: - # 1. Authenticate user - user = await authenticate_user(username_or_email=form_data.username, password=form_data.password, db=db) - - if not user: - raise HTTPException(status_code=401, detail="Incorrect username or password") - - # 2. Create access token - access_token = await create_access_token(data={"sub": user["username"]}) - - # 3. Create refresh token - refresh_token = await create_refresh_token(data={"sub": user["username"]}) - - # 4. Set refresh token as HTTP-only cookie - response.set_cookie( - key="refresh_token", - value=refresh_token, - httponly=True, - secure=True, - samesite="strict", - max_age=REFRESH_TOKEN_EXPIRE_DAYS * 24 * 60 * 60, - ) - - return {"access_token": access_token, "token_type": "bearer"} -``` - -### Token Refresh Endpoint - -```python -@router.post("/refresh", response_model=Token) -async def refresh_access_token( - response: Response, db: Annotated[AsyncSession, Depends(async_get_db)], refresh_token: str = Cookie(None) -) -> dict[str, str]: - if not refresh_token: - raise HTTPException(status_code=401, detail="Refresh token missing") - - # 1. Verify refresh token - token_data = await verify_token(refresh_token, TokenType.REFRESH, db) - if not token_data: - raise HTTPException(status_code=401, detail="Invalid refresh token") - - # 2. Create new access token - new_access_token = await create_access_token(data={"sub": token_data.username_or_email}) - - # 3. Optionally create new refresh token (token rotation) - new_refresh_token = await create_refresh_token(data={"sub": token_data.username_or_email}) - - # 4. Blacklist old refresh token - await blacklist_token(refresh_token, db) - - # 5. Set new refresh token cookie - response.set_cookie( - key="refresh_token", - value=new_refresh_token, - httponly=True, - secure=True, - samesite="strict", - max_age=REFRESH_TOKEN_EXPIRE_DAYS * 24 * 60 * 60, - ) - - return {"access_token": new_access_token, "token_type": "bearer"} -``` - -### Logout Implementation - -```python -@router.post("/logout") -async def logout( - response: Response, - db: Annotated[AsyncSession, Depends(async_get_db)], - current_user: dict = Depends(get_current_user), - token: str = Depends(oauth2_scheme), - refresh_token: str = Cookie(None), -) -> dict[str, str]: - # 1. Blacklist access token - await blacklist_token(token, db) - - # 2. Blacklist refresh token if present - if refresh_token: - await blacklist_token(refresh_token, db) - - # 3. Clear refresh token cookie - response.delete_cookie(key="refresh_token", httponly=True, secure=True, samesite="strict") - - return {"message": "Successfully logged out"} -``` - -## Authentication Dependencies - -### get_current_user - -```python -async def get_current_user(db: AsyncSession = Depends(async_get_db), token: str = Depends(oauth2_scheme)) -> dict: - # 1. Verify token - token_data = await verify_token(token, TokenType.ACCESS, db) - if not token_data: - raise HTTPException(status_code=401, detail="Invalid token") - - # 2. Get user from database - user = await crud_users.get(db=db, username=token_data.username_or_email, schema_to_select=UserRead) - - if user is None: - raise HTTPException(status_code=401, detail="User not found") - - return user -``` - -### get_optional_user - -```python -async def get_optional_user( - db: AsyncSession = Depends(async_get_db), token: str = Depends(optional_oauth2_scheme) -) -> dict | None: - if not token: - return None - - try: - return await get_current_user(db=db, token=token) - except HTTPException: - return None -``` - -### get_current_superuser - -```python -async def get_current_superuser(current_user: dict = Depends(get_current_user)) -> dict: - if not current_user.get("is_superuser", False): - raise HTTPException(status_code=403, detail="Not enough permissions") - return current_user -``` - -## Configuration - -### Environment Variables - -```bash -# JWT Configuration -SECRET_KEY=your-secret-key-here -ALGORITHM=HS256 -ACCESS_TOKEN_EXPIRE_MINUTES=30 -REFRESH_TOKEN_EXPIRE_DAYS=7 - -# Security Headers -SECURE_COOKIES=true -CORS_ORIGINS=["http://localhost:3000","https://yourapp.com"] -``` - -### Security Configuration - -```python -# app/core/config.py -class Settings(BaseSettings): - SECRET_KEY: SecretStr - ALGORITHM: str = "HS256" - ACCESS_TOKEN_EXPIRE_MINUTES: int = 30 - REFRESH_TOKEN_EXPIRE_DAYS: int = 7 - - # Cookie settings - SECURE_COOKIES: bool = True - COOKIE_DOMAIN: str | None = None - COOKIE_SAMESITE: str = "strict" -``` - -## Security Best Practices - -### Token Security - -- **Use strong secrets**: Generate cryptographically secure SECRET_KEY -- **Rotate secrets**: Regularly change SECRET_KEY in production -- **Environment separation**: Different secrets for dev/staging/production -- **Secure transmission**: Always use HTTPS in production - -### Cookie Security - -- **HttpOnly flag**: Prevents JavaScript access to refresh tokens -- **Secure flag**: Ensures cookies only sent over HTTPS -- **SameSite attribute**: Prevents CSRF attacks -- **Domain restrictions**: Set cookie domain appropriately - -### Implementation Security - -- **Input validation**: Validate all token inputs -- **Rate limiting**: Implement login attempt limits -- **Audit logging**: Log authentication events -- **Token rotation**: Regularly refresh tokens - -## Common Patterns - -### API Key Authentication - -For service-to-service communication: - -```python -async def get_api_key_user(api_key: str = Header(None), db: AsyncSession = Depends(async_get_db)) -> dict: - if not api_key: - raise HTTPException(status_code=401, detail="API key required") - - # Verify API key - user = await crud_users.get(db=db, api_key=api_key) - if not user: - raise HTTPException(status_code=401, detail="Invalid API key") - - return user -``` - -### Multiple Authentication Methods - -```python -async def get_authenticated_user( - db: AsyncSession = Depends(async_get_db), token: str = Depends(optional_oauth2_scheme), api_key: str = Header(None) -) -> dict: - # Try JWT token first - if token: - try: - return await get_current_user(db=db, token=token) - except HTTPException: - pass - - # Fall back to API key - if api_key: - return await get_api_key_user(api_key=api_key, db=db) - - raise HTTPException(status_code=401, detail="Authentication required") -``` - -## Troubleshooting - -### Common Issues - -**Token Expired**: Implement automatic refresh using refresh tokens -**Invalid Signature**: Check SECRET_KEY consistency across environments -**Blacklisted Token**: User logged out - redirect to login -**Missing Token**: Ensure Authorization header is properly set - -### Debugging Tips - -```python -# Enable debug logging -import logging - -logging.getLogger("app.core.security").setLevel(logging.DEBUG) - -# Test token validation -async def debug_token(token: str, db: AsyncSession): - try: - payload = jwt.decode(token, SECRET_KEY.get_secret_value(), algorithms=[ALGORITHM]) - print(f"Token payload: {payload}") - - is_blacklisted = await crud_token_blacklist.exists(db, token=token) - print(f"Is blacklisted: {is_blacklisted}") - - except JWTError as e: - print(f"JWT Error: {e}") -``` - -This comprehensive JWT implementation provides secure, scalable authentication for your FastAPI application. diff --git a/docs/user-guide/authentication/permissions.md b/docs/user-guide/authentication/permissions.md index c1daddfd..2cf44cbd 100644 --- a/docs/user-guide/authentication/permissions.md +++ b/docs/user-guide/authentication/permissions.md @@ -1,634 +1,386 @@ # Permissions and Authorization -Authorization determines what authenticated users can do within your application. While authentication answers "who are you?", authorization answers "what can you do?". This section covers the permission system, access control patterns, and how to implement secure authorization in your endpoints. +Authentication answers "who are you?". Authorization answers "what can you do?". This page covers the boilerplate's authorization patterns: superuser flags, resource ownership, tier-based limits, and API key permissions. -## Understanding Authorization +## Authorization Patterns -Authorization is a multi-layered security concept that protects resources and operations based on user identity, roles, and contextual information. The boilerplate implements several authorization patterns to handle different security requirements. +The boilerplate ships four overlapping mechanisms. Pick the one(s) that fit your use case. -### Authorization vs Authentication +| Pattern | Where it lives | When to use | +|---------|----------------|-------------| +| **Superuser flag** | `User.is_superuser` boolean | Admin-only operations | +| **Resource ownership** | Service-layer permission checks | "Users can only edit their own X" | +| **Tier-based limits** | `Tier` model + `RateLimit` rules | Subscription gating, rate limits | +| **API key permissions** | `KeyPermission` model (resource + action) | Programmatic access control | -**Authentication**: Verifies user identity - confirms the user is who they claim to be -**Authorization**: Determines user permissions - decides what the authenticated user can access +These compose. A typical request goes through: -These work together: you must authenticate first (prove identity) before you can authorize (check permissions). +1. **Authentication** — session cookie (or API key) identifies *who* +2. **Coarse access** — superuser flag for admin endpoints +3. **Fine-grained access** — service-layer ownership / tier checks +4. **Rate limiting** — tier-based per-route limits (separate concern) -### Authorization Patterns +## Superuser Authorization -The system implements several common authorization patterns: +The User model has an `is_superuser: bool` column. Endpoints that should only be accessible to admins use the `get_current_superuser` dependency: -1. **Role-Based Access Control (RBAC)**: Users have roles (superuser, regular user) that determine permissions -2. **Resource Ownership**: Users can only access resources they own -3. **Tiered Access**: Different user tiers have different capabilities and limits -4. **Contextual Authorization**: Permissions based on request context (rate limits, time-based access) +```python +from typing import Annotated, Any -## Core Authorization Patterns +from fastapi import APIRouter, Depends -### Superuser Permissions +from ...infrastructure.auth.session.dependencies import get_current_superuser -Superusers have elevated privileges for administrative operations. This pattern is essential for system management but must be carefully controlled. +router = APIRouter() -```python -from app.api.dependencies import get_current_superuser - -# Superuser-only endpoint -@router.get("/admin/users/", dependencies=[Depends(get_current_superuser)]) -async def get_all_users( - db: AsyncSession = Depends(async_get_db) -) -> list[UserRead]: - # Only superusers can access this endpoint - users = await crud_users.get_multi( - db=db, - schema_to_select=UserRead, - return_as_model=True - ) - return users.data -``` -**When to Use Superuser Authorization:** +@router.delete("/admin/users/{username}") +async def gdpr_anonymize( + username: str, + _: Annotated[dict[str, Any], Depends(get_current_superuser)], +) -> dict[str, str]: + # Only superusers reach this code + ... +``` -- **User management operations**: Creating, deleting, or modifying other users -- **System configuration**: Changing application settings or configuration -- **Data export/import**: Bulk operations on sensitive data -- **Administrative reporting**: Access to system-wide analytics and logs +The leading `_:` is the codebase convention for dependency-only parameters whose value isn't used. -**Security Considerations:** +`get_current_superuser` returns 401 if not authenticated and 403 if authenticated but not a superuser. See [Sessions](sessions.md) for the dependency reference. -- **Minimal Assignment**: Only assign superuser status when absolutely necessary -- **Regular Audits**: Periodically review who has superuser access -- **Activity Logging**: Log all superuser actions for security monitoring -- **Time-Limited Access**: Consider temporary superuser elevation for specific tasks +### When to Use the Superuser Flag -### Resource Ownership +- User management (create/delete other users) +- Tier assignment (`PATCH /api/v1/users/{username}/tier`) +- Rate limit configuration (`PATCH /api/v1/rate-limits/{name}`) +- GDPR data anonymization +- System configuration changes -Resource ownership ensures users can only access and modify their own data. This is the most common authorization pattern in user-facing applications. +### Bootstrapping the First Superuser -```python -@router.get("/posts/me/") -async def get_my_posts( - current_user: dict = Depends(get_current_user), - db: AsyncSession = Depends(async_get_db) -) -> list[PostRead]: - # Get posts owned by current user - posts = await crud_posts.get_multi( - db=db, - created_by_user_id=current_user["id"], - schema_to_select=PostRead, - return_as_model=True - ) - return posts.data +The first superuser is created by `scripts/setup_initial_data.py` from `ADMIN_*` env vars on first run: -@router.delete("/posts/{post_id}") -async def delete_post( - post_id: int, - current_user: dict = Depends(get_current_user), - db: AsyncSession = Depends(async_get_db) -) -> dict[str, str]: - # 1. Get the post - post = await crud_posts.get(db=db, id=post_id) - if not post: - raise NotFoundException("Post not found") - - # 2. Check ownership - if post["created_by_user_id"] != current_user["id"]: - raise ForbiddenException("You can only delete your own posts") - - # 3. Delete the post - await crud_posts.delete(db=db, id=post_id) - return {"message": "Post deleted"} +```bash +cd backend +uv run python -m scripts.setup_initial_data ``` -**Ownership Validation Pattern:** +To grant superuser to an existing user, flip the column directly via the admin UI (`/admin`) or a one-off SQL update. -1. **Retrieve Resource**: Get the resource from the database -2. **Check Ownership**: Compare resource owner with current user -3. **Authorize or Deny**: Allow action if user owns resource, deny otherwise +## Resource Ownership -### User Tiers and Rate Limiting +Most "users can only modify their own data" rules belong in the **service layer**, not the route. The service raises a `PermissionDeniedError`, which the global handler maps to HTTP 403. -User tiers provide differentiated access based on subscription levels or user status. This enables business models with different feature sets for different user types. +Real example from `modules/user/service.py`: ```python -@router.post("/posts/", response_model=PostRead) -async def create_post( - post: PostCreate, - current_user: dict = Depends(get_current_user), - db: AsyncSession = Depends(async_get_db) -) -> PostRead: - # Check rate limits based on user tier - await check_rate_limit( - resource="posts", - user_id=current_user["id"], - tier_id=current_user.get("tier_id"), - db=db - ) - - # Create post with user association - post_internal = PostCreateInternal( - **post.model_dump(), - created_by_user_id=current_user["id"] - ) - - created_post = await crud_posts.create(db=db, object=post_internal) - return created_post -``` +from ..common.exceptions import PermissionDeniedError -**Rate Limiting Implementation:** -```python -async def check_rate_limit( - resource: str, - user_id: int, - tier_id: int | None, - db: AsyncSession +async def verify_user_permission( + self, + current_user: dict[str, Any], + target_username: str, + action: str, ) -> None: - # 1. Get user's tier information - if tier_id: - tier = await crud_tiers.get(db=db, id=tier_id) - limit = tier["rate_limit_posts"] if tier else 10 # Default limit - else: - limit = 5 # Free tier limit - - # 2. Count recent posts (last 24 hours) - recent_posts = await crud_posts.count( - db=db, - created_by_user_id=user_id, - created_at__gte=datetime.utcnow() - timedelta(hours=24) - ) - - # 3. Check if limit exceeded - if recent_posts >= limit: - raise RateLimitException(f"Daily {resource} limit exceeded ({limit})") + """Raise PermissionDeniedError if current_user can't act on target_username.""" + if current_user["username"] != target_username and not current_user["is_superuser"]: + raise PermissionDeniedError(f"Cannot {action} for another user") ``` -**Tier-Based Authorization Benefits:** +Routes call this before dispatching the operation: -- **Business Model Support**: Different features for different subscription levels -- **Resource Protection**: Prevents abuse by limiting free tier usage -- **Progressive Enhancement**: Encourages upgrades by showing tier benefits -- **Fair Usage**: Ensures equitable resource distribution among users +```python +# modules/user/routes.py +@router.patch("/{username}") +async def update_user_profile( + username: str, + values: UserUpdate, + current_user: Annotated[dict[str, Any], Depends(get_current_user)], + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], +) -> dict[str, str]: + try: + await user_service.verify_user_permission(current_user, username, "update profile") + # ...proceed with update... +``` + +The exception flows up to the global handler (registered in `infrastructure/app_factory.py`) which translates it via the `EXCEPTION_MAPPING` table — `PermissionDeniedError` → `ForbiddenException` (403). See [Exceptions](../api/exceptions.md) for the full mapping pipeline. -### Custom Permission Helpers +### Generic Ownership Pattern -Custom permission functions provide reusable authorization logic for complex scenarios. +For your own modules: ```python -# Permission helper functions -async def can_edit_post(user: dict, post_id: int, db: AsyncSession) -> bool: - """Check if user can edit a specific post.""" - post = await crud_posts.get(db=db, id=post_id) - if not post: - return False - - # Superusers can edit any post - if user.get("is_superuser", False): - return True - - # Users can edit their own posts - if post["created_by_user_id"] == user["id"]: - return True - - return False - -async def can_access_admin_panel(user: dict) -> bool: - """Check if user can access admin panel.""" - return user.get("is_superuser", False) - -async def has_tier_feature(user: dict, feature: str, db: AsyncSession) -> bool: - """Check if user's tier includes a specific feature.""" - tier_id = user.get("tier_id") - if not tier_id: - return False # Free tier - no premium features - - tier = await crud_tiers.get(db=db, id=tier_id) - if not tier: - return False - - # Check tier features (example) - return tier.get(f"allows_{feature}", False) - -# Usage in endpoints -@router.put("/posts/{post_id}") -async def update_post( - post_id: int, - post_updates: PostUpdate, - current_user: dict = Depends(get_current_user), - db: AsyncSession = Depends(async_get_db) -) -> PostRead: - # Use permission helper - if not await can_edit_post(current_user, post_id, db): - raise ForbiddenException("Cannot edit this post") - - updated_post = await crud_posts.update( - db=db, - object=post_updates, - id=post_id - ) - return updated_post +# modules/widgets/service.py +from ..common.exceptions import PermissionDeniedError, ResourceNotFoundError + + +class WidgetService: + async def delete( + self, widget_id: int, current_user: dict[str, Any], db: AsyncSession, + ) -> None: + widget = await crud_widgets.get(db=db, id=widget_id) + if widget is None: + raise ResourceNotFoundError("Widget not found") + + if widget["owner_id"] != current_user["id"] and not current_user["is_superuser"]: + raise PermissionDeniedError("Cannot delete another user's widget") + + await crud_widgets.delete(db=db, id=widget_id) ``` -**Permission Helper Benefits:** +Three rules to follow: + +1. **Service raises domain exceptions, not HTTP exceptions.** Lets the same logic be reused outside routes (admin scripts, tests, taskiq jobs). +2. **Superuser bypass is explicit.** `not current_user["is_superuser"]` makes the rule readable. +3. **Order: existence check first, then ownership.** A 404 is preferred to a 403 for resources the user shouldn't even know about — see the [Hide Resource Existence](../api/exceptions.md#hide-resource-existence) note. + +## Tier-Based Authorization -- **Reusability**: Same logic used across multiple endpoints -- **Consistency**: Ensures uniform permission checking -- **Maintainability**: Changes to permissions only need updates in one place -- **Testability**: Permission logic can be unit tested separately +Every user has a `tier_id` foreign key to the `Tier` model. The boilerplate ships **bare tiers** — just `name` and `description`, no built-in feature mapping or pricing logic. You decide what tiers mean. -## Authorization Dependencies +### Reading the User's Tier -### Basic Authorization Dependencies +`User.tier` is loaded automatically via `lazy="selectin"`, so a fetched user record includes their tier: ```python -# Required authentication -async def get_current_user( - token: str = Depends(oauth2_scheme), - db: AsyncSession = Depends(async_get_db) -) -> dict: - """Get currently authenticated user.""" - token_data = await verify_token(token, TokenType.ACCESS, db) - if not token_data: - raise HTTPException(status_code=401, detail="Invalid token") - - user = await crud_users.get(db=db, username=token_data.username_or_email) - if not user: - raise HTTPException(status_code=401, detail="User not found") - - return user - -# Optional authentication -async def get_optional_user( - token: str = Depends(optional_oauth2_scheme), - db: AsyncSession = Depends(async_get_db) -) -> dict | None: - """Get currently authenticated user, or None if not authenticated.""" - if not token: - return None - - try: - return await get_current_user(token=token, db=db) - except HTTPException: - return None - -# Superuser requirement -async def get_current_superuser( - current_user: dict = Depends(get_current_user) -) -> dict: - """Get current user and ensure they are a superuser.""" - if not current_user.get("is_superuser", False): - raise HTTPException(status_code=403, detail="Not enough permissions") +@router.get("/me", response_model=UserRead) +async def me( + current_user: Annotated[dict[str, Any], Depends(get_current_user)], +) -> dict[str, Any]: + # current_user["tier"] is the joined Tier dict (or None) return current_user ``` -### Advanced Authorization Dependencies +### Gating a Feature on Tier Name + +For a simple feature gate, check the tier name directly in the service: ```python -# Tier-based access control -def require_tier(minimum_tier: str): - """Factory function for tier-based dependencies.""" - async def check_user_tier( - current_user: dict = Depends(get_current_user), - db: AsyncSession = Depends(async_get_db) - ) -> dict: - tier_id = current_user.get("tier_id") - if not tier_id: - raise HTTPException(status_code=403, detail="No subscription tier") - - tier = await crud_tiers.get(db=db, id=tier_id) - if not tier or tier["name"] != minimum_tier: - raise HTTPException( - status_code=403, - detail=f"Requires {minimum_tier} tier" - ) - - return current_user - - return check_user_tier - -# Resource ownership dependency -def require_resource_ownership(resource_type: str): - """Factory function for resource ownership dependencies.""" - async def check_ownership( - resource_id: int, - current_user: dict = Depends(get_current_user), - db: AsyncSession = Depends(async_get_db) - ) -> dict: - if resource_type == "post": - resource = await crud_posts.get(db=db, id=resource_id) - owner_field = "created_by_user_id" - else: - raise ValueError(f"Unknown resource type: {resource_type}") - - if not resource: - raise HTTPException(status_code=404, detail="Resource not found") - - # Superusers can access any resource - if current_user.get("is_superuser", False): - return current_user - - # Check ownership - if resource[owner_field] != current_user["id"]: - raise HTTPException( - status_code=403, - detail="You don't own this resource" - ) - - return current_user - - return check_ownership - -# Usage examples -@router.get("/premium-feature", dependencies=[Depends(require_tier("Premium"))]) -async def premium_feature(): - return {"message": "Premium feature accessed"} - -@router.put("/posts/{post_id}") -async def update_post( - post_id: int, - post_update: PostUpdate, - current_user: dict = Depends(require_resource_ownership("post")), - db: AsyncSession = Depends(async_get_db) -) -> PostRead: - # User ownership already verified by dependency - updated_post = await crud_posts.update(db=db, object=post_update, id=post_id) - return updated_post +async def export_data(self, current_user: dict[str, Any], db: AsyncSession) -> bytes: + tier = current_user.get("tier") or {} + if tier.get("name") not in {"pro", "enterprise"}: + raise PermissionDeniedError("Data export requires the Pro or Enterprise tier") + # ...generate export... ``` -## Security Best Practices +This works for "binary" features. For more complex models (per-feature quotas, multiple add-ons), consider building an entitlements system on top — that's outside the scope of the boilerplate. -### Principle of Least Privilege +### Tier-Based Rate Limits -Always grant the minimum permissions necessary for users to complete their tasks. +Rate limiting *is* built-in: each `RateLimit` row binds a tier to a path with a `limit` and `period`. The middleware in `infrastructure/rate_limit/middleware.py` enforces these per request. See [Rate Limiting](../rate-limiting/index.md). -**Implementation:** +To configure rate limits for a tier: -- **Default Deny**: Start with no permissions and explicitly grant what's needed -- **Regular Review**: Periodically audit user permissions and remove unnecessary access -- **Role Segregation**: Separate administrative and user-facing permissions -- **Temporary Elevation**: Use temporary permissions for one-time administrative tasks +```bash +# Create a rate limit (admin only) +curl -X POST http://localhost:8000/api/v1/rate-limits/ \ + -b superuser_cookies.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: " \ + -d '{ + "tier_id": 2, + "name": "pro_users", + "path": "/api/v1/widgets/", + "limit": 1000, + "period": 60 + }' +``` -### Defense in Depth +## API Key Permissions -Implement multiple layers of authorization checks throughout your application. +For programmatic access, API keys carry their own per-key permission model. Each key can have multiple `KeyPermission` rows, where a permission is `(resource, action, allow/deny, optional conditions)`. -**Authorization Layers:** +### Permission Model -1. **API Gateway**: Route-level permission checks -2. **Endpoint Dependencies**: FastAPI dependency injection for common patterns -3. **Business Logic**: Method-level permission validation -4. **Database**: Row-level security where applicable +```python +# modules/api_keys/models.py +class KeyPermission(Base, TimestampMixin): + __tablename__ = "key_permissions" + + api_key_id: Mapped[int] = mapped_column(ForeignKey("api_keys.id", ondelete="CASCADE")) + resource: Mapped[KeyPermissionResource] = mapped_column(index=True) + action: Mapped[KeyPermissionAction] = mapped_column(index=True) + conditions: Mapped[dict[str, Any] | None] = mapped_column(JSON, default=None) + is_allowed: Mapped[bool] = mapped_column(Boolean, default=True) +``` -### Input Validation and Sanitization +### Resources and Actions -Always validate and sanitize user input, even from authorized users. +The `KeyPermissionResource` and `KeyPermissionAction` enums in `modules/api_keys/enums.py` define the shape of a permission row: ```python -@router.post("/admin/users/{user_id}/tier") -async def update_user_tier( - user_id: int, - tier_update: UserTierUpdate, - current_user: dict = Depends(get_current_superuser), - db: AsyncSession = Depends(async_get_db) -) -> dict[str, str]: - # 1. Validate tier exists - tier = await crud_tiers.get(db=db, id=tier_update.tier_id) - if not tier: - raise NotFoundException("Tier not found") - - # 2. Validate user exists - user = await crud_users.get(db=db, id=user_id) - if not user: - raise NotFoundException("User not found") - - # 3. Prevent self-demotion (optional business rule) - if user_id == current_user["id"] and tier["name"] == "free": - raise ForbiddenException("Cannot demote yourself to free tier") - - # 4. Update user tier - await crud_users.update( - db=db, - object={"tier_id": tier_update.tier_id}, - id=user_id - ) - - return {"message": f"User tier updated to {tier['name']}"} +class KeyPermissionResource(StrEnum): + USER_PROFILE = "user_profile" + ANALYTICS = "analytics" + ADMIN = "admin" + BILLING = "billing" + API_KEYS = "api_keys" + WILDCARD = "*" + # ... plus a few legacy values inherited from the upstream template + + +class KeyPermissionAction(StrEnum): + READ = "read" + WRITE = "write" + DELETE = "delete" + CREATE = "create" + UPDATE = "update" + LIST = "list" + ADMIN = "admin" + WILDCARD = "*" ``` -### Audit Logging +`*` is a wildcard — `(resource="*", action="*")` is full access; `(resource="user_profile", action="*")` is full access to the user_profile resource. -Log all significant authorization decisions for security monitoring and compliance. +!!! info "Customize the enums" + The enum values are starting points. Edit `modules/api_keys/enums.py` to match the resources and actions your API actually exposes. The default values include some leftovers from the upstream template (e.g. `conversations`, `credits`) — feel free to drop them. -```python -import logging +### Granting Permissions on a New Key -security_logger = logging.getLogger("security") +Permissions are passed at creation time: -async def log_authorization_event( - user_id: int, - action: str, - resource: str, - result: str, - details: dict = None -): - """Log authorization events for security auditing.""" - security_logger.info( - f"Authorization {result}: User {user_id} attempted {action} on {resource}", - extra={ - "user_id": user_id, - "action": action, - "resource": resource, - "result": result, - "details": details or {} - } - ) - -# Usage in permission checks -async def delete_user_account(user_id: int, current_user: dict, db: AsyncSession): - if current_user["id"] != user_id and not current_user.get("is_superuser"): - await log_authorization_event( - user_id=current_user["id"], - action="delete_account", - resource=f"user:{user_id}", - result="denied", - details={"reason": "insufficient_permissions"} - ) - raise ForbiddenException("Cannot delete other users' accounts") - - await log_authorization_event( - user_id=current_user["id"], - action="delete_account", - resource=f"user:{user_id}", - result="granted" - ) - - # Proceed with deletion - await crud_users.delete(db=db, id=user_id) +```bash +curl -X POST http://localhost:8000/api/v1/api-keys/ \ + -b cookies.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: " \ + -d '{ + "name": "Read-only analytics integration", + "permissions": { + "analytics": ["read", "list"], + "user_profile": ["read"] + }, + "usage_limits": {} + }' ``` -## Common Authorization Patterns +The service translates the dict into `KeyPermission` rows. -### Multi-Tenant Authorization +### Checking Permissions in a Route -For applications serving multiple organizations or tenants: +When a request comes in via API key, you can guard endpoints by required `(resource, action)`. The boilerplate doesn't ship a built-in `require_permission(...)` decorator — the API key flow is left flexible so you can wire it however suits your app: ```python -@router.get("/organizations/{org_id}/users/") -async def get_organization_users( - org_id: int, - current_user: dict = Depends(get_current_user), - db: AsyncSession = Depends(async_get_db) -) -> list[UserRead]: - # Check if user belongs to organization - membership = await crud_org_members.get( - db=db, - organization_id=org_id, - user_id=current_user["id"] - ) - - if not membership: - raise ForbiddenException("Not a member of this organization") - - # Check if user has admin role in organization - if membership.role not in ["admin", "owner"]: - raise ForbiddenException("Insufficient organization permissions") - - # Get organization users - users = await crud_users.get_multi( +async def require_key_permission( + resource: KeyPermissionResource, + action: KeyPermissionAction, + db: AsyncSession, + api_key: dict[str, Any], +) -> None: + has_permission = await crud_key_permissions.exists( db=db, - organization_id=org_id, - schema_to_select=UserRead, - return_as_model=True + api_key_id=api_key["id"], + resource=resource, + action=action, + is_allowed=True, ) - - return users.data + # also check wildcards + if not has_permission: + has_wildcard = await crud_key_permissions.exists( + db=db, + api_key_id=api_key["id"], + resource=KeyPermissionResource.WILDCARD, + action=KeyPermissionAction.WILDCARD, + is_allowed=True, + ) + if not has_wildcard: + raise PermissionDeniedError(f"API key lacks {resource}:{action}") ``` -### Time-Based Permissions +How API keys are authenticated (parsing the header, looking up the row, checking the status) is up to you — `KeyStatus` defines the lifecycle (`ACTIVE`, `INACTIVE`, `SUSPENDED`, `EXPIRED`, `REVOKED`). -For permissions that change based on time or schedule: +## Combining Patterns -```python -from datetime import datetime, time - -async def check_business_hours_access(user: dict) -> bool: - """Check if user can access during business hours only.""" - now = datetime.now() - business_start = time(9, 0) # 9 AM - business_end = time(17, 0) # 5 PM - - # Superusers can always access - if user.get("is_superuser", False): - return True - - # Regular users only during business hours - current_time = now.time() - return business_start <= current_time <= business_end - -# Usage in dependency -async def require_business_hours( - current_user: dict = Depends(get_current_user) -) -> dict: - """Require access during business hours for non-admin users.""" - if not await check_business_hours_access(current_user): - raise ForbiddenException("Access only allowed during business hours") - return current_user +A real endpoint often uses several at once: -@router.post("/business-operation", dependencies=[Depends(require_business_hours)]) -async def business_operation(): - return {"message": "Business operation completed"} +```python +@router.delete("/widgets/{widget_id}", status_code=204) +async def delete_widget( + widget_id: int, + current_user: Annotated[dict[str, Any], Depends(get_current_user)], # 1. authn + db: Annotated[AsyncSession, Depends(async_session)], + widget_service: Annotated[WidgetService, Depends(get_widget_service)], +) -> None: + try: + # Service handles: + # 2. Existence check + # 3. Ownership check (superuser bypass) + # 4. Tier feature gate (e.g. "delete requires Pro tier") + await widget_service.delete(widget_id, current_user, db) + except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="An unexpected error occurred") ``` -### Role-Based Access Control (RBAC) +The route stays trivial. Authorization rules accumulate in the service, where they're testable and reusable. -For more complex permission systems: +## Testing Authorization + +Test the **service**, not the route, for permission rules — they're easier to set up and faster to run. ```python -# Role definitions -class Role(str, Enum): - USER = "user" - MODERATOR = "moderator" - ADMIN = "admin" - SUPERUSER = "superuser" - -# Permission checking -def has_role(user: dict, required_role: Role) -> bool: - """Check if user has required role or higher.""" - role_hierarchy = { - Role.USER: 0, - Role.MODERATOR: 1, - Role.ADMIN: 2, - Role.SUPERUSER: 3 - } - - user_role = Role(user.get("role", "user")) - return role_hierarchy[user_role] >= role_hierarchy[required_role] - -# Role-based dependency -def require_role(minimum_role: Role): - """Factory for role-based dependencies.""" - async def check_role(current_user: dict = Depends(get_current_user)) -> dict: - if not has_role(current_user, minimum_role): - raise HTTPException( - status_code=403, - detail=f"Requires {minimum_role.value} role or higher" - ) - return current_user - - return check_role - -# Usage -@router.delete("/posts/{post_id}", dependencies=[Depends(require_role(Role.MODERATOR))]) -async def moderate_delete_post(post_id: int, db: AsyncSession = Depends(async_get_db)): - await crud_posts.delete(db=db, id=post_id) - return {"message": "Post deleted by moderator"} +import pytest +from src.modules.user.service import UserService +from src.modules.common.exceptions import PermissionDeniedError + + +@pytest.mark.asyncio +async def test_normal_user_cannot_update_other_users(): + service = UserService() + current_user = {"username": "alice", "is_superuser": False} + + with pytest.raises(PermissionDeniedError): + await service.verify_user_permission(current_user, "bob", "update profile") + + +@pytest.mark.asyncio +async def test_superuser_can_update_other_users(): + service = UserService() + current_user = {"username": "alice", "is_superuser": True} + + # Should not raise + await service.verify_user_permission(current_user, "bob", "update profile") ``` -### Feature Flags and Permissions +For end-to-end coverage, integration tests against `TestClient` exercise the full session-cookie + permission-check stack. See [Testing](../testing.md). + +## Best Practices -For gradual feature rollouts: +### Keep authorization in services + +Routes do dependency injection and HTTP shaping; services hold rules. If a `PermissionDeniedError` raise feels out of place in your service, that's a sign your service is doing more than business logic. + +### Order checks: authn → existence → ownership → quota ```python -async def has_feature_access(user: dict, feature: str, db: AsyncSession) -> bool: - """Check if user has access to a specific feature.""" - # Check feature flags - feature_flag = await crud_feature_flags.get(db=db, name=feature) - if not feature_flag or not feature_flag.enabled: - return False - - # Check user tier permissions - if feature_flag.requires_tier: - tier_id = user.get("tier_id") - if not tier_id: - return False - - tier = await crud_tiers.get(db=db, id=tier_id) - if not tier or tier["level"] < feature_flag["minimum_tier_level"]: - return False - - # Check beta user status - if feature_flag.beta_only: - return user.get("is_beta_user", False) - - return True - -# Feature flag dependency -def require_feature(feature_name: str): - """Factory for feature flag dependencies.""" - async def check_feature_access( - current_user: dict = Depends(get_current_user), - db: AsyncSession = Depends(async_get_db) - ) -> dict: - if not await has_feature_access(current_user, feature_name, db): - raise HTTPException( - status_code=403, - detail=f"Access to {feature_name} feature not available" - ) - return current_user - - return check_feature_access - -@router.get("/beta-feature", dependencies=[Depends(require_feature("beta_analytics"))]) -async def get_beta_analytics(): - return {"analytics": "beta_data"} +# 1. Authenticated? — done by the dependency +# 2. Resource exists? +if widget is None: + raise ResourceNotFoundError(...) +# 3. User owns it? +if widget["owner_id"] != current_user["id"] and not current_user["is_superuser"]: + raise PermissionDeniedError(...) +# 4. Quota / tier OK? +if not within_tier_limits(...): + raise PermissionDeniedError(...) ``` -This comprehensive permissions system provides flexible, secure authorization patterns that can be adapted to your specific application requirements while maintaining security best practices. +This order prevents leaking existence (404 before 403) and keeps the cheap checks first. + +### Don't reinvent rate limits + +The built-in tier rate-limiter middleware is enforced before your route runs. Don't roll your own per-feature counters unless you need something the middleware can't express. See [Rate Limiting](../rate-limiting/index.md). + +### Audit superuser actions + +Superuser endpoints touch sensitive data. Log the actor + action server-side — the boilerplate's logging infrastructure (with `correlation_id` + `support_id`) makes this straightforward. See [Logging](../../user-guide/configuration/index.md) for the setup. + +## Next Steps + +- **[Sessions](sessions.md)** — How session-based authentication works +- **[Rate Limiting](../rate-limiting/index.md)** — Tier-based rate limit middleware +- **[Exceptions](../api/exceptions.md)** — How `PermissionDeniedError` becomes 403 +- **[Production](../production.md)** — Hardening checklist diff --git a/docs/user-guide/authentication/sessions.md b/docs/user-guide/authentication/sessions.md new file mode 100644 index 00000000..1f3c429b --- /dev/null +++ b/docs/user-guide/authentication/sessions.md @@ -0,0 +1,270 @@ +# Sessions + +Sessions are the boilerplate's default authentication mechanism. All built-in API routes use session auth. + +## Protecting Routes + +Import the session dependencies and add them to your routes: + +```python +from typing import Annotated, Any +from fastapi import APIRouter, Depends + +from ...infrastructure.auth.session.dependencies import get_current_user + +router = APIRouter() + + +@router.get("/my-profile") +async def get_profile( + current_user: Annotated[dict[str, Any], Depends(get_current_user)], +) -> dict[str, Any]: + return {"user_id": current_user["id"], "email": current_user["email"]} +``` + +If the request doesn't have a valid session, the boilerplate returns `401 Unauthorized`. + +### Available Dependencies + +All from `src/infrastructure/auth/session/dependencies.py`. + +**`get_current_user`** — Returns the authenticated user dict. Raises 401 if not authenticated. + +```python +@router.get("/dashboard") +async def dashboard( + current_user: Annotated[dict[str, Any], Depends(get_current_user)], +) -> dict[str, Any]: + return {"welcome": current_user["username"]} +``` + +**`get_current_superuser`** — Same as `get_current_user`, plus checks `is_superuser=True`. Raises 403 if not a superuser. + +```python +@router.delete("/users/{user_id}") +async def delete_user( + user_id: int, + current_user: Annotated[dict[str, Any], Depends(get_current_superuser)], +) -> None: + # Only superusers reach this code + ... +``` + +**`get_optional_user`** — Returns the user dict if authenticated, `None` otherwise. Never raises. + +```python +@router.get("/products") +async def list_products( + current_user: Annotated[dict[str, Any] | None, Depends(get_optional_user)], +) -> list[dict[str, Any]]: + if current_user: + # Personalize for logged-in users + ... +``` + +**`get_current_session_data`** — Returns the full `SessionData` object (id, user_id, ip, device info, timestamps). Useful for endpoints like `/check-auth` that need to expose session metadata. + +### Protecting Entire Routers + +Apply auth to every route in a router: + +```python +router = APIRouter( + prefix="/admin", + dependencies=[Depends(get_current_superuser)], +) + + +@router.get("/stats") +async def stats() -> dict[str, Any]: + # Already authenticated at the router level + ... +``` + +Note: router-level dependencies don't inject values into handlers. If you need the user object inside the handler, also add `Depends(get_current_user)` to that specific route. + +## How Sessions Work + +When a user hits `POST /api/v1/auth/login`: + +1. Login rate limiter checks IP+username (`LOGIN_MAX_ATTEMPTS` per `LOGIN_WINDOW_MINUTES`) +2. `authenticate_user(...)` validates the credentials +3. `SessionManager.create_session(...)` writes a record to the configured backend (Redis by default) +4. A new CSRF token is generated and bound to the session +5. Two cookies are set on the response: + - `session_id` — HTTP-only, the session identifier + - `csrf_token` — readable by JS, mirrors the CSRF token returned in the JSON body + +On every subsequent request, the session dependency: + +1. Reads `session_id` from cookies +2. Looks it up in the configured backend; rejects expired or missing sessions +3. For mutating requests (POST/PUT/DELETE/PATCH), validates the CSRF token if `CSRF_ENABLED=true` +4. Returns the user record (joined with the `Tier` relationship via `lazy="selectin"`) + +Logout (`POST /api/v1/auth/logout`) terminates the session record and clears the cookies. + +## CSRF Protection + +Session auth ships with CSRF protection. For non-GET requests, send the CSRF token via either: + +- The `csrf_token` cookie (browsers send it automatically), or +- The `X-CSRF-Token` header (typical for JS clients) + +```javascript +const csrfToken = getCookie('csrf_token'); + +await fetch('/api/v1/users/', { + method: 'POST', + credentials: 'include', // include cookies cross-origin + headers: { + 'X-CSRF-Token': csrfToken, + 'Content-Type': 'application/json', + }, + body: JSON.stringify(data), +}); +``` + +Need a fresh token mid-session? Hit `POST /api/v1/auth/refresh-csrf` — it returns a new token and sets the cookie. + +For dev/test environments where CSRF gets in the way, set `CSRF_ENABLED=false`. + +## Device Tracking + +Sessions capture the IP address and parsed User-Agent fields. Inspect via the session dep: + +```python +from typing import Annotated, Any +from fastapi import Depends + +from src.infrastructure.auth.session.dependencies import get_current_session_data +from src.infrastructure.auth.session.schemas import SessionData + + +@router.get("/my-current-session") +async def my_session( + session_data: Annotated[SessionData, Depends(get_current_session_data)], +) -> dict[str, Any]: + return { + "ip": session_data.ip_address, + "user_agent": session_data.user_agent, + "device_info": session_data.device_info, # browser, os, is_mobile, etc. + "created_at": session_data.created_at, + "last_activity": session_data.last_activity, + } +``` + +This makes it straightforward to build "your active sessions" UIs or detect suspicious activity. + +## Login Rate Limiting + +Failed login attempts are tracked per IP+username. After `LOGIN_MAX_ATTEMPTS` failures within `LOGIN_WINDOW_MINUTES`, further attempts on `/api/v1/auth/login` are blocked. + +This happens automatically in the login route — you don't need to wire it up. The defaults (5 attempts in 15 minutes) are conservative; tune per your threat model. + +## Session Limits + +Per-user concurrent session count is capped by `MAX_SESSIONS_PER_USER` (default 5). When a user logs in beyond this cap, the oldest session is terminated. + +## Storage Backends + +Sessions are stored server-side. Configure via `SESSION_BACKEND`: + +| Value | When to use | +|-------|-------------| +| `redis` *(default)* | Production. Supports key expiration, pattern scans for cleanup, persists across restarts | +| `memcached` | Production alternative — choose based on what your infrastructure already runs | +| `memory` | Tests only. Cleared on restart, not safe for multi-process deploys | + +Storage backends live in `src/infrastructure/auth/session/backends/`. + +## Configuration + +```env +# Backend +SESSION_BACKEND=redis + +# Lifetime +SESSION_TIMEOUT_MINUTES=30 # inactive sessions expire +SESSION_CLEANUP_INTERVAL_MINUTES=15 # how often the storage backend sweeps expired entries +SESSION_COOKIE_MAX_AGE=86400 # 1 day — total cookie lifetime + +# Per-user cap +MAX_SESSIONS_PER_USER=5 + +# Cookie security (HTTPS only) +SESSION_SECURE_COOKIES=true + +# CSRF +CSRF_ENABLED=true + +# Login rate limiting +LOGIN_MAX_ATTEMPTS=5 +LOGIN_WINDOW_MINUTES=15 +``` + +For development you'll typically set `SESSION_SECURE_COOKIES=false` and `CSRF_ENABLED=false` so cookies work over plain HTTP and curl/Postman aren't blocked. Re-enable both for staging and production. + +## Login & Logout Flow + +### Login + +```bash +curl -X POST http://localhost:8000/api/v1/auth/login \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "username=admin&password=your_admin_password" \ + -c cookies.txt +``` + +Response: + +```json +{ "csrf_token": "..." } +``` + +The HTTP-only `session_id` cookie is now in `cookies.txt`. The CSRF token is also set as a cookie *and* returned in the body so JS clients can store it (browsers can't read HTTP-only cookies). + +### Authenticated Request + +```bash +curl http://localhost:8000/api/v1/users/me -b cookies.txt +``` + +For mutating requests, add the CSRF header: + +```bash +curl -X POST http://localhost:8000/api/v1/users/ \ + -b cookies.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: " \ + -d '{"name": "...", "username": "...", "email": "...", "password": "..."}' +``` + +### Refresh CSRF Token + +```bash +curl -X POST http://localhost:8000/api/v1/auth/refresh-csrf -b cookies.txt +``` + +### Logout + +```bash +curl -X POST http://localhost:8000/api/v1/auth/logout -b cookies.txt +``` + +Terminates the session and clears the cookies. + +## Key Files + +| Component | Location | +|-----------|----------| +| Dependencies | `backend/src/infrastructure/auth/session/dependencies.py` | +| Session manager | `backend/src/infrastructure/auth/session/manager.py` | +| Storage backends | `backend/src/infrastructure/auth/session/backends/` | +| Schemas | `backend/src/infrastructure/auth/session/schemas.py` | +| Login/logout routes | `backend/src/infrastructure/auth/routes.py` | +| Auth settings | `backend/src/infrastructure/config/settings.py` (`AuthSettings`) | + +--- + +[← Authentication Overview](index.md){ .md-button } [User Management →](user-management.md){ .md-button .md-button--primary } diff --git a/docs/user-guide/authentication/user-management.md b/docs/user-guide/authentication/user-management.md index af2be650..bd06ae93 100644 --- a/docs/user-guide/authentication/user-management.md +++ b/docs/user-guide/authentication/user-management.md @@ -1,879 +1,484 @@ # User Management -User management forms the core of any authentication system, handling everything from user registration and login to profile updates and account deletion. This section covers the complete user lifecycle with secure authentication flows and administrative operations. - -## Understanding User Lifecycle - -The user lifecycle in the boilerplate follows a secure, well-defined process that protects user data while providing a smooth experience. Understanding this flow helps you customize the system for your specific needs. - -**Registration → Authentication → Profile Management → Administrative Operations** - -Each stage has specific security considerations and business logic that ensure data integrity and user safety. - -## User Registration - -User registration is the entry point to your application. The process must be secure, user-friendly, and prevent common issues like duplicate accounts or weak passwords. +User management covers the full lifecycle: registration, authentication, profile updates, and deletion. This page documents the endpoints and patterns the boilerplate ships with. + +## Endpoints at a Glance + +All under `/api/v1/users/` (defined in `modules/user/routes.py`): + +| Method | Path | Description | Auth | +|--------|------|-------------|------| +| `POST` | `/api/v1/users/` | Create a new user | Open | +| `GET` | `/api/v1/users/` | Paginated list of users | Superuser | +| `GET` | `/api/v1/users/me` | Current user's profile | Session | +| `GET` | `/api/v1/users/{username}` | Get a user by username (active only) | Open | +| `GET` | `/api/v1/users/active-and-inactive/{username}` | Same as above, includes soft-deleted | Superuser | +| `PATCH` | `/api/v1/users/{username}` | Update profile (own or admin) | Session | +| `DELETE` | `/api/v1/users/{username}` | Soft-delete a user (own or admin) | Session | +| `DELETE` | `/api/v1/users/db/{username}` | GDPR anonymize (admin) | Superuser | +| `GET` | `/api/v1/users/{username}/rate-limits` | User's rate limits via tier | Session | +| `GET` | `/api/v1/users/{username}/tier` | User's tier details | Session | +| `PATCH` | `/api/v1/users/{username}/tier` | Change a user's tier | Superuser | + +Plus the auth endpoints under `/api/v1/auth/` documented in [Sessions](sessions.md). + +## Registration + +`POST /api/v1/users/` is open — no auth required. Anyone can create an account. + +```bash +curl -X POST http://localhost:8000/api/v1/users/ \ + -H "Content-Type: application/json" \ + -d '{ + "name": "John Doe", + "username": "johndoe", + "email": "john@example.com", + "password": "Str1ngst!" + }' +``` -### Registration Process +The route delegates to `UserService.create`, which: -The registration endpoint performs several validation steps before creating a user account. This multi-step validation prevents common registration issues and ensures data quality. +1. Checks `email` is unique → raises `UserExistsError` if not (→ 409) +2. Checks `username` is unique → raises `UserExistsError` if not (→ 409) +3. Hashes the password with bcrypt via `get_password_hash` +4. Builds a `UserCreateInternal` (schema with `hashed_password` instead of `password`) +5. Persists via `crud_users.create` ```python -# User registration endpoint -@router.post("/user", response_model=UserRead, status_code=201) -async def write_user( - user: UserCreate, - db: AsyncSession -) -> UserRead: - # 1. Check if email exists - email_row = await crud_users.exists(db=db, email=user.email) - if email_row: - raise DuplicateValueException("Email is already registered") - - # 2. Check if username exists - username_row = await crud_users.exists(db=db, username=user.username) - if username_row: - raise DuplicateValueException("Username not available") - - # 3. Hash password - user_internal_dict = user.model_dump() - user_internal_dict["hashed_password"] = get_password_hash( - password=user_internal_dict["password"] - ) - del user_internal_dict["password"] - - # 4. Create user - user_internal = UserCreateInternal(**user_internal_dict) - created_user = await crud_users.create(db=db, object=user_internal) - - return created_user +# modules/user/service.py +async def create(self, user: UserCreate, db: AsyncSession) -> dict[str, Any]: + if await crud_users.exists(db=db, email=user.email): + raise UserExistsError("Email already registered") + if await crud_users.exists(db=db, username=user.username): + raise UserExistsError("Username already taken") + + payload = user.model_dump() + payload["hashed_password"] = get_password_hash(payload.pop("password")) + user_internal = UserCreateInternal(**payload) + + return await crud_users.create(db=db, object=user_internal, schema_to_select=UserRead) ``` -**Security Steps Explained:** - -1. **Email Uniqueness**: Prevents multiple accounts with the same email, which could cause confusion and security issues -2. **Username Uniqueness**: Ensures usernames are unique identifiers within your system -3. **Password Hashing**: Converts plain text passwords into secure hashes before database storage -4. **Data Separation**: Plain text passwords are immediately removed from memory after hashing - -### Registration Schema - -The registration schema defines what data is required and how it's validated. This ensures consistent data quality and prevents malformed user accounts. +The `UserCreate` schema enforces input validation: ```python -# User registration input class UserCreate(UserBase): model_config = ConfigDict(extra="forbid") - + password: Annotated[ str, Field( + min_length=8, pattern=r"^.{8,}|[0-9]+|[A-Z]+|[a-z]+|[^a-zA-Z0-9]+$", - examples=["Str1ngst!"] - ) + examples=["Str1ngst!"], + ), ] - -# Internal schema for database storage -class UserCreateInternal(UserBase): - hashed_password: str + # OAuth fields (filled when user signs up via Google) + google_id: str | None = None + github_id: str | None = None + oauth_provider: str | None = None ``` -**Schema Design Principles:** +`extra="forbid"` rejects any unknown fields the client tries to send — useful to keep clients honest. -- **`extra="forbid"`**: Rejects unexpected fields, preventing injection of unauthorized data -- **Password Patterns**: Enforces minimum security requirements for passwords -- **Separation of Concerns**: External schema accepts passwords, internal schema stores hashes +## Authentication -## User Authentication - -Authentication verifies user identity using credentials. The process must be secure against common attacks while remaining user-friendly. - -### Authentication Process +Authentication happens via `POST /api/v1/auth/login`. See [Sessions](sessions.md) for the full flow. The function that does the credential check is `authenticate_user`: ```python -async def authenticate_user(username_or_email: str, password: str, db: AsyncSession) -> dict | False: - # 1. Get user by email or username +# infrastructure/auth/session/dependencies.py +async def authenticate_user( + username_or_email: str, password: str, db: AsyncSession +) -> dict[str, Any] | None: + # Look up by email if "@" present, else username — both with is_deleted=False if "@" in username_or_email: - db_user = await crud_users.get(db=db, email=username_or_email, is_deleted=False) + user = await crud_users.get(db=db, email=username_or_email, is_deleted=False) else: - db_user = await crud_users.get(db=db, username=username_or_email, is_deleted=False) - - if not db_user: - return False - - # 2. Verify password - if not await verify_password(password, db_user["hashed_password"]): - return False - - return db_user + user = await crud_users.get(db=db, username=username_or_email, is_deleted=False) + + if not user: + return None + if not await verify_password(password, user["hashed_password"]): + return None + return user ``` -**Security Considerations:** +Two things to note: -- **Flexible Login**: Accepts both username and email for better user experience -- **Soft Delete Check**: `is_deleted=False` prevents deleted users from logging in -- **Consistent Timing**: Both user lookup and password verification take similar time +- **Username or email** — both forms work in the same field +- **Soft-deleted users can't log in** — `is_deleted=False` filters them out -### Password Security +### Password Hashing (bcrypt) -Password security is critical for protecting user accounts. The system uses industry-standard bcrypt hashing with automatic salt generation. +`infrastructure/auth/utils.py`: ```python import bcrypt + async def verify_password(plain_password: str, hashed_password: str) -> bool: - """Verify a plain password against its hash.""" - correct_password: bool = bcrypt.checkpw( - plain_password.encode(), - hashed_password.encode() - ) - return correct_password + return bcrypt.checkpw(plain_password.encode(), hashed_password.encode()) + def get_password_hash(password: str) -> str: - """Generate password hash with salt.""" - hashed_password: str = bcrypt.hashpw( - password.encode(), - bcrypt.gensalt() - ).decode() - return hashed_password + return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode() ``` -**Why bcrypt?** +bcrypt handles salt generation automatically and is computationally expensive enough to defeat brute force at scale. -- **Adaptive Hashing**: Computationally expensive, making brute force attacks impractical -- **Automatic Salt**: Each password gets a unique salt, preventing rainbow table attacks -- **Future-Proof**: Can increase computational cost as hardware improves +## Profile Operations -### Login Validation +### Get Current User -Client-side validation provides immediate feedback but should never be the only validation layer. +```bash +curl http://localhost:8000/api/v1/users/me -b cookies.txt +``` + +Trivial route — `get_current_user` already returns the user dict: ```python -# Password validation pattern -PASSWORD_PATTERN = r"^.{8,}|[0-9]+|[A-Z]+|[a-z]+|[^a-zA-Z0-9]+$" - -# Frontend validation (example) -function validatePassword(password) { - const minLength = password.length >= 8; - const hasNumber = /[0-9]/.test(password); - const hasUpper = /[A-Z]/.test(password); - const hasLower = /[a-z]/.test(password); - const hasSpecial = /[^a-zA-Z0-9]/.test(password); - - return minLength && hasNumber && hasUpper && hasLower && hasSpecial; -} +@router.get("/me", response_model=UserRead) +async def get_current_user_profile( + current_user: Annotated[dict[str, Any], Depends(get_current_user)], +) -> dict[str, Any]: + return current_user ``` -**Validation Strategy:** - -- **Server-Side**: Always validate on the server - client validation can be bypassed -- **Client-Side**: Provides immediate feedback for better user experience -- **Progressive**: Validate as user types to catch issues early +### Get User by Username -## Profile Management +Public endpoint — no auth required. Filters out soft-deleted users. -Profile management allows users to update their information while maintaining security and data integrity. +```bash +curl http://localhost:8000/api/v1/users/johndoe +``` -### Get Current User Profile +Returns 404 if not found or soft-deleted. The admin-only `/active-and-inactive/{username}` variant returns soft-deleted users too. -Retrieving the current user's profile is a fundamental operation that should be fast and secure. +### Update Profile -```python -@router.get("/user/me/", response_model=UserRead) -async def read_users_me(current_user: dict = Depends(get_current_user)) -> dict: - return current_user +Users can update their own profile; superusers can update anyone's. Tier updates are gated on a separate endpoint (see [Permissions](permissions.md)). -# Frontend usage -async function getCurrentUser() { - const token = localStorage.getItem('access_token'); - const response = await fetch('/api/v1/user/me/', { - headers: { - 'Authorization': `Bearer ${token}` - } - }); - - if (response.ok) { - return await response.json(); - } - throw new Error('Failed to get user profile'); -} +```bash +curl -X PATCH http://localhost:8000/api/v1/users/johndoe \ + -b cookies.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: " \ + -d '{"name": "John Updated"}' ``` -**Design Decisions:** +The service enforces the ownership rule: -- **`/me` Endpoint**: Common pattern that's intuitive for users and developers -- **Current User Dependency**: Automatically handles authentication and user lookup -- **Minimal Data**: Returns only safe, user-relevant information +```python +# modules/user/service.py +async def verify_user_permission( + self, current_user: dict[str, Any], target_username: str, action: str, +) -> None: + if current_user["username"] != target_username and not current_user["is_superuser"]: + raise PermissionDeniedError(f"Cannot {action} for another user") +``` -### Update User Profile +If the body changes `username` or `email`, the service also re-checks uniqueness. -Profile updates require careful validation to prevent unauthorized changes and maintain data integrity. +The `UserUpdate` schema makes every field optional so clients can send partial updates: ```python -@router.patch("/user/{username}") -async def patch_user( - values: UserUpdate, - username: str, - current_user: dict = Depends(get_current_user), - db: AsyncSession = Depends(async_get_db), -) -> dict[str, str]: - # 1. Get user from database - db_user = await crud_users.get(db=db, username=username, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - # 2. Check ownership (users can only update their own profile) - if db_user["username"] != current_user["username"]: - raise ForbiddenException("Cannot update other users") - - # 3. Validate unique constraints - if values.username and values.username != db_user["username"]: - existing_username = await crud_users.exists(db=db, username=values.username) - if existing_username: - raise DuplicateValueException("Username not available") - - if values.email and values.email != db_user["email"]: - existing_email = await crud_users.exists(db=db, email=values.email) - if existing_email: - raise DuplicateValueException("Email is already registered") - - # 4. Update user - await crud_users.update(db=db, object=values, username=username) - return {"message": "User updated"} +class UserUpdate(BaseModel): + model_config = ConfigDict(extra="forbid") + + name: Annotated[str | None, Field(min_length=2, max_length=30, default=None)] + username: Annotated[ + str | None, + Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$", default=None), + ] + email: Annotated[EmailStr | None, Field(default=None)] + profile_image_url: Annotated[ + str | None, + Field(pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", default=None), + ] ``` -**Security Measures:** +## Deletion -1. **Ownership Verification**: Users can only update their own profiles -2. **Uniqueness Checks**: Prevents conflicts when changing username/email -3. **Partial Updates**: Only provided fields are updated -4. **Input Validation**: Pydantic schemas validate all input data +The boilerplate distinguishes three deletion modes — pick based on what the request actually wants. -## User Deletion +### Soft Delete -User deletion requires careful consideration of data retention, user rights, and system integrity. +`DELETE /api/v1/users/{username}` — sets `is_deleted=True` and `deleted_at=now()`. The row stays in the database; the user can no longer log in but their data is preserved. -### Self-Deletion +```bash +curl -X DELETE http://localhost:8000/api/v1/users/johndoe \ + -b cookies.txt \ + -H "X-CSRF-Token: " +``` -Users should be able to delete their own accounts, but the process should be secure and potentially reversible. +Permission rules: -```python -@router.delete("/user/{username}") -async def erase_user( - username: str, - current_user: dict = Depends(get_current_user), - db: AsyncSession = Depends(async_get_db), - token: str = Depends(oauth2_scheme), -) -> dict[str, str]: - # 1. Get user from database - db_user = await crud_users.get(db=db, username=username, schema_to_select=UserRead) - if not db_user: - raise NotFoundException("User not found") - - # 2. Check ownership - if username != current_user["username"]: - raise ForbiddenException() - - # 3. Soft delete user - await crud_users.delete(db=db, username=username) - - # 4. Blacklist current token - await blacklist_token(token=token, db=db) - - return {"message": "User deleted"} -``` +- A user can soft-delete their own account +- A superuser can soft-delete anyone -**Soft Delete Benefits:** +### Hard Delete (database) -- **Data Recovery**: Users can be restored if needed -- **Audit Trail**: Maintain records for compliance -- **Relationship Integrity**: Related data (posts, comments) remain accessible -- **Gradual Cleanup**: Allow time for data migration or backup +There's no public hard-delete endpoint by design — deleting rows from `user` would orphan all related data (sessions, API keys, etc.). If you really need it, use FastCRUD's `crud_users.db_delete(...)` from a script or admin task with full understanding of the foreign-key impact. -### Admin Deletion (Hard Delete) +### GDPR Anonymization -Administrators may need to permanently remove users in specific circumstances. +`DELETE /api/v1/users/db/{username}` — superuser only. Replaces PII with neutral values while keeping the row (and therefore foreign-key relationships) intact. -```python -@router.delete("/db_user/{username}", dependencies=[Depends(get_current_superuser)]) -async def erase_db_user( - username: str, - db: AsyncSession = Depends(async_get_db), - token: str = Depends(oauth2_scheme), -) -> dict[str, str]: - # 1. Check if user exists - db_user = await crud_users.exists(db=db, username=username) - if not db_user: - raise NotFoundException("User not found") - - # 2. Hard delete from database - await crud_users.db_delete(db=db, username=username) - - # 3. Blacklist current token - await blacklist_token(token=token, db=db) - - return {"message": "User deleted from the database"} +```bash +curl -X DELETE http://localhost:8000/api/v1/users/db/johndoe \ + -b superuser_cookies.txt \ + -H "X-CSRF-Token: " ``` -**When to Use Hard Delete:** +Service implementation: -- **Legal Requirements**: GDPR "right to be forgotten" requests -- **Data Breach Response**: Complete removal of compromised accounts -- **Spam/Abuse**: Permanent removal of malicious accounts +```python +async def anonymize_user(self, user_id: int, db: AsyncSession) -> None: + await crud_users.update( + db=db, + object=UserAnonymize( + name="DELETED USER", + username=f"deleted-{user_id}-{uuid4().hex[:8]}", + hashed_password=None, + profile_image_url=None, + tier_id=None, + is_superuser=False, + google_id=None, + github_id=None, + oauth_provider=None, + email_verified=False, + oauth_created_at=None, + oauth_updated_at=None, + ), + id=user_id, + ) +``` + +Email is intentionally retained for legal compliance purposes (audit trail, "right to be forgotten" doesn't always apply if the platform is required to keep records). ## Administrative Operations ### List All Users -```python -@router.get("/users", response_model=PaginatedListResponse[UserRead]) -async def read_users( - db: AsyncSession = Depends(async_get_db), - page: int = 1, - items_per_page: int = 10 -) -> dict: - users_data = await crud_users.get_multi( - db=db, - offset=compute_offset(page, items_per_page), - limit=items_per_page, - is_deleted=False, - ) - - response: dict[str, Any] = paginated_response( - crud_data=users_data, - page=page, - items_per_page=items_per_page - ) - return response -``` +`GET /api/v1/users/` — superuser only, paginated. -### Get User by Username +```bash +curl "http://localhost:8000/api/v1/users/?page=1&items_per_page=10" \ + -b superuser_cookies.txt +``` -```python -@router.get("/user/{username}", response_model=UserRead) -async def read_user( - username: str, - db: AsyncSession = Depends(async_get_db) -) -> UserRead: - db_user = await crud_users.get( - db=db, - username=username, - is_deleted=False, - schema_to_select=UserRead - ) - if db_user is None: - raise NotFoundException("User not found") - - return db_user +Response shape (via `paginated_response`): + +```json +{ + "data": [ + { "id": 1, "name": "Admin User", "username": "admin", "email": "admin@example.com", ... } + ], + "total_count": 42, + "has_more": true, + "page": 1, + "items_per_page": 10 +} ``` -### User with Tier Information +See [Pagination](../api/pagination.md) for the full pattern. -```python -@router.get("/user/{username}/tier") -async def read_user_tier( - username: str, - db: AsyncSession = Depends(async_get_db) -) -> dict | None: - # 1. Get user - db_user = await crud_users.get(db=db, username=username, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - # 2. Return None if no tier assigned - if db_user["tier_id"] is None: - return None - - # 3. Get tier information - db_tier = await crud_tiers.get(db=db, id=db_user["tier_id"], schema_to_select=TierRead) - if not db_tier: - raise NotFoundException("Tier not found") - - # 4. Combine user and tier data - user_dict = dict(db_user) # Convert to dict if needed - tier_dict = dict(db_tier) # Convert to dict if needed - - for key, value in tier_dict.items(): - user_dict[f"tier_{key}"] = value - - return user_dict +### View a User's Tier + +```bash +curl http://localhost:8000/api/v1/users/johndoe/tier -b cookies.txt ``` -## User Tiers and Permissions +Returns the user record joined with their tier. Permission: own profile or superuser. -### Assign User Tier +### Change a User's Tier -```python -@router.patch("/user/{username}/tier", dependencies=[Depends(get_current_superuser)]) -async def patch_user_tier( - username: str, - values: UserTierUpdate, - db: AsyncSession = Depends(async_get_db) -) -> dict[str, str]: - # 1. Verify user exists - db_user = await crud_users.get(db=db, username=username, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - # 2. Verify tier exists - tier_exists = await crud_tiers.exists(db=db, id=values.tier_id) - if not tier_exists: - raise NotFoundException("Tier not found") - - # 3. Update user tier - await crud_users.update(db=db, object=values, username=username) - return {"message": "User tier updated"} - -# Tier update schema -class UserTierUpdate(BaseModel): - tier_id: int +`PATCH /api/v1/users/{username}/tier` — superuser only. + +```bash +curl -X PATCH http://localhost:8000/api/v1/users/johndoe/tier \ + -b superuser_cookies.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: " \ + -d '{"tier_id": 2}' ``` -### User Rate Limits +The service verifies the tier exists before assigning it. -```python -@router.get("/user/{username}/rate_limits", dependencies=[Depends(get_current_superuser)]) -async def read_user_rate_limits( - username: str, - db: AsyncSession = Depends(async_get_db) -) -> dict[str, Any]: - # 1. Get user - db_user = await crud_users.get(db=db, username=username, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - user_dict = dict(db_user) # Convert to dict if needed - - # 2. No tier assigned - if db_user["tier_id"] is None: - user_dict["tier_rate_limits"] = [] - return user_dict - - # 3. Get tier and rate limits - db_tier = await crud_tiers.get(db=db, id=db_user["tier_id"], schema_to_select=TierRead) - if db_tier is None: - raise NotFoundException("Tier not found") - - db_rate_limits = await crud_rate_limits.get_multi(db=db, tier_id=db_tier["id"]) - user_dict["tier_rate_limits"] = db_rate_limits["data"] - - return user_dict +### View a User's Rate Limits + +```bash +curl http://localhost:8000/api/v1/users/johndoe/rate-limits -b cookies.txt ``` -## User Model Structure +Returns the rate limits configured for the user's tier. Permission: own profile or superuser. + +## User Model Reference -### Database Model +The actual model lives in `modules/user/models.py`. Trimmed: ```python -class User(Base): +class User(Base, TimestampMixin, SoftDeleteMixin): __tablename__ = "user" - - id: Mapped[int] = mapped_column(primary_key=True) + + id: Mapped[int] = mapped_column( + "id", autoincrement=True, nullable=False, unique=True, + primary_key=True, init=False, + ) name: Mapped[str] = mapped_column(String(30)) username: Mapped[str] = mapped_column(String(20), unique=True, index=True) email: Mapped[str] = mapped_column(String(50), unique=True, index=True) - hashed_password: Mapped[str] - profile_image_url: Mapped[str] = mapped_column(default="https://www.profileimageurl.com") - is_superuser: Mapped[bool] = mapped_column(default=False) - tier_id: Mapped[int | None] = mapped_column(ForeignKey("tier.id"), default=None) - - # Timestamps - created_at: Mapped[datetime] = mapped_column(default=datetime.utcnow) - updated_at: Mapped[datetime | None] = mapped_column(default=None) - - # Soft delete - is_deleted: Mapped[bool] = mapped_column(default=False) - deleted_at: Mapped[datetime | None] = mapped_column(default=None) - - # Relationships - tier: Mapped["Tier"] = relationship(back_populates="users") - posts: Mapped[list["Post"]] = relationship(back_populates="created_by_user") -``` + hashed_password: Mapped[str] = mapped_column(String(100)) + profile_image_url: Mapped[str] = mapped_column( + String, default="https://profileimageurl.com", + ) -### User Schemas + tier_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("tiers.id"), index=True, default=None, + ) + tier: Mapped["Tier | None"] = relationship( + "Tier", back_populates="users", lazy="selectin", init=False, + ) -```python -# Base schema with common fields -class UserBase(BaseModel): - name: Annotated[str, Field(min_length=2, max_length=30)] - username: Annotated[str, Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$")] - email: Annotated[EmailStr, Field(examples=["user@example.com"])] - -# Reading user data (API responses) -class UserRead(BaseModel): - id: int - name: str - username: str - email: str - profile_image_url: str - tier_id: int | None - -# Full user data (internal use) -class User(TimestampSchema, UserBase, UUIDSchema, PersistentDeletion): - profile_image_url: str = "https://www.profileimageurl.com" - hashed_password: str - is_superuser: bool = False - tier_id: int | None = None -``` + is_superuser: Mapped[bool] = mapped_column(default=False) -## Common User Operations + # OAuth (filled when user signs in via Google/GitHub) + google_id: Mapped[str | None] = mapped_column(String(50), unique=True, index=True, default=None) + github_id: Mapped[str | None] = mapped_column(String(50), unique=True, index=True, default=None) + oauth_provider: Mapped[str | None] = mapped_column(String(20), default=None) + email_verified: Mapped[bool] = mapped_column(default=False) +``` -### Check User Existence +Mixins from `infrastructure/database/models`: -```python -# By email -email_exists = await crud_users.exists(db=db, email="user@example.com") +- `TimestampMixin` — `created_at`, `updated_at` +- `SoftDeleteMixin` — `is_deleted`, `deleted_at` -# By username -username_exists = await crud_users.exists(db=db, username="johndoe") +Table name is **`user`** (singular). -# By ID -user_exists = await crud_users.exists(db=db, id=123) -``` +## Common CRUD Tasks -### Search Users +The same FastCRUD operations described in [CRUD Operations](../database/crud.md) work on users: ```python -# Get active users only -active_users = await crud_users.get_multi( - db=db, - is_deleted=False, - limit=10 -) - -# Get users by tier -tier_users = await crud_users.get_multi( - db=db, - tier_id=1, - is_deleted=False -) - -# Get superusers -superusers = await crud_users.get_multi( - db=db, - is_superuser=True, - is_deleted=False -) -``` +from src.modules.user.crud import crud_users -### User Statistics +# Existence checks +await crud_users.exists(db=db, email="user@example.com") +await crud_users.exists(db=db, username="johndoe") -```python -async def get_user_stats(db: AsyncSession) -> dict: - # Total users - total_users = await crud_users.count(db=db, is_deleted=False) - - # Active users (logged in recently) - # This would require tracking last_login_at - - # Users by tier - tier_stats = {} - tiers = await crud_tiers.get_multi(db=db) - for tier in tiers["data"]: - count = await crud_users.count(db=db, tier_id=tier["id"], is_deleted=False) - tier_stats[tier["name"]] = count - - return { - "total_users": total_users, - "tier_distribution": tier_stats - } +# Counts +total_active = await crud_users.count(db=db, is_deleted=False) +admin_count = await crud_users.count(db=db, is_superuser=True) + +# Filtered queries +result = await crud_users.get_multi(db=db, tier_id=1, is_deleted=False, limit=20) + +# Search by username substring +result = await crud_users.get_multi(db=db, username__icontains="ad") ``` ## Frontend Integration -### Complete User Management Component +Use cookies, not bearer tokens. The browser will send the session cookie automatically as long as you set `credentials: 'include'`: ```javascript -class UserManager { +class UserClient { constructor(baseUrl = '/api/v1') { this.baseUrl = baseUrl; - this.token = localStorage.getItem('access_token'); + this.csrfToken = null; } - + async register(userData) { - const response = await fetch(`${this.baseUrl}/user`, { + const res = await fetch(`${this.baseUrl}/users/`, { method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(userData) + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(userData), }); - - if (!response.ok) { - const error = await response.json(); - throw new Error(error.detail); - } - - return await response.json(); + if (!res.ok) throw new Error((await res.json()).detail); + return await res.json(); } - + async login(username, password) { - const response = await fetch(`${this.baseUrl}/login`, { + const res = await fetch(`${this.baseUrl}/auth/login`, { method: 'POST', - headers: { - 'Content-Type': 'application/x-www-form-urlencoded', - }, - body: new URLSearchParams({ - username: username, - password: password - }) + credentials: 'include', + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + body: new URLSearchParams({ username, password }), }); - - if (!response.ok) { - const error = await response.json(); - throw new Error(error.detail); - } - - const tokens = await response.json(); - localStorage.setItem('access_token', tokens.access_token); - this.token = tokens.access_token; - - return tokens; + if (!res.ok) throw new Error((await res.json()).detail); + const { csrf_token } = await res.json(); + this.csrfToken = csrf_token; + return csrf_token; } - + async getProfile() { - const response = await fetch(`${this.baseUrl}/user/me/`, { - headers: { - 'Authorization': `Bearer ${this.token}` - } + const res = await fetch(`${this.baseUrl}/users/me`, { + credentials: 'include', }); - - if (!response.ok) { - throw new Error('Failed to get profile'); - } - - return await response.json(); + if (!res.ok) throw new Error('Failed to get profile'); + return await res.json(); } - + async updateProfile(username, updates) { - const response = await fetch(`${this.baseUrl}/user/${username}`, { + const res = await fetch(`${this.baseUrl}/users/${username}`, { method: 'PATCH', + credentials: 'include', headers: { - 'Authorization': `Bearer ${this.token}`, - 'Content-Type': 'application/json' + 'Content-Type': 'application/json', + 'X-CSRF-Token': this.csrfToken, }, - body: JSON.stringify(updates) + body: JSON.stringify(updates), }); - - if (!response.ok) { - const error = await response.json(); - throw new Error(error.detail); - } - - return await response.json(); + if (!res.ok) throw new Error((await res.json()).detail); + return await res.json(); } - + async deleteAccount(username) { - const response = await fetch(`${this.baseUrl}/user/${username}`, { + const res = await fetch(`${this.baseUrl}/users/${username}`, { method: 'DELETE', - headers: { - 'Authorization': `Bearer ${this.token}` - } + credentials: 'include', + headers: { 'X-CSRF-Token': this.csrfToken }, }); - - if (!response.ok) { - const error = await response.json(); - throw new Error(error.detail); - } - - // Clear local storage - localStorage.removeItem('access_token'); - this.token = null; - - return await response.json(); + if (!res.ok) throw new Error((await res.json()).detail); + this.csrfToken = null; + return await res.json(); } - + async logout() { - const response = await fetch(`${this.baseUrl}/logout`, { + await fetch(`${this.baseUrl}/auth/logout`, { method: 'POST', - headers: { - 'Authorization': `Bearer ${this.token}` - } + credentials: 'include', + headers: { 'X-CSRF-Token': this.csrfToken }, }); - - // Clear local storage regardless of response - localStorage.removeItem('access_token'); - this.token = null; - - if (response.ok) { - return await response.json(); - } + this.csrfToken = null; } } - -// Usage -const userManager = new UserManager(); - -// Register new user -try { - const user = await userManager.register({ - name: "John Doe", - username: "johndoe", - email: "john@example.com", - password: "SecurePass123!" - }); - console.log('User registered:', user); -} catch (error) { - console.error('Registration failed:', error.message); -} - -// Login -try { - const tokens = await userManager.login('johndoe', 'SecurePass123!'); - console.log('Login successful'); - - // Get profile - const profile = await userManager.getProfile(); - console.log('User profile:', profile); -} catch (error) { - console.error('Login failed:', error.message); -} ``` -## Security Considerations - -### Input Validation - -```python -# Server-side validation -class UserCreate(UserBase): - password: Annotated[ - str, - Field( - min_length=8, - pattern=r"^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[@$!%*?&])[A-Za-z\d@$!%*?&]", - description="Password must contain uppercase, lowercase, number, and special character" - ) - ] -``` - -### Rate Limiting - -```python -# Protect registration endpoint -@router.post("/user", dependencies=[Depends(rate_limiter_dependency)]) -async def write_user(user: UserCreate, db: AsyncSession): - # Registration logic - pass - -# Protect login endpoint -@router.post("/login", dependencies=[Depends(rate_limiter_dependency)]) -async def login_for_access_token(): - # Login logic - pass -``` - -### Data Sanitization - -```python -def sanitize_user_input(user_data: dict) -> dict: - """Sanitize user input to prevent XSS and injection.""" - import html - - sanitized = {} - for key, value in user_data.items(): - if isinstance(value, str): - # HTML escape - sanitized[key] = html.escape(value.strip()) - else: - sanitized[key] = value - - return sanitized -``` +`credentials: 'include'` makes the browser send/store cookies cross-origin — pair this with `CORS_ALLOW_CREDENTIALS=true` and an explicit `CORS_ORIGINS` list (no `*`) on the server. -## Next Steps +## Security Considerations -Now that you understand user management: +### Server-side validation -1. **[Permissions](permissions.md)** - Learn about role-based access control and authorization -2. **[Production Guide](../production.md)** - Implement production-grade security measures -3. **[JWT Tokens](jwt-tokens.md)** - Review token management if needed +All input validation runs server-side via Pydantic schemas. Client-side checks are nice for UX but don't replace server validation. -User management provides the core functionality for authentication systems. Master these patterns before implementing advanced permission systems. +### Login rate limiting -## Common Authentication Tasks +The login endpoint is automatically rate-limited via `LOGIN_MAX_ATTEMPTS` per `LOGIN_WINDOW_MINUTES`. See [Sessions](sessions.md#login-rate-limiting). -### Protect New Endpoints +### Generic auth error messages -```python -# Add authentication dependency to your router -@router.get("/my-endpoint") -async def my_endpoint(current_user: dict = Depends(get_current_user)): - # Endpoint now requires authentication - return {"user_specific_data": f"Hello {current_user['username']}"} - -# Optional authentication for public endpoints -@router.get("/public-endpoint") -async def public_endpoint(user: dict | None = Depends(get_optional_user)): - if user: - return {"message": f"Hello {user['username']}", "premium_features": True} - return {"message": "Hello anonymous user", "premium_features": False} -``` - -### Complete Authentication Flow +`POST /api/v1/auth/login` returns "Incorrect username or password" for both wrong username and wrong password — never reveal which one was wrong. -```python -# 1. User registration -user_data = UserCreate( - name="John Doe", - username="johndoe", - email="john@example.com", - password="SecurePassword123!" -) -user = await crud_users.create(db=db, object=user_data) - -# 2. User login -form_data = {"username": "johndoe", "password": "SecurePassword123!"} -user = await authenticate_user(form_data["username"], form_data["password"], db) - -# 3. Token generation (handled in login endpoint) -access_token = await create_access_token(data={"sub": user["username"]}) -refresh_token = await create_refresh_token(data={"sub": user["username"]}) - -# 4. API access with token -headers = {"Authorization": f"Bearer {access_token}"} -response = requests.get("/api/v1/users/me", headers=headers) - -# 5. Token refresh when access token expires -response = requests.post("/api/v1/refresh") # Uses refresh token cookie -new_access_token = response.json()["access_token"] - -# 6. Secure logout (blacklists both tokens) -await logout_user(access_token=access_token, refresh_token=refresh_token, db=db) -``` +### Soft delete for accounts -### Check User Permissions +The default `DELETE /api/v1/users/{username}` is a soft delete. Hard deletion only for GDPR-style requests, with anonymization preserving FK integrity. -```python -def check_user_permission(user: dict, required_tier: str = None): - """Check if user has required permissions.""" - if not user.get("is_active", True): - raise UnauthorizedException("User account is disabled") - - if required_tier and user.get("tier", {}).get("name") != required_tier: - raise ForbiddenException(f"Requires {required_tier} tier") - -# Usage in endpoint -@router.get("/premium-feature") -async def premium_feature(current_user: dict = Depends(get_current_user)): - check_user_permission(current_user, "Pro") - return {"premium_data": "exclusive_content"} -``` - -### Custom Authentication Logic - -```python -async def get_user_with_posts(current_user: dict = Depends(get_current_user)): - """Custom dependency that adds user's posts.""" - posts = await crud_posts.get_multi(db=db, created_by_user_id=current_user["id"]) - current_user["posts"] = posts - return current_user +## Next Steps -# Usage -@router.get("/dashboard") -async def get_dashboard(user_with_posts: dict = Depends(get_user_with_posts)): - return { - "user": user_with_posts, - "post_count": len(user_with_posts["posts"]) - } -``` \ No newline at end of file +1. **[Permissions](permissions.md)** — Role-based access control patterns +2. **[Sessions](sessions.md)** — Cookie / CSRF / session lifecycle +3. **[Production Guide](../production.md)** — Hardening checklist diff --git a/docs/user-guide/background-tasks/index.md b/docs/user-guide/background-tasks/index.md index 70c2a184..3ac533b3 100644 --- a/docs/user-guide/background-tasks/index.md +++ b/docs/user-guide/background-tasks/index.md @@ -1,92 +1,340 @@ # Background Tasks -The boilerplate includes a robust background task system built on ARQ (Async Redis Queue) for handling long-running operations asynchronously. This enables your API to remain responsive while processing intensive tasks in the background. +The boilerplate runs background work with [Taskiq](https://taskiq-python.github.io/) — an async-native task queue with a Celery-like API and pluggable brokers. By default it runs against Redis, with RabbitMQ available as an alternative. -## Overview +This page covers the actual setup that ships in `backend/src/infrastructure/taskiq/`, how to write and enqueue tasks, and how to run a worker. -Background tasks are essential for operations that: +## When to Use a Background Task -- **Take longer than 2 seconds** to complete -- **Don't block user interactions** in your frontend -- **Can be processed asynchronously** without immediate user feedback -- **Require intensive computation** or external API calls +Reach for a task when an operation: -## Quick Example +- Takes longer than a couple of seconds (image processing, PDF generation, large data exports) +- Calls an unreliable third party (don't make your API's latency depend on someone else's API) +- Sends an email, dispatches a webhook, or fans out notifications +- Recomputes an expensive aggregate that doesn't need to be live -```python -# Define a background task -async def send_welcome_email(ctx: Worker, user_id: int, email: str) -> str: - # Send email logic here - await send_email_service(email, "Welcome!") - return f"Welcome email sent to {email}" - -# Enqueue the task from an API endpoint -@router.post("/users/", response_model=UserRead) -async def create_user(user_data: UserCreate): - # Create user in database - user = await crud_users.create(db=db, object=user_data) - - # Queue welcome email in background - await queue.pool.enqueue_job("send_welcome_email", user["id"], user["email"]) - - return user -``` - -## Architecture - -### ARQ Worker System -- **Redis-Based**: Uses Redis as the message broker for job queues -- **Async Processing**: Fully asynchronous task execution -- **Worker Pool**: Multiple workers can process tasks concurrently -- **Job Persistence**: Tasks survive application restarts - -### Task Lifecycle -1. **Enqueue**: Tasks are added to Redis queue from API endpoints -2. **Processing**: ARQ workers pick up and execute tasks -3. **Results**: Task results are stored and can be retrieved -4. **Monitoring**: Track task status and execution history - -## Key Features - -**Scalable Processing** -- Multiple worker instances for high throughput -- Automatic load balancing across workers -- Configurable concurrency per worker - -**Reliable Execution** -- Task retry mechanisms for failed jobs -- Dead letter queues for problematic tasks -- Graceful shutdown and task cleanup - -**Database Integration** -- Shared database sessions with main application -- CRUD operations available in background tasks -- Transaction management and error handling - -## Common Use Cases - -- **Email Processing**: Welcome emails, notifications, newsletters -- **File Operations**: Image processing, PDF generation, file uploads -- **External APIs**: Third-party integrations, webhooks, data sync -- **Data Processing**: Report generation, analytics, batch operations -- **ML/AI Tasks**: Model inference, data analysis, predictions - -## Getting Started - -The boilerplate provides everything needed to start using background tasks immediately. Simply define your task functions, register them in the worker settings, and enqueue them from your API endpoints. +Don't reach for a task when the operation needs to surface a result to the user immediately or when the failure modes are user-correctable in real time. + +## What Ships Out of the Box + +```text +backend/src/infrastructure/taskiq/ +├── __init__.py Exports default_broker, DBSession, register_task, task_registry +├── brokers.py Builds the Redis or RabbitMQ broker from settings +├── app.py Wires WORKER_STARTUP / WORKER_SHUTDOWN logging hooks +├── deps.py DBSession dependency (TaskiqDepends-wrapped AsyncSession) +├── registry.py Tiny in-process registry for monitoring +└── worker.py Worker entry point: `default_broker` +``` + +Importantly: **no example task ships in the boilerplate.** The infrastructure is wired up; the modules are yours to add. `register_task` and `task_registry` are available for your own bookkeeping but are optional. ## Configuration -Basic Redis queue configuration: +The relevant settings live in `TaskiqSettings` (`infrastructure/config/settings.py`) and read from `backend/.env`: + +```env +# Toggle and broker selection +TASKIQ_ENABLED=true +TASKIQ_BROKER_TYPE=redis # or "rabbitmq" + +# Redis broker (when TASKIQ_BROKER_TYPE=redis) +TASKIQ_REDIS_HOST=redis # use "localhost" without Docker +TASKIQ_REDIS_PORT=6379 +TASKIQ_REDIS_DB=3 # separate DB from CACHE / SESSION / RATE_LIMITER +TASKIQ_REDIS_PASSWORD= + +# RabbitMQ broker (when TASKIQ_BROKER_TYPE=rabbitmq) +TASKIQ_RABBITMQ_HOST=localhost +TASKIQ_RABBITMQ_PORT=5672 +TASKIQ_RABBITMQ_USER=guest +TASKIQ_RABBITMQ_PASSWORD=guest +TASKIQ_RABBITMQ_VHOST=/ + +# Worker tuning +TASKIQ_WORKER_CONCURRENCY=2 +TASKIQ_MAX_TASKS_PER_WORKER=1000 +``` + +The default `TASKIQ_REDIS_DB=3` keeps Taskiq isolated from Cache (DB 0), Session (DB 1), and Rate Limiter (DB 2) — so `redis-cli FLUSHDB` on one doesn't trash the others. + +If you pick `TASKIQ_BROKER_TYPE=rabbitmq`, install the optional broker: + +```bash +uv add taskiq-aio-pika +``` + +The boilerplate already ships it as a dependency, but the `aio_pika` import is gated to keep Redis-only deployments lean. + +## Writing a Task + +Tasks live alongside the module they belong to, e.g. `modules/widgets/tasks.py`. The shape: + +```python +# backend/src/modules/widgets/tasks.py +import logging +from typing import Any + +from ...infrastructure.taskiq import DBSession, default_broker + +logger = logging.getLogger(__name__) + + +@default_broker.task(task_name="widgets:rebuild_index") +async def rebuild_widget_index( + owner_id: int, + db: DBSession, +) -> dict[str, Any]: + """Recompute the search index for a single owner's widgets.""" + logger.info("Rebuilding widget index for owner %s", owner_id) + # ... do the work ... + return {"owner_id": owner_id, "indexed": 42} +``` + +A few things worth knowing: + +- **`task_name`** is optional but recommended. If you don't pass one, Taskiq uses `module.function_name` — fine for hobbyist setups, but a refactor that moves the function will silently break consumers. Pin a stable name. +- **`DBSession`** is the boilerplate's `Annotated[AsyncSession, TaskiqDepends(get_db_session)]`. Each task gets its own session backed by a `NullPool` engine — connections aren't shared with the API process and are closed at the end of the task. +- **Return values** can be retrieved via the result backend (Redis, by default). If you don't need the result, don't await it. +- **Logging** flows through your standard logger — there's no separate Taskiq logger to configure. + +### Importing Tasks for Discovery + +The Taskiq worker only knows about tasks whose modules have been imported. The cleanest pattern is to import every task module from a single entry point — usually wherever your `default_broker` lives or a dedicated `tasks/__init__.py`. + +```python +# backend/src/infrastructure/taskiq/__init__.py (or similar) +from src.modules.widgets import tasks as _widget_tasks # noqa: F401 +from src.modules.users import tasks as _user_tasks # noqa: F401 +``` + +Without these imports, `widgets:rebuild_index.kiq(...)` will queue the message but no worker will know how to execute it. + +## Enqueuing a Task + +From a route handler, service method, or anywhere else in the app: + +```python +from .tasks import rebuild_widget_index + + +@router.post("/widgets/{owner_id}/reindex", status_code=202) +async def trigger_reindex(owner_id: int) -> dict[str, str]: + await rebuild_widget_index.kiq(owner_id=owner_id) + return {"status": "queued"} +``` + +`.kiq(...)` is Taskiq's enqueue method — it serializes the kwargs, drops the message on the broker, and returns a `TaskiqResult` handle. **The handle is not awaited** in the typical "fire and forget" flow above — if you do want to wait, see [Awaiting Results](#awaiting-results) below. + +A few important constraints: + +- **All kwargs must be JSON-serializable.** Pass IDs, not ORM objects. Pass dicts, not Pydantic models that contain `datetime` (or convert via `.model_dump(mode="json")` first). +- **Don't pass database sessions.** The task gets its own via `DBSession`. +- **Don't pass HTTP request objects.** They don't survive serialization, and tasks shouldn't need them. + +### Awaiting Results + +If you genuinely need the result of a task before responding (rare — usually you'd compute synchronously instead), you can await it: + +```python +result = await rebuild_widget_index.kiq(owner_id=owner_id) +value = await result.wait_result(timeout=30) +print(value.return_value) +``` + +This holds the API request open until the worker finishes. **Don't do this for slow tasks** — it defeats the purpose of using a queue. If a result is small and quick, return synchronously; if it's slow, return 202 and let the client poll. + +### Scheduled & Delayed Tasks + +Taskiq supports labels and a separate scheduler library (`taskiq-redis`'s scheduler source, `taskiq.scheduler.TaskiqScheduler`). The boilerplate doesn't ship a scheduler wired up — if you need cron-like scheduling, add `taskiq[scheduler]` to your worker setup. For one-off delays: + +```python +await rebuild_widget_index.kicker().with_labels(delay=60).kiq(owner_id=owner_id) +``` + +(60-second delay before the worker picks it up, when supported by your broker — check the Taskiq docs for the labels you have.) + +## Running a Worker + +In development, run the worker in a separate terminal from the API: + +```bash +cd backend +uv run taskiq worker infrastructure.taskiq.worker:default_broker +``` + +In Docker Compose, add a worker service that runs the same command. The worker needs the same Redis (or RabbitMQ) and the same database the API uses. + +To tune concurrency: + +```bash +uv run taskiq worker infrastructure.taskiq.worker:default_broker --workers 4 +``` + +The `TASKIQ_WORKER_CONCURRENCY` env var configures the per-process concurrency; multiple `--workers` spawn additional processes. Pick the combination based on whether your tasks are I/O-bound (high concurrency, single process) or CPU-bound (multiple processes, low concurrency). + +### Reloading on Code Changes ```bash -# Redis Queue Settings -REDIS_QUEUE_HOST=localhost -REDIS_QUEUE_PORT=6379 +uv run taskiq worker infrastructure.taskiq.worker:default_broker --reload ``` -The system automatically handles Redis connection pooling and worker lifecycle management. +Helpful in development. Don't run with `--reload` in production. + +## Worker Lifecycle Hooks + +The boilerplate already wires Taskiq's `WORKER_STARTUP` and `WORKER_SHUTDOWN` events for logging in `infrastructure/taskiq/app.py`: + +```python +broker.add_event_handler(TaskiqEvents.WORKER_STARTUP, startup_taskiq_worker) +broker.add_event_handler(TaskiqEvents.WORKER_SHUTDOWN, shutdown_taskiq_worker) +``` + +You can register additional handlers in your own setup — initialize a third-party SDK, prime an in-memory cache, push a metrics counter on shutdown, etc. + +```python +from taskiq import TaskiqEvents +from taskiq.state import TaskiqState + +from infrastructure.taskiq import default_broker + + +async def my_startup(state: TaskiqState) -> None: + state.metrics_client = await build_metrics_client() + + +default_broker.add_event_handler(TaskiqEvents.WORKER_STARTUP, my_startup) +``` + +The `state` object is shared across all tasks running in that worker process — useful for connection pools and clients that should be created once. + +## Error Handling and Retries + +Taskiq doesn't retry by default. If a task raises, the message is acknowledged and gone. To retry, use the built-in retry middleware: + +```python +from taskiq import TaskiqMiddleware +from taskiq.middlewares import SimpleRetryMiddleware + +from infrastructure.taskiq import default_broker + +default_broker.add_middlewares(SimpleRetryMiddleware(default_retry_count=3)) +``` + +Add this in your bootstrap (alongside the existing `configure_broker_lifecycle` call). With the middleware loaded, you can mark individual tasks for retry: + +```python +@default_broker.task(retry_on_error=True, max_retries=3) +async def flaky_task(...): ... +``` + +For finer control (exponential backoff, dead-letter queues), check the [Taskiq middlewares docs](https://taskiq-python.github.io/guide/taskiq-middlewares.html). Whichever pattern you pick, **make tasks idempotent** — at-least-once delivery means the same task can run twice on partial failures. + +## Monitoring + +Taskiq doesn't ship a Flower-style dashboard, but you have a few options: + +- **`task_registry`** (in `infrastructure/taskiq/registry.py`) is an in-process record of registered tasks for sanity-checking. Call `task_registry.get_tasks()` to list everything the worker knows about. +- **Logs** — every task logs through your standard logger; flow them into your existing log aggregation. +- **Result backend** — Redis stores task results for the configured TTL; you can read them back or scan with `redis-cli`. +- **External tools** — Taskiq has community projects for Prometheus metrics and admin UIs; see the [Taskiq docs](https://taskiq-python.github.io/) for what's current. + +For most teams, structured logs plus alerting on error rates is enough. Add per-task counters to your existing metrics pipeline if you need finer visibility. + +## Common Patterns + +### Fan-Out + +Trigger N independent tasks from a single API call: + +```python +@router.post("/widgets/reindex-all") +async def reindex_all(owner_ids: list[int]) -> dict[str, int]: + for owner_id in owner_ids: + await rebuild_widget_index.kiq(owner_id=owner_id) + return {"queued": len(owner_ids)} +``` + +### Pipeline (Task Chains) + +When task B depends on task A's result, chain them inside the task itself rather than enqueuing A and waiting: + +```python +@default_broker.task(task_name="widgets:fetch_then_index") +async def fetch_then_index(owner_id: int, db: DBSession) -> dict[str, int]: + fetched = await fetch_remote_widgets(owner_id, db) + await rebuild_widget_index.kiq(owner_id=owner_id) + return {"fetched": fetched} +``` + +Avoid: `result = await task_a.kiq(...).wait_result(); await task_b.kiq(result, ...)` from a route handler — that holds the request open and serializes work that should be parallel. + +### Email and Notifications + +A canonical use case: hash the heavy work into a task, return 202 from the API: + +```python +@default_broker.task(task_name="users:welcome_email") +async def send_welcome_email(user_id: int, db: DBSession) -> None: + user = await user_service.get_by_id(user_id, db) + await email_client.send(template="welcome", to=user["email"], context={...}) + + +# In the route: +new_user = await user_service.create(payload, db) +await send_welcome_email.kiq(user_id=new_user["id"]) +return new_user +``` + +The user is created synchronously; the email goes out from a worker. If the email service is down, the user account isn't blocked. + +## Troubleshooting + +### "Task is queued but never runs" + +- Confirm the worker process is running and pointed at the same broker as your API +- Confirm the task's module is **imported** somewhere the worker bootstraps — Taskiq doesn't auto-discover tasks +- Check the worker logs for serialization errors on dequeue +- For Redis: `redis-cli LRANGE default 0 -1` (or your queue name) shows pending messages + +### "Worker can't import my task module" + +The worker imports the broker by module path. With the boilerplate's install layout (`[tool.setuptools.packages.find] where = ["src"]`), `infrastructure`, `modules`, etc. are top-level packages once you've run `uv sync` — so `infrastructure.taskiq.worker:default_broker` resolves cleanly. If you skipped install and are running from source, ensure `backend/src` is on `PYTHONPATH`. + +### "Database connection errors in tasks" + +Tasks use `DBSession`, which uses a separate engine with `poolclass=NullPool` (one connection per task, closed at the end). If you're seeing connection errors: + +- Check `DATABASE_URL` is set in the worker's environment +- Make sure your Postgres `max_connections` accommodates both the API's pool and the worker's per-task connections (rough rule: `api_pool_size + worker_concurrency`) + +### "Tasks fail silently" + +Without a retry middleware, a failed task is acknowledged and gone. Either add retries (see above) or wrap your task body in a try/except that logs explicitly: + +```python +@default_broker.task(task_name="widgets:rebuild_index") +async def rebuild_widget_index(owner_id: int, db: DBSession) -> dict[str, Any]: + try: + # ... + except Exception: + logger.exception("Widget index rebuild failed for owner %s", owner_id) + raise +``` + +## Key Files + +| Component | Location | +|-----------------------|-----------------------------------------------------------| +| Broker factory | `backend/src/infrastructure/taskiq/brokers.py` | +| Worker entry point | `backend/src/infrastructure/taskiq/worker.py` | +| Lifecycle hooks | `backend/src/infrastructure/taskiq/app.py` | +| DB dependency | `backend/src/infrastructure/taskiq/deps.py` | +| Task registry | `backend/src/infrastructure/taskiq/registry.py` | +| Settings | `backend/src/infrastructure/config/settings.py` (`TaskiqSettings`) | ## Next Steps -Check the [ARQ documentation](https://arq-docs.helpmanual.io/) for advanced usage patterns and refer to the boilerplate's example implementation in `src/app/core/worker/` and `src/app/api/v1/tasks.py`. \ No newline at end of file +- **[Taskiq documentation](https://taskiq-python.github.io/)** — Authoritative reference for middlewares, schedulers, brokers +- **[Production](../production.md)** — Running the worker in production, scaling, supervision +- **[Caching → Cache Strategies](../caching/cache-strategies.md)** — Using Taskiq to schedule cache warming diff --git a/docs/user-guide/caching/cache-strategies.md b/docs/user-guide/caching/cache-strategies.md index bbdd5272..6ae9195e 100644 --- a/docs/user-guide/caching/cache-strategies.md +++ b/docs/user-guide/caching/cache-strategies.md @@ -1,191 +1,385 @@ # Cache Strategies -Effective cache strategies balance performance gains with data consistency. This section covers invalidation patterns, cache warming, and optimization techniques for building robust caching systems. +Caching is easy to add and easy to get wrong. This page collects the practical patterns the boilerplate supports — how to name keys, when to invalidate, how to layer TTLs against write patterns, and how to warm the cache without bolting on a separate scheduler. -## Cache Invalidation Strategies +All examples use the boilerplate's real APIs: the [`@cache` decorator](redis-cache.md) for endpoint-level caching, and the provider exports (`get`, `set`, `delete`, `delete_pattern`, `exists`, `clear`) from `src.infrastructure.cache` for everything else. -Cache invalidation is one of the hardest problems in computer science. The boilerplate provides several strategies to handle different scenarios while maintaining data consistency. +## Picking a Key Naming Scheme -### Understanding Cache Invalidation +The decorator generates keys as `{formatted_key_prefix}:{resource_id}`. Your job is to pick a `key_prefix` (and any `{kwarg}` placeholders) that: -**Cache invalidation** ensures that cached data doesn't become stale when the underlying data changes. Poor invalidation leads to users seeing outdated information, while over-aggressive invalidation negates caching benefits. +1. **Doesn't collide** with unrelated caches +2. **Matches your invalidation surface** — if you'll wipe by user, include the user identifier +3. **Reads cleanly** in `redis-cli KEYS '*'` while debugging -### Basic Invalidation Patterns - -#### Time-Based Expiration (TTL) - -The simplest strategy relies on cache expiration times: +Patterns the codebase encourages: ```python -# Set different TTL based on data characteristics -@cache(key_prefix="user_profile", expiration=3600) # 1 hour for profiles -@cache(key_prefix="post_content", expiration=1800) # 30 min for posts -@cache(key_prefix="live_stats", expiration=60) # 1 min for live data +# Simple resource by id +@cache(key_prefix="widget", resource_id_name="widget_id") +# → widget:42 + +# Per-user list, paginated +@cache(key_prefix="user_{user_id}_widgets:page_{page}:size_{items_per_page}", + resource_id_name="user_id") +# → user_5_widgets:page_1:size_10:5 + +# String IDs (usernames, slugs, hashed query strings) +@cache(key_prefix="user", resource_id_name="username", resource_id_type=str) +# → user:johndoe + +# Time-windowed analytics — bake the window into the prefix +@cache(key_prefix="analytics_{user_id}_30d", resource_id_name="report_id") +# → analytics_5_30d:summary ``` -**Pros:** +**Avoid** prefixes that: + +- Use raw resource IDs as the prefix (`{post_id}_comments`) — collisions are silent +- Include unbounded user input directly (`search:{raw_query}`) — hash long/free-text inputs first +- Mix unrelated resources at the same level (`data:42`) — debug nightmare + +## Invalidation Strategies + +There are essentially three invalidation strategies you'll combine: + +### 1. TTL Only ("eventually correct") + +Just expire the cache; never invalidate explicitly. -- Simple to implement and understand -- Guarantees cache freshness within TTL period -- Works well for data with predictable change patterns +```python +@cache(key_prefix="popular_widgets", expiration=300) # 5 minutes +async def get_popular(request: Request, ...): + ... +``` -**Cons:** +**When to use:** read-only or near-read-only data where 1–5 minutes of staleness is acceptable. Reference data (countries, tier definitions), aggregates (top-N lists), expensive computations whose inputs change rarely. -- May serve stale data until TTL expires -- Difficult to optimize TTL for all scenarios -- Cache miss storms when many keys expire simultaneously +**Don't use for:** anything a user just edited and expects to see immediately. -#### Write-Through Invalidation +### 2. Write-Through Invalidation ("strict consistency") -Automatically invalidate cache when data is modified: +Mutations on the same `(key_prefix, resource_id_name)` automatically delete the matching key. Add `to_invalidate_extra` for related caches: ```python -@router.put("/posts/{post_id}") +@router.patch("/{widget_id}") @cache( - key_prefix="post_cache", - resource_id_name="post_id", + key_prefix="widget", + resource_id_name="widget_id", to_invalidate_extra={ - "user_posts": "{user_id}", # User's post list - "category_posts": "{category_id}", # Category post list - "recent_posts": "global" # Global recent posts - } + "user_widgets": "owner_id", # invalidate the owner's list + "widget_count": "global", # invalidate the global counter + }, ) -async def update_post( +async def update_widget( request: Request, - post_id: int, - post_data: PostUpdate, - user_id: int, - category_id: int -): - # Update triggers automatic cache invalidation - updated_post = await crud_posts.update(db=db, id=post_id, object=post_data) - return updated_post + widget_id: int, + owner_id: int, + ..., +) -> dict[str, Any]: + return await widget_service.update(widget_id, values, db) ``` -**Pros:** +The decorator deletes the keys **after** the handler returns successfully — failed mutations don't touch the cache. + +**When to use:** mutations to a resource that's directly cached, plus a small fixed set of related caches (this user's list, the global count, etc). + +**Don't use for:** broad invalidations across many user-scoped lists — that's pattern-based territory. -- Immediate consistency when data changes -- No stale data served to users -- Precise control over what gets invalidated +### 3. Pattern-Based Invalidation ("blast radius") + +For wipes that touch many keys at once (paginated lists, search caches), use pattern matching: + +```python +@router.delete("/{widget_id}") +@cache( + key_prefix="widget", + resource_id_name="widget_id", + pattern_to_invalidate_extra=[ + "user_{owner_id}_widgets:*", # all paginated lists for this user + "widget_search:*", # all search-result caches + ], +) +async def delete_widget(request: Request, widget_id: int, owner_id: int, ...) -> None: + await widget_service.delete(widget_id, db) +``` -**Cons:** +!!! warning "Memcached doesn't support patterns" + `pattern_to_invalidate_extra` raises `PatternMatchingNotSupportedError` when `CACHE_BACKEND=memcached`. The non-pattern delete still happens. Use Redis if you need pattern-based invalidation. -- More complex implementation -- Can impact write performance -- Risk of over-invalidation +**When to use:** paginated or filtered lists where you don't know how many keys exist, search-result caches, anything where the prefix is a stable namespace. -### Advanced Invalidation Patterns +**Don't use for:** narrow invalidations — you're scanning Redis on every mutation, which is much more expensive than a single `DEL`. -#### Pattern-Based Invalidation +## Combining the Three -Use Redis pattern matching for bulk invalidation: +Real services usually mix all three: ```python -@router.put("/users/{user_id}/profile") +@router.put("/{widget_id}") @cache( - key_prefix="user_profile", - resource_id_name="user_id", - pattern_to_invalidate_extra=[ - "user_{user_id}_*", # All user-related caches - "*_user_{user_id}_*", # Caches containing this user - "leaderboard_*", # Leaderboards might change - "search_users_*" # User search results - ] + key_prefix="widget", + resource_id_name="widget_id", + expiration=900, # TTL fallback (15 min) + to_invalidate_extra={ # narrow related-key wipes + "widget_count": "global", + }, + pattern_to_invalidate_extra=[ # broad list wipes + "user_{owner_id}_widgets:*", + "widget_search:*", + ], ) -async def update_user_profile(request: Request, user_id: int, profile_data: ProfileUpdate): - await crud_users.update(db=db, id=user_id, object=profile_data) - return {"message": "Profile updated"} +async def update_widget(request: Request, widget_id: int, owner_id: int, ...) -> dict[str, Any]: + return await widget_service.update(widget_id, values, db) +``` + +The TTL is your safety net — even if you forget an invalidation, the cache self-heals within 15 minutes. + +## Cache Aside (Service-Layer Caching) + +The decorator covers route-level caching. For caching inside services or background tasks, use the provider API directly: + +```python +from src.infrastructure.cache import get, set, delete + +KEY_TTL = 1800 # 30 minutes + + +async def get_user_score(user_id: int, db: AsyncSession) -> int: + cache_key = f"user_score:{user_id}" + + # Try the cache first + cached = await get(key=cache_key) + if cached is not None: + return int(cached) + + # Miss — compute and store + score = await _compute_user_score(user_id, db) + await set(key=cache_key, value=score, expiration=KEY_TTL) + return score + + +async def invalidate_user_score(user_id: int) -> None: + await delete(key=f"user_score:{user_id}") +``` + +Conventions: + +- **Always use the same key format** in both the read and the invalidate path — copy/paste mistakes here are the most common cause of "cache won't update" +- **Compute first, write second.** Never `set` a value before you've successfully computed it; you'd cache an error. +- **Use the same TTL across reads and refreshes** so behavior is predictable. + +## Cache Stampede Mitigation + +When a hot cache key expires, every concurrent request can hit the database before any of them writes the new value back — a stampede. Mitigations the boilerplate's stack supports: + +### Slightly Randomized TTLs + +Pick TTLs in a range, not a single value, so a thousand keys created at the same time don't expire in lockstep: + +```python +import random + +ttl = 1800 + random.randint(-60, 60) # 30 min ± 1 min +await set(key=cache_key, value=payload, expiration=ttl) +``` + +This is enough for most workloads. + +### Refresh Ahead of Expiration + +Inside the service, decide based on a "soft" TTL whether to recompute opportunistically: + +```python +SOFT_TTL = 1500 # 25 min — recompute eagerly past this +HARD_TTL = 1800 # 30 min — fail-open beyond this + +async def get_payload(user_id: int) -> dict: + cache_key = f"user_payload:{user_id}" + payload = await get(key=cache_key) + + if payload is not None and payload.get("computed_at", 0) > time.time() - SOFT_TTL: + return payload # fresh enough + + fresh = await _compute(user_id) + fresh["computed_at"] = time.time() + await set(key=cache_key, value=fresh, expiration=HARD_TTL) + return fresh +``` + +Past the soft TTL, the next request triggers a recompute even though the cache is still warm — the next concurrent request still gets the fresh value. This is enough to prevent stampedes for moderately hot keys. + +For genuinely hot keys (top trending list with 10k req/s), reach for a distributed lock (`SET key value NX EX 30`) inside the recompute path. The boilerplate doesn't ship one, but Redis primitives are sufficient. + +## Cache Warming + +Cache warming proactively populates the cache so the first user request after a deploy isn't a cold miss. Two reasonable places to do it in the boilerplate: + +### At Application Startup (in the lifespan) + +The boilerplate's `lifespan_factory` (in `infrastructure/app_factory.py`) is where the cache is initialized. Warming sits naturally just after that point — but only for genuinely **small** datasets (reference tables, tier definitions, top-N aggregates). Don't pull a million rows into Redis on every boot. + +The pattern, in your own `interfaces/main.py` setup: + +```python +from contextlib import asynccontextmanager +from fastapi import FastAPI + +from src.infrastructure.app_factory import lifespan_factory +from src.infrastructure.cache import set +from src.infrastructure.config.settings import get_settings + + +@asynccontextmanager +async def lifespan(app: FastAPI): + settings = get_settings() + # Run the boilerplate's default lifespan first + base_lifespan = lifespan_factory(settings) + async with base_lifespan(app): + await _warm_reference_data() + yield + + +async def _warm_reference_data(): + # Small, slow-changing data — safe to warm at boot. + tiers = await _load_all_tiers() + for tier in tiers: + await set(key=f"tier:{tier['id']}", value=tier, expiration=86400) +``` + +Wire it by passing `lifespan=lifespan` to `create_application()`. + +### As a Taskiq Task + +For larger or periodic warming, use a Taskiq task on a schedule. See [Background Tasks](../background-tasks/index.md) for the worker setup; the warming logic is the same — fetch data, call `set()`. + +```python +# backend/src/modules/cache/tasks.py +from ...infrastructure.cache import set +from ...infrastructure.taskiq import default_broker + + +@default_broker.task(task_name="warm_top_widgets") +async def warm_top_widgets() -> None: + top = await _query_top_widgets(limit=100) + await set(key="top_widgets", value=top, expiration=600) ``` -**Pattern Examples:** +Schedule it to run every 5 minutes (or whatever's shorter than the TTL) and the cache is always warm. + +## Negative Caching + +When a lookup misses the database too, cache the miss for a short window so subsequent requests don't re-hit the database: + ```python -# User-specific patterns -"user_{user_id}_posts_*" # All paginated post lists for user -"user_{user_id}_*_cache" # All cached data for user -"*_following_{user_id}" # All caches tracking this user's followers +from src.infrastructure.cache import get, set + +NEGATIVE_TTL = 60 # 1 minute — keep negative caches very short +SENTINEL = "__NOT_FOUND__" + + +async def get_widget(widget_id: int, db: AsyncSession) -> dict | None: + cache_key = f"widget:{widget_id}" + cached = await get(key=cache_key) -# Content patterns -"posts_category_{category_id}_*" # All posts in category -"comments_post_{post_id}_*" # All comments for post -"search_*_{query}" # All search results for query + if cached == SENTINEL: + return None + if cached is not None: + return cached -# Time-based patterns -"daily_stats_*" # All daily statistics -"hourly_*" # All hourly data -"temp_*" # Temporary cache entries + result = await widget_service.get_by_id(widget_id, db) + if result is None: + await set(key=cache_key, value=SENTINEL, expiration=NEGATIVE_TTL) + return None + + await set(key=cache_key, value=result, expiration=600) + return result ``` -## Cache Warming Strategies +Keep negative TTLs **much shorter** than positive ones — the row will appear eventually and you don't want users to keep getting 404s for a minute after creation. -Cache warming proactively loads data into cache to avoid cache misses during peak usage. +## Per-User vs Global Caches -### Application Startup Warming +The single biggest mistake when adding `@cache` to an endpoint that returns user-specific data is keying only by resource ID. Two concrete problems: ```python -# core/startup.py -async def warm_critical_caches(): - """Warm up critical caches during application startup.""" - - logger.info("Starting cache warming...") - - # Warm up reference data - await warm_reference_data() - - # Warm up popular content - await warm_popular_content() - - # Warm up user session data for active users - await warm_active_user_data() - - logger.info("Cache warming completed") - -async def warm_reference_data(): - """Warm up reference data that rarely changes.""" - - # Countries, currencies, timezones, etc. - reference_data = await crud_reference.get_all_countries() - for country in reference_data: - cache_key = f"country:{country['code']}" - await cache.client.set(cache_key, json.dumps(country), ex=86400) # 24 hours - - # Categories - categories = await crud_categories.get_all() - await cache.client.set("all_categories", json.dumps(categories), ex=3600) - -async def warm_popular_content(): - """Warm up frequently accessed content.""" - - # Most viewed posts - popular_posts = await crud_posts.get_popular(limit=100) - for post in popular_posts: - cache_key = f"post_cache:{post['id']}" - await cache.client.set(cache_key, json.dumps(post), ex=1800) - - # Trending topics - trending = await crud_posts.get_trending_topics(limit=50) - await cache.client.set("trending_topics", json.dumps(trending), ex=600) - -async def warm_active_user_data(): - """Warm up data for recently active users.""" - - # Get users active in last 24 hours - active_users = await crud_users.get_recently_active(hours=24) - - for user in active_users: - # Warm user profile - profile_key = f"user_profile:{user['id']}" - await cache.client.set(profile_key, json.dumps(user), ex=3600) - - # Warm user's recent posts - user_posts = await crud_posts.get_user_posts(user['id'], limit=10) - posts_key = f"user_{user['id']}_posts:page_1" - await cache.client.set(posts_key, json.dumps(user_posts), ex=1800) - -# Add to startup events -@app.on_event("startup") -async def startup_event(): - await create_redis_cache_pool() - await warm_critical_caches() -``` - -These cache strategies provide a comprehensive approach to building performant, consistent caching systems that scale with your application's needs while maintaining data integrity. \ No newline at end of file +# WRONG — every user gets user 1's data +@router.get("/me/dashboard") +@cache(key_prefix="dashboard", resource_id_name="user_id") +async def my_dashboard(request: Request, user_id: int, ...): + ... +``` + +Multiple users hit `dashboard:1` (the user_id of the first cached request) and see each other's data. Two fixes: + +```python +# Include user in the prefix +@cache(key_prefix="dashboard_for_user_{user_id}", resource_id_name="user_id") +# → dashboard_for_user_5:5 ← key includes user + +# Or just don't cache personalized responses +# (often the right call — Redis hits add latency for hot per-user data anyway) +``` + +## Picking TTLs + +Default is one hour (`3600`). Override per route based on staleness tolerance: + +| Data shape | Suggested TTL | +|-----------------------------------------------|----------------------------| +| Static reference data (tier list, countries) | 24 hours (`86400`) | +| User profile / public objects | 5–30 minutes (`300`–`1800`)| +| Paginated list endpoints | 1–5 minutes (`60`–`300`) | +| Search results | 5–15 minutes (`300`–`900`) | +| Frequently changing dashboards | 30–60 seconds | +| Negative caches (404 lookups) | 30–120 seconds | + +When in doubt, start short. It's cheap to raise a TTL once you trust the invalidation paths — much harder to debug stale-data complaints from a 24-hour cache. + +## Operational Notes + +### Read your keys in production + +```bash +redis-cli -h $CACHE_REDIS_HOST KEYS 'widget:*' +redis-cli -h $CACHE_REDIS_HOST TTL widget:42 +redis-cli -h $CACHE_REDIS_HOST GET widget:42 +``` + +If you can't tell from the key alone what's cached and how it's invalidated, your prefix is too short. + +### Watch for fail-open behavior + +The decorator catches Redis errors and falls through to the handler. That's good for availability but means you can have a "cache is down" outage that looks like a "DB is slow" outage on dashboards. Watch the logs for: + +``` +Cache backend not available: +``` + +Alert on the rate of those, not just on Redis being unreachable. + +### Don't cache personal data without thinking + +If your handler returns different bodies depending on auth state, headers, or query params, those have to be in the key. The decorator only sees what you pass in `key_prefix` placeholders and `resource_id_name`. + +## Anti-Patterns to Avoid + +- **Caching mutation responses.** The decorator only caches GETs; if you find yourself wanting to cache a POST/PATCH response, you probably want to cache the underlying GET that's about to refresh anyway. +- **Reaching into the cache for state that's not derived from the database.** Cached state must be reconstructable. If losing the cache loses real data, you needed a DB row, not a cache key. +- **Mixing TTLs across paginated pages.** `widgets:page_1` expiring an hour before `widgets:page_2` produces inconsistent pagination. Use the same TTL across the entire prefix family. +- **Pattern invalidation on every mutation.** Pattern scans get expensive at scale. Reach for them only when you genuinely need to wipe many keys at once. + +## Key Files + +| Component | Location | +|-----------------------|-------------------------------------------------------| +| Decorator | `backend/src/infrastructure/cache/decorator.py` | +| Provider API | `backend/src/infrastructure/cache/provider.py` | +| Backends | `backend/src/infrastructure/cache/backends/` | +| Lifespan integration | `backend/src/infrastructure/app_factory.py` | + +## Next Steps + +- **[Redis Cache](redis-cache.md)** — Decorator parameters and provider API reference +- **[Client Cache](client-cache.md)** — `Cache-Control` headers for browser caching +- **[Background Tasks](../background-tasks/index.md)** — Scheduling cache warming jobs with Taskiq diff --git a/docs/user-guide/caching/client-cache.md b/docs/user-guide/caching/client-cache.md index 7096e78b..e1229562 100644 --- a/docs/user-guide/caching/client-cache.md +++ b/docs/user-guide/caching/client-cache.md @@ -1,515 +1,192 @@ # Client Cache -Client-side caching leverages HTTP cache headers to instruct browsers and CDNs to cache responses locally. This reduces server load and improves user experience by serving cached content directly from the client. +Client-side caching uses HTTP `Cache-Control` headers to tell browsers (and intermediate CDNs / proxies) when they're allowed to reuse a response without coming back to the server. The boilerplate ships a small middleware that sets sensible defaults — an explicit "don't cache" for the API, and a configurable `max-age` for everything else. -## Understanding Client Caching +## What's Built In -Client caching works by setting HTTP headers that tell browsers, proxies, and CDNs how long they should cache responses. When implemented correctly, subsequent requests for the same resource are served instantly from the local cache. - -### Benefits of Client Caching - -**Reduced Latency**: Instant response from local cache eliminates network round trips -**Lower Server Load**: Fewer requests reach your server infrastructure -**Bandwidth Savings**: Cached responses don't consume network bandwidth -**Better User Experience**: Faster page loads and improved responsiveness -**Cost Reduction**: Lower server resource usage and bandwidth costs - -## Cache-Control Headers - -The `Cache-Control` header is the primary mechanism for controlling client-side caching behavior. - -### Header Components - -```http -Cache-Control: public, max-age=3600, s-maxage=7200, must-revalidate +```text +infrastructure/middleware.py ClientCacheMiddleware +infrastructure/app_factory.py Wires it into the app at startup +infrastructure/config/settings.py CLIENT_CACHE_ENABLED, CLIENT_CACHE_MAX_AGE ``` -**Directive Breakdown:** - -- **`public`**: Response can be cached by any cache (browsers, CDNs, proxies) -- **`private`**: Response can only be cached by browsers, not shared caches -- **`max-age=3600`**: Cache for 3600 seconds (1 hour) in browsers -- **`s-maxage=7200`**: Cache for 7200 seconds (2 hours) in shared caches (CDNs) -- **`must-revalidate`**: Must check with server when cache expires -- **`no-cache`**: Must revalidate with server before using cached response -- **`no-store`**: Must not store any part of the response +That's the entire surface area. There's no per-route configuration, no path table, no ETag handling out of the box. The middleware is intentionally tiny — anything more nuanced you handle in your route handlers. -### Common Cache Patterns +## How It Works ```python -# Static assets (images, CSS, JS) -"Cache-Control: public, max-age=31536000, immutable" # 1 year - -# API data that changes rarely -"Cache-Control: public, max-age=3600" # 1 hour - -# User-specific data -"Cache-Control: private, max-age=1800" # 30 minutes, browser only - -# Real-time data -"Cache-Control: no-cache, must-revalidate" # Always validate - -# Sensitive data -"Cache-Control: no-store, no-cache, must-revalidate" # Never cache -``` - -## Middleware Implementation - -The boilerplate includes middleware that automatically adds cache headers to responses. - -### ClientCacheMiddleware - -```python -# middleware/client_cache_middleware.py -from fastapi import FastAPI, Request, Response -from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint - +# infrastructure/middleware.py class ClientCacheMiddleware(BaseHTTPMiddleware): - """Middleware to set Cache-Control headers for client-side caching.""" - - def __init__(self, app: FastAPI, max_age: int = 60) -> None: + def __init__(self, app: ASGIApp, max_age: int = 60) -> None: super().__init__(app) self.max_age = max_age - async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: - response: Response = await call_next(request) - response.headers["Cache-Control"] = f"public, max-age={self.max_age}" + async def dispatch(self, request, call_next): + response = await call_next(request) + if request.url.path.startswith("/api/"): + response.headers["Cache-Control"] = "private, no-cache, no-store, must-revalidate" + else: + response.headers["Cache-Control"] = f"public, max-age={self.max_age}" return response ``` -### Adding Middleware to Application +Two rules: -```python -# main.py -from fastapi import FastAPI -from app.middleware.client_cache_middleware import ClientCacheMiddleware +| Path | `Cache-Control` value | +|-------------------------|--------------------------------------------------------| +| Starts with `/api/` | `private, no-cache, no-store, must-revalidate` | +| Anything else | `public, max-age={CLIENT_CACHE_MAX_AGE}` | -app = FastAPI() +The reasoning: -# Add client caching middleware -app.add_middleware( - ClientCacheMiddleware, - max_age=300 # 5 minutes default cache -) -``` +- **`/api/*`** is dynamic, often authenticated, and frequently personalized. Caching at the browser or CDN would leak data between users and serve stale state. Default is hard "don't cache." +- **Non-API paths** (static assets, the admin UI's static files, anything else mounted at the root) tend to be safe to cache for a minute or so by default — long enough to reduce repeat requests, short enough to recover quickly from a deploy. -### Custom Middleware Configuration +## Configuration -```python -class AdvancedClientCacheMiddleware(BaseHTTPMiddleware): - """Advanced client cache middleware with path-specific configurations.""" - - def __init__( - self, - app: FastAPI, - default_max_age: int = 300, - path_configs: dict[str, dict] = None - ): - super().__init__(app) - self.default_max_age = default_max_age - self.path_configs = path_configs or {} - - async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: - response = await call_next(request) - - # Get path-specific configuration - cache_config = self._get_cache_config(request.url.path) - - # Set cache headers based on configuration - if cache_config.get("no_cache", False): - response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate" - response.headers["Pragma"] = "no-cache" - response.headers["Expires"] = "0" - else: - max_age = cache_config.get("max_age", self.default_max_age) - visibility = "private" if cache_config.get("private", False) else "public" - - cache_control = f"{visibility}, max-age={max_age}" - - if cache_config.get("must_revalidate", False): - cache_control += ", must-revalidate" - - if cache_config.get("immutable", False): - cache_control += ", immutable" - - response.headers["Cache-Control"] = cache_control - - return response - - def _get_cache_config(self, path: str) -> dict: - """Get cache configuration for a specific path.""" - for pattern, config in self.path_configs.items(): - if path.startswith(pattern): - return config - return {} - -# Usage with path-specific configurations -app.add_middleware( - AdvancedClientCacheMiddleware, - default_max_age=300, - path_configs={ - "/api/v1/static/": {"max_age": 31536000, "immutable": True}, # 1 year for static assets - "/api/v1/auth/": {"no_cache": True}, # No cache for auth endpoints - "/api/v1/users/me": {"private": True, "max_age": 900}, # 15 min private cache for user data - "/api/v1/public/": {"max_age": 1800}, # 30 min for public data - } -) +```env +# Enables the middleware. Set to false to skip the Cache-Control header entirely. +CLIENT_CACHE_ENABLED=true + +# max-age (seconds) used for non-API paths. +CLIENT_CACHE_MAX_AGE=60 ``` -## Manual Cache Control +The middleware is added to the FastAPI app only when **both** `CACHE_ENABLED` and `CLIENT_CACHE_ENABLED` are true (`infrastructure/app_factory.py`). If you've already disabled the server-side cache, the client-cache middleware also goes away. + +When `CLIENT_CACHE_ENABLED=false`, no `Cache-Control` header is set by middleware — your routes (or your reverse proxy) are responsible for it. -Set cache headers manually in specific endpoints for fine-grained control. +## Overriding for a Specific Endpoint -### Response Header Manipulation +If you want a particular API endpoint to opt **into** browser caching, set the header in the handler. Middleware runs after the handler, so a header set in the route is overwritten — meaning you have to either set it via `Response` directly (and let the middleware overwrite anyway) **or** use a small route-level middleware. The simplest reliable pattern is to disable the global middleware in tests/docs and set headers explicitly in your routes: ```python from fastapi import APIRouter, Response router = APIRouter() -@router.get("/api/v1/static-data") -async def get_static_data(response: Response): - """Endpoint with long-term caching for static data.""" - # Set cache headers for static data - response.headers["Cache-Control"] = "public, max-age=86400, immutable" # 24 hours - response.headers["Last-Modified"] = "Wed, 21 Oct 2023 07:28:00 GMT" - response.headers["ETag"] = '"abc123"' - - return {"data": "static content that rarely changes"} - -@router.get("/api/v1/user-data") -async def get_user_data(response: Response, current_user: dict = Depends(get_current_user)): - """Endpoint with private caching for user-specific data.""" - # Private cache for user-specific data - response.headers["Cache-Control"] = "private, max-age=1800" # 30 minutes - response.headers["Vary"] = "Authorization" # Cache varies by auth header - - return {"user_id": current_user["id"], "preferences": "user data"} - -@router.get("/api/v1/real-time-data") -async def get_real_time_data(response: Response): - """Endpoint that should not be cached.""" - # Prevent caching for real-time data - response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate" - response.headers["Pragma"] = "no-cache" - response.headers["Expires"] = "0" - - return {"timestamp": datetime.utcnow(), "live_data": "current status"} + +@router.get("/manifest.json") +async def manifest(response: Response) -> dict[str, str]: + response.headers["Cache-Control"] = "public, max-age=86400, immutable" + return {"name": "My App", "version": "1.0.0"} ``` -### Conditional Caching +Heads up: because the global middleware overwrites `Cache-Control` for `/api/*` paths, the snippet above only takes effect on **non-API** routes. For API routes you actually want to cache, either: -Implement conditional caching based on request parameters: +1. Mount them outside `/api/` (rare), or +2. Disable `CLIENT_CACHE_ENABLED` and set headers on a per-route basis. -```python -@router.get("/api/v1/posts") -async def get_posts( - response: Response, - page: int = 1, - per_page: int = 10, - category: str | None = None, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - """Conditional caching based on parameters.""" - - # Different cache strategies based on parameters - if category: - # Category-specific data changes less frequently - response.headers["Cache-Control"] = "public, max-age=1800" # 30 minutes - elif page == 1: - # First page cached more aggressively - response.headers["Cache-Control"] = "public, max-age=600" # 10 minutes - else: - # Other pages cached for shorter duration - response.headers["Cache-Control"] = "public, max-age=300" # 5 minutes - - # Add ETag for efficient revalidation - content_hash = hashlib.md5(f"{page}{per_page}{category}".encode()).hexdigest() - response.headers["ETag"] = f'"{content_hash}"' - - posts = await crud_posts.get_multi( - db=db, - offset=(page - 1) * per_page, - limit=per_page, - category=category - ) - - return {"posts": posts, "page": page, "per_page": per_page} -``` +In practice, almost no endpoint in a typical app benefits from browser caching of an API response — keep API caching server-side via the [`@cache` decorator](redis-cache.md) and let the browser fetch fresh. -## ETag Implementation +## A Quick Cache-Control Primer -ETags enable efficient cache validation by allowing clients to check if content has changed. +The directives the middleware uses (and the ones you'll most often add manually): -### ETag Generation +| Directive | Meaning | +|-------------------------|---------------------------------------------------------------------------------| +| `public` | Any cache (browser, CDN, proxy) may store the response | +| `private` | Only the end-user's browser may store it. CDNs / shared proxies must not | +| `no-cache` | Caches may store, but must revalidate with the server before reuse | +| `no-store` | Don't store at all — not in the browser, not on disk, not in a CDN | +| `must-revalidate` | Once stale, the cache must check upstream before serving again | +| `max-age=` | Cache is fresh for this many seconds | +| `s-maxage=` | Same as `max-age`, but applies only to shared caches (CDNs) | +| `immutable` | The body will never change — clients can skip revalidation entirely | +| `stale-while-revalidate=` | After freshness expires, serve the stale copy for this long while updating | -```python -import hashlib -from typing import Any - -def generate_etag(data: Any) -> str: - """Generate ETag from data content.""" - content = json.dumps(data, sort_keys=True, default=str) - return hashlib.md5(content.encode()).hexdigest() - -@router.get("/api/v1/users/{user_id}") -async def get_user( - request: Request, - response: Response, - user_id: int, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - """Endpoint with ETag support for efficient caching.""" - - user = await crud_users.get(db=db, id=user_id) - if not user: - raise HTTPException(status_code=404, detail="User not found") - - # Generate ETag from user data - etag = generate_etag(user) - - # Check if client has current version - if_none_match = request.headers.get("If-None-Match") - if if_none_match == f'"{etag}"': - # Content hasn't changed, return 304 Not Modified - response.status_code = 304 - return Response(status_code=304) - - # Set ETag and cache headers - response.headers["ETag"] = f'"{etag}"' - response.headers["Cache-Control"] = "private, max-age=1800, must-revalidate" - - return user -``` +The pre-API value (`private, no-cache, no-store, must-revalidate`) is paranoid on purpose: in combination, those directives forbid every form of caching the major browsers and CDNs implement. That's the right default for authenticated dynamic data. -### Last-Modified Headers +## When to Reach for ETags -Use Last-Modified headers for time-based cache validation: +The middleware doesn't generate ETags or `Last-Modified` headers. If you want conditional requests (`304 Not Modified` on unchanged resources), you have to set those headers in the handler: ```python -@router.get("/api/v1/posts/{post_id}") -async def get_post( - request: Request, - response: Response, - post_id: int, - db: Annotated[AsyncSession, Depends(async_get_db)] -): - """Endpoint with Last-Modified header support.""" - - post = await crud_posts.get(db=db, id=post_id) - if not post: - raise HTTPException(status_code=404, detail="Post not found") - - # Use post's updated_at timestamp - last_modified = post["updated_at"] - - # Check If-Modified-Since header - if_modified_since = request.headers.get("If-Modified-Since") - if if_modified_since: - client_time = datetime.strptime(if_modified_since, "%a, %d %b %Y %H:%M:%S GMT") - if last_modified <= client_time: - response.status_code = 304 - return Response(status_code=304) - - # Set Last-Modified header - response.headers["Last-Modified"] = last_modified.strftime("%a, %d %b %Y %H:%M:%S GMT") - response.headers["Cache-Control"] = "public, max-age=3600, must-revalidate" - - return post -``` - -## Cache Strategy by Content Type +import hashlib +from fastapi import APIRouter, Request, Response, status -Different types of content require different caching strategies. +router = APIRouter() -### Static Assets -```python -@router.get("/static/{file_path:path}") -async def serve_static(response: Response, file_path: str): - """Serve static files with aggressive caching.""" - - # Static assets can be cached for a long time - response.headers["Cache-Control"] = "public, max-age=31536000, immutable" # 1 year - response.headers["Vary"] = "Accept-Encoding" # Vary by compression - - # Add file-specific ETag based on file modification time - file_stat = os.stat(f"static/{file_path}") - etag = hashlib.md5(f"{file_path}{file_stat.st_mtime}".encode()).hexdigest() - response.headers["ETag"] = f'"{etag}"' - - return FileResponse(f"static/{file_path}") -``` +@router.get("/manifest.json") +async def manifest(request: Request, response: Response) -> dict[str, str] | Response: + payload = {"name": "My App", "version": "1.0.0"} + body = str(payload).encode() + etag = f'"{hashlib.md5(body).hexdigest()}"' -### API Responses + if request.headers.get("if-none-match") == etag: + return Response(status_code=status.HTTP_304_NOT_MODIFIED) -```python -# Reference data (rarely changes) -@router.get("/api/v1/countries") -async def get_countries(response: Response, db: Annotated[AsyncSession, Depends(async_get_db)]): - response.headers["Cache-Control"] = "public, max-age=86400" # 24 hours - return await crud_countries.get_all(db=db) - -# User-generated content (moderate changes) -@router.get("/api/v1/posts") -async def get_posts(response: Response, db: Annotated[AsyncSession, Depends(async_get_db)]): - response.headers["Cache-Control"] = "public, max-age=1800" # 30 minutes - return await crud_posts.get_multi(db=db, is_deleted=False) - -# Personal data (private caching only) -@router.get("/api/v1/users/me/notifications") -async def get_notifications( - response: Response, - current_user: dict = Depends(get_current_user), - db: Annotated[AsyncSession, Depends(async_get_db)] -): - response.headers["Cache-Control"] = "private, max-age=300" # 5 minutes - response.headers["Vary"] = "Authorization" - return await crud_notifications.get_user_notifications(db=db, user_id=current_user["id"]) - -# Real-time data (no caching) -@router.get("/api/v1/system/status") -async def get_system_status(response: Response): - response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate" - return {"status": "online", "timestamp": datetime.utcnow()} + response.headers["ETag"] = etag + response.headers["Cache-Control"] = "public, max-age=300" + return payload ``` -## Vary Header Usage +ETags are most useful for: -The `Vary` header tells caches which request headers affect the response, enabling proper cache key generation. +- Larger static-ish payloads (manifests, configs, generated PDFs) +- Public endpoints with high read volume but low write volume +- Anything where the body is computed but stable across many requests -### Common Vary Patterns +For typical CRUD endpoints they're rarely worth the complexity — `@cache` server-side plus a short `max-age` for genuinely static assets covers most cases. -```python -# Cache varies by authorization (user-specific content) -response.headers["Vary"] = "Authorization" - -# Cache varies by accepted language -response.headers["Vary"] = "Accept-Language" - -# Cache varies by compression support -response.headers["Vary"] = "Accept-Encoding" - -# Multiple varying headers -response.headers["Vary"] = "Authorization, Accept-Language, Accept-Encoding" - -# Example implementation -@router.get("/api/v1/dashboard") -async def get_dashboard( - request: Request, - response: Response, - current_user: dict = Depends(get_current_user) -): - """Dashboard content that varies by user and language.""" - - # Content varies by user (Authorization) and language preference - response.headers["Vary"] = "Authorization, Accept-Language" - response.headers["Cache-Control"] = "private, max-age=900" # 15 minutes - - language = request.headers.get("Accept-Language", "en") - - dashboard_data = await generate_dashboard( - user_id=current_user["id"], - language=language - ) - - return dashboard_data -``` +## Reverse Proxy / CDN Considerations -## CDN Integration +If you serve the app behind a reverse proxy or CDN (Nginx, Caddy, CloudFront, Cloudflare), the proxy will see and obey the middleware's `Cache-Control` headers: -Configure cache headers for optimal CDN performance. +- `/api/*` responses won't be cached at the edge — every request reaches the origin +- Non-API responses are eligible for shared caching for `CLIENT_CACHE_MAX_AGE` seconds -### CDN-Specific Headers +If you need different behavior at the edge (longer CDN TTL but short browser TTL, for example), set both `s-maxage` and `max-age` from your handlers, or strip the middleware's header at the proxy and replace it. -```python -@router.get("/api/v1/public-content") -async def get_public_content(response: Response): - """Content optimized for CDN caching.""" - - # Different cache times for browser vs CDN - response.headers["Cache-Control"] = "public, max-age=300, s-maxage=3600" # 5 min browser, 1 hour CDN - - # CDN-specific headers (CloudFlare example) - response.headers["CF-Cache-Tag"] = "public-content,api-v1" # Cache tags for purging - response.headers["CF-Edge-Cache"] = "max-age=86400" # Edge cache for 24 hours - - return await get_public_content_data() +## Disabling the Middleware + +```env +CLIENT_CACHE_ENABLED=false ``` -### Cache Purging +After restart, no `Cache-Control` header is set by the boilerplate. Your routes and proxy take full control. This is the right move when: -Implement cache purging for content updates: +- You have a reverse proxy / CDN already managing cache headers +- You're doing per-route caching strategies that would be undone by the middleware +- You're debugging a caching-related bug and want a clean baseline -```python -@router.put("/api/v1/posts/{post_id}") -async def update_post( - response: Response, - post_id: int, - post_data: PostUpdate, - current_user: dict = Depends(get_current_user), - db: Annotated[AsyncSession, Depends(async_get_db)] -): - """Update post and invalidate related caches.""" - - # Update the post - updated_post = await crud_posts.update(db=db, id=post_id, object=post_data) - if not updated_post: - raise HTTPException(status_code=404, detail="Post not found") - - # Set headers to indicate cache invalidation is needed - response.headers["Cache-Control"] = "no-cache" - response.headers["X-Cache-Purge"] = f"post-{post_id},user-{current_user['id']}-posts" - - # In production, trigger CDN purge here - # await purge_cdn_cache([f"post-{post_id}", f"user-{current_user['id']}-posts"]) - - return updated_post -``` +## Troubleshooting -## Best Practices +### "My API response is still being cached by the browser" -### Cache Duration Guidelines +Confirm the response actually carries the no-cache header: -```python -# Choose appropriate cache durations based on content characteristics: +```bash +curl -I http://localhost:8000/api/v1/users/me \ + -H "Cookie: session_id=..." +# look for: Cache-Control: private, no-cache, no-store, must-revalidate +``` -# Static assets (CSS, JS, images with versioning) -max_age = 31536000 # 1 year +If the header is missing, check that `CLIENT_CACHE_ENABLED=true` and `CACHE_ENABLED=true`. Both must be true for the middleware to mount. -# API reference data (countries, categories) -max_age = 86400 # 24 hours +### "I want to cache an API response but the middleware overrides it" -# User-generated content (posts, comments) -max_age = 1800 # 30 minutes +The middleware overwrites `Cache-Control` for any `/api/*` path. Options: -# User-specific data (profiles, preferences) -max_age = 900 # 15 minutes +- Cache server-side with the [`@cache` decorator](redis-cache.md) — almost always what you actually want +- Set `CLIENT_CACHE_ENABLED=false` and manage `Cache-Control` per-route +- Route the endpoint outside `/api/*` if it really is a static asset -# Search results -max_age = 600 # 10 minutes +### "Static assets aren't being cached aggressively enough" -# Real-time data (live scores, chat) -max_age = 0 # No caching -``` +Raise `CLIENT_CACHE_MAX_AGE`, or set per-asset headers in the handler / proxy. Browsers will use whichever value the server returns most recently, so updating the env var and redeploying takes effect for new requests immediately. -### Security Considerations +## Key Files -```python -# Never cache sensitive data -@router.get("/api/v1/admin/secrets") -async def get_secrets(response: Response): - response.headers["Cache-Control"] = "no-store, no-cache, must-revalidate, private" - response.headers["Pragma"] = "no-cache" - response.headers["Expires"] = "0" - return {"secret": "sensitive_data"} - -# Use private caching for user-specific content -@router.get("/api/v1/users/me/private-data") -async def get_private_data(response: Response): - response.headers["Cache-Control"] = "private, max-age=300, must-revalidate" - response.headers["Vary"] = "Authorization" - return {"private": "user_data"} -``` +| Component | Location | +|-----------------------|-------------------------------------------------------| +| Middleware | `backend/src/infrastructure/middleware.py` | +| Wiring | `backend/src/infrastructure/app_factory.py` | +| Settings | `backend/src/infrastructure/config/settings.py` (`CacheSettings`) | + +## Next Steps -Client-side caching, when properly implemented, provides significant performance improvements while maintaining security and data freshness through intelligent cache control strategies. \ No newline at end of file +- **[Redis Cache](redis-cache.md)** — Server-side caching with the `@cache` decorator +- **[Cache Strategies](cache-strategies.md)** — Patterns for keys, related-key invalidation, cache-aside flows +- **[Environment Variables](../configuration/environment-variables.md#cache)** — Full settings reference diff --git a/docs/user-guide/caching/index.md b/docs/user-guide/caching/index.md index 45ffed0e..8bcbaa3c 100644 --- a/docs/user-guide/caching/index.md +++ b/docs/user-guide/caching/index.md @@ -1,77 +1,134 @@ # Caching -The boilerplate includes a comprehensive caching system built on Redis that improves performance through server-side caching and client-side cache control. This section covers the complete caching implementation. +The boilerplate ships a flexible caching system supporting **Redis or Memcached** server-side, plus configurable client-side cache headers. Same decorator and provider API regardless of the backend. ## Overview -The caching system provides multiple layers of optimization: +Three layers, used together as needed: -- **Server-Side Caching**: Redis-based caching with automatic invalidation -- **Client-Side Caching**: HTTP cache headers for browser optimization -- **Cache Invalidation**: Smart invalidation strategies for data consistency +- **`@cache` decorator** — Caches GET endpoints, automatically invalidates on PUT/POST/DELETE +- **Provider API** — Direct cache operations (`get`, `set`, `delete`, etc.) for non-route code +- **Client-side cache headers** — `Cache-Control` headers added by middleware for browser caching ## Quick Example ```python -from app.core.utils.cache import cache - -@router.get("/posts/{post_id}") -@cache(key_prefix="post_cache", expiration=3600) -async def get_post(request: Request, post_id: int): - # Cached for 1 hour, automatic invalidation on updates - return await crud_posts.get(db=db, id=post_id) +from typing import Annotated, Any +from fastapi import APIRouter, Depends, Request +from sqlalchemy.ext.asyncio import AsyncSession + +from ...infrastructure.cache import cache +from ...infrastructure.database.session import async_session +from .schemas import WidgetRead +from .service import WidgetService + +router = APIRouter() + + +@router.get("/{widget_id}", response_model=WidgetRead) +@cache(key_prefix="widget", resource_id_name="widget_id", expiration=600) +async def get_widget( + request: Request, + widget_id: int, + db: Annotated[AsyncSession, Depends(async_session)], + widget_service: Annotated[WidgetService, Depends(get_widget_service)], +) -> dict[str, Any]: + return await widget_service.get_by_id(widget_id, db) ``` +The decorator caches the result for 600 seconds. PUT/PATCH/DELETE on the same `widget_id` invalidate the cache automatically. The first parameter must be `request: Request` — the decorator inspects the HTTP method. + ## Architecture -### Server-Side Caching -- **Redis Integration**: Connection pooling and async operations -- **Decorator-Based**: Simple `@cache` decorator for endpoints -- **Smart Invalidation**: Automatic cache clearing on data changes -- **Pattern Matching**: Bulk invalidation using Redis patterns +```text +HTTP Request + ↓ +@cache decorator (decorator.py) + ↓ (cache miss) +APIRouter handler + ↓ +service / FastCRUD (your code) + ↓ +PostgreSQL +``` -### Client-Side Caching -- **HTTP Headers**: Cache-Control headers for browser caching -- **Middleware**: Automatic header injection -- **Configurable TTL**: Customizable cache duration +When the cache hits, the handler doesn't run at all — the cached value is returned directly. When it misses (or for mutations), the handler runs and the cache is updated or invalidated as configured. -## Key Features +## What's Included -**Automatic Cache Management** -- Caches GET requests automatically -- Invalidates cache on PUT/POST/DELETE operations -- Supports complex invalidation patterns +| Component | Purpose | Location | +|-----------|---------|----------| +| `@cache` decorator | Endpoint-level caching with auto-invalidation | `infrastructure/cache/decorator.py` | +| Provider API | Direct cache ops (`get`, `set`, `delete`, …) | `infrastructure/cache/provider.py` | +| Redis backend | Pattern matching, persistence, rich types | `infrastructure/cache/backends/redis.py` | +| Memcached backend | Lightweight, key/value only | `infrastructure/cache/backends/memcached.py` | +| `ClientCacheMiddleware` | Adds `Cache-Control` headers | `infrastructure/middleware.py` | -**Flexible Configuration** -- Per-endpoint expiration times -- Custom cache key generation -- Environment-specific Redis settings +## Configuration -**Performance Optimization** -- Connection pooling for Redis -- Efficient key pattern matching -- Minimal overhead for cache operations +```env +CACHE_ENABLED=true +CACHE_BACKEND=redis # or "memcached" +DEFAULT_CACHE_EXPIRATION=3600 + +# Redis backend +CACHE_REDIS_HOST=redis # use "localhost" without Docker +CACHE_REDIS_PORT=6379 +CACHE_REDIS_DB=0 +CACHE_REDIS_PASSWORD= +CACHE_REDIS_POOL_SIZE=10 + +# Memcached backend (only when CACHE_BACKEND=memcached) +CACHE_MEMCACHED_HOST=localhost +CACHE_MEMCACHED_PORT=11211 +CACHE_MEMCACHED_POOL_SIZE=10 + +# Client-side cache headers +CLIENT_CACHE_ENABLED=true +CLIENT_CACHE_MAX_AGE=60 # seconds +``` -## Getting Started +When `CACHE_ENABLED=false`, the decorator becomes a no-op — useful in tests. -1. **[Redis Cache](redis-cache.md)** - Server-side caching with Redis -2. **[Client Cache](client-cache.md)** - Browser caching with HTTP headers -3. **[Cache Strategies](cache-strategies.md)** - Invalidation patterns and best practices +See [Environment Variables](../configuration/environment-variables.md#cache) for the full reference. -Each section provides detailed implementation examples and configuration options for building a robust caching layer. +## Picking a Backend -## Configuration +Both backends work with the decorator and provider. They have different strengths: -Basic Redis configuration in your environment: +| Feature | Redis | Memcached | +|---------|-------|-----------| +| Pattern-based deletion (`delete_pattern`) | Yes | No (raises `PatternMatchingNotSupportedError`) | +| Optional persistence (AOF / RDB) | Yes | No | +| Rich data structures (lists, sets, hashes) | Yes | Key/value only | +| Memory efficiency | Good | Excellent | -```bash -# Redis Cache Settings -REDIS_CACHE_HOST=localhost -REDIS_CACHE_PORT=6379 -``` +**Pick Redis** if you need pattern-based invalidation (`pattern_to_invalidate_extra` on the decorator, or `delete_pattern` directly), persistence across restarts, or you're already running Redis for sessions / rate limits / Taskiq. + +**Pick Memcached** if you have an existing Memcached deployment or you specifically want simpler key/value semantics. + +The boilerplate's defaults run Redis everywhere because it doubles as the session and rate-limit backend. + +## Graceful Degradation + +If the cache backend becomes unavailable, the decorator catches the error and falls through to the underlying handler. Your endpoints still work, just slower. This fail-open behavior is intentional — cached data is reproducible from the database, so cache failures shouldn't take the API down. + +## Sub-pages + +1. **[Redis Cache](redis-cache.md)** — Decorator usage, provider API, invalidation patterns +2. **[Client Cache](client-cache.md)** — `Cache-Control` headers and the client-cache middleware +3. **[Cache Strategies](cache-strategies.md)** — Patterns for cache key naming, related-key invalidation, and cache-aside flows + +## Key Files -The caching system automatically handles connection pooling and provides efficient cache operations for your FastAPI endpoints. +| Component | Location | +|-----------|----------| +| Decorator | `backend/src/infrastructure/cache/decorator.py` | +| Provider API | `backend/src/infrastructure/cache/provider.py` | +| Backends | `backend/src/infrastructure/cache/backends/` | +| Settings | `backend/src/infrastructure/config/settings.py` (`CacheSettings`) | +| Client-cache middleware | `backend/src/infrastructure/middleware.py` | ## Next Steps -Start with **[Redis Cache](redis-cache.md)** to understand the core server-side caching implementation, then explore client-side caching and advanced invalidation strategies. \ No newline at end of file +Start with [Redis Cache](redis-cache.md) for the decorator and provider patterns, then look at [Cache Strategies](cache-strategies.md) for invalidation tactics. diff --git a/docs/user-guide/caching/redis-cache.md b/docs/user-guide/caching/redis-cache.md index e9e60922..fd5421cc 100644 --- a/docs/user-guide/caching/redis-cache.md +++ b/docs/user-guide/caching/redis-cache.md @@ -1,359 +1,329 @@ -# Redis Cache +# Server-Side Cache (Redis or Memcached) -Redis-based server-side caching provides fast, in-memory storage for API responses. The boilerplate includes a sophisticated caching decorator that automatically handles cache storage, retrieval, and invalidation. +Server-side caching stores responses keyed by request shape so subsequent identical requests skip the database. This page covers the `@cache` decorator and the provider API. -## Understanding Redis Caching +The same code works against Redis or Memcached — pick via `CACHE_BACKEND`. The "Redis" label on this page is historical; everything below works for both backends unless explicitly called out. -Redis serves as a high-performance cache layer between your API and database. When properly implemented, it can reduce response times from hundreds of milliseconds to single-digit milliseconds by serving data directly from memory. +## The `@cache` Decorator -### Why Redis? - -**Performance**: In-memory storage provides sub-millisecond data access -**Scalability**: Handles thousands of concurrent connections efficiently -**Persistence**: Optional data persistence for cache warm-up after restarts -**Atomic Operations**: Thread-safe operations for concurrent applications -**Pattern Matching**: Advanced key pattern operations for bulk cache invalidation - -## Cache Decorator - -The `@cache` decorator provides a simple interface for adding caching to any FastAPI endpoint. +The decorator handles caching for GET endpoints and invalidation for mutations. ### Basic Usage ```python -from fastapi import APIRouter, Request, Depends -from sqlalchemy.orm import Session -from app.core.utils.cache import cache -from app.core.db.database import get_db +from typing import Annotated, Any +from fastapi import APIRouter, Depends, Request +from sqlalchemy.ext.asyncio import AsyncSession + +from ...infrastructure.cache import cache +from ...infrastructure.database.session import async_session +from .schemas import WidgetRead +from .service import WidgetService router = APIRouter() -@router.get("/posts/{post_id}") -@cache(key_prefix="post_cache", expiration=3600) -async def get_post(request: Request, post_id: int, db: Session = Depends(get_db)): - # This function's result will be cached for 1 hour - post = await crud_posts.get(db=db, id=post_id) - return post + +@router.get("/{widget_id}", response_model=WidgetRead) +@cache(key_prefix="widget", resource_id_name="widget_id", expiration=600) +async def get_widget( + request: Request, + widget_id: int, + db: Annotated[AsyncSession, Depends(async_session)], + widget_service: Annotated[WidgetService, Depends(get_widget_service)], +) -> dict[str, Any]: + return await widget_service.get_by_id(widget_id, db) ``` -**How It Works:** +The decorator builds the cache key as `widget:{widget_id}`. On cache hits, the handler doesn't run — the cached value is returned directly. + +!!! warning "`request: Request` is required" + The decorator inspects `request.method` to decide whether to read or invalidate. The first parameter of every decorated route must be `request: Request`. Without it the decorator raises an error. + +### How It Works -1. **Cache Check**: On GET requests, checks Redis for existing cached data -2. **Cache Miss**: If no cache exists, executes the function and stores the result -3. **Cache Hit**: Returns cached data directly, bypassing function execution -4. **Invalidation**: Automatically removes cache on non-GET requests (POST, PUT, DELETE) +1. **GET requests**: check the cache → return on hit, run the handler + cache the response on miss +2. **PUT/PATCH/POST/DELETE**: run the handler, then **delete** the cache key for the same `(key_prefix, resource_id)`. Optional extras (`to_invalidate_extra`, `pattern_to_invalidate_extra`) trigger additional invalidations +3. **Fail-open**: if the cache backend errors out, the decorator logs a warning and falls through to run the handler. Your endpoint stays available ### Decorator Parameters ```python @cache( - key_prefix: str, # Cache key prefix - resource_id_name: str = None, # Explicit resource ID parameter - expiration: int = 3600, # Cache TTL in seconds - resource_id_type: type | tuple[type, ...] = int, # Expected ID type - to_invalidate_extra: dict[str, str] = None, # Additional keys to invalidate - pattern_to_invalidate_extra: list[str] = None # Pattern-based invalidation + key_prefix: str, # required — cache namespace + resource_id_name: Any = None, # name of the parameter holding the resource ID + expiration: int = 3600, # TTL in seconds (default: 1 hour) + resource_id_type: type | tuple[type, ...] = int, # expected type when auto-inferring + to_invalidate_extra: dict[str, Any] | None = None, + pattern_to_invalidate_extra: list[str] | None = None, + backend_name: str | None = None, # if you've registered multiple backends ) ``` -#### Key Prefix +### `key_prefix` — Cache Namespace -The key prefix creates unique cache identifiers: +The prefix can use `{kwarg_name}` placeholders to interpolate route parameters: ```python -# Simple prefix -@cache(key_prefix="user_data") -# Generates keys like: "user_data:123" +@cache(key_prefix="widget", ...) +# → "widget:42" -# Dynamic prefix with placeholders -@cache(key_prefix="{username}_posts") -# Generates keys like: "johndoe_posts:456" +@cache(key_prefix="user_{username}_widgets", ...) +# → "user_johndoe_widgets:42" -# Complex prefix with multiple parameters -@cache(key_prefix="user_{user_id}_posts_page_{page}") -# Generates keys like: "user_123_posts_page_2:789" +@cache(key_prefix="user_{user_id}_widgets:page_{page}:size_{items_per_page}", ...) +# → "user_5_widgets:page_1:size_10:42" ``` -#### Resource ID Handling +The `{...}` placeholders are interpolated from the route handler's keyword arguments (path/query parameters and dependencies). -```python -# Automatic ID inference (looks for 'id' parameter) -@cache(key_prefix="post_cache") -async def get_post(request: Request, post_id: int): - # Uses post_id automatically - -# Explicit ID parameter -@cache(key_prefix="user_cache", resource_id_name="username") -async def get_user(request: Request, username: str): - # Uses username instead of looking for 'id' - -# Multiple ID types -@cache(key_prefix="search", resource_id_type=(int, str)) -async def search(request: Request, query: str, page: int): - # Accepts either string or int as resource ID -``` +### `resource_id_name` — Which Argument is the ID -### Advanced Caching Patterns - -#### Paginated Data Caching +The decorator appends `:{resource_id}` to the prefix. Resource ID resolution: ```python -@router.get("/users/{username}/posts") -@cache( - key_prefix="{username}_posts:page_{page}:items_per_page_{items_per_page}", - resource_id_name="username", - expiration=300 # 5 minutes for paginated data -) -async def get_user_posts( - request: Request, - username: str, - page: int = 1, - items_per_page: int = 10 -): - offset = compute_offset(page, items_per_page) - posts = await crud_posts.get_multi( - db=db, - offset=offset, - limit=items_per_page, - created_by_user_id=user_id - ) - return paginated_response(posts, page, items_per_page) -``` +# Explicit — recommended +@cache(key_prefix="widget", resource_id_name="widget_id", expiration=600) +async def get_widget(request: Request, widget_id: int, ...): ... -#### Hierarchical Data Caching +# Implicit — the decorator infers from the kwargs (looks for an int argument by default) +@cache(key_prefix="widget", expiration=600) +async def get_widget(request: Request, widget_id: int, ...): ... -```python -@router.get("/organizations/{org_id}/departments/{dept_id}/employees") -@cache( - key_prefix="org_{org_id}_dept_{dept_id}_employees", - resource_id_name="dept_id", - expiration=1800 # 30 minutes -) -async def get_department_employees( - request: Request, - org_id: int, - dept_id: int -): - employees = await crud_employees.get_multi( - db=db, - department_id=dept_id, - organization_id=org_id - ) - return employees +# String IDs — set resource_id_type +@cache(key_prefix="user", resource_id_name="username", resource_id_type=str) +async def get_user(request: Request, username: str, ...): ... ``` -## Cache Invalidation +If the decorator can't infer a resource ID, it logs a warning and skips caching for that request — the handler still runs normally. -Cache invalidation ensures data consistency when the underlying data changes. +## Invalidation -### Automatic Invalidation +### Automatic on Mutations -The cache decorator automatically invalidates cache entries on non-GET requests: +Any non-GET method on a route decorated with the same `(key_prefix, resource_id_name)` automatically invalidates that key: ```python -@router.put("/posts/{post_id}") -@cache(key_prefix="post_cache", resource_id_name="post_id") -async def update_post(request: Request, post_id: int, data: PostUpdate): - # Automatically invalidates "post_cache:123" when called with PUT/POST/DELETE - await crud_posts.update(db=db, id=post_id, object=data) - return {"message": "Post updated"} +@router.patch("/{widget_id}") +@cache(key_prefix="widget", resource_id_name="widget_id") +async def update_widget(request: Request, widget_id: int, ...) -> dict[str, Any]: + # PATCH automatically deletes "widget:{widget_id}" after the handler runs + return await widget_service.update(widget_id, values, db) ``` -### Extra Key Invalidation +The cache deletion happens **after** the handler returns successfully. If the handler raises, the cache isn't touched. + +### Invalidating Related Keys (`to_invalidate_extra`) -Invalidate related cache entries when data changes: +When mutating a resource, you often need to invalidate other caches that reference it. Use `to_invalidate_extra` — a dict of `{prefix: id_kwarg}`: ```python -@router.post("/posts") +@router.post("/") @cache( - key_prefix="new_post", - resource_id_name="user_id", + key_prefix="widget", + resource_id_name="widget_id", to_invalidate_extra={ - "user_posts": "{user_id}", # Invalidate user's post list - "latest_posts": "global", # Invalidate global latest posts - "user_stats": "{user_id}" # Invalidate user statistics - } + "user_widgets": "owner_id", # also invalidate "user_widgets:{owner_id}" + "widget_count": "global", # invalidate "widget_count:global" + }, ) -async def create_post(request: Request, post: PostCreate, user_id: int): - # Creating a post invalidates related cached data - new_post = await crud_posts.create(db=db, object=post) - return new_post +async def create_widget( + request: Request, + widget: WidgetCreate, + owner_id: int, + ..., +) -> dict[str, Any]: + return await widget_service.create(widget, owner_id, db) ``` -### Pattern-Based Invalidation +The `id_kwarg` value can be a literal (like `"global"`) or a placeholder reference (`"owner_id"` resolves from the route's kwargs). + +!!! note "Only on mutations" + `to_invalidate_extra` and `pattern_to_invalidate_extra` are not allowed on GET routes — the decorator raises `InvalidRequestError` if you try. Cache invalidation only happens on PUT/PATCH/POST/DELETE. -Use Redis pattern matching for bulk invalidation: +### Pattern-Based Invalidation (`pattern_to_invalidate_extra`) + +For bulk wipes, use Redis pattern matching: ```python -@router.put("/users/{user_id}/profile") +@router.delete("/{widget_id}") @cache( - key_prefix="user_profile", - resource_id_name="user_id", + key_prefix="widget", + resource_id_name="widget_id", pattern_to_invalidate_extra=[ - "user_{user_id}_*", # All user-related caches - "*_user_{user_id}_*", # Caches that include this user - "search_results_*" # All search result caches - ] + "user_{owner_id}_widgets:*", # all paginated lists for this user + "widget_search:*", # all search result caches + ], ) -async def update_user_profile(request: Request, user_id: int, data: UserUpdate): - # Invalidates all matching cache patterns - await crud_users.update(db=db, id=user_id, object=data) - return {"message": "Profile updated"} +async def delete_widget( + request: Request, + widget_id: int, + owner_id: int, + ..., +) -> None: + await widget_service.delete(widget_id, db) ``` -**Pattern Examples:** +**Memcached doesn't support patterns.** When `CACHE_BACKEND=memcached`, `pattern_to_invalidate_extra` raises `PatternMatchingNotSupportedError` (logged at error level). The non-pattern delete still happens. -- `user_*` - All keys starting with "user_" -- `*_posts` - All keys ending with "_posts" -- `user_*_posts_*` - Complex patterns with wildcards -- `temp_*` - Temporary cache entries +## The Provider API -## Configuration +For cache operations outside of routes (background jobs, services, scripts), use the provider API directly: -### Redis Settings +```python +from src.infrastructure.cache import ( + cache_provider, + clear, + delete, + delete_pattern, + exists, + get, + set, +) -Configure Redis connection in your environment settings: -```python -# core/config.py -class RedisCacheSettings(BaseSettings): - REDIS_CACHE_HOST: str = config("REDIS_CACHE_HOST", default="localhost") - REDIS_CACHE_PORT: int = config("REDIS_CACHE_PORT", default=6379) - REDIS_CACHE_PASSWORD: str = config("REDIS_CACHE_PASSWORD", default="") - REDIS_CACHE_DB: int = config("REDIS_CACHE_DB", default=0) - REDIS_CACHE_URL: str = f"redis://:{REDIS_CACHE_PASSWORD}@{REDIS_CACHE_HOST}:{REDIS_CACHE_PORT}/{REDIS_CACHE_DB}" -``` +# Store a value (any JSON-serializable type) +await set(key="config:site_name", value="My App", expiration=3600) + +# Retrieve it +value = await get(key="config:site_name") -### Environment Variables +# Existence check +if await exists(key="config:site_name"): + ... -```bash -# Basic Configuration -REDIS_CACHE_HOST=localhost -REDIS_CACHE_PORT=6379 +# Delete a key +await delete(key="config:site_name") -# Production Configuration -REDIS_CACHE_HOST=redis.production.com -REDIS_CACHE_PORT=6379 -REDIS_CACHE_PASSWORD=your-secure-password -REDIS_CACHE_DB=0 +# Delete by pattern (Redis only) +await delete_pattern(pattern="user:42:*") -# Docker Compose -REDIS_CACHE_HOST=redis -REDIS_CACHE_PORT=6379 +# Clear everything +await clear() ``` -### Connection Pool Setup +Values are serialized via `fastapi.encoders.jsonable_encoder` automatically — you can store dicts, lists, and any JSON-compatible structure. -The boilerplate automatically configures Redis connection pooling: +## Cache Key Conventions + +The decorator generates keys as: -```python -# core/setup.py -async def create_redis_cache_pool() -> None: - """Initialize Redis connection pool for caching.""" - cache.pool = redis.ConnectionPool.from_url( - settings.REDIS_CACHE_URL, - max_connections=20, # Maximum connections in pool - retry_on_timeout=True, # Retry on connection timeout - socket_timeout=5.0, # Socket timeout in seconds - health_check_interval=30 # Health check frequency - ) - cache.client = redis.Redis.from_pool(cache.pool) +``` +{formatted_key_prefix}:{resource_id} ``` -### Cache Client Usage +A few patterns the codebase uses: -Direct Redis client access for custom caching logic: +| Pattern | Use case | +|---------|----------| +| `widget:42` | Single resource by id | +| `user_widgets:5` | List of a user's widgets | +| `user_{username}_widgets:page_{page}` | Paginated list scoped to a user | +| `search:{query_hash}` | Hashed search query | +| `analytics_{user_id}_30d:report` | Time-windowed analytics | -```python -from app.core.utils.cache import client - -async def custom_cache_operation(): - if client is None: - raise MissingClientError("Redis client not initialized") - - # Set custom cache entry - await client.set("custom_key", "custom_value", ex=3600) - - # Get cached value - cached_value = await client.get("custom_key") - - # Delete cache entry - await client.delete("custom_key") - - # Bulk operations - pipe = client.pipeline() - pipe.set("key1", "value1") - pipe.set("key2", "value2") - pipe.expire("key1", 3600) - await pipe.execute() +Pick prefixes that: + +1. **Are unique** — never let two unrelated caches collide on the same key +2. **Match how you'll invalidate** — if you'll wipe by user, include the user identifier +3. **Are predictable** — anyone debugging should be able to read the key and know what's in it + +## Configuration + +```env +CACHE_ENABLED=true +CACHE_BACKEND=redis # or "memcached" +DEFAULT_CACHE_EXPIRATION=3600 + +# Redis backend +CACHE_REDIS_HOST=redis # use "localhost" without Docker +CACHE_REDIS_PORT=6379 +CACHE_REDIS_DB=0 +CACHE_REDIS_PASSWORD= +CACHE_REDIS_CONNECT_TIMEOUT=5 +CACHE_REDIS_POOL_SIZE=10 ``` -## Performance Optimization +When `CACHE_ENABLED=false`, the decorator becomes a no-op (the handler runs every time). Use this in tests or when isolating performance issues. -### Connection Pooling +## Picking Expiration Times -Connection pooling prevents the overhead of creating new Redis connections for each request: +| Data shape | Suggested TTL | +|------------|---------------| +| Static reference data (e.g. country list, tier list) | 24 hours (`86400`) | +| User profile / public objects | 5–30 minutes (`300`–`1800`) | +| Paginated list endpoints | 1–5 minutes (`60`–`300`) | +| Search results | 5–15 minutes (`300`–`900`) | +| Frequently changing dashboards | 30–60 seconds | -```python -# Benefits of connection pooling: -# - Reuses existing connections -# - Handles connection failures gracefully -# - Provides connection health checks -# - Supports concurrent operations - -# Pool configuration -redis.ConnectionPool.from_url( - settings.REDIS_CACHE_URL, - max_connections=20, # Adjust based on expected load - retry_on_timeout=True, # Handle network issues - socket_keepalive=True, # Keep connections alive - socket_keepalive_options={} -) -``` +Default is 1 hour (`3600`). Override per route based on staleness tolerance. -### Cache Key Generation +## Real Examples -The cache decorator automatically generates keys using this pattern: +The boilerplate doesn't currently use `@cache` on its built-in routes (the existing endpoints are admin/list operations where the data churns enough that caching isn't a clear win). Add `@cache` to your own modules where it pays off — typically: read-heavy GETs on rarely-changing data. -```python -# Decorator generates: "{formatted_key_prefix}:{resource_id}" -@cache(key_prefix="post_cache", resource_id_name="post_id") -# Generates: "post_cache:123" +## Performance Tips -@cache(key_prefix="{username}_posts:page_{page}") -# Generates: "johndoe_posts:page_1:456" (where 456 is the resource_id) +### Use `schema_to_select` Together with Caching -# The system handles key formatting automatically - you just provide the prefix template -``` +When the underlying CRUD call uses `schema_to_select=WidgetRead`, the cached payload is the trimmed dict — smaller cache values, faster serialization on hit. -**What you control:** -- `key_prefix` template with placeholders like `{username}`, `{page}` -- `resource_id_name` to specify which parameter to use as the ID -- The decorator handles the rest +### Don't Cache Personalized Responses Globally -**Generated key examples from the boilerplate:** -```python -# From posts.py -"{username}_posts:page_{page}:items_per_page_{items_per_page}" → "john_posts:page_1:items_per_page_10:789" -"{username}_post_cache" → "john_post_cache:123" -``` +If your handler returns different data per user but the cache key only includes the resource ID, **users see each other's data**. Either: -### Expiration Strategies +- Include `user_id` in the prefix: `key_prefix="widget_for_user_{user_id}"` +- Don't cache it -Choose appropriate expiration times based on data characteristics: +### Cache the Response, Not the Computation -```python -# Static reference data (rarely changes) -@cache(key_prefix="countries", expiration=86400) # 24 hours +The decorator caches the route handler's return value. If you need to cache an expensive sub-computation but not the whole response, use the provider API directly inside your service. + +### Watch Pool Saturation + +Default `CACHE_REDIS_POOL_SIZE=10` is enough for typical workloads. If you have very high concurrency on cached endpoints, raise it. Watch the application logs for `redis.exceptions.ConnectionError` — that often means pool exhaustion. -# User-generated content (changes moderately) -@cache(key_prefix="user_posts", expiration=1800) # 30 minutes +## Graceful Degradation -# Real-time data (changes frequently) -@cache(key_prefix="live_stats", expiration=60) # 1 minute +If the cache backend is unreachable, the decorator catches the error and falls through to run the handler. Your endpoint keeps working at non-cached speed. This fail-open behavior is intentional — cached data is reproducible from the database, so cache outages shouldn't take the API down. -# Search results (can be stale) -@cache(key_prefix="search", expiration=3600) # 1 hour +You'll see a warning in the logs: + +``` +Cache backend not available: ``` -This comprehensive Redis caching system provides high-performance data access while maintaining data consistency through intelligent invalidation strategies. \ No newline at end of file +That's your signal to investigate the cache infrastructure. + +## Troubleshooting + +### Decorator never reads from cache + +Check that `request: Request` is the first parameter of the decorated function. Without it, the decorator can't determine the HTTP method and can't decide whether to cache or invalidate. + +### Pattern invalidation fails on Memcached + +`PatternMatchingNotSupportedError` is expected — Memcached doesn't support pattern operations. Either switch to Redis or invalidate keys explicitly via `to_invalidate_extra`. + +### Cached data is stale after a mutation + +The mutation route needs the **same `(key_prefix, resource_id_name)`** as the GET route. If your `PATCH /widgets/{widget_id}` uses `key_prefix="widget"` and your `GET /widgets/{widget_id}` uses `key_prefix="widget_cache"`, they won't talk to each other. + +### Cache returns the wrong user's data + +You're keying by resource ID without including the user. See "Don't Cache Personalized Responses Globally" above. + +## Key Files + +| Component | Location | +|-----------|----------| +| Decorator | `backend/src/infrastructure/cache/decorator.py` | +| Provider API | `backend/src/infrastructure/cache/provider.py` | +| Redis backend | `backend/src/infrastructure/cache/backends/redis.py` | +| Memcached backend | `backend/src/infrastructure/cache/backends/memcached.py` | +| Settings | `backend/src/infrastructure/config/settings.py` (`CacheSettings`) | + +## Next Steps + +- **[Client Cache](client-cache.md)** — `Cache-Control` headers for browser caching +- **[Cache Strategies](cache-strategies.md)** — Patterns for keys, related-key invalidation, cache-aside flows +- **[Environment Variables](../configuration/environment-variables.md#cache)** — Full settings reference diff --git a/docs/user-guide/configuration/docker-setup.md b/docs/user-guide/configuration/docker-setup.md index fed2dac6..f7bcf91f 100644 --- a/docs/user-guide/configuration/docker-setup.md +++ b/docs/user-guide/configuration/docker-setup.md @@ -1,555 +1,348 @@ # Docker Setup -Learn how to configure and run the FastAPI Boilerplate using Docker Compose. The project includes a complete containerized setup with PostgreSQL, Redis, background workers, and optional services. +This page walks through running the boilerplate in containers. The Python project lives at `backend/`, so all Docker operations happen from there. -## Quick Start +!!! info "docker-compose.yml status" + The repository ships a `backend/Dockerfile` (multi-stage). A canonical `backend/docker-compose.yml` is on the way — until then, the **Recommended Compose File** below is what to drop in at `backend/docker-compose.yml` to get the `docker compose up` flow running. -The fastest way to get started is with the setup script: +## Quick Start ```bash -./setup.py +cd backend +cp .env.example .env +# edit .env (set SECRET_KEY, change default DB password, etc.) +docker compose up ``` -This script helps you choose between three deployment configurations: - -- **Local development** (`./setup.py local`) - Uvicorn with auto-reload -- **Staging** (`./setup.py staging`) - Gunicorn with workers -- **Production** (`./setup.py production`) - NGINX + Gunicorn +## Dockerfile Architecture -Each option copies the appropriate `Dockerfile`, `docker-compose.yml`, and `.env.example` files from the `scripts/` folder. +`backend/Dockerfile` uses **four stages** built from `python:3.11-slim`: -## Docker Compose Architecture +| Stage | Purpose | +|-------|---------| +| `requirements-stage` | Exports pinned requirements from `uv.lock` into `requirements-prod.txt` and `requirements-dev.txt`. Uses the official `astral-sh/uv` image to do this reliably. | +| `base` | Installs system deps (gcc), production Python deps, and copies `src/` into the image. Sets `PYTHONPATH=/app/src`. | +| `dev` | Adds dev requirements and `tests/`, runs as a non-root `appuser`, starts with `fastapi dev interfaces/main.py --host 0.0.0.0 --port 8000`. | +| `migrate` | Adds `migrations/` and `alembic.ini`. Default command is `alembic upgrade head`. Useful as a one-off job before the prod app starts. | +| `prod` | Same as base, runs as non-root, starts with `fastapi run interfaces/main.py --host 0.0.0.0 --port 8000 --workers $WORKERS` (defaults to 1). | -The boilerplate includes these core services: +You select a stage with `--target` when building: -```yaml -services: - web: # FastAPI application (uvicorn or gunicorn) - worker: # ARQ background task worker - db: # PostgreSQL 13 database - redis: # Redis Alpine for caching/queues - # Optional services (commented out by default): - # pgadmin: # Database administration - # nginx: # Reverse proxy - # create_superuser: # One-time superuser creation - # create_tier: # One-time tier creation +```bash +docker build --target dev -t fastapi-boilerplate:dev backend +docker build --target prod -t fastapi-boilerplate:prod backend +docker build --target migrate -t fastapi-boilerplate:migrate backend ``` -## Basic Docker Compose +## Recommended Compose File -### Main Configuration - -The main `docker-compose.yml` includes: +Save this as `backend/docker-compose.yml`. It brings up Postgres, Redis, and the FastAPI app in dev mode: ```yaml -version: '3.8' - services: - web: + app: build: context: . dockerfile: Dockerfile - # Development mode (reload enabled) - command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload - # Production mode (uncomment for production) - # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 + target: dev env_file: - - ./src/.env + - .env ports: - "8000:8000" - depends_on: - - db - - redis volumes: - - ./src/app:/code/app - - ./src/.env:/code/.env - - worker: - build: - context: . - dockerfile: Dockerfile - command: arq app.core.worker.settings.WorkerSettings - env_file: - - ./src/.env + - ./src:/app/src # live code reload + - ./tests:/app/src/tests depends_on: - db - redis - volumes: - - ./src/app:/code/app - - ./src/.env:/code/.env db: - image: postgres:13 - env_file: - - ./src/.env + image: postgres:17-alpine + environment: + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres} + POSTGRES_DB: ${POSTGRES_DB:-postgres} volumes: - postgres-data:/var/lib/postgresql/data - expose: - - "5432" + ports: + - "5432:5432" redis: - image: redis:alpine + image: redis:7-alpine volumes: - redis-data:/data - expose: - - "6379" + ports: + - "6379:6379" volumes: postgres-data: redis-data: ``` -### Environment File Loading +### Matching `.env` for Compose -All services automatically load environment variables from `./src/.env`: +When the app talks to the other services in the Compose network, it uses **service names** as hostnames: -```yaml -env_file: - - ./src/.env +```env +# In backend/.env +POSTGRES_SERVER=db +CACHE_REDIS_HOST=redis +RATE_LIMITER_REDIS_HOST=redis +TASKIQ_REDIS_HOST=redis ``` -The Docker services use these environment variables: +If you also use the host machine to reach Postgres/Redis directly (e.g. for a local dev tool), keep `localhost` working by exposing those ports as the example does (`5432:5432`, `6379:6379`). -- `POSTGRES_USER`, `POSTGRES_PASSWORD`, `POSTGRES_DB` for database -- `REDIS_*_HOST` variables automatically resolve to service names -- All application settings from your `.env` file +## Service Reference -## Service Details +### `app` — FastAPI Application -### Web Service (FastAPI Application) +Built from the `dev` Dockerfile stage. Runs `fastapi dev`, which auto-reloads on code changes. The volume mount on `./src` makes the reload pick up your edits live. -The web service runs your FastAPI application: +To switch to production mode, change `target: dev` → `target: prod` and drop the volume mounts. -```yaml -web: - build: - context: . - dockerfile: Dockerfile - # Development: uvicorn with reload - command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload - # Production: gunicorn with multiple workers (commented out) - # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 - env_file: - - ./src/.env - ports: - - "8000:8000" # Direct access in development - volumes: - - ./src/app:/code/app # Live code reloading - - ./src/.env:/code/.env -``` +### `db` — PostgreSQL 17 -**Key Features:** +Postgres 17 (alpine for size). Reads `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` from the environment, and persists data in the named volume `postgres-data`. -- **Development mode**: Uses uvicorn with `--reload` for automatic code reloading -- **Production mode**: Switch to gunicorn with multiple workers (commented out) -- **Live reloading**: Source code mounted as volume for development -- **Port exposure**: Direct access on port 8000 (can be disabled for nginx) +### `redis` — Redis 7 -### Worker Service (Background Tasks) +Used for cache (`CACHE_REDIS_DB=0`), rate limiting (`RATE_LIMITER_REDIS_DB=1`), sessions, and the Taskiq broker (`TASKIQ_REDIS_DB=3`). The boilerplate uses different DB numbers so they don't interfere. -Handles background job processing with ARQ: +## Optional Services -```yaml -worker: - build: - context: . - dockerfile: Dockerfile - command: arq app.core.worker.settings.WorkerSettings - env_file: - - ./src/.env - depends_on: - - db - - redis - volumes: - - ./src/app:/code/app - - ./src/.env:/code/.env -``` +Add these to your `docker-compose.yml` as needed. -**Features:** -- Runs ARQ worker for background job processing -- Shares the same codebase and environment as web service -- Automatically connects to Redis for job queues -- Live code reloading in development +### Taskiq Worker -### Database Service (PostgreSQL 13) +To process background tasks, add a worker service: ```yaml -db: - image: postgres:13 - env_file: - - ./src/.env - volumes: - - postgres-data:/var/lib/postgresql/data - expose: - - "5432" # Internal network only + worker: + build: + context: . + dockerfile: Dockerfile + target: dev + env_file: + - .env + command: taskiq worker infrastructure.taskiq.worker:default_broker + volumes: + - ./src:/app/src + depends_on: + - db + - redis ``` -**Configuration:** -- Uses environment variables: `POSTGRES_USER`, `POSTGRES_PASSWORD`, `POSTGRES_DB` -- Data persisted in named volume `postgres-data` -- Only exposed to internal Docker network (no external port) -- To enable external access, uncomment the ports section +Scale workers with `docker compose up --scale worker=3`. -### Redis Service +### Migrations Job + +Run Alembic migrations before the app starts: ```yaml -redis: - image: redis:alpine - volumes: - - redis-data:/data - expose: - - "6379" # Internal network only + migrate: + build: + context: . + dockerfile: Dockerfile + target: migrate + env_file: + - .env + depends_on: + - db ``` -**Features:** -- Lightweight Alpine Linux image -- Data persistence with named volume -- Used for caching, job queues, and rate limiting -- Internal network access only - -## Optional Services +```bash +docker compose run --rm migrate +``` -### Database Administration (pgAdmin) +### Initial Setup Job -Uncomment to enable web-based database management: +Create the first admin user and default tier on a fresh DB: ```yaml -pgadmin: - container_name: pgadmin4 - image: dpage/pgadmin4:latest - restart: always - ports: - - "5050:80" - volumes: - - pgadmin-data:/var/lib/pgadmin - env_file: - - ./src/.env - depends_on: - - db + setup: + build: + context: . + dockerfile: Dockerfile + target: dev + env_file: + - .env + command: python -m scripts.setup_initial_data + depends_on: + - db ``` -**Usage:** -- Access at `http://localhost:5050` -- Requires `PGADMIN_DEFAULT_EMAIL` and `PGADMIN_DEFAULT_PASSWORD` in `.env` -- Connect to database using service name `db` and port `5432` +```bash +docker compose run --rm setup +``` -### Reverse Proxy (Nginx) +### pgAdmin -Uncomment for production-style reverse proxy: +If you want a web UI for the database, add: ```yaml -nginx: - image: nginx:latest - ports: - - "80:80" - volumes: - - ./default.conf:/etc/nginx/conf.d/default.conf - depends_on: - - web -``` - -**Configuration:** -The included `default.conf` provides: - -```nginx -server { - listen 80; - - location / { - proxy_pass http://web:8000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } -} + pgadmin: + image: dpage/pgadmin4:latest + restart: unless-stopped + environment: + PGADMIN_DEFAULT_EMAIL: admin@example.com + PGADMIN_DEFAULT_PASSWORD: admin + ports: + - "5050:80" + depends_on: + - db ``` -**When using nginx:** - -1. Uncomment the nginx service -2. Comment out the `ports` section in the web service -3. Uncomment `expose: ["8000"]` in the web service +Visit , log in, and add a server with hostname `db`, port `5432`, user `postgres` (or whatever you set in `.env`). -### Initialization Services +### Memcached (alternative cache backend) -#### Create First Superuser +If you prefer Memcached over Redis: ```yaml -create_superuser: - build: - context: . - dockerfile: Dockerfile - env_file: - - ./src/.env - depends_on: - - db - - web - command: python -m src.scripts.create_first_superuser - volumes: - - ./src:/code/src + memcached: + image: memcached:1.6-alpine + ports: + - "11211:11211" ``` -#### Create First Tier +And in `.env`: -```yaml -create_tier: - build: - context: . - dockerfile: Dockerfile - env_file: - - ./src/.env - depends_on: - - db - - web - command: python -m src.scripts.create_first_tier - volumes: - - ./src:/code/src +```env +CACHE_BACKEND=memcached +RATE_LIMITER_BACKEND=memcached +CACHE_MEMCACHED_HOST=memcached +RATE_LIMITER_MEMCACHED_HOST=memcached ``` -**Usage:** - -- These are one-time setup services -- Uncomment when you need to initialize data -- Run once, then comment out again - -## Dockerfile Details - -The project uses a multi-stage Dockerfile with `uv` for fast Python package management: - -### Builder Stage - -```dockerfile -FROM ghcr.io/astral-sh/uv:python3.11-bookworm-slim AS builder +### RabbitMQ (alternative Taskiq broker) -ENV UV_COMPILE_BYTECODE=1 -ENV UV_LINK_MODE=copy - -WORKDIR /app - -# Install dependencies (cached layer) -RUN --mount=type=cache,target=/root/.cache/uv \ - --mount=type=bind,source=uv.lock,target=uv.lock \ - --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ - uv sync --locked --no-install-project - -# Copy and install project -COPY . /app -RUN --mount=type=cache,target=/root/.cache/uv \ - uv sync --locked --no-editable +```yaml + rabbitmq: + image: rabbitmq:3.13-management-alpine + environment: + RABBITMQ_DEFAULT_USER: ${TASKIQ_RABBITMQ_USER:-guest} + RABBITMQ_DEFAULT_PASS: ${TASKIQ_RABBITMQ_PASSWORD:-guest} + ports: + - "5672:5672" + - "15672:15672" # management UI ``` -### Final Stage - -```dockerfile -FROM python:3.11-slim-bookworm - -# Create non-root user for security -RUN groupadd --gid 1000 app \ - && useradd --uid 1000 --gid app --shell /bin/bash --create-home app +In `.env`: -# Copy virtual environment from builder -COPY --from=builder --chown=app:app /app/.venv /app/.venv - -ENV PATH="/app/.venv/bin:$PATH" -USER app -WORKDIR /code - -# Default command (can be overridden) -CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] +```env +TASKIQ_BROKER_TYPE=rabbitmq +TASKIQ_RABBITMQ_HOST=rabbitmq ``` -**Security Features:** - -- Non-root user execution -- Multi-stage build for smaller final image -- Cached dependency installation - -## Common Docker Commands - -### Development Workflow +## Common Commands ```bash -# Start all services +cd backend + +# Bring everything up (foreground, attached logs) docker compose up -# Start in background +# Detached docker compose up -d -# Rebuild and start (after code changes) +# Rebuild after dependency changes docker compose up --build -# View logs -docker compose logs -f web -docker compose logs -f worker +# Logs for a specific service +docker compose logs -f app -# Stop services -docker compose down +# Open a shell inside the app container +docker compose exec app bash -# Stop and remove volumes (reset data) -docker compose down -v -``` - -### Service Management - -```bash -# Start specific services -docker compose up web db redis - -# Scale workers -docker compose up --scale worker=3 - -# Execute commands in running containers -docker compose exec web bash +# Run a one-off command +docker compose exec app uv run alembic upgrade head docker compose exec db psql -U postgres docker compose exec redis redis-cli -# View service status -docker compose ps -``` - -### Production Mode - -To switch to production mode: - -1. **Enable Gunicorn:** - ```yaml - # Comment out uvicorn line - # command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload - # Uncomment gunicorn line - command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 - ``` - -2. **Enable Nginx** (optional): - ```yaml - # Uncomment nginx service - nginx: - image: nginx:latest - ports: - - "80:80" - - # In web service, comment out ports and uncomment expose - # ports: - # - "8000:8000" - expose: - - "8000" - ``` - -3. **Remove development volumes:** - ```yaml - # Remove or comment out for production - # volumes: - # - ./src/app:/code/app - # - ./src/.env:/code/.env - ``` - -## Environment Configuration - -### Service Communication - -Services communicate using service names: +# Stop everything +docker compose down -```yaml -# In your .env file for Docker -POSTGRES_SERVER=db # Not localhost -REDIS_CACHE_HOST=redis # Not localhost -REDIS_QUEUE_HOST=redis -REDIS_RATE_LIMIT_HOST=redis +# Stop and wipe volumes (⚠️ deletes data) +docker compose down -v ``` -### Port Management +## Production-Style Setup -**Development (default):** -- Web: `localhost:8000` (direct access) -- Database: `localhost:5432` (uncomment ports to enable) -- Redis: `localhost:6379` (uncomment ports to enable) -- pgAdmin: `localhost:5050` (if enabled) +For a more production-like local stack: -**Production with Nginx:** -- Web: `localhost:80` (through nginx) -- Database: Internal only -- Redis: Internal only +1. **Use the `prod` stage**: change `target: dev` to `target: prod` in the `app` service. +2. **Drop dev volume mounts**: remove `./src:/app/src` so the image is the source of truth. +3. **Run migrations as a separate job** (the `migrate` service above) before the app starts. +4. **Bump worker count**: set `WORKERS=4` in `.env` (the `prod` command reads it). +5. **Add a reverse proxy** if you need TLS — Caddy or Traefik are simpler to configure than nginx for single-host setups. ## Troubleshooting -### Common Issues +### Container won't start -**Container won't start:** ```bash -# Check logs -docker compose logs web - -# Rebuild image -docker compose build --no-cache web - -# Check environment file -docker compose exec web env | grep POSTGRES +docker compose logs app +docker compose build --no-cache app ``` -**Database connection issues:** +### Database connection refused + ```bash -# Check if db service is running +# Is the db service up? docker compose ps db -# Test connection from web container -docker compose exec web ping db +# Can the app container resolve "db"? +docker compose exec app python -c "import socket; print(socket.gethostbyname('db'))" -# Check database logs +# Inspect db logs docker compose logs db ``` -**Port conflicts:** +### Code changes not picking up + +Make sure you have the `./src:/app/src` volume mount in the `app` service, and that `target: dev` is set (the `dev` stage uses `fastapi dev` which has reload enabled). The `prod` stage does **not** auto-reload. + +### Port already in use + ```bash -# Check what's using the port lsof -i :8000 - -# Use different ports +# or change the host-side port in compose: ports: - - "8001:8000" # Use port 8001 instead + - "8080:8000" ``` -### Development vs Production - -**Development features:** - -- Live code reloading with volume mounts -- Direct port access -- uvicorn with `--reload` -- Exposed database/redis ports for debugging +### Resetting everything -**Production optimizations:** - -- No volume mounts (code baked into image) -- Nginx reverse proxy -- Gunicorn with multiple workers -- Internal service networking only -- Resource limits and health checks +```bash +cd backend +docker compose down -v # wipes volumes +docker compose build --no-cache +docker compose up +``` ## Best Practices ### Development -- Use volume mounts for live code reloading -- Enable direct port access for debugging -- Use uvicorn with reload for fast development -- Enable optional services (pgAdmin) as needed +- Use `target: dev` for live reload +- Mount `./src` as a volume so edits don't require rebuilds +- Expose Postgres/Redis ports for easy local debugging +- Keep `.env` out of version control (it's already in `.gitignore`) ### Production -- Switch to gunicorn with multiple workers -- Use nginx for reverse proxy and load balancing -- Remove volume mounts and bake code into images -- Use internal networking only -- Set resource limits and health checks +- Use `target: prod` and remove dev volume mounts +- Run the `migrate` stage as a separate job before launching the app +- Set `ENVIRONMENT=production` to enable the security validator +- Run as the non-root `appuser` (already set up in the Dockerfile) +- Pin image tags (`postgres:17-alpine`, not `postgres:latest`) ### Security -- Containers run as non-root user -- Use internal networking for service communication -- Don't expose database/redis ports externally -- Use Docker secrets for sensitive data in production - -### Monitoring -- Use `docker compose logs` to monitor services -- Set up health checks for all services -- Monitor resource usage with `docker stats` -- Use structured logging for better observability - -The Docker setup provides everything you need for both development and production. Start with the default configuration and customize as your needs grow! \ No newline at end of file +- Containers run as non-root in dev/prod stages +- Don't expose the Postgres/Redis ports to public networks in production +- Set strong `POSTGRES_PASSWORD`, Redis passwords (`CACHE_REDIS_PASSWORD`, etc.) and `SECRET_KEY` before deploying + +## See Also + +- **[Environment Variables](environment-variables.md)** — Full env var reference +- **[Settings Classes](settings-classes.md)** — How env vars become Python settings +- **[Production](../production.md)** — Production deployment guide diff --git a/docs/user-guide/configuration/environment-specific.md b/docs/user-guide/configuration/environment-specific.md index 92640702..b2f83697 100644 --- a/docs/user-guide/configuration/environment-specific.md +++ b/docs/user-guide/configuration/environment-specific.md @@ -1,679 +1,299 @@ # Environment-Specific Configuration -Learn how to configure your FastAPI application for different environments (development, staging, production) with appropriate security, performance, and monitoring settings. +The boilerplate adapts its behavior based on the `ENVIRONMENT` variable. This page covers the recommended settings for each environment and the behaviors the codebase already changes for you. -## Environment Types +## Supported Environments -The boilerplate supports three environment types: +```env +# Options: production, staging, development, local +ENVIRONMENT=development +``` -- **`local`** - Development environment with full debugging -- **`staging`** - Pre-production testing environment -- **`production`** - Production environment with security hardening +| Value | Intended Use | +|-------|--------------| +| `development` | Local dev with verbose logging and DEBUG-level output | +| `local` | Equivalent to `development` for default config (used by tests / CI) | +| `staging` | Pre-production testing — structured logs, INFO level | +| `production` | Live deployment — JSON logs, security validator on, docs gated | -Set the environment type with: +## What the Codebase Does for You -```env -ENVIRONMENT="local" # or "staging" or "production" -``` +The boilerplate already changes its own behavior based on `ENVIRONMENT`. You don't need to write conditional code for these: -## Development Environment +| Behavior | development / local | staging | production | +|----------|---------------------|---------|------------| +| **Logging style** | Detailed text, color console | Structured key/value | JSON | +| **Default log level** | DEBUG (when `LOG_DEVELOPMENT_VERBOSE=true`) | INFO | WARNING (when `LOG_PRODUCTION_OPTIMIZE=true`) | +| **Noisy library loggers** | Normal level | Normal level | Quieted (urllib3, sqlalchemy, redis, etc.) | +| **Security validator** | Skipped | Skipped | Runs at startup if `PRODUCTION_SECURITY_VALIDATION_ENABLED=true` (default) | +| **Docs at `/docs`** | Available | Available | Disabled unless `ENABLE_DOCS_IN_PRODUCTION=true` | -### Local Development Settings +The logging behavior is driven by `src/infrastructure/logging/config.py`. The security validator lives in `src/infrastructure/security/`. -Create `src/.env.development`: +## Development + +For day-to-day local development. ```env -# ------------- environment ------------- -ENVIRONMENT="local" +ENVIRONMENT=development DEBUG=true -# ------------- app settings ------------- -APP_NAME="MyApp (Development)" -APP_VERSION="0.1.0-dev" +# App metadata +APP_NAME=MyApp (Development) +VERSION=0.1.0-dev -# ------------- database ------------- -POSTGRES_USER="dev_user" -POSTGRES_PASSWORD="dev_password" -POSTGRES_SERVER="localhost" +# Database (local Postgres or Docker Compose) +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_DB=myapp_dev +POSTGRES_SERVER=localhost # or "db" with Docker Compose POSTGRES_PORT=5432 -POSTGRES_DB="myapp_dev" - -# ------------- security ------------- -SECRET_KEY="dev-secret-key-not-for-production-use" -ALGORITHM="HS256" -ACCESS_TOKEN_EXPIRE_MINUTES=60 # Longer for development -REFRESH_TOKEN_EXPIRE_DAYS=30 # Longer for development - -# ------------- redis ------------- -REDIS_CACHE_HOST="localhost" -REDIS_CACHE_PORT=6379 -REDIS_QUEUE_HOST="localhost" -REDIS_QUEUE_PORT=6379 -REDIS_RATE_LIMIT_HOST="localhost" -REDIS_RATE_LIMIT_PORT=6379 - -# ------------- caching ------------- -CLIENT_CACHE_MAX_AGE=0 # Disable caching for development - -# ------------- rate limiting ------------- -DEFAULT_RATE_LIMIT_LIMIT=1000 # Higher limits for development -DEFAULT_RATE_LIMIT_PERIOD=3600 - -# ------------- admin ------------- -ADMIN_NAME="Dev Admin" -ADMIN_EMAIL="admin@localhost" -ADMIN_USERNAME="admin" -ADMIN_PASSWORD="admin123" - -# ------------- tier ------------- -TIER_NAME="dev_tier" - -# ------------- logging ------------- -DATABASE_ECHO=true # Log all SQL queries -``` -### Development Features +# Security — keep a placeholder, never reuse for staging/prod +SECRET_KEY=insecure-dev-key-replace-me -```python -# Development-specific features -if settings.ENVIRONMENT == "local": - # Enable API documentation - app.openapi_url = "/openapi.json" - app.docs_url = "/docs" - app.redoc_url = "/redoc" -``` +# Sessions — relax cookie security for plain HTTP +SESSION_SECURE_COOKIES=false +CSRF_ENABLED=false # often easier when testing with curl + +# Cache / Rate limiter / Taskiq — point at localhost +CACHE_REDIS_HOST=localhost +RATE_LIMITER_REDIS_HOST=localhost +TASKIQ_REDIS_HOST=localhost + +# Looser limits while iterating +DEFAULT_RATE_LIMIT_LIMIT=1000 +DEFAULT_RATE_LIMIT_PERIOD=60 -### Docker Development Override - -`docker-compose.override.yml`: - -```yaml -version: '3.8' - -services: - web: - environment: - - ENVIRONMENT=local - - DEBUG=true - - DATABASE_ECHO=true - volumes: - - ./src:/code/src:cached - command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload - ports: - - "8000:8000" - - db: - environment: - - POSTGRES_DB=myapp_dev - ports: - - "5432:5432" - - redis: - ports: - - "6379:6379" - - # Development tools - adminer: - image: adminer - ports: - - "8080:8080" - depends_on: - - db +# Admin user (used by setup_initial_data on first run) +ADMIN_NAME=Dev Admin +ADMIN_EMAIL=admin@localhost +ADMIN_USERNAME=admin +ADMIN_PASSWORD=admin123 + +# Logging +LOG_DEVELOPMENT_VERBOSE=true # gives you DEBUG-level console output ``` -## Staging Environment +!!! tip "Why disable CSRF in dev?" + The session cookie is HTTP-only and a CSRF token is also returned. Browser-based clients should send both. For curl/Postman testing, setting `CSRF_ENABLED=false` removes one moving piece. Re-enable it when testing the real frontend flow. -### Staging Settings +## Staging -Create `src/.env.staging`: +A pre-production rehearsal — same code paths as production, separate data, useful for catching environment-specific issues. ```env -# ------------- environment ------------- -ENVIRONMENT="staging" +ENVIRONMENT=staging DEBUG=false -# ------------- app settings ------------- -APP_NAME="MyApp (Staging)" -APP_VERSION="0.1.0-staging" +APP_NAME=MyApp (Staging) +VERSION=0.1.0-staging -# ------------- database ------------- -POSTGRES_USER="staging_user" -POSTGRES_PASSWORD="complex_staging_password_123!" -POSTGRES_SERVER="staging-db.example.com" +POSTGRES_USER=staging_user +POSTGRES_PASSWORD=use-a-strong-password +POSTGRES_SERVER=staging-db.example.com POSTGRES_PORT=5432 -POSTGRES_DB="myapp_staging" - -# ------------- security ------------- -SECRET_KEY="staging-secret-key-different-from-production" -ALGORITHM="HS256" -ACCESS_TOKEN_EXPIRE_MINUTES=30 -REFRESH_TOKEN_EXPIRE_DAYS=7 -CORS_ORIGINS=["https://staging.example.com"] -CORS_METHODS=["GET","POST","PUT","DELETE"] - -# ------------- redis ------------- -REDIS_CACHE_HOST="staging-redis.example.com" -REDIS_CACHE_PORT=6379 -REDIS_QUEUE_HOST="staging-redis.example.com" -REDIS_QUEUE_PORT=6379 -REDIS_RATE_LIMIT_HOST="staging-redis.example.com" -REDIS_RATE_LIMIT_PORT=6379 - -# ------------- caching ------------- -CLIENT_CACHE_MAX_AGE=300 # 5 minutes - -# ------------- rate limiting ------------- -DEFAULT_RATE_LIMIT_LIMIT=100 -DEFAULT_RATE_LIMIT_PERIOD=3600 +POSTGRES_DB=myapp_staging -# ------------- admin ------------- -ADMIN_NAME="Staging Admin" -ADMIN_EMAIL="admin@staging.example.com" -ADMIN_USERNAME="staging_admin" -ADMIN_PASSWORD="secure_staging_password_456!" +# Real key, distinct from prod +SECRET_KEY= -# ------------- tier ------------- -TIER_NAME="staging_tier" +# Lock cookies and CSRF down +SESSION_SECURE_COOKIES=true +CSRF_ENABLED=true -# ------------- logging ------------- -DATABASE_ECHO=false -``` +# Restrict CORS to staging domains +CORS_ORIGINS=https://staging.example.com +CORS_ALLOW_METHODS=GET,POST,PUT,DELETE,PATCH +CORS_ALLOW_HEADERS=Authorization,Content-Type -### Staging Features +# Real Redis hostnames — use separate DBs for cache/rate limit/taskiq +CACHE_REDIS_HOST=staging-redis.example.com +CACHE_REDIS_PASSWORD= +RATE_LIMITER_REDIS_HOST=staging-redis.example.com +RATE_LIMITER_REDIS_PASSWORD= +TASKIQ_REDIS_HOST=staging-redis.example.com +TASKIQ_REDIS_PASSWORD= -```python -# Staging-specific features -if settings.ENVIRONMENT == "staging": - # API docs available to superusers only - @app.get("/docs", include_in_schema=False) - async def custom_swagger_ui(current_user: User = Depends(get_current_superuser)): - return get_swagger_ui_html(openapi_url="/openapi.json") -``` +DEFAULT_RATE_LIMIT_LIMIT=100 +DEFAULT_RATE_LIMIT_PERIOD=60 -### Docker Staging Configuration - -`docker-compose.staging.yml`: - -```yaml -version: '3.8' - -services: - web: - environment: - - ENVIRONMENT=staging - - DEBUG=false - deploy: - replicas: 2 - resources: - limits: - memory: 1G - reservations: - memory: 512M - restart: always - - db: - environment: - - POSTGRES_DB=myapp_staging - volumes: - - postgres_staging_data:/var/lib/postgresql/data - restart: always - - redis: - restart: always - - worker: - deploy: - replicas: 2 - restart: always - -volumes: - postgres_staging_data: -``` +ADMIN_NAME=Staging Admin +ADMIN_EMAIL=admin@staging.example.com +ADMIN_USERNAME=staging_admin +ADMIN_PASSWORD= -## Production Environment +# Logging — staging picks INFO automatically; can opt into file output +LOG_FILE_ENABLED=true +LOG_FILE_PATH=logs/app.log +``` -### Production Settings +## Production -Create `src/.env.production`: +Live traffic. Treat every setting as security-relevant. ```env -# ------------- environment ------------- -ENVIRONMENT="production" +ENVIRONMENT=production DEBUG=false -# ------------- app settings ------------- -APP_NAME="MyApp" -APP_VERSION="1.0.0" -CONTACT_NAME="Support Team" -CONTACT_EMAIL="support@example.com" - -# ------------- database ------------- -POSTGRES_USER="prod_user" -POSTGRES_PASSWORD="ultra_secure_production_password_789!" -POSTGRES_SERVER="prod-db.example.com" -POSTGRES_PORT=5433 # Custom port for security -POSTGRES_DB="myapp_production" - -# ------------- security ------------- -SECRET_KEY="ultra-secure-production-key-generated-with-openssl-rand-hex-32" -ALGORITHM="HS256" -ACCESS_TOKEN_EXPIRE_MINUTES=15 # Shorter for security -REFRESH_TOKEN_EXPIRE_DAYS=3 # Shorter for security -CORS_ORIGINS=["https://example.com","https://www.example.com"] -CORS_METHODS=["GET","POST","PUT","DELETE"] -CORS_HEADERS=["Authorization","Content-Type"] - -# ------------- redis ------------- -REDIS_CACHE_HOST="prod-redis.example.com" -REDIS_CACHE_PORT=6380 # Custom port for security -REDIS_QUEUE_HOST="prod-redis.example.com" -REDIS_QUEUE_PORT=6380 -REDIS_RATE_LIMIT_HOST="prod-redis.example.com" -REDIS_RATE_LIMIT_PORT=6380 - -# ------------- caching ------------- -CLIENT_CACHE_MAX_AGE=3600 # 1 hour - -# ------------- rate limiting ------------- -DEFAULT_RATE_LIMIT_LIMIT=100 -DEFAULT_RATE_LIMIT_PERIOD=3600 - -# ------------- admin ------------- -ADMIN_NAME="System Administrator" -ADMIN_EMAIL="admin@example.com" -ADMIN_USERNAME="sysadmin" -ADMIN_PASSWORD="extremely_secure_admin_password_with_symbols_#$%!" +APP_NAME=MyApp +VERSION=1.0.0 +CONTACT_NAME=Support Team +CONTACT_EMAIL=support@example.com -# ------------- tier ------------- -TIER_NAME="production_tier" +POSTGRES_USER=prod_user +POSTGRES_PASSWORD= +POSTGRES_SERVER=prod-db.example.com +POSTGRES_PORT=5432 +POSTGRES_DB=myapp_production + +# Generated with: python -c "import secrets; print(secrets.token_urlsafe(64))" +SECRET_KEY= + +# Production security validator is on by default +PRODUCTION_SECURITY_VALIDATION_ENABLED=true +PRODUCTION_SECURITY_STRICT_MODE=true + +# Sessions +SESSION_SECURE_COOKIES=true +SESSION_TIMEOUT_MINUTES=30 +SESSION_BACKEND=redis +CSRF_ENABLED=true +LOGIN_MAX_ATTEMPTS=5 +LOGIN_WINDOW_MINUTES=15 + +# Strict CORS +CORS_ORIGINS=https://example.com,https://www.example.com +CORS_ALLOW_METHODS=GET,POST,PUT,DELETE,PATCH +CORS_ALLOW_HEADERS=Authorization,Content-Type + +# Docs gated off by default in production +ENABLE_DOCS_IN_PRODUCTION=false + +# Real Redis with passwords; separate DBs per concern +CACHE_REDIS_HOST=prod-redis.example.com +CACHE_REDIS_PASSWORD= +CACHE_REDIS_DB=0 +RATE_LIMITER_REDIS_HOST=prod-redis.example.com +RATE_LIMITER_REDIS_PASSWORD= +RATE_LIMITER_REDIS_DB=1 +TASKIQ_REDIS_HOST=prod-redis.example.com +TASKIQ_REDIS_PASSWORD= +TASKIQ_REDIS_DB=3 -# ------------- logging ------------- -DATABASE_ECHO=false +DEFAULT_RATE_LIMIT_LIMIT=100 +DEFAULT_RATE_LIMIT_PERIOD=60 + +# Admin (used by setup_initial_data only on first deploy) +ADMIN_NAME=System Administrator +ADMIN_EMAIL=admin@example.com +ADMIN_USERNAME=sysadmin +ADMIN_PASSWORD= + +# Logging — production picks JSON automatically; LOG_PRODUCTION_OPTIMIZE quiets noisy libs +LOG_PRODUCTION_OPTIMIZE=true +LOG_FILE_ENABLED=true +LOG_FILE_PATH=/var/log/app/app.log ``` -### Production Security Features - -```python -# Production-specific features -if settings.ENVIRONMENT == "production": - # Disable API documentation - app.openapi_url = None - app.docs_url = None - app.redoc_url = None - - # Add security headers - @app.middleware("http") - async def add_security_headers(request: Request, call_next): - response = await call_next(request) - response.headers["X-Content-Type-Options"] = "nosniff" - response.headers["X-Frame-Options"] = "DENY" - response.headers["X-XSS-Protection"] = "1; mode=block" - response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains" - return response -``` +!!! danger "Production Security Validator" + With `ENVIRONMENT=production` and `PRODUCTION_SECURITY_VALIDATION_ENABLED=true` (both default), the app refuses to start if it finds insecure settings — e.g. the placeholder `SECRET_KEY`, `DEBUG=true`, `CORS_ORIGINS=*`. Set `PRODUCTION_SECURITY_STRICT_MODE=true` to make it stricter still. -### Docker Production Configuration - -`docker-compose.prod.yml`: - -```yaml -version: '3.8' - -services: - web: - environment: - - ENVIRONMENT=production - - DEBUG=false - deploy: - replicas: 3 - resources: - limits: - memory: 2G - cpus: '1' - reservations: - memory: 1G - cpus: '0.5' - restart: always - ports: [] # No direct exposure - - nginx: - image: nginx:alpine - ports: - - "80:80" - - "443:443" - volumes: - - ./nginx/nginx.conf:/etc/nginx/nginx.conf - - ./nginx/ssl:/etc/nginx/ssl - - ./nginx/htpasswd:/etc/nginx/htpasswd - depends_on: - - web - restart: always - - db: - environment: - - POSTGRES_DB=myapp_production - volumes: - - postgres_prod_data:/var/lib/postgresql/data - ports: [] # No external access - deploy: - resources: - limits: - memory: 4G - reservations: - memory: 2G - restart: always - - redis: - volumes: - - redis_prod_data:/data - ports: [] # No external access - deploy: - resources: - limits: - memory: 1G - reservations: - memory: 512M - restart: always - - worker: - deploy: - replicas: 2 - resources: - limits: - memory: 1G - reservations: - memory: 512M - restart: always - -volumes: - postgres_prod_data: - redis_prod_data: -``` +## Detecting the Environment in Code -## Environment Detection - -### Runtime Environment Checks +`src/infrastructure/config/enums.py` defines `EnvironmentOption` so you don't have to compare against magic strings: ```python -# src/app/core/config.py -class Settings(BaseSettings): - @computed_field - @property - def IS_DEVELOPMENT(self) -> bool: - return self.ENVIRONMENT == "local" - - @computed_field - @property - def IS_PRODUCTION(self) -> bool: - return self.ENVIRONMENT == "production" - - @computed_field - @property - def IS_STAGING(self) -> bool: - return self.ENVIRONMENT == "staging" - - -# Use in application -if settings.IS_DEVELOPMENT: - # Development-only code - pass - -if settings.IS_PRODUCTION: - # Production-only code - pass -``` +from src.infrastructure.config.settings import EnvironmentOption, get_settings -### Environment-Specific Validation +settings = get_settings() -```python -@model_validator(mode="after") -def validate_environment_config(self) -> "Settings": - if self.ENVIRONMENT == "production": - # Production validation - if self.DEBUG: - raise ValueError("DEBUG must be False in production") - if len(self.SECRET_KEY) < 32: - raise ValueError("SECRET_KEY must be at least 32 characters in production") - if "dev" in self.SECRET_KEY.lower(): - raise ValueError("Production SECRET_KEY cannot contain 'dev'") - - if self.ENVIRONMENT == "local": - # Development warnings - if not self.DEBUG: - logger.warning("DEBUG is False in development environment") - - return self +if settings.ENVIRONMENT == EnvironmentOption.PRODUCTION: + # production-only code + ... ``` -## Configuration Management +For helpers, add small properties to your own settings class — but for most cases the above is enough. Avoid scattering environment branches in business logic; keep them at startup or in middleware. + +## Managing Multiple Environments -### Environment File Templates +### One `.env` per environment -Create template files for each environment: +The simplest approach. Keep `.env.development`, `.env.staging`, `.env.production` *outside* version control (e.g. in a secrets manager) and symlink the active one: ```bash -# Create environment templates -cp src/.env.example src/.env.development -cp src/.env.example src/.env.staging -cp src/.env.example src/.env.production - -# Use environment-specific files -ln -sf .env.development src/.env # For development -ln -sf .env.staging src/.env # For staging -ln -sf .env.production src/.env # For production +# Switch environments locally +ln -sf .env.staging backend/.env ``` -### Configuration Validation +For staging/production you'd more typically: -```python -# src/scripts/validate_config.py -import asyncio -from src.app.core.config import settings -from src.app.core.db.database import async_get_db - - -async def validate_configuration(): - """Validate configuration for current environment.""" - print(f"Validating configuration for {settings.ENVIRONMENT} environment...") - - # Basic settings validation - assert settings.APP_NAME, "APP_NAME is required" - assert settings.SECRET_KEY, "SECRET_KEY is required" - assert len(settings.SECRET_KEY) >= 32, "SECRET_KEY must be at least 32 characters" - - # Environment-specific validation - if settings.ENVIRONMENT == "production": - assert not settings.DEBUG, "DEBUG must be False in production" - assert "dev" not in settings.SECRET_KEY.lower(), "Production SECRET_KEY invalid" - assert settings.POSTGRES_PORT != 5432, "Use custom PostgreSQL port in production" - - # Test database connection - try: - db = await anext(async_get_db()) - print("✓ Database connection successful") - await db.close() - except Exception as e: - print(f"✗ Database connection failed: {e}") - return False - - print("✓ Configuration validation passed") - return True - - -if __name__ == "__main__": - asyncio.run(validate_configuration()) -``` +- Pull secrets from a manager (AWS Secrets Manager, Vault, Doppler, 1Password) at deploy time +- Render the `.env` file from CI, or set env vars directly on the runtime (Kubernetes, ECS, systemd unit, etc.) -### Environment Switching +### Docker Compose Overrides -```bash -#!/bin/bash -# scripts/switch_env.sh - -ENV=$1 - -if [ -z "$ENV" ]; then - echo "Usage: $0 " - exit 1 -fi - -case $ENV in - development) - ln -sf .env.development src/.env - echo "Switched to development environment" - ;; - staging) - ln -sf .env.staging src/.env - echo "Switched to staging environment" - ;; - production) - ln -sf .env.production src/.env - echo "Switched to production environment" - echo "WARNING: Make sure to review all settings before deployment!" - ;; - *) - echo "Invalid environment: $ENV" - echo "Valid options: development, staging, production" - exit 1 - ;; -esac - -# Validate configuration -python -c "from src.app.core.config import settings; print(f'Current environment: {settings.ENVIRONMENT}')" -``` +For Compose, use overlay files: -## Security Best Practices +```bash +# Development: docker-compose.yml + docker-compose.override.yml (auto-loaded) +docker compose up -### Environment-Specific Security +# Staging +docker compose -f docker-compose.yml -f docker-compose.staging.yml up -```python -# Different security levels per environment -SECURITY_CONFIGS = { - "local": { - "token_expire_minutes": 60, - "enable_cors_origins": ["*"], - "enable_docs": True, - "log_level": "DEBUG", - }, - "staging": { - "token_expire_minutes": 30, - "enable_cors_origins": ["https://staging.example.com"], - "enable_docs": True, # For testing - "log_level": "INFO", - }, - "production": { - "token_expire_minutes": 15, - "enable_cors_origins": ["https://example.com"], - "enable_docs": False, - "log_level": "WARNING", - }, -} - -config = SECURITY_CONFIGS[settings.ENVIRONMENT] +# Production +docker compose -f docker-compose.yml -f docker-compose.production.yml up ``` -### Secrets Management - -```bash -# Use secrets management in production -# Instead of plain text environment variables -POSTGRES_PASSWORD_FILE="/run/secrets/postgres_password" -SECRET_KEY_FILE="/run/secrets/jwt_secret" - -# Docker secrets -services: - web: - secrets: - - postgres_password - - jwt_secret - environment: - - POSTGRES_PASSWORD_FILE=/run/secrets/postgres_password - - SECRET_KEY_FILE=/run/secrets/jwt_secret - -secrets: - postgres_password: - external: true - jwt_secret: - external: true -``` +The override files only need to specify what *changes* (target stage, removed dev volumes, scaling, secrets), not the full service definition. -## Monitoring and Logging +## Validating Configuration -### Environment-Specific Logging +Run a quick check that the app reads what you think: -```python -LOGGING_CONFIG = { - "local": { - "level": "DEBUG", - "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s", - "handlers": ["console"], - }, - "staging": { - "level": "INFO", - "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s", - "handlers": ["console", "file"], - }, - "production": { - "level": "WARNING", - "format": "%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s", - "handlers": ["file", "syslog"], - }, -} +```bash +cd backend +uv run python -c " +from src.infrastructure.config.settings import get_settings +s = get_settings() +print(f'env : {s.ENVIRONMENT}') +print(f'debug : {s.DEBUG}') +print(f'app : {s.APP_NAME} v{s.VERSION}') +print(f'db host : {s.POSTGRES_SERVER}:{s.POSTGRES_PORT}/{s.POSTGRES_DB}') +print(f'cache : {s.CACHE_BACKEND} -> {s.CACHE_REDIS_HOST}:{s.CACHE_REDIS_PORT}') +print(f'cors : {s.CORS_ORIGINS}') +print(f'sessions : secure={s.SESSION_SECURE_COOKIES} csrf={s.CSRF_ENABLED}') +" ``` -### Health Checks by Environment - -```python -@app.get("/health") -async def health_check(): - health_info = { - "status": "healthy", - "environment": settings.ENVIRONMENT, - "version": settings.APP_VERSION, - } - - # Add detailed info in non-production - if not settings.IS_PRODUCTION: - health_info.update( - { - "database": await check_database_health(), - "redis": await check_redis_health(), - "worker_queue": await check_worker_health(), - } - ) - - return health_info -``` +For production deployment specifically, the security validator runs at startup — if it fails, the app exits before binding the port. That's the strongest signal. ## Best Practices ### Security - -- Use different secret keys for each environment -- Disable debug mode in staging and production -- Use custom ports in production -- Implement proper CORS policies -- Remove API documentation in production +- Generate a fresh `SECRET_KEY` per environment (never reuse) +- Pull secrets from a manager, not files committed to git +- Always set `SESSION_SECURE_COOKIES=true` outside development +- Restrict `CORS_ORIGINS` to your real domains in staging/production +- Set Redis passwords for staging/production +- Leave `PRODUCTION_SECURITY_VALIDATION_ENABLED=true` in production ### Performance - -- Configure appropriate resource limits per environment -- Use caching in staging and production -- Set shorter token expiration in production -- Use connection pooling in production - -### Configuration - -- Keep environment files in version control (except production) -- Use validation to prevent misconfiguration -- Document all environment-specific settings -- Test configuration changes in staging first - -### Monitoring - -- Use appropriate log levels per environment -- Monitor different metrics in each environment -- Set up alerts for production only -- Use health checks for all environments - -Environment-specific configuration ensures your application runs securely and efficiently in each deployment stage. Start with development settings and progressively harden for production! +- Use Redis (not Memcached) when you need persistence or multi-DB separation +- Set distinct Redis DB numbers for cache/rate-limit/taskiq (defaults 0/1/3) +- Tune `POSTGRES_POOL_SIZE` for your workload (default 20) +- Increase `TASKIQ_WORKER_CONCURRENCY` if jobs are I/O-bound + +### Operations +- Keep environment-specific values in your secrets manager, not env files +- Document any custom env vars you add in `.env.example` +- Test deployments in staging before production +- Monitor the logs at startup — the security validator will tell you what's wrong + +## See Also + +- **[Environment Variables](environment-variables.md)** — Complete reference of every variable +- **[Settings Classes](settings-classes.md)** — How variables become Python settings +- **[Docker Setup](docker-setup.md)** — Compose configuration per environment +- **[Production](../production.md)** — Production deployment guide diff --git a/docs/user-guide/configuration/environment-variables.md b/docs/user-guide/configuration/environment-variables.md index 53fd3ca6..2dc664ac 100644 --- a/docs/user-guide/configuration/environment-variables.md +++ b/docs/user-guide/configuration/environment-variables.md @@ -1,668 +1,347 @@ -# Configuration Guide +# Environment Variables Reference -This guide covers all configuration options available in the FastAPI Boilerplate, including environment variables, settings classes, and advanced deployment configurations. +This page is the complete reference for every environment variable the boilerplate reads. The source of truth is `backend/.env.example` — this page mirrors it with descriptions. -## Configuration Overview +All variables are loaded from `backend/.env` at application startup via Pydantic `BaseSettings` classes in `src/infrastructure/config/settings.py`. -The boilerplate uses a layered configuration approach: - -- **Environment Variables** (`.env` file) - Primary configuration method -- **Settings Classes** (`src/app/core/config.py`) - Python-based configuration -- **Docker Configuration** (`docker-compose.yml`) - Container orchestration -- **Database Configuration** (`alembic.ini`) - Database migrations - -## Environment Variables Reference - -All configuration is managed through environment variables defined in the `.env` file located in the `src/` directory. - -### Application Settings - -Basic application metadata displayed in API documentation: +## Environment ```env -# ------------- app settings ------------- -APP_NAME="Your App Name" -APP_DESCRIPTION="Your app description here" -APP_VERSION="0.1.0" -CONTACT_NAME="Your Name" -CONTACT_EMAIL="your.email@example.com" -LICENSE_NAME="MIT" +# Options: development, staging, production, local +ENVIRONMENT=development ``` -**Variables Explained:** - -- `APP_NAME`: Displayed in API documentation and responses -- `APP_DESCRIPTION`: Shown in OpenAPI documentation -- `APP_VERSION`: API version for documentation and headers -- `CONTACT_NAME`: Contact information for API documentation -- `CONTACT_EMAIL`: Support email for API users -- `LICENSE_NAME`: License type for the API +| Variable | Default | Purpose | +|----------|---------|---------| +| `ENVIRONMENT` | `development` | Drives logging style, docs visibility, and security validation. See [Environment-Specific](environment-specific.md). | -### Database Configuration - -PostgreSQL database connection settings: +## Database ```env -# ------------- database ------------- -POSTGRES_USER="your_postgres_user" -POSTGRES_PASSWORD="your_secure_password" -POSTGRES_SERVER="localhost" +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_DB=postgres +POSTGRES_SERVER=db # use "localhost" without Docker POSTGRES_PORT=5432 -POSTGRES_DB="your_database_name" +POSTGRES_SYNC_PREFIX=postgresql:// +POSTGRES_ASYNC_PREFIX=postgresql+asyncpg:// +CREATE_TABLES_ON_STARTUP=true ``` -**Variables Explained:** +| Variable | Default | Purpose | +|----------|---------|---------| +| `POSTGRES_USER` | `postgres` | Database user | +| `POSTGRES_PASSWORD` | `postgres` | Database password | +| `POSTGRES_DB` | `postgres` | Database name | +| `POSTGRES_SERVER` | `localhost` | Hostname (use `db` for Compose) | +| `POSTGRES_PORT` | `5432` | TCP port | +| `POSTGRES_SYNC_PREFIX` | `postgresql://` | Driver prefix for sync code (Alembic) | +| `POSTGRES_ASYNC_PREFIX` | `postgresql+asyncpg://` | Driver prefix for async code (the app) | +| `CREATE_TABLES_ON_STARTUP` | `true` | Auto-create tables from models on startup | +| `POSTGRES_POOL_SIZE` | `20` | SQLAlchemy connection pool size | +| `POSTGRES_MAX_OVERFLOW` | `0` | Pool overflow connections | -- `POSTGRES_USER`: Database user with appropriate permissions -- `POSTGRES_PASSWORD`: Strong password for database access -- `POSTGRES_SERVER`: Hostname or IP of PostgreSQL server -- `POSTGRES_PORT`: PostgreSQL port (default: 5432) -- `POSTGRES_DB`: Name of the database to connect to +If you set `DATABASE_URL` directly, it overrides the constructed URL. -**Environment-Specific Values:** +## Cache ```env -# Local development -POSTGRES_SERVER="localhost" - -# Docker Compose -POSTGRES_SERVER="db" +CACHE_ENABLED=true +CACHE_BACKEND=redis # or "memcached" +DEFAULT_CACHE_EXPIRATION=3600 -# Production -POSTGRES_SERVER="your-prod-db-host.com" +# Client-side cache (Cache-Control headers) +CLIENT_CACHE_ENABLED=true +CLIENT_CACHE_MAX_AGE=60 ``` -### Security & Authentication - -JWT and password security configuration: +### Redis backend ```env -# ------------- crypt ------------- -SECRET_KEY="your-super-secret-key-here" -ALGORITHM="HS256" -ACCESS_TOKEN_EXPIRE_MINUTES=30 -REFRESH_TOKEN_EXPIRE_DAYS=7 +CACHE_REDIS_HOST=redis # use "localhost" without Docker +CACHE_REDIS_PORT=6379 +CACHE_REDIS_DB=0 +CACHE_REDIS_PASSWORD= +CACHE_REDIS_CONNECT_TIMEOUT=5 +CACHE_REDIS_POOL_SIZE=10 ``` -**Variables Explained:** - -- `SECRET_KEY`: Used for JWT token signing (generate with `openssl rand -hex 32`) -- `ALGORITHM`: JWT signing algorithm (HS256 recommended) -- `ACCESS_TOKEN_EXPIRE_MINUTES`: How long access tokens remain valid -- `REFRESH_TOKEN_EXPIRE_DAYS`: How long refresh tokens remain valid - -!!! danger "Security Warning" -Never use default values in production. Generate a strong secret key: -`bash openssl rand -hex 32 ` - -### Redis Configuration - -Redis is used for caching, job queues, and rate limiting: +### Memcached backend ```env -# ------------- redis cache ------------- -REDIS_CACHE_HOST="localhost" # Use "redis" for Docker Compose -REDIS_CACHE_PORT=6379 - -# ------------- redis queue ------------- -REDIS_QUEUE_HOST="localhost" # Use "redis" for Docker Compose -REDIS_QUEUE_PORT=6379 - -# ------------- redis rate limit ------------- -REDIS_RATE_LIMIT_HOST="localhost" # Use "redis" for Docker Compose -REDIS_RATE_LIMIT_PORT=6379 +CACHE_MEMCACHED_HOST=localhost +CACHE_MEMCACHED_PORT=11211 +CACHE_MEMCACHED_POOL_SIZE=10 +CACHE_MEMCACHED_CONNECT_TIMEOUT=5 ``` -**Best Practices:** - -- **Development**: Use the same Redis instance for all services -- **Production**: Use separate Redis instances for better isolation +## Rate Limiting ```env -# Production example with separate instances -REDIS_CACHE_HOST="cache.redis.example.com" -REDIS_QUEUE_HOST="queue.redis.example.com" -REDIS_RATE_LIMIT_HOST="ratelimit.redis.example.com" +RATE_LIMITER_ENABLED=true +RATE_LIMITER_BACKEND=redis # or "memcached" +RATE_LIMITER_FAIL_OPEN=true # allow requests when backend is unreachable +DEFAULT_RATE_LIMIT_LIMIT=100 +DEFAULT_RATE_LIMIT_PERIOD=60 ``` -### Caching Settings - -Client-side and server-side caching configuration: +### Redis backend ```env -# ------------- redis client-side cache ------------- -CLIENT_CACHE_MAX_AGE=30 # seconds +RATE_LIMITER_REDIS_HOST=redis +RATE_LIMITER_REDIS_PORT=6379 +RATE_LIMITER_REDIS_DB=1 # separate DB from cache (DB 0) +RATE_LIMITER_REDIS_PASSWORD= +RATE_LIMITER_REDIS_CONNECT_TIMEOUT=5 +RATE_LIMITER_REDIS_POOL_SIZE=10 ``` -**Variables Explained:** - -- `CLIENT_CACHE_MAX_AGE`: How long browsers should cache responses - -### Rate Limiting - -Default rate limiting configuration: +### Memcached backend ```env -# ------------- default rate limit settings ------------- -DEFAULT_RATE_LIMIT_LIMIT=10 # requests per period -DEFAULT_RATE_LIMIT_PERIOD=3600 # period in seconds (1 hour) +RATE_LIMITER_MEMCACHED_HOST=localhost +RATE_LIMITER_MEMCACHED_PORT=11211 +RATE_LIMITER_MEMCACHED_POOL_SIZE=10 ``` -**Variables Explained:** - -- `DEFAULT_RATE_LIMIT_LIMIT`: Number of requests allowed per period -- `DEFAULT_RATE_LIMIT_PERIOD`: Time window in seconds - -### Admin User - -First superuser account configuration: +## Background Tasks (Taskiq) ```env -# ------------- admin ------------- -ADMIN_NAME="Admin User" -ADMIN_EMAIL="admin@example.com" -ADMIN_USERNAME="admin" -ADMIN_PASSWORD="secure_admin_password" +TASKIQ_ENABLED=true +TASKIQ_BROKER_TYPE=redis # or "rabbitmq" ``` -**Variables Explained:** - -- `ADMIN_NAME`: Display name for the admin user -- `ADMIN_EMAIL`: Email address for the admin account -- `ADMIN_USERNAME`: Username for admin login -- `ADMIN_PASSWORD`: Initial password (change after first login) - -### CORS Configuration - -Cross-Origin Resource Sharing (CORS) settings for frontend integration: +### Redis broker ```env -# ------------- CORS ------------- -CORS_ORIGINS=["*"] -CORS_METHODS=["*"] -CORS_HEADERS=["*"] +TASKIQ_REDIS_HOST=redis +TASKIQ_REDIS_PORT=6379 +TASKIQ_REDIS_DB=3 # separate DB from cache and rate limiter +TASKIQ_REDIS_PASSWORD= ``` -**Variables Explained:** - -- `CORS_ORIGINS`: Comma-separated list of allowed origins (e.g., `["https://app.com","https://www.app.com"]`) -- `CORS_METHODS`: Comma-separated list of allowed HTTP methods (e.g., `["GET","POST","PUT","DELETE"]`) -- `CORS_HEADERS`: Comma-separated list of allowed headers (e.g., `["Authorization","Content-Type"]`) - -**Environment-Specific Values:** +### RabbitMQ broker ```env -# Development - Allow all origins -CORS_ORIGINS=["*"] -CORS_METHODS=["*"] -CORS_HEADERS=["*"] - -# Production - Specific domains only -CORS_ORIGINS=["https://yourapp.com","https://www.yourapp.com"] -CORS_METHODS=["GET","POST","PUT","DELETE","PATCH"] -CORS_HEADERS=["Authorization","Content-Type","X-Requested-With"] +TASKIQ_RABBITMQ_HOST=localhost +TASKIQ_RABBITMQ_PORT=5672 +TASKIQ_RABBITMQ_USER=guest +TASKIQ_RABBITMQ_PASSWORD=guest +TASKIQ_RABBITMQ_VHOST=/ ``` -!!! danger "Security Warning" -Never use wildcard (`*`) for `CORS_ORIGINS` in production environments. Always specify exact allowed domains to prevent unauthorized cross-origin requests. - -### User Tiers - -Initial tier configuration: +### Worker tuning ```env -# ------------- first tier ------------- -TIER_NAME="free" +TASKIQ_WORKER_CONCURRENCY=2 +TASKIQ_MAX_TASKS_PER_WORKER=1000 ``` -**Variables Explained:** - -- `TIER_NAME`: Name of the default user tier +## Web Server -### Environment Type - -Controls API documentation visibility and behavior: +### CORS ```env -# ------------- environment ------------- -ENVIRONMENT="local" # local, staging, or production -``` - -**Environment Types:** - -- **local**: Full API docs available publicly at `/docs` -- **staging**: API docs available to superusers only -- **production**: API docs completely disabled - -## Docker Compose Configuration - -### Basic Setup - -Docker Compose automatically loads the `.env` file: - -```yaml -# In docker-compose.yml -services: - web: - env_file: - - ./src/.env -``` - -### Development Overrides - -Create `docker-compose.override.yml` for local customizations: - -```yaml -version: '3.8' -services: - web: - ports: - - "8001:8000" # Use different port - environment: - - DEBUG=true - volumes: - - ./custom-logs:/code/logs -``` - -### Service Configuration - -Understanding each Docker service: - -```yaml -services: - web: # FastAPI application - db: # PostgreSQL database - redis: # Redis for caching/queues - worker: # ARQ background task worker - nginx: # Reverse proxy (optional) -``` - -## Python Settings Classes - -Advanced configuration is handled in `src/app/core/config.py`: - -### Settings Composition - -The main `Settings` class inherits from multiple setting groups: - -```python -class Settings( - AppSettings, - PostgresSettings, - CryptSettings, - FirstUserSettings, - RedisCacheSettings, - ClientSideCacheSettings, - RedisQueueSettings, - RedisRateLimiterSettings, - DefaultRateLimitSettings, - EnvironmentSettings, - CORSSettings, -): - pass +CORS_ENABLED=true +CORS_ORIGINS=* # comma-separated list of origins +CORS_ALLOW_CREDENTIALS=true +CORS_ALLOW_METHODS=* +CORS_ALLOW_HEADERS=* ``` -### Adding Custom Settings - -Create your own settings group: +!!! danger "CORS in Production" + Never use `*` for `CORS_ORIGINS` in production. Specify exact domains: + ```env + CORS_ORIGINS=https://yourapp.com,https://www.yourapp.com + CORS_ALLOW_METHODS=GET,POST,PUT,DELETE,PATCH + CORS_ALLOW_HEADERS=Authorization,Content-Type + ``` -```python -class CustomSettings(BaseSettings): - CUSTOM_API_KEY: str = "" - CUSTOM_TIMEOUT: int = 30 - ENABLE_FEATURE_X: bool = False +### Compression - -# Add to main Settings class -class Settings( - AppSettings, - # ... other settings ... - CustomSettings, -): - pass -``` - -### Opting Out of Services - -Remove unused services by excluding their settings: - -```python -# Minimal setup without Redis services -class Settings( - AppSettings, - PostgresSettings, - CryptSettings, - FirstUserSettings, - # Removed: RedisCacheSettings - # Removed: RedisQueueSettings - # Removed: RedisRateLimiterSettings - EnvironmentSettings, -): - pass -``` - -## Database Configuration - -### Alembic Configuration - -Database migrations are configured in `src/alembic.ini`: - -```ini -[alembic] -script_location = migrations -sqlalchemy.url = postgresql://%(POSTGRES_USER)s:%(POSTGRES_PASSWORD)s@%(POSTGRES_SERVER)s:%(POSTGRES_PORT)s/%(POSTGRES_DB)s +```env +GZIP_ENABLED=true +GZIP_MINIMUM_SIZE=1000 ``` -### Connection Pooling - -SQLAlchemy connection pool settings in `src/app/core/db/database.py`: +### API Docs -```python -engine = create_async_engine( - DATABASE_URL, - pool_size=20, # Number of connections to maintain - max_overflow=30, # Additional connections allowed - pool_timeout=30, # Seconds to wait for connection - pool_recycle=1800, # Seconds before connection refresh -) +```env +ENABLE_DOCS_IN_PRODUCTION=false # serve /docs even when ENVIRONMENT=production +OPENAPI_PREFIX= # path prefix for the OpenAPI schema ``` -### Database Best Practices +## Authentication & Security -**Connection Pool Sizing:** - -- Start with `pool_size=20`, `max_overflow=30` -- Monitor connection usage and adjust based on load -- Use connection pooling monitoring tools - -**Migration Strategy:** - -- Always backup database before running migrations -- Test migrations on staging environment first -- Use `alembic revision --autogenerate` for model changes - -## Security Configuration - -### JWT Token Configuration - -Customize JWT behavior in `src/app/core/security.py`: +```env +SECRET_KEY=insecure-secret-key-change-this-in-production -```python -def create_access_token(data: dict, expires_delta: timedelta = None): - to_encode = data.copy() - if expires_delta: - expire = datetime.utcnow() + expires_delta - else: - expire = datetime.utcnow() + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES) +# Production security validation (enabled by default in production) +PRODUCTION_SECURITY_VALIDATION_ENABLED=true +PRODUCTION_SECURITY_STRICT_MODE=false ``` -### CORS Configuration - -Customize Cross-Origin Resource Sharing in `src/app/core/setup.py`: +Generate a strong key: -```python -app.add_middleware( - CORSMiddleware, - allow_origins=["http://localhost:3000"], # Specify allowed origins - allow_credentials=True, - allow_methods=["GET", "POST"], # Specify allowed methods - allow_headers=["*"], -) +```bash +python -c "import secrets; print(secrets.token_urlsafe(64))" ``` -**Production CORS Settings:** +### Sessions -```python -# Never use wildcard (*) in production -allow_origins = (["https://yourapp.com", "https://www.yourapp.com"],) +```env +SESSION_TIMEOUT_MINUTES=30 +SESSION_CLEANUP_INTERVAL_MINUTES=15 +MAX_SESSIONS_PER_USER=5 +SESSION_SECURE_COOKIES=true +SESSION_BACKEND=redis +SESSION_COOKIE_MAX_AGE=86400 ``` -### Security Headers - -Add security headers middleware: - -```python -from starlette.middleware.base import BaseHTTPMiddleware +### CSRF - -class SecurityHeadersMiddleware(BaseHTTPMiddleware): - async def dispatch(self, request, call_next): - response = await call_next(request) - response.headers["X-Frame-Options"] = "DENY" - response.headers["X-Content-Type-Options"] = "nosniff" - response.headers["X-XSS-Protection"] = "1; mode=block" - return response +```env +# Set false to disable CSRF validation in dev/test +CSRF_ENABLED=true ``` -## Logging Configuration - -### Basic Logging Setup - -Configure logging in `src/app/core/logger.py`: +### Login Rate Limiting -```python -import logging -from logging.handlers import RotatingFileHandler - -# Set log level -LOGGING_LEVEL = logging.INFO - -# Configure file rotation -file_handler = RotatingFileHandler("logs/app.log", maxBytes=10485760, backupCount=5) # 10MB # Keep 5 backup files +```env +LOGIN_MAX_ATTEMPTS=5 +LOGIN_WINDOW_MINUTES=15 ``` -### Structured Logging - -Use structured logging for better observability: +### OAuth -```python -import structlog - -structlog.configure( - processors=[ - structlog.stdlib.filter_by_level, - structlog.stdlib.add_logger_name, - structlog.stdlib.add_log_level, - structlog.processors.JSONRenderer(), - ], - logger_factory=structlog.stdlib.LoggerFactory(), -) -``` - -### Log Levels by Environment +```env +OAUTH_REDIRECT_BASE_URL=http://localhost:8000 -```python -# Environment-specific log levels -LOG_LEVELS = {"local": logging.DEBUG, "staging": logging.INFO, "production": logging.WARNING} +# Google OAuth (leave empty to disable) +OAUTH_GOOGLE_CLIENT_ID= +OAUTH_GOOGLE_CLIENT_SECRET= -LOGGING_LEVEL = LOG_LEVELS.get(settings.ENVIRONMENT, logging.INFO) +# GitHub OAuth (provider scaffolded; routes not yet wired) +OAUTH_GITHUB_CLIENT_ID= +OAUTH_GITHUB_CLIENT_SECRET= ``` -## Environment-Specific Configurations - -### Development (.env.development) +## Admin Interface (SQLAdmin) ```env -ENVIRONMENT="local" -POSTGRES_SERVER="localhost" -REDIS_CACHE_HOST="localhost" -SECRET_KEY="dev-secret-key-not-for-production" -ACCESS_TOKEN_EXPIRE_MINUTES=60 # Longer for development -DEBUG=true +ADMIN_ENABLED=true # enables /admin ``` -### Staging (.env.staging) +## Application Metadata ```env -ENVIRONMENT="staging" -POSTGRES_SERVER="staging-db.example.com" -REDIS_CACHE_HOST="staging-redis.example.com" -SECRET_KEY="staging-secret-key-different-from-prod" -ACCESS_TOKEN_EXPIRE_MINUTES=30 DEBUG=false +APP_NAME=FastAPI Boilerplate +APP_DESCRIPTION=Modular FastAPI starter +VERSION=0.18.0 +CONTACT_NAME=Support +CONTACT_EMAIL=support@example.com +LICENSE_NAME=MIT ``` -### Production (.env.production) +### API Settings (optional overrides) ```env -ENVIRONMENT="production" -POSTGRES_SERVER="prod-db.example.com" -REDIS_CACHE_HOST="prod-redis.example.com" -SECRET_KEY="ultra-secure-production-key-generated-with-openssl" -ACCESS_TOKEN_EXPIRE_MINUTES=15 -DEBUG=false -REDIS_CACHE_PORT=6380 # Custom port for security -POSTGRES_PORT=5433 # Custom port for security +# API_PREFIX=/api +# DOCS_URL=/docs +# REDOC_URL=/redoc ``` -## Advanced Configuration - -### Custom Middleware - -Add custom middleware in `src/app/core/setup.py`: +## Initial Setup -```python -def create_application(router, settings, **kwargs): - app = FastAPI(...) - - # Add custom middleware - app.add_middleware(CustomMiddleware, setting=value) - app.add_middleware(TimingMiddleware) - app.add_middleware(RequestIDMiddleware) - - return app -``` +These are read by `python -m scripts.setup_initial_data`: -### Feature Toggles - -Implement feature flags: - -```python -class FeatureSettings(BaseSettings): - ENABLE_ADVANCED_CACHING: bool = False - ENABLE_ANALYTICS: bool = True - ENABLE_EXPERIMENTAL_FEATURES: bool = False - ENABLE_API_VERSIONING: bool = True - - -# Use in endpoints -if settings.ENABLE_ADVANCED_CACHING: - # Advanced caching logic - pass -``` - -## Configuration Validation - -### Environment Validation - -Add validation to prevent misconfiguration: - -```python -def validate_settings(): - if not settings.SECRET_KEY: - raise ValueError("SECRET_KEY must be set") - - if settings.ENVIRONMENT == "production": - if settings.SECRET_KEY == "dev-secret-key": - raise ValueError("Production must use secure SECRET_KEY") - - if settings.DEBUG: - raise ValueError("DEBUG must be False in production") +```env +ADMIN_NAME=Admin User +ADMIN_EMAIL=admin@example.com +ADMIN_USERNAME=admin +ADMIN_PASSWORD=your-secure-password ``` -### Runtime Checks +The default tier name is also configurable (defaults to `free`): -Add validation to application startup: - -```python -@app.on_event("startup") -async def startup_event(): - validate_settings() - await check_database_connection() - await check_redis_connection() - logger.info(f"Application started in {settings.ENVIRONMENT} mode") +```env +DEFAULT_TIER_NAME=free ``` -## Configuration Troubleshooting - -### Common Issues +## Logging -**Environment Variables Not Loading:** +```env +LOG_LEVEL=INFO +LOG_FORMAT=structured # simple | detailed | structured | json +LOG_CONSOLE_ENABLED=true +LOG_FILE_ENABLED=false +LOG_FILE_PATH=logs/app.log +LOG_FILE_MAX_SIZE=10485760 # 10 MB +LOG_FILE_BACKUP_COUNT=5 +LOG_CORRELATION_ID=true +LOG_STRUCTURED_CONTEXT=true +LOG_PERFORMANCE_METRICS=false +LOG_SQL_QUERIES=false +LOG_INCLUDE_STACKTRACE=true +LOG_DEVELOPMENT_VERBOSE=true +LOG_PRODUCTION_OPTIMIZE=true +``` + +## Production Security Checklist + +Before deploying to production: + +1. Generate a strong `SECRET_KEY` (at least 64 bytes of entropy) +2. Use unique passwords for the database and every Redis instance +3. Use separate Redis databases for each service (`CACHE_REDIS_DB=0`, `RATE_LIMITER_REDIS_DB=1`, `TASKIQ_REDIS_DB=3`) +4. Restrict `CORS_ORIGINS` to your real domains (no `*`) +5. Set strong admin credentials (`ADMIN_USERNAME`, `ADMIN_PASSWORD`) +6. Review session timeouts for your security posture +7. Set `ENVIRONMENT=production` to enable the security validator +8. If using RabbitMQ, replace the `guest/guest` defaults + +## Troubleshooting + +### Variables Not Loading ```bash -# Check file location and permissions -ls -la src/.env +# Check the file location +ls -la backend/.env -# Check file format (no spaces around =) -cat src/.env | grep "=" | head -5 +# Make sure there are no spaces around = +grep "=" backend/.env | head -5 -# Verify environment loading in Python -python -c "from src.app.core.config import settings; print(settings.APP_NAME)" +# Verify what Python sees +cd backend +uv run python -c "from src.infrastructure.config.settings import get_settings; s = get_settings(); print(s.APP_NAME, s.ENVIRONMENT)" ``` -**Database Connection Failed:** +### Database Connection Failed ```bash -# Test connection manually -psql -h localhost -U postgres -d myapp +# Linux +sudo systemctl status postgresql +psql -h localhost -U postgres -d postgres -# Check if PostgreSQL is running -systemctl status postgresql -# or on macOS +# macOS brew services list | grep postgresql ``` -**Redis Connection Failed:** +### Redis Connection Failed ```bash -# Test Redis connection -redis-cli -h localhost -p 6379 ping - -# Check Redis status -systemctl status redis -# or on macOS -brew services list | grep redis -``` - -### Configuration Testing - -Test your configuration with a simple script: - -```python -# test_config.py -import asyncio -from src.app.core.config import settings -from src.app.core.db.database import async_get_db - +redis-cli -h localhost -p 6379 ping # should print PONG -async def test_config(): - print(f"App: {settings.APP_NAME}") - print(f"Environment: {settings.ENVIRONMENT}") +# Linux +sudo systemctl status redis-server - # Test database - try: - db = await anext(async_get_db()) - print("✓ Database connection successful") - await db.close() - except Exception as e: - print(f"✗ Database connection failed: {e}") - - # Test Redis (if enabled) - try: - from src.app.core.utils.cache import redis_client - - await redis_client.ping() - print("✓ Redis connection successful") - except Exception as e: - print(f"✗ Redis connection failed: {e}") - - -if __name__ == "__main__": - asyncio.run(test_config()) +# macOS +brew services list | grep redis ``` -Run with: +## See Also -```bash -uv run python test_config.py -``` +- **[Settings Classes](settings-classes.md)** — How env vars are turned into Python settings +- **[Docker Setup](docker-setup.md)** — Compose configuration +- **[Environment-Specific](environment-specific.md)** — Recommended values per environment diff --git a/docs/user-guide/configuration/index.md b/docs/user-guide/configuration/index.md index ad825d67..68788f59 100644 --- a/docs/user-guide/configuration/index.md +++ b/docs/user-guide/configuration/index.md @@ -1,6 +1,6 @@ # Configuration -Learn how to configure your FastAPI Boilerplate application for different environments and use cases. Everything is configured through environment variables and Python settings classes. +Learn how to configure your FastAPI Boilerplate application for different environments. Configuration is driven by environment variables and validated by Python settings classes. ## What You'll Learn @@ -11,301 +11,272 @@ Learn how to configure your FastAPI Boilerplate application for different enviro ## Quick Start -The boilerplate uses environment variables as the primary configuration method: - ```bash -# Copy the example file -cp src/.env.example src/.env - -# Edit with your values -nano src/.env +cd backend +cp .env.example .env +$EDITOR .env ``` -Essential variables to set: +Essential variables: ```env # Application -APP_NAME="My FastAPI App" -SECRET_KEY="your-super-secret-key-here" +APP_NAME=My FastAPI App +SECRET_KEY=your-super-secret-key-here # Database -POSTGRES_USER="your_user" -POSTGRES_PASSWORD="your_password" -POSTGRES_DB="your_database" - -# Admin Account -ADMIN_EMAIL="admin@example.com" -ADMIN_PASSWORD="secure_password" +POSTGRES_USER=your_user +POSTGRES_PASSWORD=your_password +POSTGRES_DB=your_database + +# Admin Account (used by setup_initial_data) +ADMIN_NAME=Admin User +ADMIN_EMAIL=admin@example.com +ADMIN_USERNAME=admin +ADMIN_PASSWORD=secure_password ``` ## Configuration Architecture -The configuration system has three layers: - ``` -Environment Variables (.env files) +Environment Variables (.env file) ↓ -Settings Classes (Python validation) +Settings Classes (Pydantic BaseSettings) ↓ -Application Configuration (Runtime) +Application Code (via get_settings()) ``` ### Layer 1: Environment Variables -Primary configuration through `.env` files: + +Primary configuration through `backend/.env`: + ```env -POSTGRES_USER="myuser" -POSTGRES_PASSWORD="mypassword" -REDIS_CACHE_HOST="localhost" -SECRET_KEY="your-secret-key" +POSTGRES_USER=myuser +POSTGRES_PASSWORD=mypassword +CACHE_REDIS_HOST=localhost +SECRET_KEY=your-secret-key ``` ### Layer 2: Settings Classes -Python classes that validate and structure configuration: + +Pydantic `BaseSettings` classes in `src/infrastructure/config/settings.py` validate and structure config: + ```python -class PostgresSettings(BaseSettings): - POSTGRES_USER: str - POSTGRES_PASSWORD: str = Field(min_length=8) - POSTGRES_SERVER: str = "localhost" - POSTGRES_PORT: int = 5432 - POSTGRES_DB: str +class DatabaseSettings(BaseSettings): + POSTGRES_USER: str = config("POSTGRES_USER", default="postgres") + POSTGRES_PASSWORD: str = config("POSTGRES_PASSWORD", default="postgres") + POSTGRES_SERVER: str = config("POSTGRES_SERVER", default="localhost") + POSTGRES_PORT: int = config("POSTGRES_PORT", default=5432) + POSTGRES_DB: str = config("POSTGRES_DB", default="postgres") ``` +A single composite `Settings` class combines them all. + ### Layer 3: Application Use -Configuration injected throughout the application: + +Pull settings anywhere in the app via `get_settings()`: + ```python -from app.core.config import settings +from src.infrastructure.config.settings import get_settings -# Use anywhere in your code -DATABASE_URL = f"postgresql+asyncpg://{settings.POSTGRES_USER}:{settings.POSTGRES_PASSWORD}@{settings.POSTGRES_SERVER}:{settings.POSTGRES_PORT}/{settings.POSTGRES_DB}" +settings = get_settings() +print(settings.DATABASE_URL) ``` ## Key Configuration Areas -### Security Settings +### Application Settings + ```env -SECRET_KEY="your-super-secret-key-here" -ALGORITHM="HS256" -ACCESS_TOKEN_EXPIRE_MINUTES=30 -REFRESH_TOKEN_EXPIRE_DAYS=7 +APP_NAME=Your App Name +VERSION=1.0.0 +ENVIRONMENT=development # development | staging | production | local +DEBUG=false ``` -### Database Configuration +### Database + ```env -POSTGRES_USER="your_user" -POSTGRES_PASSWORD="your_password" -POSTGRES_SERVER="localhost" +POSTGRES_USER=your_user +POSTGRES_PASSWORD=your_password +POSTGRES_SERVER=localhost # use "db" with Docker Compose POSTGRES_PORT=5432 -POSTGRES_DB="your_database" +POSTGRES_DB=your_database +CREATE_TABLES_ON_STARTUP=true ``` -### Redis Services +### Security & Sessions + ```env -# Cache -REDIS_CACHE_HOST="localhost" -REDIS_CACHE_PORT=6379 +SECRET_KEY=your-super-secret-key-here + +SESSION_TIMEOUT_MINUTES=30 +SESSION_SECURE_COOKIES=true +SESSION_BACKEND=redis +CSRF_ENABLED=true +LOGIN_MAX_ATTEMPTS=5 +LOGIN_WINDOW_MINUTES=15 +``` -# Background jobs -REDIS_QUEUE_HOST="localhost" -REDIS_QUEUE_PORT=6379 +### Cache (Redis or Memcached) -# Rate limiting -REDIS_RATE_LIMIT_HOST="localhost" -REDIS_RATE_LIMIT_PORT=6379 +```env +CACHE_ENABLED=true +CACHE_BACKEND=redis # or "memcached" +CACHE_REDIS_HOST=localhost # use "redis" with Docker Compose +CACHE_REDIS_PORT=6379 +CACHE_REDIS_DB=0 +DEFAULT_CACHE_EXPIRATION=3600 ``` -### Application Settings +### Background Tasks (Taskiq) + ```env -APP_NAME="Your App Name" -APP_VERSION="1.0.0" -ENVIRONMENT="local" # local, staging, production -DEBUG=true +TASKIQ_ENABLED=true +TASKIQ_BROKER_TYPE=redis # or "rabbitmq" +TASKIQ_REDIS_HOST=localhost # use "redis" with Docker Compose +TASKIQ_REDIS_PORT=6379 +TASKIQ_REDIS_DB=3 ``` ### Rate Limiting + ```env +RATE_LIMITER_ENABLED=true +RATE_LIMITER_BACKEND=redis +RATE_LIMITER_REDIS_HOST=localhost +RATE_LIMITER_REDIS_DB=1 DEFAULT_RATE_LIMIT_LIMIT=100 -DEFAULT_RATE_LIMIT_PERIOD=3600 # 1 hour in seconds +DEFAULT_RATE_LIMIT_PERIOD=60 ``` -### Admin User +### Admin User (Initial Setup) + +Read by `python -m scripts.setup_initial_data` on first run: + ```env -ADMIN_NAME="Admin User" -ADMIN_EMAIL="admin@example.com" -ADMIN_USERNAME="admin" -ADMIN_PASSWORD="secure_password" +ADMIN_NAME=Admin User +ADMIN_EMAIL=admin@example.com +ADMIN_USERNAME=admin +ADMIN_PASSWORD=secure_password ``` ## Environment-Specific Configurations ### Development + ```env -ENVIRONMENT="local" +ENVIRONMENT=development DEBUG=true -POSTGRES_SERVER="localhost" -REDIS_CACHE_HOST="localhost" -ACCESS_TOKEN_EXPIRE_MINUTES=60 # Longer for development +POSTGRES_SERVER=localhost +CACHE_REDIS_HOST=localhost +TASKIQ_REDIS_HOST=localhost +RATE_LIMITER_REDIS_HOST=localhost ``` ### Staging + ```env -ENVIRONMENT="staging" +ENVIRONMENT=staging DEBUG=false -POSTGRES_SERVER="staging-db.example.com" -REDIS_CACHE_HOST="staging-redis.example.com" -ACCESS_TOKEN_EXPIRE_MINUTES=30 +POSTGRES_SERVER=staging-db.example.com +CACHE_REDIS_HOST=staging-redis.example.com +SESSION_SECURE_COOKIES=true ``` ### Production + ```env -ENVIRONMENT="production" +ENVIRONMENT=production DEBUG=false -POSTGRES_SERVER="prod-db.example.com" -REDIS_CACHE_HOST="prod-redis.example.com" -ACCESS_TOKEN_EXPIRE_MINUTES=15 -# Use custom ports for security -POSTGRES_PORT=5433 -REDIS_CACHE_PORT=6380 +POSTGRES_SERVER=prod-db.example.com +CACHE_REDIS_HOST=prod-redis.example.com +PRODUCTION_SECURITY_VALIDATION_ENABLED=true +PRODUCTION_SECURITY_STRICT_MODE=true +ENABLE_DOCS_IN_PRODUCTION=false ``` ## Docker Configuration -### Basic Setup -Docker Compose automatically loads your `.env` file: +Docker Compose loads variables from `.env` automatically. With Compose, services reach each other by service name: -```yaml -services: - web: - env_file: - - ./src/.env - environment: - - DATABASE_URL=postgresql+asyncpg://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB} +```env +POSTGRES_SERVER=db +CACHE_REDIS_HOST=redis +RATE_LIMITER_REDIS_HOST=redis +TASKIQ_REDIS_HOST=redis ``` ### Service Overview + ```yaml services: - web: # FastAPI application - db: # PostgreSQL database - redis: # Redis for caching/queues - worker: # Background task worker + web: # FastAPI application + db: # PostgreSQL + redis: # Cache, rate limiting, sessions, taskiq broker ``` +To run a Taskiq worker, add a worker service to your Compose file with the command `taskiq worker infrastructure.taskiq.worker:default_broker`. + ## Common Configuration Patterns -### Feature Flags -```python -# In settings class -class FeatureSettings(BaseSettings): - ENABLE_CACHING: bool = True - ENABLE_ANALYTICS: bool = False - ENABLE_BACKGROUND_JOBS: bool = True +### Feature Toggles -# Use in code -if settings.ENABLE_CACHING: - cache_result = await get_from_cache(key) -``` +The boilerplate already exposes toggles like `CACHE_ENABLED`, `RATE_LIMITER_ENABLED`, `TASKIQ_ENABLED`, `ADMIN_ENABLED`, and `CSRF_ENABLED`. You can add your own in a settings class: -### Environment Detection ```python -@app.get("/docs", include_in_schema=False) -async def custom_swagger_ui(): - if settings.ENVIRONMENT == "production": - raise HTTPException(404, "Documentation not available") - return get_swagger_ui_html(openapi_url="/openapi.json") +class FeatureSettings(BaseSettings): + ENABLE_ANALYTICS: bool = config("ENABLE_ANALYTICS", default=False, cast=bool) ``` -### Health Checks -```python -@app.get("/health") -async def health_check(): - return { - "status": "healthy", - "environment": settings.ENVIRONMENT, - "version": settings.APP_VERSION, - "database": await check_database_health(), - "redis": await check_redis_health() - } -``` +Then use it: -## Quick Configuration Tasks +```python +from src.infrastructure.config.settings import get_settings -### Generate Secret Key -```bash -# Generate a secure secret key -openssl rand -hex 32 +if get_settings().ENABLE_ANALYTICS: + track_event(...) ``` -### Test Configuration +### Environment Detection + ```python -# test_config.py -from app.core.config import settings +from src.infrastructure.config.settings import EnvironmentOption, get_settings -print(f"App: {settings.APP_NAME}") -print(f"Environment: {settings.ENVIRONMENT}") -print(f"Database: {settings.POSTGRES_DB}") +if get_settings().ENVIRONMENT == EnvironmentOption.PRODUCTION: + ... ``` -### Environment File Templates -```bash -# Development -cp src/.env.example src/.env.development - -# Staging -cp src/.env.example src/.env.staging +### Generate Secret Key -# Production -cp src/.env.example src/.env.production +```bash +python -c "import secrets; print(secrets.token_urlsafe(64))" ``` ## Best Practices ### Security -- Never commit `.env` files to version control -- Use different secret keys for each environment +- Never commit `.env` to version control +- Use a unique strong `SECRET_KEY` per environment - Disable debug mode in production -- Use secure passwords and keys +- Set `ENVIRONMENT=production` to enable the security validator +- Restrict `CORS_ORIGINS` to specific domains ### Performance -- Configure appropriate connection pool sizes -- Set reasonable token expiration times -- Use Redis for caching in production -- Configure proper rate limits +- Tune `POSTGRES_POOL_SIZE` and `POSTGRES_MAX_OVERFLOW` for your workload +- Use separate Redis databases for cache (`CACHE_REDIS_DB=0`), rate limiting (`RATE_LIMITER_REDIS_DB=1`), and taskiq (`TASKIQ_REDIS_DB=3`) +- Set sensible `SESSION_TIMEOUT_MINUTES` and `MAX_SESSIONS_PER_USER` ### Maintenance -- Document all custom environment variables -- Use validation in settings classes -- Test configurations in staging first -- Monitor configuration changes - -### Testing -- Use separate test environment variables -- Mock external services in tests -- Validate configuration on startup -- Test with different environment combinations +- Document custom environment variables in `.env.example` +- Add validation in settings classes (Pydantic types catch most issues) +- Test configurations in staging before production ## Getting Started -Follow this path to configure your application: - -### 1. **[Environment Variables](environment-variables.md)** - Start here -Learn about all available environment variables, their purposes, and recommended values for different environments. - -### 2. **[Settings Classes](settings-classes.md)** - Validation layer -Understand how Python settings classes validate and structure your configuration with type hints and validation rules. - -### 3. **[Docker Setup](docker-setup.md)** - Container configuration -Configure Docker Compose services, networking, and environment-specific overrides. - -### 4. **[Environment-Specific](environment-specific.md)** - Deployment configs -Set up configuration for development, staging, and production environments with best practices. - -## What's Next - -Each guide provides practical examples and copy-paste configurations: - -1. **[Environment Variables](environment-variables.md)** - Complete reference and examples -2. **[Settings Classes](settings-classes.md)** - Custom validation and organization -3. **[Docker Setup](docker-setup.md)** - Service configuration and overrides -4. **[Environment-Specific](environment-specific.md)** - Production-ready configurations +1. **[Environment Variables](environment-variables.md)** - Complete reference of every variable +2. **[Settings Classes](settings-classes.md)** - How config is organized in Python +3. **[Docker Setup](docker-setup.md)** - Compose files and overrides +4. **[Environment-Specific](environment-specific.md)** - Per-environment best practices -The boilerplate provides sensible defaults - just customize what you need! \ No newline at end of file +The boilerplate ships with sensible defaults — only override what you need. diff --git a/docs/user-guide/configuration/settings-classes.md b/docs/user-guide/configuration/settings-classes.md index 277ef8af..aaf795b4 100644 --- a/docs/user-guide/configuration/settings-classes.md +++ b/docs/user-guide/configuration/settings-classes.md @@ -1,553 +1,302 @@ # Settings Classes -Learn how Python settings classes validate, structure, and organize your application configuration. The boilerplate uses Pydantic's `BaseSettings` for type-safe configuration management. +Settings live in `backend/src/infrastructure/config/settings.py` and are organized as Pydantic `BaseSettings` classes — each class groups related variables, and a single `Settings` class composes them all. Defaults come from `backend/.env` via Starlette's `Config()` loader. ## Settings Architecture -The main `Settings` class inherits from multiple specialized setting groups: - ```python -# src/app/core/config.py +# src/infrastructure/config/settings.py +from pydantic_settings import BaseSettings +from starlette.config import Config + +config = Config(env_path) # reads backend/.env + + class Settings( - AppSettings, - PostgresSettings, - CryptSettings, - FirstUserSettings, - RedisCacheSettings, - ClientSideCacheSettings, - RedisQueueSettings, - RedisRateLimiterSettings, - DefaultRateLimitSettings, EnvironmentSettings, + DatabaseSettings, + CacheSettings, + RateLimiterSettings, CORSSettings, + CompressionSettings, + APIDocSettings, + AuthSettings, + APISettings, + AppSettings, + AdminSettings, + SQLAdminSettings, + SecuritySettings, + LoggingSettings, + TaskiqSettings, ): + """Main settings class that combines all setting categories.""" + pass -# Single instance used throughout the app settings = Settings() -``` -## Built-in Settings Groups -### Application Settings +def get_settings() -> Settings: + return settings +``` -Basic app metadata and configuration: +Anywhere in the app: ```python -class AppSettings(BaseSettings): - APP_NAME: str = "FastAPI" - APP_DESCRIPTION: str = "A FastAPI project" - APP_VERSION: str = "0.1.0" - CONTACT_NAME: str = "Your Name" - CONTACT_EMAIL: str = "your.email@example.com" - LICENSE_NAME: str = "MIT" +from src.infrastructure.config.settings import get_settings + +settings = get_settings() +print(settings.APP_NAME) ``` -### Database Settings +## Built-in Settings Groups -PostgreSQL connection configuration: +The actual classes that ship with the boilerplate, all in `src/infrastructure/config/settings.py`: + +| Class | Covers | +|-------|--------| +| `EnvironmentSettings` | `ENVIRONMENT` (production/staging/development/local) | +| `DatabaseSettings` | All `POSTGRES_*` vars + `DATABASE_URL` computed property | +| `CacheSettings` | `CACHE_*` (Redis + Memcached + client-side) | +| `RateLimiterSettings` | `RATE_LIMITER_*` (Redis + Memcached + defaults) | +| `CORSSettings` | `CORS_*` | +| `CompressionSettings` | `GZIP_*` | +| `APIDocSettings` | `ENABLE_DOCS_IN_PRODUCTION`, `OPENAPI_PREFIX` | +| `AuthSettings` | `SECRET_KEY`, `SESSION_*`, `CSRF_*`, `LOGIN_*`, `OAUTH_*` | +| `APISettings` | API path overrides (`API_PREFIX`, `DOCS_URL`, `REDOC_URL`) | +| `AppSettings` | `APP_NAME`, `APP_DESCRIPTION`, `VERSION`, `DEBUG`, contact info | +| `AdminSettings` | `ADMIN_NAME`, `ADMIN_EMAIL`, `ADMIN_USERNAME`, `ADMIN_PASSWORD`, `DEFAULT_TIER_NAME` | +| `SQLAdminSettings` | `ADMIN_ENABLED` | +| `SecuritySettings` | `PRODUCTION_SECURITY_VALIDATION_ENABLED`, `PRODUCTION_SECURITY_STRICT_MODE` | +| `LoggingSettings` | All `LOG_*` | +| `TaskiqSettings` | `TASKIQ_*` (Redis + RabbitMQ + worker tuning) | + +## Anatomy of a Settings Group + +A typical class: ```python -class PostgresSettings(BaseSettings): - POSTGRES_USER: str - POSTGRES_PASSWORD: str - POSTGRES_SERVER: str = "localhost" - POSTGRES_PORT: int = 5432 - POSTGRES_DB: str - - @computed_field +class DatabaseSettings(BaseSettings): + """Database-related settings.""" + + POSTGRES_USER: str = config("POSTGRES_USER", default="postgres") + POSTGRES_PASSWORD: str = config("POSTGRES_PASSWORD", default="postgres") + POSTGRES_SERVER: str = config("POSTGRES_SERVER", default="localhost") + POSTGRES_PORT: int = config("POSTGRES_PORT", default=5432) + POSTGRES_DB: str = config("POSTGRES_DB", default="postgres") + POSTGRES_ASYNC_PREFIX: str = config("POSTGRES_ASYNC_PREFIX", default="postgresql+asyncpg://") + CREATE_TABLES_ON_STARTUP: bool = config("CREATE_TABLES_ON_STARTUP", default=True, cast=bool) + POSTGRES_POOL_SIZE: int = config("POSTGRES_POOL_SIZE", default=20, cast=int) + @property def DATABASE_URL(self) -> str: + """Construct the full database URL. + + Falls back to assembling from POSTGRES_* if DATABASE_URL is not set. + """ + direct_url = config("DATABASE_URL", default=None) + if direct_url: + return direct_url return ( - f"postgresql+asyncpg://{self.POSTGRES_USER}:" + f"{self.POSTGRES_ASYNC_PREFIX}{self.POSTGRES_USER}:" f"{self.POSTGRES_PASSWORD}@{self.POSTGRES_SERVER}:" f"{self.POSTGRES_PORT}/{self.POSTGRES_DB}" ) ``` -### Security Settings - -JWT and authentication configuration: - -```python -class CryptSettings(BaseSettings): - SECRET_KEY: str - ALGORITHM: str = "HS256" - ACCESS_TOKEN_EXPIRE_MINUTES: int = 30 - REFRESH_TOKEN_EXPIRE_DAYS: int = 7 - - @field_validator("SECRET_KEY") - @classmethod - def validate_secret_key(cls, v: str) -> str: - if len(v) < 32: - raise ValueError("SECRET_KEY must be at least 32 characters") - return v -``` - -### Redis Settings - -Separate Redis instances for different services: - -```python -class RedisCacheSettings(BaseSettings): - REDIS_CACHE_HOST: str = "localhost" - REDIS_CACHE_PORT: int = 6379 - - -class RedisQueueSettings(BaseSettings): - REDIS_QUEUE_HOST: str = "localhost" - REDIS_QUEUE_PORT: int = 6379 - - -class RedisRateLimiterSettings(BaseSettings): - REDIS_RATE_LIMIT_HOST: str = "localhost" - REDIS_RATE_LIMIT_PORT: int = 6379 -``` - -### Rate Limiting Settings +Key points: -Default rate limiting configuration: +- Each field uses `config("VAR_NAME", default=..., cast=...)`. The `Config()` instance is initialized with `backend/.env` so values are loaded at import time. +- For typed conversion, pass `cast=int`, `cast=bool`, etc. +- Use `@property` for derived values (like `DATABASE_URL`) — no need for `@computed_field` since callers always go through `get_settings()`. -```python -class DefaultRateLimitSettings(BaseSettings): - DEFAULT_RATE_LIMIT_LIMIT: int = 10 - DEFAULT_RATE_LIMIT_PERIOD: int = 3600 # 1 hour -``` +## Adding Custom Settings -### Admin User Settings - -First superuser account creation: +### Basic Custom Group ```python -class FirstUserSettings(BaseSettings): - ADMIN_NAME: str = "Admin" - ADMIN_EMAIL: str - ADMIN_USERNAME: str = "admin" - ADMIN_PASSWORD: str - - @field_validator("ADMIN_EMAIL") - @classmethod - def validate_admin_email(cls, v: str) -> str: - if "@" not in v: - raise ValueError("ADMIN_EMAIL must be a valid email") - return v -``` - -## Creating Custom Settings +# backend/src/infrastructure/config/settings.py -### Basic Custom Settings +class StorageSettings(BaseSettings): + """File-storage settings.""" -Add your own settings group: + STORAGE_BACKEND: str = config("STORAGE_BACKEND", default="local") # "local" or "s3" + LOCAL_STORAGE_PATH: str = config("LOCAL_STORAGE_PATH", default="./uploads") -```python -class CustomSettings(BaseSettings): - CUSTOM_API_KEY: str = "" - CUSTOM_TIMEOUT: int = 30 - ENABLE_FEATURE_X: bool = False - MAX_UPLOAD_SIZE: int = 10485760 # 10MB + AWS_ACCESS_KEY_ID: str = config("AWS_ACCESS_KEY_ID", default="") + AWS_SECRET_ACCESS_KEY: str = config("AWS_SECRET_ACCESS_KEY", default="") + AWS_BUCKET_NAME: str = config("AWS_BUCKET_NAME", default="") + AWS_REGION: str = config("AWS_REGION", default="us-east-1") - @field_validator("MAX_UPLOAD_SIZE") - @classmethod - def validate_upload_size(cls, v: int) -> int: - if v < 1024: # 1KB minimum - raise ValueError("MAX_UPLOAD_SIZE must be at least 1KB") - if v > 104857600: # 100MB maximum - raise ValueError("MAX_UPLOAD_SIZE cannot exceed 100MB") - return v + MAX_UPLOAD_SIZE_BYTES: int = config("MAX_UPLOAD_SIZE_BYTES", default=10_485_760, cast=int) -# Add to main Settings class class Settings( - AppSettings, - PostgresSettings, - # ... other settings ... - CustomSettings, # Add your custom settings + EnvironmentSettings, + DatabaseSettings, + # ...existing groups... + StorageSettings, # add yours ): pass ``` -### Advanced Custom Settings +Then add the matching variables to `backend/.env.example` so they're discoverable. -Settings with complex validation and computed fields: +### Computed / Derived Values ```python -class EmailSettings(BaseSettings): - SMTP_HOST: str = "" - SMTP_PORT: int = 587 - SMTP_USERNAME: str = "" - SMTP_PASSWORD: str = "" - SMTP_USE_TLS: bool = True - EMAIL_FROM: str = "" - EMAIL_FROM_NAME: str = "" - - @computed_field +class StorageSettings(BaseSettings): + STORAGE_BACKEND: str = config("STORAGE_BACKEND", default="local") + AWS_ACCESS_KEY_ID: str = config("AWS_ACCESS_KEY_ID", default="") + AWS_SECRET_ACCESS_KEY: str = config("AWS_SECRET_ACCESS_KEY", default="") + AWS_BUCKET_NAME: str = config("AWS_BUCKET_NAME", default="") + @property - def EMAIL_ENABLED(self) -> bool: - return bool(self.SMTP_HOST and self.SMTP_USERNAME) + def s3_configured(self) -> bool: + return bool(self.AWS_ACCESS_KEY_ID and self.AWS_SECRET_ACCESS_KEY and self.AWS_BUCKET_NAME) - @model_validator(mode="after") - def validate_email_config(self) -> "EmailSettings": - if self.SMTP_HOST and not self.EMAIL_FROM: - raise ValueError("EMAIL_FROM required when SMTP_HOST is set") - if self.SMTP_USERNAME and not self.SMTP_PASSWORD: - raise ValueError("SMTP_PASSWORD required when SMTP_USERNAME is set") - return self + @property + def storage_enabled(self) -> bool: + if self.STORAGE_BACKEND == "local": + return True + if self.STORAGE_BACKEND == "s3": + return self.s3_configured + return False ``` -### Feature Flag Settings +### Validation -Organize feature toggles: +For richer validation, switch a field's value to use Pydantic validators: ```python -class FeatureSettings(BaseSettings): - # Core features - ENABLE_CACHING: bool = True - ENABLE_RATE_LIMITING: bool = True - ENABLE_BACKGROUND_JOBS: bool = True - - # Optional features - ENABLE_ANALYTICS: bool = False - ENABLE_EMAIL_NOTIFICATIONS: bool = False - ENABLE_FILE_UPLOADS: bool = False +from pydantic import field_validator, model_validator - # Experimental features - ENABLE_EXPERIMENTAL_API: bool = False - ENABLE_BETA_FEATURES: bool = False - - @model_validator(mode="after") - def validate_feature_dependencies(self) -> "FeatureSettings": - if self.ENABLE_EMAIL_NOTIFICATIONS and not self.ENABLE_BACKGROUND_JOBS: - raise ValueError("Email notifications require background jobs") - return self -``` -## Settings Validation - -### Field Validation - -Validate individual fields: - -```python -class DatabaseSettings(BaseSettings): - DB_POOL_SIZE: int = 20 - DB_MAX_OVERFLOW: int = 30 - DB_TIMEOUT: int = 30 - - @field_validator("DB_POOL_SIZE") - @classmethod - def validate_pool_size(cls, v: int) -> int: - if v < 1: - raise ValueError("Pool size must be at least 1") - if v > 100: - raise ValueError("Pool size should not exceed 100") - return v +class StorageSettings(BaseSettings): + STORAGE_BACKEND: str = config("STORAGE_BACKEND", default="local") + MAX_UPLOAD_SIZE_BYTES: int = config("MAX_UPLOAD_SIZE_BYTES", default=10_485_760, cast=int) - @field_validator("DB_TIMEOUT") + @field_validator("MAX_UPLOAD_SIZE_BYTES") @classmethod - def validate_timeout(cls, v: int) -> int: - if v < 5: - raise ValueError("Timeout must be at least 5 seconds") + def _check_upload_size(cls, v: int) -> int: + if v < 1024: + raise ValueError("MAX_UPLOAD_SIZE_BYTES must be at least 1KB") + if v > 100 * 1024 * 1024: + raise ValueError("MAX_UPLOAD_SIZE_BYTES cannot exceed 100MB") return v -``` - -### Model Validation - -Validate across multiple fields: - -```python -class SecuritySettings(BaseSettings): - ENABLE_HTTPS: bool = False - SSL_CERT_PATH: str = "" - SSL_KEY_PATH: str = "" - FORCE_SSL: bool = False @model_validator(mode="after") - def validate_ssl_config(self) -> "SecuritySettings": - if self.ENABLE_HTTPS: - if not self.SSL_CERT_PATH: - raise ValueError("SSL_CERT_PATH required when HTTPS enabled") - if not self.SSL_KEY_PATH: - raise ValueError("SSL_KEY_PATH required when HTTPS enabled") - - if self.FORCE_SSL and not self.ENABLE_HTTPS: - raise ValueError("Cannot force SSL without enabling HTTPS") - + def _check_backend(self) -> "StorageSettings": + if self.STORAGE_BACKEND not in ("local", "s3"): + raise ValueError(f"Unknown STORAGE_BACKEND: {self.STORAGE_BACKEND}") return self ``` -### Environment-Specific Validation +Validators run when `Settings()` is instantiated at startup, so misconfiguration fails fast. -Different validation rules per environment: +## Enums for Constrained Values -```python -class EnvironmentSettings(BaseSettings): - ENVIRONMENT: str = "local" - DEBUG: bool = True +For options with a fixed set of valid values, define a `StrEnum` in `src/infrastructure/config/enums.py` and use it as the default: - @model_validator(mode="after") - def validate_environment_config(self) -> "EnvironmentSettings": - if self.ENVIRONMENT == "production": - if self.DEBUG: - raise ValueError("DEBUG must be False in production") +```python +# enums.py +from enum import StrEnum - if self.ENVIRONMENT not in ["local", "staging", "production"]: - raise ValueError("ENVIRONMENT must be local, staging, or production") - return self -``` +class StorageBackend(StrEnum): + LOCAL = "local" + S3 = "s3" -## Computed Properties -### Dynamic Configuration +# settings.py +from .enums import StorageBackend -Create computed values from other settings: -```python class StorageSettings(BaseSettings): - STORAGE_TYPE: str = "local" # local, s3, gcs - - # Local storage - LOCAL_STORAGE_PATH: str = "./uploads" - - # S3 settings - AWS_ACCESS_KEY_ID: str = "" - AWS_SECRET_ACCESS_KEY: str = "" - AWS_BUCKET_NAME: str = "" - AWS_REGION: str = "us-east-1" - - @computed_field - @property - def STORAGE_ENABLED(self) -> bool: - if self.STORAGE_TYPE == "local": - return bool(self.LOCAL_STORAGE_PATH) - elif self.STORAGE_TYPE == "s3": - return bool(self.AWS_ACCESS_KEY_ID and self.AWS_SECRET_ACCESS_KEY and self.AWS_BUCKET_NAME) - return False - - @computed_field - @property - def STORAGE_CONFIG(self) -> dict: - if self.STORAGE_TYPE == "local": - return {"path": self.LOCAL_STORAGE_PATH} - elif self.STORAGE_TYPE == "s3": - return { - "bucket": self.AWS_BUCKET_NAME, - "region": self.AWS_REGION, - "credentials": { - "access_key": self.AWS_ACCESS_KEY_ID, - "secret_key": self.AWS_SECRET_ACCESS_KEY, - }, - } - return {} + STORAGE_BACKEND: str = config("STORAGE_BACKEND", default=StorageBackend.LOCAL.value) ``` -## Organizing Settings +The boilerplate already does this for `CacheBackend`, `LogFormat`, `LogLevel`, `SessionBackend`, `TaskiqBrokerType`, and `EnvironmentOption`. -### Service-Based Organization +## Removing Built-in Groups -Group settings by service or domain: +If you don't use a feature, drop the corresponding class from the `Settings` MRO: ```python -# Authentication service settings -class AuthSettings(BaseSettings): - JWT_SECRET_KEY: str - JWT_ALGORITHM: str = "HS256" - ACCESS_TOKEN_EXPIRE: int = 30 - REFRESH_TOKEN_EXPIRE: int = 7200 - PASSWORD_MIN_LENGTH: int = 8 - - -# Notification service settings -class NotificationSettings(BaseSettings): - EMAIL_ENABLED: bool = False - SMS_ENABLED: bool = False - PUSH_ENABLED: bool = False - - # Email settings - SMTP_HOST: str = "" - SMTP_PORT: int = 587 - - # SMS settings (example with Twilio) - TWILIO_ACCOUNT_SID: str = "" - TWILIO_AUTH_TOKEN: str = "" - - -# Main settings class Settings( - AppSettings, + EnvironmentSettings, + DatabaseSettings, + CORSSettings, AuthSettings, - NotificationSettings, - # ... other settings -): - pass -``` - -### Conditional Settings Loading - -Load different settings based on environment: - -```python -class BaseAppSettings(BaseSettings): - APP_NAME: str = "FastAPI App" - DEBUG: bool = False - - -class DevelopmentSettings(BaseAppSettings): - DEBUG: bool = True - LOG_LEVEL: str = "DEBUG" - DATABASE_ECHO: bool = True - - -class ProductionSettings(BaseAppSettings): - DEBUG: bool = False - LOG_LEVEL: str = "WARNING" - DATABASE_ECHO: bool = False - - -def get_settings() -> BaseAppSettings: - environment = os.getenv("ENVIRONMENT", "local") - - if environment == "production": - return ProductionSettings() - else: - return DevelopmentSettings() - - -settings = get_settings() -``` - -## Removing Unused Services - -### Minimal Configuration - -Remove services you don't need: - -```python -# Minimal setup without Redis services -class MinimalSettings( + APISettings, AppSettings, - PostgresSettings, - CryptSettings, - FirstUserSettings, - # Removed: RedisCacheSettings - # Removed: RedisQueueSettings - # Removed: RedisRateLimiterSettings - EnvironmentSettings, + LoggingSettings, + # CacheSettings — removed + # RateLimiterSettings — removed + # TaskiqSettings — removed ): pass ``` -### Service Feature Flags +You'll also want to: -Use feature flags to conditionally enable services: - -```python -class ServiceSettings(BaseSettings): - ENABLE_REDIS: bool = True - ENABLE_CELERY: bool = True - ENABLE_MONITORING: bool = False - - -class ConditionalSettings( - AppSettings, - PostgresSettings, - CryptSettings, - ServiceSettings, -): - # Add Redis settings only if enabled - def __init__(self, **kwargs): - super().__init__(**kwargs) - - if self.ENABLE_REDIS: - # Dynamically add Redis settings - self.__class__ = type("ConditionalSettings", (self.__class__, RedisCacheSettings), {}) -``` +- Remove the now-orphan code that depends on those settings (e.g. cache decorator, taskiq broker, rate limiter middleware) +- Drop the corresponding env vars from `.env.example` +- Disable startup of those subsystems in `infrastructure/app_factory.py` ## Testing Settings -### Test Configuration - -Create separate settings for testing: +The test suite uses fixtures that override settings. The general pattern: ```python -class TestSettings(BaseSettings): - # Override database for testing - POSTGRES_DB: str = "test_database" - - # Disable external services - ENABLE_REDIS: bool = False - ENABLE_EMAIL: bool = False +import pytest +from src.infrastructure.config.settings import Settings - # Speed up tests - ACCESS_TOKEN_EXPIRE_MINUTES: int = 5 - # Test-specific settings - TEST_USER_EMAIL: str = "test@example.com" - TEST_USER_PASSWORD: str = "testpassword123" - - -# Use in tests @pytest.fixture -def test_settings(): - return TestSettings() +def test_settings(monkeypatch): + monkeypatch.setenv("ENVIRONMENT", "local") + monkeypatch.setenv("CACHE_ENABLED", "false") + monkeypatch.setenv("RATE_LIMITER_ENABLED", "false") + return Settings() ``` -### Settings Validation Testing - -Test your custom settings: +For one-off overrides without env vars, instantiate the relevant settings class directly with kwargs: ```python -def test_custom_settings_validation(): - # Test valid configuration - settings = CustomSettings(CUSTOM_API_KEY="test-key", CUSTOM_TIMEOUT=60, MAX_UPLOAD_SIZE=5242880) # 5MB - assert settings.CUSTOM_TIMEOUT == 60 - - # Test validation error - with pytest.raises(ValueError, match="MAX_UPLOAD_SIZE cannot exceed 100MB"): - CustomSettings(MAX_UPLOAD_SIZE=209715200) # 200MB - - -def test_settings_computed_fields(): - settings = StorageSettings( - STORAGE_TYPE="s3", - AWS_ACCESS_KEY_ID="test-key", - AWS_SECRET_ACCESS_KEY="test-secret", - AWS_BUCKET_NAME="test-bucket", - ) - - assert settings.STORAGE_ENABLED is True - assert settings.STORAGE_CONFIG["bucket"] == "test-bucket" +def test_storage_validation(): + with pytest.raises(ValueError, match="cannot exceed 100MB"): + StorageSettings(MAX_UPLOAD_SIZE_BYTES=200_000_000) ``` ## Best Practices ### Organization -- Group related settings in dedicated classes -- Use descriptive names for settings groups -- Keep validation logic close to the settings -- Document complex validation rules +- Group settings by **subsystem** (cache, auth, taskiq), not by environment +- Keep validation alongside the field it validates +- Add a one-line docstring per class so its purpose is obvious +- Mirror group names in `.env.example` section headers ### Security -- Validate sensitive settings like secret keys -- Never set default values for secrets in production -- Use computed fields to derive connection strings -- Separate test and production configurations +- Validate `SECRET_KEY` length / strength when `ENVIRONMENT=production` (the boilerplate already does this via the production security validator) +- Never set a real default for credentials — leave them blank and let the validator complain +- Use `@property` to derive connection strings rather than embedding them in env vars ### Performance -- Use `@computed_field` for expensive calculations -- Cache settings instances appropriately -- Avoid complex validation in hot paths -- Use model validators for cross-field validation +- The `Settings` instance is created once at import time and shared via `get_settings()` — don't instantiate it per-request +- Keep validators cheap; they run at startup but they also run if anyone re-instantiates `Settings` ### Testing -- Create separate test settings classes -- Test all validation rules -- Mock external service settings in tests -- Use dependency injection for settings in tests +- Use `monkeypatch.setenv(...)` to vary env vars per test +- Don't reach for the global `settings` in tests when you can pass an instance directly + +## See Also -The settings system provides type safety, validation, and organization for your application configuration. Start with the built-in settings and extend them as your application grows! +- **[Environment Variables](environment-variables.md)** — Full variable reference +- **[Docker Setup](docker-setup.md)** — How variables flow into Compose +- **[Environment-Specific](environment-specific.md)** — Recommended values per environment diff --git a/docs/user-guide/database/crud.md b/docs/user-guide/database/crud.md index 6b169ed1..38b0f6d3 100644 --- a/docs/user-guide/database/crud.md +++ b/docs/user-guide/database/crud.md @@ -1,492 +1,434 @@ # CRUD Operations -This guide covers all CRUD (Create, Read, Update, Delete) operations available in the FastAPI Boilerplate using FastCRUD, a powerful library that provides consistent and efficient database operations. +This guide covers the CRUD (Create, Read, Update, Delete) operations available in the boilerplate via [FastCRUD](https://benavlabs.github.io/fastcrud/). ## Overview -The boilerplate uses [FastCRUD](https://github.com/igorbenav/fastcrud) for all database operations. FastCRUD provides: +The boilerplate uses **FastCRUD** for all database access. It gives you: -- **Consistent API** across all models -- **Type safety** with generic type parameters -- **Automatic pagination** support -- **Advanced filtering** and joining capabilities -- **Soft delete** support -- **Optimized queries** with selective field loading +- A consistent async API across every model +- Automatic pagination helpers +- Built-in soft delete support (when the model has `SoftDeleteMixin`) +- Selective field loading via `schema_to_select` +- Joined queries for related data -## CRUD Class Structure +## Where CRUD Lives -Each model has a corresponding CRUD class that defines the available operations: +Each module owns its own FastCRUD instance, kept tiny and predictable: ```python -# src/app/crud/crud_users.py +# backend/src/modules/user/crud.py from fastcrud import FastCRUD -from app.models.user import User -from app.schemas.user import ( - UserCreateInternal, UserUpdate, UserUpdateInternal, - UserDelete, UserRead -) -CRUDUser = FastCRUD[ - User, # Model class - UserCreateInternal, # Create schema - UserUpdate, # Update schema - UserUpdateInternal, # Internal update schema - UserDelete, # Delete schema - UserRead # Read schema -] -crud_users = CRUDUser(User) +from .models import User + +crud_users: FastCRUD = FastCRUD(User) ``` -## Read Operations +```python +# backend/src/modules/tier/crud.py +from fastcrud import FastCRUD + +from .models import Tier -### Get Single Record +crud_tiers: FastCRUD = FastCRUD(Tier) +``` -Retrieve a single record by any field: +The CRUD instance is then imported by the module's `service.py`, which adds business logic on top — input validation, permission checks, password hashing, multi-step orchestration. ```python -# Get user by ID -user = await crud_users.get(db=db, id=user_id) +# Typical service method (modules/user/service.py) +from .crud import crud_users +from .schemas import UserCreate, UserCreateInternal, UserRead -# Get user by username -user = await crud_users.get(db=db, username="john_doe") -# Get user by email -user = await crud_users.get(db=db, email="john@example.com") +async def create(self, user: UserCreate, db: AsyncSession) -> dict[str, Any]: + if await crud_users.exists(db=db, email=user.email): + raise UserExistsError("Email already registered") + if await crud_users.exists(db=db, username=user.username): + raise UserExistsError("Username already taken") -# Get with specific fields only -user = await crud_users.get( - db=db, - schema_to_select=UserRead, # Only select fields defined in UserRead - id=user_id, -) + payload = user.model_dump() + payload["hashed_password"] = get_password_hash(payload.pop("password")) + user_internal = UserCreateInternal(**payload) + + return await crud_users.create(db=db, object=user_internal, schema_to_select=UserRead) ``` -**Real usage from the codebase:** +## Read Operations + +### Get a Single Record ```python -# From src/app/api/v1/users.py -db_user = await crud_users.get( - db=db, +# By id +user = await crud_users.get(db=db, id=user_id) + +# By any indexed field +user = await crud_users.get(db=db, username="userson") +user = await crud_users.get(db=db, email="user.userson@example.com") + +# Restrict the returned shape with a Pydantic schema +user = await crud_users.get( + db=db, schema_to_select=UserRead, - username=username, + username=username, is_deleted=False, ) ``` ### Get Multiple Records -Retrieve multiple records with filtering and pagination: - ```python -# Get all users -users = await crud_users.get_multi(db=db) - -# Get with pagination -users = await crud_users.get_multi( - db=db, - offset=0, # Skip first 0 records - limit=10, # Return maximum 10 records -) - -# Get with filtering -active_users = await crud_users.get_multi( +# All non-deleted users, first 10 +result = await crud_users.get_multi( db=db, - is_deleted=False, # Filter condition - offset=compute_offset(page, items_per_page), - limit=items_per_page + is_deleted=False, + offset=0, + limit=10, ) ``` -**Pagination response structure:** +`get_multi` returns a dict shaped like: ```python { - "data": [ - {"id": 1, "username": "john", "email": "john@example.com"}, - {"id": 2, "username": "jane", "email": "jane@example.com"} - ], + "data": [...], "total_count": 25, - "has_more": true, - "page": 1, - "items_per_page": 10 } ``` -### Check Existence +For full paginated responses (`page` / `has_more` / `items_per_page`), wrap the result with `paginated_response()` — see [Pagination](#pagination). -Check if a record exists without fetching it: +### Filter Operators + +FastCRUD supports `__` operators on field names: ```python -# Check if user exists -user_exists = await crud_users.exists(db=db, email="john@example.com") -# Returns True or False +# Substring match +await crud_users.get_multi(db=db, username__icontains="john") + +# Range +await crud_users.get_multi(db=db, created_at__gt=cutoff_datetime) -# Check if username is available -username_taken = await crud_users.exists(db=db, username="john_doe") +# Set membership +await crud_users.get_multi(db=db, tier_id__in=[1, 2, 3]) ``` -**Real usage example:** +Available operators include `__contains`, `__icontains`, `__startswith`, `__endswith`, `__gt`, `__ge`, `__lt`, `__le`, `__in`, `__not_in`, and others. See the [FastCRUD docs](https://benavlabs.github.io/fastcrud/) for the full list. + +### Check Existence ```python -# From src/app/api/v1/users.py - checking before creating -email_row = await crud_users.exists(db=db, email=user.email) -if email_row: - raise DuplicateValueException("Email is already registered") +if await crud_users.exists(db=db, email="user@example.com"): + raise UserExistsError("Email already registered") ``` -### Count Records +`exists()` is faster than `get()` when you only need a yes/no — it doesn't transfer the row. -Get count of records matching criteria: +### Count Records ```python -# Count all users -total_users = await crud_users.count(db=db) - -# Count active users -active_count = await crud_users.count(db=db, is_deleted=False) - -# Count by specific criteria -admin_count = await crud_users.count(db=db, is_superuser=True) +total = await crud_users.count(db=db) +admins = await crud_users.count(db=db, is_superuser=True) +active = await crud_users.count(db=db, is_deleted=False) ``` ## Create Operations -### Basic Creation - -Create new records using Pydantic schemas: - ```python -# Create user -user_data = UserCreateInternal( - username="john_doe", - email="john@example.com", - hashed_password="hashed_password_here" +user_internal = UserCreateInternal( + name="User Userson", + username="userson", + email="user.userson@example.com", + hashed_password=get_password_hash("Str1ngst!"), ) -created_user = await crud_users.create(db=db, object=user_data) +created = await crud_users.create(db=db, object=user_internal) ``` -**Real creation example:** +The pattern in service code: -```python -# From src/app/api/v1/users.py -user_internal_dict = user.model_dump() -user_internal_dict["hashed_password"] = get_password_hash(password=user_internal_dict["password"]) -del user_internal_dict["password"] +1. Validate the *external* schema (`UserCreate`) on input +2. Apply business rules (uniqueness check, password hashing, etc.) +3. Build the *internal* schema (`UserCreateInternal`) with the values you actually want to persist +4. Call `crud.create(db=db, object=internal_schema)` + +Pass `schema_to_select=UserRead` if you want the returned dict trimmed to the public shape: -user_internal = UserCreateInternal(**user_internal_dict) -created_user = await crud_users.create(db=db, object=user_internal) +```python +created = await crud_users.create(db=db, object=user_internal, schema_to_select=UserRead) ``` -### Create with Relationships +### Creating Records with Foreign Keys -When creating records with foreign keys: +For models that reference other rows, just include the FK column on the create schema: ```python -# Create post for a user -post_data = PostCreateInternal( - title="My First Post", - content="This is the content of my post", - created_by_user_id=user.id # Foreign key reference +new_rate_limit = RateLimitCreate( + tier_id=tier.id, + name="users_list", + path="/api/v1/users/", + limit=100, + period=60, ) - -created_post = await crud_posts.create(db=db, object=post_data) +await crud_rate_limits.create(db=db, object=new_rate_limit) ``` ## Update Operations -### Basic Updates - -Update records by any field: - ```python -# Update user by ID -update_data = UserUpdate(email="newemail@example.com") -await crud_users.update(db=db, object=update_data, id=user_id) - -# Update by username -await crud_users.update(db=db, object=update_data, username="john_doe") - -# Update multiple fields -update_data = UserUpdate( - email="newemail@example.com", - profile_image_url="https://newimage.com/photo.jpg" +# Update by id +await crud_users.update( + db=db, + object=UserUpdate(email="newemail@example.com"), + id=user_id, ) -await crud_users.update(db=db, object=update_data, id=user_id) + +# Update by any field +await crud_users.update(db=db, object=UserUpdate(name="New Name"), username=username) ``` -### Conditional Updates +Only fields set on the update schema are written — `*Update` schemas have every field as `Optional[T] = None`, and unset fields are skipped. -Update with validation: +### Common Pattern: Validate Before Update ```python -# From real endpoint - check before updating -if values.username != db_user.username: - existing_username = await crud_users.exists(db=db, username=values.username) - if existing_username: - raise DuplicateValueException("Username not available") +# Service method +if values.username and values.username != db_user["username"]: + if await crud_users.exists(db=db, username=values.username): + raise UserExistsError("Username not available") -await crud_users.update(db=db, object=values, username=username) +await crud_users.update(db=db, object=values, id=db_user["id"]) ``` -### Bulk Updates +### Bulk Update -Update multiple records at once: +`update()` accepts the same lookup args as `get_multi()` — pass non-id criteria to update many rows: ```python -# Update all users with specific criteria -update_data = {"is_active": False} -await crud_users.update(db=db, object=update_data, is_deleted=True) +# Reset profile_image_url for everyone in a deprecated tier +await crud_users.update( + db=db, + object=UserUpdate(profile_image_url="https://www.profileimageurl.com"), + tier_id=deprecated_tier_id, +) ``` ## Delete Operations -### Soft Delete - -For models with soft delete fields (like User, Post): +### Soft Delete (default for models with `SoftDeleteMixin`) ```python -# Soft delete - sets is_deleted=True, deleted_at=now() -await crud_users.delete(db=db, username="john_doe") +# Sets is_deleted=True and deleted_at=now() +await crud_users.delete(db=db, id=user_id) -# The record stays in the database but is marked as deleted -user = await crud_users.get(db=db, username="john_doe", is_deleted=True) +# The row stays — query it explicitly +soft_deleted = await crud_users.get(db=db, id=user_id, is_deleted=True) ``` ### Hard Delete -Permanently remove records from the database: - ```python -# Permanently delete from database -await crud_users.db_delete(db=db, username="john_doe") - -# The record is completely removed +# DELETE FROM user WHERE id = ? +await crud_users.db_delete(db=db, id=user_id) ``` -**Real deletion example:** +### Filtering Out Soft-Deleted Records -```python -# From src/app/api/v1/users.py -# Regular users get soft delete -await crud_users.delete(db=db, username=username) +Add `is_deleted=False` to your queries: -# Superusers can hard delete -await crud_users.db_delete(db=db, username=username) +```python +active_users = await crud_users.get_multi(db=db, is_deleted=False, limit=10) ``` -## Advanced Operations +## Joined Queries -### Joined Queries +For models with relationships (e.g. `User.tier`), the relationship loads automatically via `lazy="selectin"`. -Get data from multiple related tables: +For ad-hoc joins without a configured relationship, use `get_joined` / `get_multi_joined`: ```python -# Get posts with user information -posts_with_users = await crud_posts.get_multi_joined( +posts_with_authors = await crud_posts.get_multi_joined( db=db, join_model=User, join_on=Post.created_by_user_id == User.id, schema_to_select=PostRead, join_schema_to_select=UserRead, - join_prefix="user_" + join_prefix="author_", + offset=0, + limit=10, ) +# Each row: {..., "author_username": ..., "author_email": ...} ``` -Result structure: -```python -{ - "id": 1, - "title": "My Post", - "content": "Post content", - "user_id": 123, - "user_username": "john_doe", - "user_email": "john@example.com" -} -``` +The boilerplate also uses **`JoinConfig`** for more complex multi-join queries (see `UserService.get_rate_limits` for a real example with two joins). -### Custom Filtering +## Pagination -Advanced filtering with SQLAlchemy expressions: +The boilerplate uses FastCRUD's `paginated_response()` helper to turn a `get_multi` result into a public-shaped paginated response: ```python -from sqlalchemy import and_, or_ +from fastcrud import PaginatedListResponse, compute_offset, paginated_response + +@router.get("/", response_model=PaginatedListResponse[UserRead]) +async def list_users( + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], + page: int = 1, + items_per_page: int = 10, +) -> dict[str, Any]: + result = await user_service.get_paginated( + skip=compute_offset(page, items_per_page), + limit=items_per_page, + db=db, + ) + return paginated_response(crud_data=result, page=page, items_per_page=items_per_page) +``` -# Complex filters -users = await crud_users.get_multi( - db=db, - filter_criteria=[ - and_( - User.is_deleted == False, - User.created_at > datetime(2024, 1, 1) - ) - ] -) +Response shape: + +```json +{ + "data": [{ "id": 1, "name": "...", "username": "..." }], + "total_count": 150, + "has_more": true, + "page": 1, + "items_per_page": 10 +} ``` -### Optimized Field Selection +## Selective Field Loading -Select only needed fields for better performance: +`schema_to_select` lets the database return only the columns the caller cares about. The result is a plain dict matching the schema fields: ```python -# Only select id and username -users = await crud_users.get_multi( +# Returns just id, name, username, email, profile_image_url, ... +result = await crud_users.get_multi( db=db, - schema_to_select=UserRead, # Use schema to define fields - limit=100 -) - -# Or specify fields directly -users = await crud_users.get_multi( - db=db, - schema_to_select=["id", "username", "email"], - limit=100 + schema_to_select=UserRead, + is_deleted=False, + limit=100, ) ``` -## Practical Examples - -### Complete CRUD Workflow +Use this when you want to avoid fetching `hashed_password` or other heavy fields you won't use. -Here's a complete example showing all CRUD operations: +## Complete Workflow Example ```python from sqlalchemy.ext.asyncio import AsyncSession -from app.crud.crud_users import crud_users -from app.schemas.user import UserCreateInternal, UserUpdate, UserRead -async def user_management_example(db: AsyncSession): +from src.modules.user.crud import crud_users +from src.modules.user.schemas import ( + UserCreateInternal, + UserRead, + UserUpdate, +) +from src.infrastructure.auth.utils import get_password_hash + + +async def user_lifecycle(db: AsyncSession) -> None: # 1. CREATE - user_data = UserCreateInternal( - username="demo_user", - email="demo@example.com", - hashed_password="hashed_password" + new_user = await crud_users.create( + db=db, + object=UserCreateInternal( + name="Demo User", + username="demo_user", + email="demo@example.com", + hashed_password=get_password_hash("Str1ngst!"), + ), + schema_to_select=UserRead, ) - new_user = await crud_users.create(db=db, object=user_data) - print(f"Created user: {new_user.id}") - + # 2. READ - user = await crud_users.get( - db=db, - id=new_user.id, - schema_to_select=UserRead + fetched = await crud_users.get( + db=db, + id=new_user["id"], + schema_to_select=UserRead, ) - print(f"Retrieved user: {user.username}") - - # 3. UPDATE - update_data = UserUpdate(email="updated@example.com") - await crud_users.update(db=db, object=update_data, id=new_user.id) - print("User updated") - - # 4. DELETE (soft delete) - await crud_users.delete(db=db, id=new_user.id) - print("User soft deleted") - - # 5. VERIFY DELETION - deleted_user = await crud_users.get(db=db, id=new_user.id, is_deleted=True) - print(f"User deleted at: {deleted_user.deleted_at}") -``` -### Pagination Helper + # 3. UPDATE + await crud_users.update( + db=db, + object=UserUpdate(name="Demo Userson"), + id=fetched["id"], + ) -Using FastCRUD's pagination utilities: + # 4. SOFT DELETE + await crud_users.delete(db=db, id=fetched["id"]) -```python -from fastcrud import compute_offset, paginated_response + # 5. FETCH SOFT-DELETED + soft_deleted = await crud_users.get(db=db, id=fetched["id"], is_deleted=True) + assert soft_deleted["deleted_at"] is not None +``` +## Error Handling -async def get_paginated_users( - db: AsyncSession, - page: int = 1, - items_per_page: int = 10 -): - users_data = await crud_users.get_multi( - db=db, - offset=compute_offset(page, items_per_page), - limit=items_per_page, - is_deleted=False, - schema_to_select=UserRead - ) - - return paginated_response( - crud_data=users_data, - page=page, - items_per_page=items_per_page - ) -``` +Domain errors live in `modules/common/exceptions.py` (`UserExistsError`, `UserNotFoundError`, `ResourceNotFoundError`, `PermissionDeniedError`, etc.). Routes catch them and translate to HTTP errors via `modules/common/utils/error_handler.handle_exception`. -### Error Handling +```python +async def create(self, user: UserCreate, db: AsyncSession) -> dict[str, Any]: + if await crud_users.exists(db=db, email=user.email): + raise UserExistsError("Email already registered") + # ... create user ... +``` -Proper error handling with CRUD operations: +The route then: ```python -from app.core.exceptions.http_exceptions import NotFoundException, DuplicateValueException - -async def safe_user_creation(db: AsyncSession, user_data: UserCreate): - # Check for duplicates - if await crud_users.exists(db=db, email=user_data.email): - raise DuplicateValueException("Email already registered") - - if await crud_users.exists(db=db, username=user_data.username): - raise DuplicateValueException("Username not available") - - # Create user - try: - user_internal = UserCreateInternal(**user_data.model_dump()) - created_user = await crud_users.create(db=db, object=user_internal) - return created_user - except Exception as e: - # Handle database errors - await db.rollback() - raise e +try: + return await user_service.create(user, db) +except Exception as e: + http_exc = handle_exception(e) + if http_exc: + raise http_exc + raise HTTPException(status_code=500, detail="An unexpected error occurred") ``` ## Performance Tips -### 1. Use Schema Selection +### Use `schema_to_select` -Always specify `schema_to_select` to avoid loading unnecessary data: +Avoid loading columns you won't read: ```python -# Good - only loads needed fields +# Good — only the public fields user = await crud_users.get(db=db, id=user_id, schema_to_select=UserRead) -# Avoid - loads all fields +# Avoid — pulls the password hash too user = await crud_users.get(db=db, id=user_id) ``` -### 2. Batch Operations - -For multiple operations, use transactions: +### Use `exists()` for existence checks ```python -async def batch_user_updates(db: AsyncSession, updates: List[dict]): - try: - for update in updates: - await crud_users.update(db=db, object=update["data"], id=update["id"]) - await db.commit() - except Exception: - await db.rollback() - raise -``` +# Good — boolean, no row transfer +if await crud_users.exists(db=db, email=email): + raise UserExistsError("Email taken") -### 3. Use Exists for Checks +# Avoid — fetches the entire row to check None +user = await crud_users.get(db=db, email=email) +if user: + raise UserExistsError("Email taken") +``` -Use `exists()` instead of `get()` when you only need to check existence: +### Use `count()` for counts ```python -# Good - faster, doesn't load data -if await crud_users.exists(db=db, email=email): - raise DuplicateValueException("Email taken") +# Good +total = await crud_users.count(db=db, is_deleted=False) -# Avoid - slower, loads unnecessary data -user = await crud_users.get(db=db, email=email) -if user: - raise DuplicateValueException("Email taken") +# Avoid +result = await crud_users.get_multi(db=db, is_deleted=False, limit=10000) +total = result["total_count"] # works, but transfers data ``` +### Pre-fetch related data in services + +If a route calls `crud_users.get` then `crud_tiers.get(tier_id)` separately, prefer using the existing `User.tier` relationship (auto-loaded with `selectin`) or a `get_joined` call, so the database only round-trips once. + ## Next Steps -- **[Database Migrations](migrations.md)** - Managing database schema changes -- **[API Development](../api/index.md)** - Using CRUD in API endpoints -- **[Caching](../caching/index.md)** - Optimizing CRUD with caching \ No newline at end of file +- **[Migrations](migrations.md)** — Manage schema changes with Alembic +- **[API Endpoints](../api/endpoints.md)** — Wire CRUD into FastAPI routes +- **[Caching](../caching/index.md)** — Cache CRUD results diff --git a/docs/user-guide/database/index.md b/docs/user-guide/database/index.md index aa941ba8..649528a9 100644 --- a/docs/user-guide/database/index.md +++ b/docs/user-guide/database/index.md @@ -4,232 +4,313 @@ Learn how to work with the database layer in the FastAPI Boilerplate. This secti ## What You'll Learn -- **[Models](models.md)** - Define database tables with SQLAlchemy models -- **[Schemas](schemas.md)** - Validate and serialize data with Pydantic schemas -- **[CRUD Operations](crud.md)** - Perform database operations with FastCRUD -- **[Migrations](migrations.md)** - Manage database schema changes with Alembic +- **[Models](models.md)** - Define database tables with SQLAlchemy 2.0 +- **[Schemas](schemas.md)** - Validate and serialize data with Pydantic +- **[CRUD Operations](crud.md)** - Database access via FastCRUD +- **[Migrations](migrations.md)** - Manage schema changes with Alembic ## Quick Overview -The boilerplate uses a layered architecture that separates concerns: +The boilerplate splits the data layer across each feature module so a feature owns its full stack: ```python -# API Endpoint +# modules/user/routes.py — request comes in, validated by UserCreate @router.post("/", response_model=UserRead) -async def create_user(user_data: UserCreate, db: AsyncSession): - return await crud_users.create(db=db, object=user_data) - -# The layers work together: -# 1. UserCreate schema validates the input -# 2. crud_users handles the database operation -# 3. User model defines the database table -# 4. UserRead schema formats the response +async def create_user( + user: UserCreate, + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], +): + return await user_service.create(user, db) + +# modules/user/service.py — business logic, calls into FastCRUD +# modules/user/crud.py — crud_users = FastCRUD(User) +# modules/user/models.py — User SQLAlchemy model ``` ## Architecture -The database layer follows a clear separation: - -``` -API Request +```text +HTTP Request + ↓ +Pydantic Schema (modules//schemas.py) + ↓ +APIRouter (modules//routes.py) ↓ -Pydantic Schema (validation & serialization) +Service (modules//service.py) ↓ -CRUD Layer (business logic & database operations) +FastCRUD (modules//crud.py) ↓ -SQLAlchemy Model (database table definition) +SQLAlchemy Model (modules//models.py) ↓ -PostgreSQL Database +PostgreSQL ``` -## Key Features +The service layer holds business rules (permission checks, multi-step orchestration). FastCRUD handles the boilerplate query plumbing. The model defines the table. + +## Key Components + +### SQLAlchemy 2.0 Models + +Models inherit from `Base` (a `DeclarativeBase` + `MappedAsDataclass` combination) and the relevant mixins: -### 🗄️ **SQLAlchemy 2.0 Models** -Modern async SQLAlchemy with type hints: ```python -class User(Base): +# modules/user/models.py +from sqlalchemy import String +from sqlalchemy.orm import Mapped, mapped_column + +from ...infrastructure.database.session import Base +from ...infrastructure.database.models import SoftDeleteMixin, TimestampMixin + + +class User(Base, TimestampMixin, SoftDeleteMixin): __tablename__ = "user" - - id: Mapped[int] = mapped_column(primary_key=True) - username: Mapped[str] = mapped_column(String(50), unique=True) - email: Mapped[str] = mapped_column(String(100), unique=True) - created_at: Mapped[datetime] = mapped_column(default=datetime.utcnow) + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + username: Mapped[str] = mapped_column(String(20), unique=True, index=True) + email: Mapped[str] = mapped_column(String(50), unique=True, index=True) + hashed_password: Mapped[str] = mapped_column(String(100)) ``` -### ✅ **Pydantic Schemas** -Automatic validation and serialization: +Available mixins from `infrastructure/database/models.py`: + +- `TimestampMixin` — adds `created_at` and `updated_at` +- `SoftDeleteMixin` — adds `is_deleted` and `deleted_at` +- `UUIDMixin` — UUID primary key (alternative to integer ids) + +### Pydantic Schemas + +Schemas live alongside the model and split into request/response shapes: + ```python +# modules/user/schemas.py +from pydantic import BaseModel, EmailStr, Field + + class UserCreate(BaseModel): - username: str = Field(min_length=2, max_length=50) + name: str = Field(min_length=2, max_length=30) + username: str = Field(min_length=2, max_length=20) email: EmailStr password: str = Field(min_length=8) + class UserRead(BaseModel): id: int + name: str username: str - email: str - created_at: datetime - # Note: no password field in read schema + email: EmailStr + # No hashed_password — schemas exclude sensitive fields ``` -### 🔧 **FastCRUD Operations** -Consistent database operations: +### FastCRUD Operations + +Each module exposes a thin FastCRUD wrapper: + +```python +# modules/user/crud.py +from fastcrud import FastCRUD +from .models import User + +crud_users: FastCRUD = FastCRUD(User) +``` + +Then in the service: + ```python +# modules/user/service.py +from .crud import crud_users + # Create user = await crud_users.create(db=db, object=user_create) -# Read +# Read one user = await crud_users.get(db=db, id=user_id) -users = await crud_users.get_multi(db=db, offset=0, limit=10) -# Update -user = await crud_users.update(db=db, object=user_update, id=user_id) +# Read many +result = await crud_users.get_multi(db=db, offset=0, limit=10) + +# Update +await crud_users.update(db=db, object=user_update, id=user_id) -# Delete (soft delete) +# Soft delete (sets is_deleted=True via the mixin) await crud_users.delete(db=db, id=user_id) + +# Hard delete +await crud_users.db_delete(db=db, id=user_id) ``` -### 🔄 **Database Migrations** -Track schema changes with Alembic: +### Database Migrations + +Run from `backend/`: + ```bash -# Generate migration -alembic revision --autogenerate -m "Add user table" +# Generate a migration from model changes +uv run alembic revision --autogenerate -m "Add user table" # Apply migrations -alembic upgrade head +uv run alembic upgrade head -# Rollback if needed -alembic downgrade -1 +# Roll back the most recent migration +uv run alembic downgrade -1 ``` ## Database Setup -The boilerplate is configured for PostgreSQL with async support: +The boilerplate uses async PostgreSQL via `asyncpg`. ### Environment Configuration -```bash -# .env file -POSTGRES_USER=your_user -POSTGRES_PASSWORD=your_password -POSTGRES_SERVER=localhost + +```env +# backend/.env +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_SERVER=localhost # or "db" for Docker Compose POSTGRES_PORT=5432 -POSTGRES_DB=your_database +POSTGRES_DB=postgres +POSTGRES_ASYNC_PREFIX=postgresql+asyncpg:// +POSTGRES_POOL_SIZE=20 +POSTGRES_MAX_OVERFLOW=0 +CREATE_TABLES_ON_STARTUP=true ``` +The `DATABASE_URL` property on `DatabaseSettings` is computed from these. If you set `DATABASE_URL` directly in the environment it overrides everything else. + ### Connection Management + +The session dependency lives in `infrastructure/database/session.py`: + +```python +from collections.abc import AsyncGenerator +from sqlalchemy.ext.asyncio import AsyncSession + + +async def async_session() -> AsyncGenerator[AsyncSession, None]: + async with local_session() as db: + yield db +``` + +Use it in routes via FastAPI's `Depends`: + ```python -# Database session dependency -async def async_get_db() -> AsyncIterator[AsyncSession]: - async with async_session_maker() as session: - yield session - -# Use in endpoints -@router.get("/users/") -async def get_users(db: Annotated[AsyncSession, Depends(async_get_db)]): - return await crud_users.get_multi(db=db) +from typing import Annotated +from fastapi import Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from ...infrastructure.database.session import async_session + + +@router.get("/") +async def list_users( + db: Annotated[AsyncSession, Depends(async_session)], +): + ... ``` ## Included Models -The boilerplate includes four example models: +The boilerplate ships with these models (one per feature module): -### **User Model** - Authentication & user management -- Username, email, password (hashed) -- Soft delete support -- Tier-based access control +### `User` — `modules/user/models.py` +- Username, email, hashed password, full name, profile image +- OAuth fields: `oauth_provider`, `google_id`, `github_id` +- Foreign key to `tier` +- Mixins: `TimestampMixin`, `SoftDeleteMixin` +- Table name: **`user`** (singular) -### **Post Model** - Content with user relationships -- Title, content, creation metadata -- Foreign key to user (no SQLAlchemy relationships) -- Soft delete built-in +### `Tier` — `modules/tier/models.py` +- Just `name` and `description` — no pricing or business logic +- One-to-many relationship with users +- Mixins: `TimestampMixin`, `SoftDeleteMixin` +- Table name: **`tiers`** -### **Tier Model** - User subscription levels -- Name-based tiers (free, premium, etc.) -- Links to rate limiting system +### `RateLimit` — `modules/rate_limit/models.py` +- Per-tier rate limits keyed by API path +- Fields: `tier_id`, `name`, `path`, `limit`, `period` +- Mixins: `TimestampMixin`, `SoftDeleteMixin` +- Table name: **`rate_limits`** -### **Rate Limit Model** - API access control -- Path-specific rate limits per tier -- Configurable limits and time periods +### `APIKey`, `KeyUsage`, `KeyPermission` — `modules/api_keys/models.py` +- API key issuance with per-key permissions and usage tracking +- Table names: `api_keys`, `key_usage`, `key_permissions` ## Directory Structure +Each feature owns its data stack: + ```text -src/app/ -├── models/ # SQLAlchemy models (database tables) -│ ├── __init__.py -│ ├── user.py # User table definition -│ ├── post.py # Post table definition -│ └── ... -├── schemas/ # Pydantic schemas (validation) -│ ├── __init__.py -│ ├── user.py # User validation schemas -│ ├── post.py # Post validation schemas -│ └── ... -├── crud/ # Database operations -│ ├── __init__.py -│ ├── crud_users.py # User CRUD operations -│ ├── crud_posts.py # Post CRUD operations -│ └── ... -└── core/db/ # Database configuration - ├── database.py # Connection and session setup - └── models.py # Base classes and mixins +backend/src/ +├── infrastructure/ +│ └── database/ +│ ├── session.py # engine, async_session dep, Base class, create_tables +│ └── models.py # TimestampMixin, SoftDeleteMixin, UUIDMixin +└── modules/ + ├── user/ + │ ├── models.py # SQLAlchemy User + │ ├── schemas.py # Pydantic UserCreate/UserRead/UserUpdate + │ ├── crud.py # crud_users = FastCRUD(User) + │ ├── service.py # UserService (business rules) + │ └── routes.py # /api/v1/users endpoints + ├── tier/ + ├── rate_limit/ + └── api_keys/ ``` +The shared `Base` and mixins are in `infrastructure/database/`. Everything feature-specific is colocated under the module. + ## Common Patterns ### Create with Validation + ```python -@router.post("/users/", response_model=UserRead) +@router.post("/", response_model=UserRead, status_code=201) async def create_user( - user_data: UserCreate, # Validates input automatically - db: Annotated[AsyncSession, Depends(async_get_db)] + user: UserCreate, + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], ): - # Check for duplicates - if await crud_users.exists(db=db, email=user_data.email): - raise DuplicateValueException("Email already exists") - - # Create user (password gets hashed automatically) - return await crud_users.create(db=db, object=user_data) + # service.create checks for duplicate username/email and hashes the password + return await user_service.create(user, db) ``` ### Query with Filters + ```python -# Get active users only -users = await crud_users.get_multi( +# Active users only (excludes soft-deleted) +result = await crud_users.get_multi( db=db, - is_active=True, is_deleted=False, offset=0, - limit=10 + limit=10, ) -# Search users -users = await crud_users.get_multi( +# Substring search +result = await crud_users.get_multi( db=db, - username__icontains="john", # Contains "john" - schema_to_select=UserRead + username__icontains="john", + schema_to_select=UserRead, ) ``` +FastCRUD supports `__` operators on field names (`__contains`, `__icontains`, `__gt`, `__lt`, `__in`, etc.). + ### Soft Delete Pattern + +The `SoftDeleteMixin` adds `is_deleted` and `deleted_at`. FastCRUD's `.delete()` flips the flag without removing the row: + ```python -# Soft delete (sets is_deleted=True) +# Soft delete (default for models with the mixin) await crud_users.delete(db=db, id=user_id) -# Hard delete (actually removes from database) +# Hard delete (actually DELETE FROM) await crud_users.db_delete(db=db, id=user_id) -# Get only non-deleted records -users = await crud_users.get_multi(db=db, is_deleted=False) +# Filter to exclude soft-deleted records +await crud_users.get_multi(db=db, is_deleted=False) ``` ## What's Next -Each guide builds on the previous one with practical examples: - -1. **[Models](models.md)** - Define your database structure -2. **[Schemas](schemas.md)** - Add validation and serialization -3. **[CRUD Operations](crud.md)** - Implement business logic -4. **[Migrations](migrations.md)** - Deploy changes safely - -The boilerplate provides a solid foundation - just follow these patterns to build your data layer! \ No newline at end of file +1. **[Models](models.md)** - Define your tables and relationships +2. **[Schemas](schemas.md)** - Add Pydantic validation and serialization +3. **[CRUD Operations](crud.md)** - Use FastCRUD to read and write data +4. **[Migrations](migrations.md)** - Track and deploy schema changes diff --git a/docs/user-guide/database/migrations.md b/docs/user-guide/database/migrations.md index ed66b352..c19e5896 100644 --- a/docs/user-guide/database/migrations.md +++ b/docs/user-guide/database/migrations.md @@ -1,470 +1,364 @@ # Database Migrations -This guide covers database migrations using Alembic, the migration tool for SQLAlchemy. Learn how to manage database schema changes safely and efficiently in development and production. +Schema changes are managed with [Alembic](https://alembic.sqlalchemy.org/). This guide covers the day-to-day workflow plus the production safety net the boilerplate adds on top. -## Overview +## Two Modes: Auto-Create vs Migrations -The FastAPI Boilerplate uses [Alembic](https://alembic.sqlalchemy.org/) for database migrations. Alembic provides: +The boilerplate supports both. They are **alternatives**, not complements: -- **Version-controlled schema changes** - Track every database modification -- **Automatic migration generation** - Generate migrations from model changes -- **Reversible migrations** - Upgrade and downgrade database versions -- **Environment-specific configurations** - Different settings for dev/staging/production -- **Safe schema evolution** - Apply changes incrementally +### `CREATE_TABLES_ON_STARTUP=true` — auto-create -## Simple Setup: Automatic Table Creation +The app calls `Base.metadata.create_all` on startup, creating any missing tables from the current models. -For simple projects or development, the boilerplate includes `create_tables_on_start` parameter that automatically creates all tables on application startup: +| Use when | Don't use when | +|----------|----------------| +| Local dev with a throwaway database | You need version-controlled schema changes | +| Tests with an ephemeral testcontainer | Multiple developers share a database | +| Quick prototyping | You're deploying to staging/production | -```python -# This is enabled by default in create_application() -app = create_application( - router=router, - settings=settings, - create_tables_on_start=True # Default: True -) -``` - -**When to use:** +Driven by the `CREATE_TABLES_ON_STARTUP` env var, defaulting to `true`. The factory honors it via `create_application(create_tables_on_startup=...)`. -- ✅ **Development** - Quick setup without migration management -- ✅ **Simple projects** - When you don't need migration history -- ✅ **Prototyping** - Fast iteration without migration complexity -- ✅ **Testing** - Clean database state for each test run +### Alembic migrations -**When NOT to use:** - -- ❌ **Production** - No migration history or rollback capability -- ❌ **Team development** - Can't track schema changes between developers -- ❌ **Data migrations** - Only handles schema, not data transformations -- ❌ **Complex deployments** - No control over when/how schema changes apply - -```python -# Disable for production environments -app = create_application( - router=router, - settings=settings, - create_tables_on_start=False # Use migrations instead -) -``` - -For production deployments and team development, use proper Alembic migrations as described below. +Tracked, reviewable, reversible schema changes — what you want for anything beyond a local sandbox. Set `CREATE_TABLES_ON_STARTUP=false` (or leave it true; `create_all` is a no-op on existing tables) and run migrations explicitly. ## Configuration -### Alembic Setup - -Alembic is configured in `src/alembic.ini`: +### `backend/alembic.ini` -```ini -[alembic] -# Path to migration files -script_location = migrations +The shipped config sets: -# Database URL with environment variable substitution -sqlalchemy.url = postgresql://%(POSTGRES_USER)s:%(POSTGRES_PASSWORD)s@%(POSTGRES_SERVER)s:%(POSTGRES_PORT)s/%(POSTGRES_DB)s - -# Other configurations -file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s -timezone = UTC -``` +- `script_location = %(here)s/migrations` — migration files live in `backend/migrations/` +- `prepend_sys_path = .` — so `src.*` resolves when running from `backend/` +- `sqlalchemy.url = driver://user:pass@localhost/dbname` — a placeholder; the real URL is overridden in `env.py` -### Environment Configuration +### `backend/migrations/env.py` -Migration environment is configured in `src/migrations/env.py`: +The boilerplate's `env.py` does three things you'll want to know about: ```python -# src/migrations/env.py -from alembic import context -from sqlalchemy import engine_from_config, pool -from app.core.db.database import Base -from app.core.config import settings +from src.infrastructure.config.settings import settings +from src.infrastructure.database.session import Base -# Import all models to ensure they're registered -from app.models import * # This imports all models +# 1. The real DATABASE_URL is taken from app settings (which build it from POSTGRES_*) +config.set_main_option("sqlalchemy.url", settings.DATABASE_URL) -config = context.config -# Override database URL from environment -config.set_main_option("sqlalchemy.url", settings.DATABASE_URL) +# 2. Production safety check — refuses to run unless explicitly opted in +def validate_production_migration(): + if os.getenv("ENVIRONMENT") == "production": + if os.getenv("CONFIRM_PRODUCTION_MIGRATION") != "yes": + raise Exception( + "Production migration requires CONFIRM_PRODUCTION_MIGRATION=yes" + ) + +# 3. Auto-import every module under src.modules so Alembic sees all models +import_models("src.modules") target_metadata = Base.metadata ``` -## Migration Workflow +The auto-import walks `src.modules` recursively — you don't need to maintain a list of model imports for `--autogenerate` to find new tables. Just create the model file under `modules//models.py` and it's discovered. -### 1. Creating Migrations +## Workflow -Generate migrations automatically when you change models: +All commands run from `backend/`: ```bash -# Navigate to src directory -cd src +cd backend +``` + +### 1. Generate a Migration + +After you change a model: -# Generate migration from model changes +```bash uv run alembic revision --autogenerate -m "Add user profile fields" ``` -**What happens:** -- Alembic compares current models with database schema -- Generates a new migration file in `src/migrations/versions/` -- Migration includes upgrade and downgrade functions +Alembic compares the current models with the database schema and writes a new file in `backend/migrations/versions/`. -### 2. Review Generated Migration +### 2. Review the Generated Migration -Always review auto-generated migrations before applying: +Always read it before applying. Autogenerate isn't perfect — it can miss enum changes, server-side defaults, computed columns, and complex constraint renames. A typical generated file: ```python -# Example migration file: src/migrations/versions/20241215_1430_add_user_profile_fields.py """Add user profile fields Revision ID: abc123def456 -Revises: previous_revision_id -Create Date: 2024-12-15 14:30:00.000000 - +Revises: prev_revision_id """ + from alembic import op import sqlalchemy as sa -# revision identifiers -revision = 'abc123def456' -down_revision = 'previous_revision_id' +revision = "abc123def456" +down_revision = "prev_revision_id" branch_labels = None depends_on = None + def upgrade() -> None: - # Add new columns - op.add_column('user', sa.Column('bio', sa.String(500), nullable=True)) - op.add_column('user', sa.Column('website', sa.String(255), nullable=True)) - - # Create index - op.create_index('ix_user_website', 'user', ['website']) + op.add_column("user", sa.Column("bio", sa.String(500), nullable=True)) + op.add_column("user", sa.Column("website", sa.String(255), nullable=True)) + op.create_index("ix_user_website", "user", ["website"]) + def downgrade() -> None: - # Remove changes (reverse order) - op.drop_index('ix_user_website', 'user') - op.drop_column('user', 'website') - op.drop_column('user', 'bio') + op.drop_index("ix_user_website", "user") + op.drop_column("user", "website") + op.drop_column("user", "bio") ``` -### 3. Apply Migration - -Apply migrations to update database schema: +### 3. Apply ```bash -# Apply all pending migrations +# All pending migrations uv run alembic upgrade head -# Apply specific number of migrations +# Step forward N revisions uv run alembic upgrade +2 -# Apply to specific revision +# Up to a specific revision uv run alembic upgrade abc123def456 ``` -### 4. Verify Migration - -Check migration status and current version: +### 4. Inspect ```bash -# Show current database version -uv run alembic current +uv run alembic current # current revision +uv run alembic history # full history +uv run alembic heads # any branched heads +uv run alembic show # details about a specific revision +``` -# Show migration history -uv run alembic history +### 5. Roll Back -# Show pending migrations -uv run alembic show head +```bash +uv run alembic downgrade -1 # one step back +uv run alembic downgrade # to a specific point +uv run alembic downgrade base # all the way back ``` -## Common Migration Scenarios +Test your downgrade in dev — it's the cheapest way to spot a missing `op.drop_index` or similar. -### Adding New Model +## Common Scenarios -1. **Create the model** in `src/app/models/`: +### Adding a New Model -```python -# src/app/models/category.py -from sqlalchemy import String, DateTime -from sqlalchemy.orm import Mapped, mapped_column -from datetime import datetime -from app.core.db.database import Base - -class Category(Base): - __tablename__ = "category" - - id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True, init=False) - name: Mapped[str] = mapped_column(String(50), unique=True, nullable=False) - slug: Mapped[str] = mapped_column(String(50), unique=True, nullable=False) - description: Mapped[str] = mapped_column(String(255), nullable=True) - created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) -``` +1. **Create the module folder** with the model: -2. **Import in __init__.py**: + ```python + # backend/src/modules/widgets/models.py + from sqlalchemy import String + from sqlalchemy.orm import Mapped, mapped_column -```python -# src/app/models/__init__.py -from .user import User -from .post import Post -from .tier import Tier -from .rate_limit import RateLimit -from .category import Category # Add new import -``` + from ...infrastructure.database.models import SoftDeleteMixin, TimestampMixin + from ...infrastructure.database.session import Base -3. **Generate migration**: -```bash -uv run alembic revision --autogenerate -m "Add category model" -``` + class Widget(Base, TimestampMixin, SoftDeleteMixin): + __tablename__ = "widgets" -### Adding Foreign Key + id: Mapped[int] = mapped_column( + "id", autoincrement=True, nullable=False, unique=True, + primary_key=True, init=False, + ) + name: Mapped[str] = mapped_column(String(100), unique=True, index=True) + ``` -1. **Update model with foreign key**: +2. **Register it in `modules/__init__.py`** so other code can import it: -```python -# Add to Post model -category_id: Mapped[Optional[int]] = mapped_column(ForeignKey("category.id"), nullable=True) -``` + ```python + from .widgets.models import Widget + ``` -2. **Generate migration**: + (The migrations env.py auto-imports `src.modules`, so the new model is picked up regardless — but adding it to `modules/__init__.py` keeps the public API explicit.) -```bash -uv run alembic revision --autogenerate -m "Add category_id to posts" -``` +3. **Generate and apply:** -3. **Review and apply**: + ```bash + uv run alembic revision --autogenerate -m "Add widgets table" + # review backend/migrations/versions/_add_widgets_table.py + uv run alembic upgrade head + ``` + +### Adding a Foreign Key ```python -# Generated migration will include: -def upgrade() -> None: - op.add_column('post', sa.Column('category_id', sa.Integer(), nullable=True)) - op.create_foreign_key('fk_post_category_id', 'post', 'category', ['category_id'], ['id']) - op.create_index('ix_post_category_id', 'post', ['category_id']) +# Add to the model +class Widget(Base, ...): + owner_id: Mapped[int] = mapped_column(ForeignKey("user.id"), index=True) ``` +```bash +uv run alembic revision --autogenerate -m "Add widget.owner_id" +``` + +The generated migration will include the column, the FK constraint, and the index. + ### Data Migrations -Sometimes you need to migrate data, not just schema: +Sometimes a schema change needs a data backfill. Edit the autogenerated upgrade to include it: ```python -# Example: Populate default category for existing posts def upgrade() -> None: - # Add the column - op.add_column('post', sa.Column('category_id', sa.Integer(), nullable=True)) - - # Data migration - connection = op.get_bind() - - # Create default category - connection.execute( - "INSERT INTO category (name, slug, description) VALUES ('General', 'general', 'Default category')" + # 1. Add nullable column + op.add_column("post", sa.Column("category_id", sa.Integer(), nullable=True)) + + # 2. Backfill + bind = op.get_bind() + bind.execute(sa.text( + "INSERT INTO category (name, slug) VALUES ('General', 'general')" + )) + default_id = bind.execute( + sa.text("SELECT id FROM category WHERE slug = 'general'") + ).scalar_one() + bind.execute(sa.text( + "UPDATE post SET category_id = :cid WHERE category_id IS NULL" + ), {"cid": default_id}) + + # 3. Tighten the constraint + op.alter_column("post", "category_id", nullable=False) + op.create_foreign_key( + "fk_post_category_id", "post", "category", ["category_id"], ["id"] ) - - # Get default category ID - result = connection.execute("SELECT id FROM category WHERE slug = 'general'") - default_category_id = result.fetchone()[0] - - # Update existing posts - connection.execute( - f"UPDATE post SET category_id = {default_category_id} WHERE category_id IS NULL" - ) - - # Make column non-nullable after data migration - op.alter_column('post', 'category_id', nullable=False) ``` -### Renaming Columns +### Renaming a Column ```python def upgrade() -> None: - # Rename column - op.alter_column('user', 'full_name', new_column_name='name') + op.alter_column("user", "full_name", new_column_name="name") + def downgrade() -> None: - # Reverse the rename - op.alter_column('user', 'name', new_column_name='full_name') + op.alter_column("user", "name", new_column_name="full_name") ``` -### Dropping Tables +For columns with foreign keys or indexes, autogenerate may produce drop-and-recreate instead of a rename. Edit it to use `alter_column` if you want to preserve data. + +### Dropping a Table ```python def upgrade() -> None: - # Drop table (be careful!) - op.drop_table('old_table') + op.drop_table("old_table") + def downgrade() -> None: - # Recreate table structure - op.create_table('old_table', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(50), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "old_table", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(50)), + sa.PrimaryKeyConstraint("id"), ) ``` ## Production Migration Strategy -### 1. Development Workflow - -```bash -# 1. Make model changes -# 2. Generate migration -uv run alembic revision --autogenerate -m "Descriptive message" - -# 3. Review migration file -# 4. Test migration -uv run alembic upgrade head - -# 5. Test downgrade (optional) -uv run alembic downgrade -1 -uv run alembic upgrade head -``` - -### 2. Staging Deployment +The boilerplate adds a hard guard against accidental production migrations: with `ENVIRONMENT=production`, Alembic refuses to run unless `CONFIRM_PRODUCTION_MIGRATION=yes` is set. ```bash -# 1. Deploy code with migrations -# 2. Backup database -pg_dump -h staging-db -U user dbname > backup_$(date +%Y%m%d_%H%M%S).sql - -# 3. Apply migrations -uv run alembic upgrade head - -# 4. Verify application works -# 5. Run tests +# Production migration — explicit confirmation required +ENVIRONMENT=production CONFIRM_PRODUCTION_MIGRATION=yes uv run alembic upgrade head ``` -### 3. Production Deployment - -```bash -# 1. Schedule maintenance window -# 2. Create database backup -pg_dump -h prod-db -U user dbname > prod_backup_$(date +%Y%m%d_%H%M%S).sql - -# 3. Apply migrations (with monitoring) -uv run alembic upgrade head - -# 4. Verify health checks pass -# 5. Monitor application metrics -``` - -## Docker Considerations - -### Development with Docker Compose +### Recommended Flow -For local development, migrations run automatically: +1. **Develop and test the migration locally** against a copy of production data +2. **Deploy code with the new migration files** to staging +3. **Back up the staging database**: + ```bash + pg_dump -h staging-db -U user dbname > staging_backup_$(date +%Y%m%d_%H%M%S).sql + ``` +4. **Apply against staging** and run a smoke test +5. **Schedule a maintenance window** if the migration is destructive +6. **Back up production**: + ```bash + pg_dump -h prod-db -U user dbname > prod_backup_$(date +%Y%m%d_%H%M%S).sql + ``` +7. **Apply against production** with the explicit confirmation env var +8. **Watch logs and metrics** post-migration -```yaml -# docker-compose.yml -services: - web: - # ... other config - depends_on: - - db - command: | - sh -c " - uv run alembic upgrade head && - uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload - " -``` - -### Production Docker +### Docker Migration Job -In production, run migrations separately: - -```dockerfile -# Dockerfile migration stage -FROM python:3.11-slim as migration -COPY requirements.txt . -RUN pip install -r requirements.txt -COPY src/ /app/ -WORKDIR /app -CMD ["alembic", "upgrade", "head"] -``` +The `migrate` stage in `backend/Dockerfile` exists for this. It runs `alembic upgrade head` and exits: ```yaml -# docker-compose.prod.yml +# In your compose / orchestrator config services: migrate: build: - context: . - target: migration + context: ./backend + target: migrate env_file: - - .env + - ./backend/.env depends_on: - db - command: alembic upgrade head - - web: - # ... web service config - depends_on: - - migrate ``` -## Migration Best Practices +Run it as a one-shot job before starting the app: -### 1. Always Review Generated Migrations - -```python -# Check for issues like: -# - Missing imports -# - Incorrect nullable settings -# - Missing indexes -# - Data loss operations +```bash +docker compose run --rm migrate ``` -### 2. Use Descriptive Messages +## Best Practices + +### Always review autogenerated migrations +Autogenerate misses enum changes, server-side defaults, certain constraint renames, and computed columns. +### Use descriptive messages ```bash # Good -uv run alembic revision --autogenerate -m "Add user email verification fields" +uv run alembic revision --autogenerate -m "Add user.email_verified" -# Bad +# Less useful three months later uv run alembic revision --autogenerate -m "Update user model" ``` -### 3. Handle Nullable Columns Carefully +### Adding a non-nullable column to a populated table +Do it in three steps in the same migration: ```python -# When adding non-nullable columns to existing tables: def upgrade() -> None: - # 1. Add as nullable first - op.add_column('user', sa.Column('phone', sa.String(20), nullable=True)) - - # 2. Populate with default data - op.execute("UPDATE user SET phone = '' WHERE phone IS NULL") - - # 3. Make non-nullable - op.alter_column('user', 'phone', nullable=False) + op.add_column("user", sa.Column("phone", sa.String(20), nullable=True)) + op.execute("UPDATE \"user\" SET phone = ''") + op.alter_column("user", "phone", nullable=False) ``` -### 4. Test Rollbacks +### Test downgrades ```bash -# Test that your downgrade works uv run alembic downgrade -1 uv run alembic upgrade head ``` -### 5. Use Transactions for Complex Migrations +If the down step blows up, fix it before merging the migration. -```python -def upgrade() -> None: - # Complex migration with transaction - connection = op.get_bind() - trans = connection.begin() - try: - # Multiple operations - op.create_table(...) - op.add_column(...) - connection.execute("UPDATE ...") - trans.commit() - except: - trans.rollback() - raise +### Don't commit auto-generated `.pyc` files +The `migrations/versions/` directory should only contain hand-written `.py` files. Alembic does not track `.pyc`. + +## Troubleshooting + +### "Target database is not up to date" +You created a new revision but the database is one or more revisions behind. Run `uv run alembic upgrade head` first, then generate the new revision. + +### "Multiple heads detected" +Two branches both added migrations from the same parent. Merge them: + +```bash +uv run alembic merge -m "merge heads" ``` +### Autogenerate produces an empty migration +You haven't actually changed the schema — or `target_metadata` doesn't see your model. Check that the new module is importable from `src.modules` (the auto-importer in `env.py` walks that package). + +### Migration applies in dev but fails in prod +Common causes: data the dev DB doesn't have (e.g. an unindexed column with NULLs you tried to make NOT NULL), different Postgres versions, or extensions installed only in one environment. Always test against production-like data. + ## Next Steps -- **[CRUD Operations](crud.md)** - Working with migrated database schema -- **[API Development](../api/index.md)** - Building endpoints for your models -- **[Testing](../testing.md)** - Testing database migrations \ No newline at end of file +- **[CRUD Operations](crud.md)** — Use the migrated schema +- **[API Endpoints](../api/endpoints.md)** — Build endpoints on the new model +- **[Production](../production.md)** — Production deployment guide diff --git a/docs/user-guide/database/models.md b/docs/user-guide/database/models.md index beea8b25..1f3c9e24 100644 --- a/docs/user-guide/database/models.md +++ b/docs/user-guide/database/models.md @@ -1,484 +1,360 @@ # Database Models -This section explains how SQLAlchemy models are implemented in the boilerplate, how to create new models, and the patterns used for relationships, validation, and data integrity. +This page covers how SQLAlchemy 2.0 models are organized in the boilerplate, the patterns used for relationships and timestamps, and how to add a new model. -## Model Structure +## Where Models Live -Models are defined in `src/app/models/` using SQLAlchemy 2.0's declarative syntax with `Mapped` type annotations. +Models live in **`backend/src/modules//models.py`** — colocated with that feature's schemas, CRUD, service, and routes: -### Base Model - -All models inherit from `Base` defined in `src/app/core/db/database.py`: - -```python -from sqlalchemy.orm import DeclarativeBase - -class Base(DeclarativeBase): - pass +```text +backend/src/modules/ +├── user/models.py # User +├── tier/models.py # Tier +├── rate_limit/models.py # RateLimit +└── api_keys/models.py # APIKey, KeyUsage, KeyPermission ``` -**SQLAlchemy 2.0 Change**: Uses `DeclarativeBase` instead of the older `declarative_base()` function. This provides better type checking and IDE support. - -### Model File Structure - -Each model is in its own file: +The shared base class and reusable mixins live in `backend/src/infrastructure/database/`: ```text -src/app/models/ -├── __init__.py # Imports all models for Alembic discovery -├── user.py # User authentication model -├── post.py # Example content model with relationships -├── tier.py # User subscription tiers -└── rate_limit.py # API rate limiting configuration +backend/src/infrastructure/database/ +├── session.py # Base, async_session, create_tables +└── models.py # TimestampMixin, SoftDeleteMixin, UUIDMixin ``` -**Import Requirement**: Models must be imported in `__init__.py` for Alembic to detect them during migration generation. +## The Base Class -## Design Decision: No SQLAlchemy Relationships +All models inherit from `Base` defined in `infrastructure/database/session.py`: -The boilerplate deliberately avoids using SQLAlchemy's `relationship()` feature. This is an intentional architectural choice with specific benefits. +```python +from sqlalchemy.orm import DeclarativeBase, MappedAsDataclass -### Why No Relationships -**Performance Concerns**: +class Base(DeclarativeBase, MappedAsDataclass): + """SQLAlchemy 2.0 base — also a dataclass for ergonomic instantiation.""" + pass +``` -- **N+1 Query Problem**: Relationships can trigger multiple queries when accessing related data -- **Lazy Loading**: Unpredictable when queries execute, making performance optimization difficult -- **Memory Usage**: Loading large object graphs consumes significant memory +Combining `DeclarativeBase` with `MappedAsDataclass` means each model behaves like a Python dataclass: you instantiate it with `User(name=..., username=..., ...)` and only the columns you mark `init=False` are excluded from the constructor. -**Code Clarity**: +## Reusable Mixins -- **Explicit Data Fetching**: Developers see exactly what data is being loaded and when -- **Predictable Queries**: No "magic" queries triggered by attribute access -- **Easier Debugging**: SQL queries are explicit in the code, not hidden in relationship configuration +The boilerplate ships three mixins in `infrastructure/database/models.py`. Compose them onto your model: -**Flexibility**: +```python +from ...infrastructure.database.models import ( + TimestampMixin, # adds created_at, updated_at + SoftDeleteMixin, # adds is_deleted, deleted_at + UUIDMixin, # adds a uuid primary key (alternative to id) +) -- **Query Optimization**: Can optimize each query for its specific use case -- **Selective Loading**: Load only the fields needed for each operation -- **Join Control**: Use FastCRUD's join methods when needed, skip when not +class MyModel(Base, TimestampMixin, SoftDeleteMixin): + ... +``` -### What This Means in Practice +| Mixin | Adds | Notes | +|-------|------|-------| +| `TimestampMixin` | `created_at`, `updated_at` (timezone-aware UTC) | Both `init=False`; defaults via `datetime.now(UTC)` | +| `SoftDeleteMixin` | `is_deleted` (bool), `deleted_at` (datetime?) | FastCRUD's `.delete()` flips these instead of issuing `DELETE FROM` | +| `UUIDMixin` | `uuid` primary key with `gen_random_uuid()` server fallback | Use this when you need an external-facing identifier | -Instead of this (traditional SQLAlchemy): -```python -# Not used in the boilerplate -class User(Base): - posts: Mapped[List["Post"]] = relationship("Post", back_populates="created_by_user") +## Auto-Discovery for Alembic -class Post(Base): - created_by_user: Mapped["User"] = relationship("User", back_populates="posts") -``` +Each module's models are imported in `backend/src/modules/__init__.py`: -The boilerplate uses this approach: ```python -# DO - Explicit and controlled -class User(Base): - # Only foreign key, no relationship - tier_id: Mapped[int | None] = mapped_column(ForeignKey("tier.id"), index=True, default=None) - -class Post(Base): - # Only foreign key, no relationship - created_by_user_id: Mapped[int] = mapped_column(ForeignKey("user.id"), index=True) - -# Explicit queries - you control exactly what's loaded -user = await crud_users.get(db=db, id=1) -posts = await crud_posts.get_multi(db=db, created_by_user_id=user.id) - -# Or use joins when needed -posts_with_users = await crud_posts.get_multi_joined( - db=db, - join_model=User, - schema_to_select=PostRead, - join_schema_to_select=UserRead -) +from .api_keys.models import APIKey, KeyPermission, KeyUsage +from .rate_limit.models import RateLimit +from .tier.models import Tier +from .user.models import User ``` -### Benefits of This Approach +When you add a new module, **add its models here** so Alembic's `--autogenerate` sees them. -**Predictable Performance**: +## Relationships -- Every database query is explicit in the code -- No surprise queries from accessing relationships -- Easier to identify and optimize slow operations +The boilerplate uses SQLAlchemy `relationship()` where it makes sense, with `lazy="selectin"` to avoid N+1 problems by fetching related rows in a single follow-up query. -**Better Caching**: +For example, `User.tier` and `Tier.users` are both wired up: -- Can cache individual models without worrying about related data -- Cache invalidation is simpler and more predictable - -**API Design**: +```python +# modules/user/models.py +class User(Base, TimestampMixin, SoftDeleteMixin): + __tablename__ = "user" + ... + tier_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("tiers.id"), index=True, default=None, + ) + tier: Mapped["Tier | None"] = relationship( + "Tier", back_populates="users", lazy="selectin", init=False, + ) -- Forces thinking about what data clients actually need -- Prevents over-fetching in API responses -- Encourages lean, focused endpoints +# modules/tier/models.py +class Tier(Base, TimestampMixin, SoftDeleteMixin): + __tablename__ = "tiers" + ... + users: Mapped[list["User"]] = relationship( + "User", back_populates="tier", lazy="selectin", + default_factory=list, init=False, + ) +``` -**Testing**: +### Avoiding Circular Imports -- Easier to mock database operations -- No complex relationship setup in test fixtures -- More predictable test data requirements +Both sides of a relationship import each other's model class. Use `TYPE_CHECKING` for the import and a string-literal class name in `relationship(...)`: -### When You Need Related Data +```python +from typing import TYPE_CHECKING -Use FastCRUD's join capabilities: +if TYPE_CHECKING: + from ..tier.models import Tier -```python -# Single record with related data -post_with_author = await crud_posts.get_joined( - db=db, - join_model=User, - schema_to_select=PostRead, - join_schema_to_select=UserRead, - id=post_id -) -# Multiple records with joins -posts_with_authors = await crud_posts.get_multi_joined( - db=db, - join_model=User, - offset=0, - limit=10 -) +class User(Base, ...): + tier: Mapped["Tier | None"] = relationship("Tier", back_populates="users", ...) ``` -### Alternative Approaches +### When to Skip Relationships -If you need relationships in your project, you can add them: +If a foreign key only points "outward" (no need to traverse from the other side), just keep the FK column and skip the relationship: ```python -# Add relationships if needed for your use case -from sqlalchemy.orm import relationship - -class User(Base): - # ... existing fields ... - posts: Mapped[List["Post"]] = relationship("Post", back_populates="created_by_user") - -class Post(Base): - # ... existing fields ... - created_by_user: Mapped["User"] = relationship("User", back_populates="posts") +class APIKey(Base, ...): + user_id: Mapped[int] = mapped_column(ForeignKey("user.id"), index=True) + # No User relationship — the API key knows its user via user_id; + # users don't need a list of all their keys at the ORM level. ``` -But consider the trade-offs and whether explicit queries might be better for your use case. +You can always join via FastCRUD when you need the related data. -## User Model Implementation +## The User Model -The User model (`src/app/models/user.py`) demonstrates authentication patterns: +`modules/user/models.py` is the most feature-rich example. Trimmed view: ```python -import uuid as uuid_pkg -from datetime import UTC, datetime -from sqlalchemy import DateTime, ForeignKey, String -from sqlalchemy.orm import Mapped, mapped_column -from ..core.db.database import Base +from datetime import datetime +from typing import TYPE_CHECKING +from sqlalchemy import DateTime, ForeignKey, Integer, String +from sqlalchemy.orm import Mapped, mapped_column, relationship -class User(Base): +from ...infrastructure.database.models import SoftDeleteMixin, TimestampMixin +from ...infrastructure.database.session import Base + +if TYPE_CHECKING: + from ..tier.models import Tier + + +class User(Base, TimestampMixin, SoftDeleteMixin): __tablename__ = "user" - id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) - - # User data + id: Mapped[int] = mapped_column( + "id", autoincrement=True, nullable=False, unique=True, + primary_key=True, init=False, + ) + + # Profile name: Mapped[str] = mapped_column(String(30)) username: Mapped[str] = mapped_column(String(20), unique=True, index=True) email: Mapped[str] = mapped_column(String(50), unique=True, index=True) - hashed_password: Mapped[str] = mapped_column(String) - - # Profile - profile_image_url: Mapped[str] = mapped_column(String, default="https://profileimageurl.com") - - # UUID for external references - uuid: Mapped[uuid_pkg.UUID] = mapped_column(default_factory=uuid_pkg.uuid4, primary_key=True, unique=True) - - # Timestamps - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default_factory=lambda: datetime.now(UTC)) - updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) - deleted_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) - - # Status flags - is_deleted: Mapped[bool] = mapped_column(default=False, index=True) - is_superuser: Mapped[bool] = mapped_column(default=False) - - # Foreign key to tier system (no relationship defined) - tier_id: Mapped[int | None] = mapped_column(ForeignKey("tier.id"), index=True, default=None, init=False) -``` + hashed_password: Mapped[str] = mapped_column(String(100)) + profile_image_url: Mapped[str] = mapped_column( + String, default="https://profileimageurl.com", + ) -### Key Implementation Details + # Tier (foreign key + relationship) + tier_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("tiers.id"), index=True, default=None, + ) + tier: Mapped["Tier | None"] = relationship( + "Tier", back_populates="users", lazy="selectin", init=False, + ) -**Type Annotations**: `Mapped[type]` provides type hints for SQLAlchemy 2.0. IDE and mypy can validate types. + is_superuser: Mapped[bool] = mapped_column(default=False) -**String Lengths**: Explicit lengths (`String(50)`) prevent database errors and define constraints clearly. + # OAuth fields (filled when user signs in via Google/GitHub) + google_id: Mapped[str | None] = mapped_column(String(50), unique=True, index=True, default=None) + github_id: Mapped[str | None] = mapped_column(String(50), unique=True, index=True, default=None) + oauth_provider: Mapped[str | None] = mapped_column(String(20), default=None) + email_verified: Mapped[bool] = mapped_column(default=False) +``` -**Nullable Fields**: Explicitly set `nullable=False` for required fields, `nullable=True` for optional ones. +Key points: -**Default Values**: Use `default=` for database-level defaults, Python functions for computed defaults. +- `init=False` excludes the field from the dataclass `__init__` (used for the primary key and timestamps you don't want callers to set). +- `index=True` adds a database index on lookup-heavy columns (`username`, `email`, `tier_id`, OAuth IDs). +- `unique=True` enforces uniqueness at the DB level. -## Post Model with Relationships +## The RateLimit Model -The Post model (`src/app/models/post.py`) shows relationships and soft deletion: +`modules/rate_limit/models.py` shows a no-relationship model with a foreign key: ```python -import uuid as uuid_pkg -from datetime import UTC, datetime -from sqlalchemy import DateTime, ForeignKey, String +from sqlalchemy import ForeignKey, Integer, String from sqlalchemy.orm import Mapped, mapped_column -from ..core.db.database import Base - -class Post(Base): - __tablename__ = "post" - - id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) - - # Content - title: Mapped[str] = mapped_column(String(30)) - text: Mapped[str] = mapped_column(String(63206)) # Large text field - media_url: Mapped[str | None] = mapped_column(String, default=None) - - # UUID for external references - uuid: Mapped[uuid_pkg.UUID] = mapped_column(default_factory=uuid_pkg.uuid4, primary_key=True, unique=True) - - # Foreign key (no relationship defined) - created_by_user_id: Mapped[int] = mapped_column(ForeignKey("user.id"), index=True) - - # Timestamps (built-in soft delete pattern) - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default_factory=lambda: datetime.now(UTC)) - updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) - deleted_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) - is_deleted: Mapped[bool] = mapped_column(default=False, index=True) -``` -### Soft Deletion Pattern +from ...infrastructure.database import Base +from ...infrastructure.database.models import SoftDeleteMixin, TimestampMixin -Soft deletion is built directly into models: -```python -# Built into each model that needs soft deletes -class Post(Base): - # ... other fields ... - - # Soft delete fields - is_deleted: Mapped[bool] = mapped_column(default=False, index=True) - deleted_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) +class RateLimit(Base, TimestampMixin, SoftDeleteMixin): + __tablename__ = "rate_limits" + + id: Mapped[int] = mapped_column( + "id", autoincrement=True, nullable=False, unique=True, + primary_key=True, init=False, + ) + tier_id: Mapped[int] = mapped_column(ForeignKey("tiers.id"), index=True) + name: Mapped[str] = mapped_column(String, nullable=False, unique=True) + path: Mapped[str] = mapped_column(String, nullable=False) + limit: Mapped[int] = mapped_column(Integer, nullable=False) + period: Mapped[int] = mapped_column(Integer, nullable=False) ``` -**Usage**: When `crud_posts.delete()` is called, it sets `is_deleted=True` and `deleted_at=datetime.now(UTC)` instead of removing the database row. +Each rate limit row says: "for tier X, requests to `path` are capped at `limit` per `period` seconds." -## Tier and Rate Limiting Models +## Soft Deletion -### Tier Model +The `SoftDeleteMixin` adds `is_deleted` and `deleted_at`. FastCRUD's `.delete()` flips them instead of removing the row: ```python -# src/app/models/tier.py -class Tier(Base): - __tablename__ = "tier" - - id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True, init=False) - name: Mapped[str] = mapped_column(String(50), unique=True, nullable=False) - created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) -``` +# Soft delete (default for models with the mixin) +await crud_users.delete(db=db, id=user_id) -### Rate Limit Model +# Actual DELETE FROM +await crud_users.db_delete(db=db, id=user_id) -```python -# src/app/models/rate_limit.py -class RateLimit(Base): - __tablename__ = "rate_limit" - - id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True, init=False) - tier_id: Mapped[int] = mapped_column(ForeignKey("tier.id"), nullable=False) - path: Mapped[str] = mapped_column(String(255), nullable=False) - limit: Mapped[int] = mapped_column(nullable=False) # requests allowed - period: Mapped[int] = mapped_column(nullable=False) # time period in seconds - name: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) +# Filter out soft-deleted rows +await crud_users.get_multi(db=db, is_deleted=False) ``` -**Purpose**: Links API endpoints (`path`) to rate limits (`limit` requests per `period` seconds) for specific user tiers. +## Adding a New Model -## Creating New Models +### Step-by-step -### Step-by-Step Process +1. **Create the module folder** (if it doesn't exist): `mkdir -p backend/src/modules/widgets` +2. **Define the model** in `modules/widgets/models.py` +3. **Register it** in `modules/__init__.py` so Alembic sees it +4. **Generate a migration**: `cd backend && uv run alembic revision --autogenerate -m "add widgets"` +5. **Review the migration** in `migrations/versions/...` (autogenerate isn't always perfect) +6. **Apply**: `uv run alembic upgrade head` -1. **Create model file** in `src/app/models/your_model.py` -2. **Define model class** inheriting from `Base` -3. **Add to imports** in `src/app/models/__init__.py` -4. **Generate migration** with `alembic revision --autogenerate` -5. **Apply migration** with `alembic upgrade head` - -### Example: Creating a Category Model +### Example: a `Widget` model ```python -# src/app/models/category.py -from datetime import datetime -from typing import List -from sqlalchemy import String, DateTime -from sqlalchemy.orm import Mapped, mapped_column, relationship -from app.core.db.database import Base - -class Category(Base): - __tablename__ = "category" - - id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True, init=False) - name: Mapped[str] = mapped_column(String(50), unique=True, nullable=False) - description: Mapped[str] = mapped_column(String(255), nullable=True) - created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) -``` +# backend/src/modules/widgets/models.py +from sqlalchemy import ForeignKey, String, Text +from sqlalchemy.orm import Mapped, mapped_column -If you want to relate Category to Post, just add the id reference in the model: +from ...infrastructure.database.models import SoftDeleteMixin, TimestampMixin +from ...infrastructure.database.session import Base -```python -class Post(Base): - __tablename__ = "post" - ... - - # Foreign key (no relationship defined) - category_id: Mapped[int] = mapped_column(ForeignKey("category.id"), index=True) + +class Widget(Base, TimestampMixin, SoftDeleteMixin): + __tablename__ = "widgets" + + id: Mapped[int] = mapped_column( + "id", autoincrement=True, nullable=False, unique=True, + primary_key=True, init=False, + ) + name: Mapped[str] = mapped_column(String(100), unique=True, index=True) + description: Mapped[str | None] = mapped_column(Text, default=None) + owner_id: Mapped[int] = mapped_column(ForeignKey("user.id"), index=True) ``` -### Import in __init__.py +Then add to `backend/src/modules/__init__.py`: ```python -# src/app/models/__init__.py -from .user import User -from .post import Post -from .tier import Tier -from .rate_limit import RateLimit -from .category import Category # Add new model -``` +from .widgets.models import Widget -**Critical**: Without this import, Alembic won't detect the model for migrations. +__all__ = [ + # ...existing exports... + "Widget", +] +``` -## Model Validation and Constraints +## Common Patterns ### Database-Level Constraints ```python -from sqlalchemy import CheckConstraint, Index +from sqlalchemy import CheckConstraint, Index, UniqueConstraint -class Product(Base): - __tablename__ = "product" - - price: Mapped[float] = mapped_column(nullable=False) - quantity: Mapped[int] = mapped_column(nullable=False) - - # Table-level constraints - __table_args__ = ( - CheckConstraint('price > 0', name='positive_price'), - CheckConstraint('quantity >= 0', name='non_negative_quantity'), - Index('idx_product_price', 'price'), - ) -``` -### Unique Constraints +class Product(Base, TimestampMixin): + __tablename__ = "products" -```python -# Single column unique -email: Mapped[str] = mapped_column(String(100), unique=True) + id: Mapped[int] = mapped_column(primary_key=True, init=False) + price_cents: Mapped[int] = mapped_column(nullable=False) + quantity: Mapped[int] = mapped_column(nullable=False) + sku: Mapped[str] = mapped_column(String(50)) + org_id: Mapped[int] = mapped_column(ForeignKey("orgs.id")) -# Multi-column unique constraint -__table_args__ = ( - UniqueConstraint('user_id', 'category_id', name='unique_user_category'), -) + __table_args__ = ( + CheckConstraint("price_cents > 0", name="positive_price"), + CheckConstraint("quantity >= 0", name="non_negative_quantity"), + UniqueConstraint("org_id", "sku", name="uq_org_sku"), + Index("ix_product_price", "price_cents"), + ) ``` -## Common Model Patterns +### Enum Fields -### Timestamp Tracking +The boilerplate prefers `StrEnum` (used in `OAuthProvider`, `EnvironmentOption`, etc.): ```python -class TimestampedModel: - created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, nullable=False) - updated_at: Mapped[datetime] = mapped_column( - DateTime, - default=datetime.utcnow, - onupdate=datetime.utcnow, - nullable=False - ) +from enum import StrEnum +from sqlalchemy import String -# Use as mixin -class Post(Base, TimestampedModel, SoftDeleteMixin): - # Model automatically gets created_at, updated_at, is_deleted, deleted_at - __tablename__ = "post" - id: Mapped[int] = mapped_column(primary_key=True) -``` -### Enumeration Fields +class WidgetStatus(StrEnum): + ACTIVE = "active" + INACTIVE = "inactive" + ARCHIVED = "archived" -```python -from enum import Enum -from sqlalchemy import Enum as SQLEnum -class UserStatus(Enum): - ACTIVE = "active" - INACTIVE = "inactive" - SUSPENDED = "suspended" +class Widget(Base, TimestampMixin): + __tablename__ = "widgets" -class User(Base): - status: Mapped[UserStatus] = mapped_column(SQLEnum(UserStatus), default=UserStatus.ACTIVE) + id: Mapped[int] = mapped_column(primary_key=True, init=False) + status: Mapped[str] = mapped_column(String(20), default=WidgetStatus.ACTIVE.value) ``` +Storing the value as a `String` keeps migrations simple. If you prefer SQLAlchemy's `SQLEnum` with a real Postgres enum type, that's also fine — just be aware that adding values requires a migration. + ### JSON Fields ```python from sqlalchemy.dialects.postgresql import JSONB -class UserProfile(Base): - preferences: Mapped[dict] = mapped_column(JSONB, nullable=True) - metadata: Mapped[dict] = mapped_column(JSONB, default=lambda: {}) -``` - -**PostgreSQL-specific**: Uses JSONB for efficient JSON storage and querying. -## Model Testing +class UserProfile(Base, TimestampMixin): + __tablename__ = "user_profiles" -### Basic Model Tests - -```python -# tests/test_models.py -import pytest -from sqlalchemy.exc import IntegrityError -from app.models.user import User - -def test_user_creation(): - user = User( - username="testuser", - email="test@example.com", - hashed_password="hashed123" - ) - assert user.username == "testuser" - assert user.is_active is True # Default value - -def test_user_unique_constraint(): - # Test that duplicate emails raise IntegrityError - with pytest.raises(IntegrityError): - # Create users with same email - pass + id: Mapped[int] = mapped_column(primary_key=True, init=False) + user_id: Mapped[int] = mapped_column(ForeignKey("user.id"), unique=True) + preferences: Mapped[dict] = mapped_column(JSONB, default_factory=dict, init=False) ``` ## Migration Considerations -### Backwards Compatible Changes - -Safe changes that don't break existing code: +### Backwards-compatible changes (safe) - Adding nullable columns - Adding new tables - Adding indexes -- Increasing column lengths - -### Breaking Changes +- Increasing string column lengths -Changes requiring careful migration: +### Breaking changes (need care) -- Making columns non-nullable -- Removing columns -- Changing column types +- Making columns non-nullable (need a default or backfill plan) +- Removing columns (drop after deploy is stable) +- Changing column types (often two-step: add new, migrate data, drop old) - Removing tables -## Next Steps +See [Migrations](migrations.md) for the full workflow. -Now that you understand model implementation: - -1. **[Schemas](schemas.md)** - Learn Pydantic validation and serialization -2. **[CRUD Operations](crud.md)** - Implement database operations with FastCRUD -3. **[Migrations](migrations.md)** - Manage schema changes with Alembic +## Next Steps -The next section covers how Pydantic schemas provide validation and API contracts separate from database models. \ No newline at end of file +1. **[Schemas](schemas.md)** - Pydantic request/response shapes +2. **[CRUD Operations](crud.md)** - FastCRUD usage patterns +3. **[Migrations](migrations.md)** - Alembic workflow diff --git a/docs/user-guide/database/schemas.md b/docs/user-guide/database/schemas.md index 058547eb..ee1b32cb 100644 --- a/docs/user-guide/database/schemas.md +++ b/docs/user-guide/database/schemas.md @@ -1,650 +1,400 @@ # Database Schemas -This section explains how Pydantic schemas handle data validation, serialization, and API contracts in the boilerplate. Schemas are separate from SQLAlchemy models and define what data enters and exits your API. +Pydantic schemas handle three things in this codebase: **input validation**, **output serialization**, and **API contracts** that frontend and backend can rely on. Schemas are separate from the SQLAlchemy models — keeping the two layers split lets you control exactly what each endpoint accepts and returns. -## Schema Purpose and Structure +## Where Schemas Live -Schemas serve three main purposes: +Each module owns its schemas, colocated with the model and CRUD: -1. **Input Validation** - Validate incoming API request data -2. **Output Serialization** - Format database data for API responses -3. **API Contracts** - Define clear interfaces between frontend and backend +```text +backend/src/modules/ +├── user/schemas.py # UserCreate, UserRead, UserUpdate, UserAnonymize, ... +├── tier/schemas.py # TierCreate, TierRead, TierUpdate +├── rate_limit/schemas.py # RateLimitCreate, RateLimitRead, RateLimitUpdate +└── api_keys/schemas.py # APIKeyCreate, APIKeyRead, APIKeyUpdate, KeyUsageRead +``` -### Schema File Organization +Cross-module shared schemas (timestamp/soft-delete mixins, common error shapes) live in `backend/src/modules/common/schemas.py`. -Schemas are organized in `src/app/schemas/` with one file per model: +## Common Mixin Schemas -```text -src/app/schemas/ -├── __init__.py # Imports for easy access -├── user.py # User-related schemas -├── post.py # Post-related schemas -├── tier.py # Tier schemas -├── rate_limit.py # Rate limit schemas -└── job.py # Background job schemas +`modules/common/schemas.py` provides two reusable Pydantic mixins matching the SQLAlchemy mixins: + +```python +# modules/common/schemas.py +class TimestampSchema(BaseModel): + created_at: datetime = Field(default_factory=lambda: datetime.now(UTC).replace(tzinfo=None)) + updated_at: datetime | None = Field(default=None) + # serializers cast both to ISO strings + + +class PersistentDeletion(BaseModel): + deleted_at: datetime | None = Field(default=None) + is_deleted: bool = False ``` -## User Schema Implementation +Compose them onto your full-record schema where applicable. -The User schemas (`src/app/schemas/user.py`) demonstrate common validation patterns: +## The User Schemas + +`modules/user/schemas.py` is the most extensive example. The pattern is **one schema per role** — each operation gets its own shape: ```python from datetime import datetime from typing import Annotated - from pydantic import BaseModel, ConfigDict, EmailStr, Field -from ..core.schemas import PersistentDeletion, TimestampSchema, UUIDSchema +from ..common.schemas import PersistentDeletion, TimestampSchema -# Base schema with common fields +# Common fields shared by create/update/full-record class UserBase(BaseModel): - name: Annotated[ - str, - Field( - min_length=2, - max_length=30, - examples=["User Userson"] - ) - ] + name: Annotated[str, Field(min_length=2, max_length=30, examples=["User Userson"])] username: Annotated[ str, - Field( - min_length=2, - max_length=20, - pattern=r"^[a-z0-9]+$", - examples=["userson"] - ) + Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$", examples=["userson"]), ] email: Annotated[EmailStr, Field(examples=["user.userson@example.com"])] -# Full User data -class User(TimestampSchema, UserBase, UUIDSchema, PersistentDeletion): - profile_image_url: Annotated[ - str, - Field(default="https://www.profileimageurl.com") - ] +# Full record (used internally — never returned to clients) +class User(TimestampSchema, UserBase, PersistentDeletion): hashed_password: str is_superuser: bool = False + profile_image_url: str = "https://www.profileimageurl.com" tier_id: int | None = None + # OAuth + google_id: str | None = None + github_id: str | None = None + oauth_provider: str | None = None + email_verified: bool = False + -# Schema for reading user data (API output) +# API response — explicitly excludes sensitive fields class UserRead(BaseModel): id: int - - name: Annotated[ - str, - Field( - min_length=2, - max_length=30, - examples=["User Userson"] - ) - ] - username: Annotated[ - str, - Field( - min_length=2, - max_length=20, - pattern=r"^[a-z0-9]+$", - examples=["userson"] - ) - ] - email: Annotated[EmailStr, Field(examples=["user.userson@example.com"])] + name: Annotated[str, Field(min_length=2, max_length=30)] + username: Annotated[str, Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$")] + email: EmailStr profile_image_url: str + is_deleted: bool = False tier_id: int | None + is_superuser: bool = False + email_verified: bool = False + oauth_provider: str | None = None -# Schema for creating new users (API input) -class UserCreate(UserBase): # Inherits from UserBase - model_config = ConfigDict(extra="forbid") +# API request body for POST /users/ +class UserCreate(UserBase): + model_config = ConfigDict(extra="forbid") # reject unknown fields password: Annotated[ str, Field( + min_length=8, + description=( + "Password must be at least 8 characters and include a number, " + "uppercase letter, lowercase letter, and special character" + ), + examples=["Str1ngst!"], pattern=r"^.{8,}|[0-9]+|[A-Z]+|[a-z]+|[^a-zA-Z0-9]+$", - examples=["Str1ngst!"] - ) + ), ] + # OAuth fields — populated when user signs up via Google/GitHub + google_id: str | None = None + github_id: str | None = None + oauth_provider: str | None = None -# Schema that FastCRUD will use to store just the hash +# What the service writes to the DB (raw password replaced with hash) class UserCreateInternal(UserBase): hashed_password: str + google_id: str | None = None + github_id: str | None = None + oauth_provider: str | None = None + email_verified: bool = False -# Schema for updating users +# Partial update — every field optional class UserUpdate(BaseModel): model_config = ConfigDict(extra="forbid") - name: Annotated[ - str | None, - Field( - min_length=2, - max_length=30, - examples=["User Userberg"], - default=None - ) - ] + name: Annotated[str | None, Field(min_length=2, max_length=30, default=None)] username: Annotated[ - str | None, - Field( - min_length=2, - max_length=20, - pattern=r"^[a-z0-9]+$", - examples=["userberg"], - default=None - ) - ] - email: Annotated[ - EmailStr | None, - Field( - examples=["user.userberg@example.com"], - default=None - ) + str | None, + Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$", default=None), ] + email: Annotated[EmailStr | None, Field(default=None)] profile_image_url: Annotated[ str | None, - Field( - pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", - examples=["https://www.profileimageurl.com"], - default=None - ), + Field(pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", default=None), ] -# Internal update schema class UserUpdateInternal(UserUpdate): - updated_at: datetime + updated_at: datetime # service stamps this before persisting -# Schema to update tier id class UserTierUpdate(BaseModel): tier_id: int -# Schema for user deletion (soft delete timestamps) class UserDelete(BaseModel): model_config = ConfigDict(extra="forbid") - is_deleted: bool deleted_at: datetime -# User specific schema -class UserRestoreDeleted(BaseModel): - is_deleted: bool +# GDPR/LGPD anonymization payload +class UserAnonymize(BaseModel): + model_config = ConfigDict(extra="forbid") + name: str + username: str + hashed_password: str | None = None + # ...other PII-clearing fields... ``` -### Key Implementation Details +### Naming Conventions -**Field Validation**: Uses `Annotated[type, Field(...)]` for validation rules. `Field` parameters include: +The schemas follow a consistent vocabulary across modules: -- `min_length/max_length` - String length constraints -- `gt/ge/lt/le` - Numeric constraints -- `pattern` - Pattern matching (regex) -- `default` - Default values +| Suffix | Use | +|--------|-----| +| `Base` | Common fields shared across create/update/full schemas | +| *(none — class name = `User`)* | Full-record schema (every column, mostly internal) | +| `Read` | API response — drops sensitive/internal fields | +| `Create` | API request body for POST | +| `CreateInternal` | What the service stores (raw password → hashed_password) | +| `Update` | Partial update body for PATCH (all fields optional) | +| `UpdateInternal` | What the service stores on update (e.g. with stamped `updated_at`) | +| `TierUpdate`, `Anonymize`, `Delete`, … | Operation-specific narrow schemas | -**EmailStr**: Validates email format and normalizes the value. +### Why Internal vs External -**ConfigDict**: Replaces the old `Config` class. `from_attributes=True` allows creating schemas from SQLAlchemy model instances. +The split between `Create` and `CreateInternal` (and likewise for updates) keeps the API surface honest: -**Internal vs External**: Separate schemas for internal operations (like password hashing) vs API exposure. +- `UserCreate` accepts `password: str` from the client. +- The service hashes the password and constructs a `UserCreateInternal` with `hashed_password` instead. +- `crud_users.create(db=db, object=user_internal)` is what actually hits the database. -## Schema Patterns +The client can never set `hashed_password` directly, and the model never sees a plaintext password. -### Base Schema Pattern +## Field Validation -```python -# Common fields shared across operations -class PostBase(BaseModel): - title: Annotated[ - str, - Field( - min_length=1, - max_length=100 - ) - ] - content: Annotated[ - str, - Field( - min_length=1, - max_length=10000 - ) - ] +### `Annotated` + `Field` -# Specific operation schemas inherit from base -class PostCreate(PostBase): - pass # Only title and content needed for creation +The codebase uses `Annotated[T, Field(...)]` for validation rules: -class PostRead(PostBase): - model_config = ConfigDict(from_attributes=True) - - id: int - created_at: datetime - created_by_user_id: int - is_deleted: bool = False # From model's soft delete fields -``` +| `Field` parameter | Effect | +|-------------------|--------| +| `min_length` / `max_length` | String length bounds | +| `pattern` | Regex validation (e.g. `r"^[a-z0-9]+$"` for usernames) | +| `gt` / `ge` / `lt` / `le` | Numeric bounds | +| `default` | Default value | +| `examples` | OpenAPI example values shown in `/docs` | +| `description` | Doc string visible in OpenAPI | -**Purpose**: Reduces duplication and ensures consistency across related schemas. +### `EmailStr` -### Optional Fields in Updates +Pydantic's `EmailStr` validates the email format and normalizes the casing. -```python -class PostUpdate(BaseModel): - title: Annotated[ - str | None, - Field( - min_length=1, - max_length=100, - default=None - ) - ] - content: Annotated[ - str | None, - Field( - min_length=1, - max_length=10000, - default=None - ) - ] -``` +### `ConfigDict(extra="forbid")` -**Pattern**: All fields optional in update schemas. Only provided fields are updated in the database. +Set on `UserCreate`, `UserUpdate`, etc. — anything the client sends beyond the declared fields raises a 422. This matters most for create/update payloads where stray fields could otherwise sneak through. -### Nested Schemas +### `from_attributes` -```python -# Post schema with user information -class PostWithUser(PostRead): - created_by_user: UserRead # Nested user data +Use `ConfigDict(from_attributes=True)` when you need to build a Pydantic schema from a SQLAlchemy model instance directly. The boilerplate's services mostly work with dicts (FastCRUD's default return shape), so this is rarely needed — but it's the right setting if you do `UserRead.model_validate(orm_user)`. -# Alternative: Custom nested schema -class PostAuthor(BaseModel): - model_config = ConfigDict(from_attributes=True) - - id: int - username: str - # Only include fields needed for this context +## Schema Patterns -class PostRead(PostBase): - created_by_user: PostAuthor -``` +### Optional Fields in Updates -**Usage**: Include related model data in responses without exposing all fields. +The convention is **all fields optional** in `*Update` schemas: -## Validation Patterns +```python +class UserUpdate(BaseModel): + model_config = ConfigDict(extra="forbid") + + name: Annotated[str | None, Field(min_length=2, max_length=30, default=None)] + email: Annotated[EmailStr | None, Field(default=None)] + # ... +``` + +The service then writes only the fields the client actually provided. ### Custom Validators +For cross-field rules or transforms: + ```python from pydantic import field_validator, model_validator -class UserCreateWithConfirm(UserBase): - password: str - confirm_password: str - - @field_validator('username') + +class WidgetCreate(BaseModel): + name: str + color: str + quantity: int = 1 + + @field_validator("name") @classmethod - def validate_username(cls, v): - if v.lower() in ['admin', 'root', 'system']: - raise ValueError('Username not allowed') - return v.lower() # Normalize to lowercase - - @model_validator(mode='after') - def validate_passwords_match(self): - if self.password != self.confirm_password: - raise ValueError('Passwords do not match') + def normalize_name(cls, v: str) -> str: + if v.lower() in {"admin", "system"}: + raise ValueError("Reserved name") + return v.strip().lower() + + @model_validator(mode="after") + def check_quantity(self) -> "WidgetCreate": + if self.color == "rare" and self.quantity > 1: + raise ValueError("Rare widgets are limited to one per request") return self ``` -**field_validator**: Validates individual fields. Can transform values. - -**model_validator**: Validates across multiple fields. Access to full model data. +`field_validator` validates one field; `model_validator(mode="after")` runs after all fields are set and can validate combinations. ### Computed Fields +For values derived at serialization time (not stored): + ```python from pydantic import computed_field -class UserReadWithComputed(UserRead): - created_at: datetime # Would need to be added to actual UserRead - + +class UserReadWithStats(UserRead): + created_at: datetime # add this if your read schema doesn't already have it + @computed_field @property - def age_days(self) -> int: - return (datetime.utcnow() - self.created_at).days - - @computed_field - @property def display_name(self) -> str: return f"@{self.username}" -``` - -**Purpose**: Add computed values to API responses without storing them in the database. - -### Conditional Validation -```python -class PostCreate(BaseModel): - title: str - content: str - category: Optional[str] = None - is_premium: bool = False - - @model_validator(mode='after') - def validate_premium_content(self): - if self.is_premium and not self.category: - raise ValueError('Premium posts must have a category') - return self + @computed_field + @property + def age_days(self) -> int: + return (datetime.now(UTC) - self.created_at).days ``` -## Schema Configuration +## Multi-Record Responses -### Model Config Options +The boilerplate uses **FastCRUD's `PaginatedListResponse`** for paginated list endpoints: ```python -class UserRead(BaseModel): - model_config = ConfigDict( - from_attributes=True, # Allow creation from SQLAlchemy models - extra="forbid", # Reject extra fields - str_strip_whitespace=True, # Strip whitespace from strings - validate_assignment=True, # Validate on field assignment - populate_by_name=True, # Allow field names and aliases +from fastcrud import PaginatedListResponse, compute_offset, paginated_response + + +@router.get("/", response_model=PaginatedListResponse[UserRead]) +async def get_users( + db: Annotated[AsyncSession, Depends(async_session)], + user_service: Annotated[UserService, Depends(get_user_service)], + page: int = 1, + items_per_page: int = 10, +) -> dict[str, Any]: + result = await user_service.get_paginated( + skip=compute_offset(page, items_per_page), + limit=items_per_page, + db=db, ) + return paginated_response(crud_data=result, page=page, items_per_page=items_per_page) ``` -### Field Aliases +The response shape: -```python -class UserResponse(BaseModel): - user_id: Annotated[ - int, - Field(alias="id") - ] - username: str - email_address: Annotated[ - str, - Field(alias="email") - ] - - model_config = ConfigDict(populate_by_name=True) -``` - -**Usage**: API can accept both `id` and `user_id`, `email` and `email_address`. - -## Response Schema Patterns - -### Multi-Record Responses - -[FastCRUD's](https://benavlabs.github.io/fastcrud/) `get_multi` method returns a `GetMultiResponse`: - -```python -# Using get_multi directly -users = await crud_users.get_multi( - db=db, - offset=0, - limit=10, - schema_to_select=UserRead, - return_as_model=True, - return_total_count=True -) -# Returns GetMultiResponse structure: -# { -# "data": [UserRead, ...], -# "total_count": 150 -# } +```json +{ + "data": [{ "id": 1, "name": "...", "username": "..." }], + "total_count": 150, + "has_more": true, + "page": 1, + "items_per_page": 10 +} ``` -### Paginated Responses - -For pagination with page numbers, use `PaginatedListResponse`: +For single-record endpoints, return the schema directly: ```python -from fastcrud import PaginatedListResponse - -# In API endpoint - ONLY for paginated list responses -@router.get("/users/", response_model=PaginatedListResponse[UserRead]) -async def get_users(page: int = 1, items_per_page: int = 10): - # Returns paginated structure with additional pagination fields: - # { - # "data": [UserRead, ...], - # "total_count": 150, - # "has_more": true, - # "page": 1, - # "items_per_page": 10 - # } - -# Single user endpoints return UserRead directly -@router.get("/users/{user_id}", response_model=UserRead) -async def get_user(user_id: int): - # Returns single UserRead object: - # { - # "id": 1, - # "name": "User Userson", - # "username": "userson", - # "email": "user.userson@example.com", - # "profile_image_url": "https://...", - # "tier_id": null - # } +@router.get("/me", response_model=UserRead) +async def me(current_user: Annotated[dict[str, Any], Depends(get_current_user)]): + return current_user ``` -### Error Response Schemas +## Adding Schemas for a New Module -```python -class ErrorResponse(BaseModel): - detail: str - error_code: Optional[str] = None - -class ValidationErrorResponse(BaseModel): - detail: str - errors: list[dict] # Pydantic validation errors -``` - -### Success Response Wrapper +1. **Create the schema file**: `backend/src/modules/widgets/schemas.py` +2. **Define a `WidgetBase`** with the fields shared by create/update/read +3. **Add `WidgetCreate`, `WidgetRead`, `WidgetUpdate`** (and any internal variants you need) +4. **Wire them up** in the module's `routes.py` and `service.py` ```python -from typing import Generic, TypeVar - -T = TypeVar('T') - -class SuccessResponse(BaseModel, Generic[T]): - success: bool = True - data: T - message: Optional[str] = None - -# Usage in endpoint -@router.post("/users/", response_model=SuccessResponse[UserRead]) -async def create_user(user_data: UserCreate): - user = await crud_users.create(db=db, object=user_data) - return SuccessResponse(data=user, message="User created successfully") -``` +# backend/src/modules/widgets/schemas.py +from datetime import datetime +from typing import Annotated +from pydantic import BaseModel, ConfigDict, Field -## Creating New Schemas +from ..common.schemas import TimestampSchema -### Step-by-Step Process -1. **Create schema file** in `src/app/schemas/your_model.py` -2. **Define base schema** with common fields -3. **Create operation-specific schemas** (Create, Read, Update, Delete) -4. **Add validation rules** as needed -5. **Import in __init__.py** for easy access +class WidgetBase(BaseModel): + name: Annotated[str, Field(min_length=1, max_length=50)] + description: Annotated[str | None, Field(max_length=255, default=None)] -### Example: Category Schemas -```python -# src/app/schemas/category.py -from datetime import datetime -from typing import Annotated -from pydantic import BaseModel, Field, ConfigDict - -class CategoryBase(BaseModel): - name: Annotated[ - str, - Field( - min_length=1, - max_length=50 - ) - ] - description: Annotated[ - str | None, - Field( - max_length=255, - default=None - ) - ] +class WidgetCreate(WidgetBase): + model_config = ConfigDict(extra="forbid") -class CategoryCreate(CategoryBase): - pass -class CategoryRead(CategoryBase): +class WidgetRead(WidgetBase): model_config = ConfigDict(from_attributes=True) - id: int + owner_id: int created_at: datetime -class CategoryUpdate(BaseModel): - name: Annotated[ - str | None, - Field( - min_length=1, - max_length=50, - default=None - ) - ] - description: Annotated[ - str | None, - Field( - max_length=255, - default=None - ) - ] - -class CategoryWithPosts(CategoryRead): - posts: list[PostRead] = [] # Include related posts -``` - -### Import in __init__.py - -```python -# src/app/schemas/__init__.py -from .user import UserCreate, UserRead, UserUpdate -from .post import PostCreate, PostRead, PostUpdate -from .category import CategoryCreate, CategoryRead, CategoryUpdate -``` - -## Schema Testing - -### Validation Testing - -```python -# tests/test_schemas.py -import pytest -from pydantic import ValidationError -from app.schemas.user import UserCreate - -def test_user_create_valid(): - user_data = { - "name": "Test User", - "username": "testuser", - "email": "test@example.com", - "password": "Str1ngst!" - } - user = UserCreate(**user_data) - assert user.username == "testuser" - assert user.name == "Test User" - -def test_user_create_invalid_email(): - with pytest.raises(ValidationError) as exc_info: - UserCreate( - name="Test User", - username="test", - email="invalid-email", - password="Str1ngst!" - ) - - errors = exc_info.value.errors() - assert any(error['type'] == 'value_error' for error in errors) - -def test_password_validation(): - with pytest.raises(ValidationError) as exc_info: - UserCreate( - name="Test User", - username="test", - email="test@example.com", - password="123" # Doesn't match pattern - ) -``` - -### Serialization Testing -```python -from app.models.user import User -from app.schemas.user import UserRead - -def test_user_read_from_model(): - # Create model instance - user_model = User( - id=1, - name="Test User", - username="testuser", - email="test@example.com", - profile_image_url="https://example.com/image.jpg", - hashed_password="hashed123", - is_superuser=False, - tier_id=None, - created_at=datetime.utcnow() - ) - - # Convert to schema - user_schema = UserRead.model_validate(user_model) - assert user_schema.username == "testuser" - assert user_schema.id == 1 - assert user_schema.name == "Test User" - # hashed_password not included in UserRead +class WidgetUpdate(BaseModel): + model_config = ConfigDict(extra="forbid") + name: Annotated[str | None, Field(min_length=1, max_length=50, default=None)] + description: Annotated[str | None, Field(max_length=255, default=None)] ``` ## Common Pitfalls -### Model vs Schema Field Names +### Don't expose sensitive fields ```python -# DON'T - Exposing sensitive fields +# BAD — leaks the password hash class UserRead(BaseModel): - hashed_password: str # Never expose password hashes + hashed_password: str -# DO - Only expose safe fields +# GOOD — read-only public shape class UserRead(BaseModel): id: int name: str username: str - email: str + email: EmailStr profile_image_url: str - tier_id: int | None ``` -### Validation Performance +### Don't query the database in validators ```python -# DON'T - Complex validation in every request -@field_validator('email') -@classmethod -def validate_email_unique(cls, v): - # Database query in validator - slow! - if crud_users.exists(email=v): - raise ValueError('Email already exists') - -# DO - Handle uniqueness in business logic -# Let database unique constraints handle this +# BAD — every request hits the DB twice +@field_validator("email") +@classmethod +def email_must_be_unique(cls, v): + if crud_users.exists(email=v): # I/O in a validator + raise ValueError("Email already exists") + +# GOOD — let the DB unique constraint and service-layer logic handle it ``` -## Next Steps +The boilerplate's `UserService.create` already checks for duplicates before insert. The DB unique constraint is the final guardrail. -Now that you understand schema implementation: +### Don't reuse the same schema for create and update -1. **[CRUD Operations](crud.md)** - Learn how schemas integrate with database operations -2. **[Migrations](migrations.md)** - Manage database schema changes -3. **[API Endpoints](../api/endpoints.md)** - Use schemas in FastAPI endpoints +A `Create` schema requires fields that an `Update` schema should be able to omit. Splitting them avoids accidental "this field defaulted because the client forgot it" bugs. + +## Next Steps -The next section covers CRUD operations and how they use these schemas for data validation and transformation. \ No newline at end of file +1. **[CRUD Operations](crud.md)** - How schemas plug into FastCRUD +2. **[Migrations](migrations.md)** - Manage the underlying database changes +3. **[API Endpoints](../api/endpoints.md)** - Use schemas in route handlers diff --git a/docs/user-guide/development.md b/docs/user-guide/development.md index 84fb1879..7fe460b6 100644 --- a/docs/user-guide/development.md +++ b/docs/user-guide/development.md @@ -1,729 +1,372 @@ # Development Guide -This guide covers everything you need to know about extending, customizing, and developing with the FastAPI boilerplate. +This page covers the day-to-day development loop: running the app, the tools that ship with it, how to add a new module, and what to know about debugging the boilerplate's moving parts. -## Extending the Boilerplate +For end-to-end "how do I add an entity," see: -### Adding New Models +- **[Database → Models](database/models.md)** — defining `Base`-derived dataclass models +- **[Database → Schemas](database/schemas.md)** — request/response Pydantic models +- **[API → Endpoints](api/endpoints.md)** — wiring routes to services +- **[Admin Panel → Adding Models](admin-panel/adding-models.md)** — surfacing the model in the admin UI -Follow this step-by-step process to add new entities to your application: - -#### 1. Create SQLAlchemy Model - -Create a new file in `src/app/models/` (e.g., `category.py`): - -```python -from sqlalchemy import String, ForeignKey -from sqlalchemy.orm import Mapped, mapped_column, relationship - -from ..core.db.database import Base +This page is the meta-guide that ties them together. +## Running the App +```bash +cd backend +uv run uvicorn src.interfaces.main:app --reload --host 0.0.0.0 --port 8000 ``` -class Category(Base): - __tablename__ = "category" - - id: Mapped[int] = mapped_column( - "id", - autoincrement=True, - nullable=False, - unique=True, - primary_key=True, - init=False, - ) - - name: Mapped[str] = mapped_column(String(50)) - description: Mapped[str | None] = mapped_column(String(255), default=None) - -class Post(Base): - __tablename__ = "post" +`--reload` watches the filesystem and restarts on Python file changes. Use it for development; **never** in production. - id: Mapped[int] = mapped_column(primary_key=True) - title: Mapped[str] = mapped_column(String(100)) +If you're using Docker: - category_id: Mapped[int | None] = mapped_column( - ForeignKey("category.id"), - index=True, - default=None - ) +```bash +docker compose up -d # API + Postgres + Redis +docker compose logs -f api # tail the api logs +docker compose exec api bash # shell into the api container ``` -#### 2. Create Pydantic Schemas - -Create `src/app/schemas/category.py`: - -```python -from datetime import datetime -from typing import Annotated -from pydantic import BaseModel, Field, ConfigDict - +The service names depend on your `docker-compose.yml`; the `api` name is conventional. -class CategoryBase(BaseModel): - name: Annotated[str, Field(min_length=1, max_length=50)] - description: Annotated[str | None, Field(max_length=255, default=None)] +## The Background Worker +If your app uses Taskiq tasks, run a worker alongside the API in a second terminal: -class CategoryCreate(CategoryBase): - model_config = ConfigDict(extra="forbid") - +```bash +cd backend +uv run taskiq worker infrastructure.taskiq.worker:default_broker --reload +``` -class CategoryRead(CategoryBase): - model_config = ConfigDict(from_attributes=True) - - id: int - created_at: datetime +`--reload` is dev-only; drop it in production. See [Background Tasks](background-tasks/index.md) for details. +## The Dev Toolchain -class CategoryUpdate(BaseModel): - model_config = ConfigDict(extra="forbid") - - name: Annotated[str | None, Field(min_length=1, max_length=50, default=None)] - description: Annotated[str | None, Field(max_length=255, default=None)] +The project ships configured `ruff`, `mypy`, and `pytest` via `backend/pyproject.toml`: +```bash +cd backend -class CategoryUpdateInternal(CategoryUpdate): - updated_at: datetime +# Lint + format (ruff handles both) +uv run ruff check . +uv run ruff format . +uv run ruff check --fix . # auto-fix what ruff can +# Type check +uv run mypy src -class CategoryDelete(BaseModel): - model_config = ConfigDict(extra="forbid") - - is_deleted: bool - deleted_at: datetime +# Tests +uv run pytest +uv run pytest -k "test_user" # run tests matching a name +uv run pytest -x # stop on first failure +uv run pytest -n auto # parallel via pytest-xdist ``` -#### 3. Create CRUD Operations +Ruff is configured (`pyproject.toml:[tool.ruff]`) with: -Create `src/app/crud/crud_categories.py`: +- `line-length = 128` +- Selected rule sets: `E`, `F`, `I`, `UP` (pyflakes, pycodestyle, isort, pyupgrade) +- `known-first-party = ["src"]` so `src.*` imports are grouped correctly -```python -from fastcrud import FastCRUD +Mypy is intentionally relaxed (`disallow_untyped_defs = false`) — adopt strictness gradually as you add types to new modules. -from ..models.category import Category -from ..schemas.category import CategoryCreate, CategoryUpdate, CategoryUpdateInternal, CategoryDelete +## Pre-Commit -CRUDCategory = FastCRUD[Category, CategoryCreate, CategoryUpdate, CategoryUpdateInternal, CategoryDelete] -crud_categories = CRUDCategory(Category) -``` +The repo's `.pre-commit-config.yaml` wires up ruff, pyupgrade, docformatter, mdformat, and a few standard hygiene hooks (trailing whitespace, large files, private keys). Install once: -#### 4. Update Model Imports - -Add your new model to `src/app/models/__init__.py`: - -```python -from .category import Category -from .user import User -from .post import Post -# ... other imports +```bash +pip install pre-commit +pre-commit install ``` -#### 5. Create Database Migration - -Generate and apply the migration: +After that, `git commit` runs the hooks automatically. To run them ad hoc: ```bash -# From the src/ directory -uv run alembic revision --autogenerate -m "Add category model" -uv run alembic upgrade head +pre-commit run --all-files ``` -#### 6. Create API Endpoints - -Create `src/app/api/v1/categories.py`: - -```python -from typing import Annotated - -from fastapi import APIRouter, Depends, HTTPException, Request -from fastcrud import PaginatedListResponse, compute_offset -from sqlalchemy.ext.asyncio import AsyncSession - -from ...api.dependencies import get_current_superuser, get_current_user -from ...core.db.database import async_get_db -from ...core.exceptions.http_exceptions import DuplicateValueException, NotFoundException -from ...crud.crud_categories import crud_categories -from ...schemas.category import CategoryCreate, CategoryRead, CategoryUpdate - -router = APIRouter(tags=["categories"]) +## Adding a New Module +The boilerplate organizes domain code under `backend/src/modules//` with a vertical-slice layout. To add a `widgets` module: -@router.post("/category", response_model=CategoryRead, status_code=201) -async def write_category( - request: Request, - category: CategoryCreate, - current_user: Annotated[dict, Depends(get_current_user)], - db: Annotated[AsyncSession, Depends(async_get_db)], -): - category_row = await crud_categories.exists(db=db, name=category.name) - if category_row: - raise DuplicateValueException("Category name already exists") - - return await crud_categories.create(db=db, object=category) - - -@router.get("/categories", response_model=PaginatedListResponse[CategoryRead]) -async def read_categories( - request: Request, - db: Annotated[AsyncSession, Depends(async_get_db)], - page: int = 1, - items_per_page: int = 10, -): - categories_data = await crud_categories.get_multi( - db=db, - offset=compute_offset(page, items_per_page), - limit=items_per_page, - schema_to_select=CategoryRead, - is_deleted=False, - ) - - return categories_data - - -@router.get("/category/{category_id}", response_model=CategoryRead) -async def read_category( - request: Request, - category_id: int, - db: Annotated[AsyncSession, Depends(async_get_db)], -): - db_category = await crud_categories.get( - db=db, - schema_to_select=CategoryRead, - id=category_id, - is_deleted=False - ) - if not db_category: - raise NotFoundException("Category not found") - - return db_category - - -@router.patch("/category/{category_id}", response_model=CategoryRead) -async def patch_category( - request: Request, - category_id: int, - values: CategoryUpdate, - current_user: Annotated[dict, Depends(get_current_user)], - db: Annotated[AsyncSession, Depends(async_get_db)], -): - db_category = await crud_categories.get(db=db, id=category_id, is_deleted=False) - if not db_category: - raise NotFoundException("Category not found") - - if values.name: - category_row = await crud_categories.exists(db=db, name=values.name) - if category_row and category_row["id"] != category_id: - raise DuplicateValueException("Category name already exists") - - return await crud_categories.update(db=db, object=values, id=category_id) - - -@router.delete("/category/{category_id}") -async def erase_category( - request: Request, - category_id: int, - current_user: Annotated[dict, Depends(get_current_superuser)], - db: Annotated[AsyncSession, Depends(async_get_db)], -): - db_category = await crud_categories.get(db=db, id=category_id, is_deleted=False) - if not db_category: - raise NotFoundException("Category not found") - - await crud_categories.delete(db=db, db_row=db_category, garbage_collection=False) - return {"message": "Category deleted"} +```text +backend/src/modules/widgets/ +├── __init__.py +├── models.py # SQLAlchemy model (Base + dataclass) +├── schemas.py # Pydantic request/response models +├── crud.py # FastCRUD instance +├── service.py # Business logic — calls CRUD, raises domain errors +└── routes.py # FastAPI router — wraps the service, handles HTTP ``` -#### 7. Register Router - -Add your router to `src/app/api/v1/__init__.py`: +The full pattern (with concrete code) is in [Database → Models](database/models.md) and [API → Endpoints](api/endpoints.md). The short version: + +1. **Write the model** in `models.py`. Inherit from `Base`, use mixins (`TimestampMixin`, `SoftDeleteMixin`, `UUIDMixin`) where they apply. +2. **Write the schemas** in `schemas.py`. Standard set: `WidgetBase`, `WidgetCreate`, `WidgetRead`, `WidgetUpdate`, plus `WidgetSelect` for FastCRUD's `schema_to_select`. +3. **Wire FastCRUD** in `crud.py`: + ```python + from fastcrud import FastCRUD + from .models import Widget + crud_widgets = FastCRUD(Widget) + ``` +4. **Implement the service** in `service.py` with class methods that call `crud_widgets`, raise `DomainError` subclasses on bad state. +5. **Define routes** in `routes.py`. Wrap the service, catch domain exceptions via `handle_exception`, return dicts (FastAPI serializes through `response_model=WidgetRead`). +6. **Register the router** in `interfaces/main.py` (or wherever your top-level routers are aggregated): + ```python + from src.modules.widgets.routes import router as widgets_router + api_v1.include_router(widgets_router, prefix="/widgets") + ``` +7. **Generate a migration**: + ```bash + cd backend + uv run alembic revision --autogenerate -m "Add widget model" + uv run alembic upgrade head + ``` + Note: `validate_production_migration` runs at the start of `env.py` and refuses to apply migrations in production unless `CONFIRM_PRODUCTION_MIGRATION=yes` is set. Local development is unaffected. +8. **(Optional)** Add a `WidgetAdmin` view — see [Admin Panel → Adding Models](admin-panel/adding-models.md). + +The Alembic env (`backend/migrations/env.py`) auto-discovers models via `import_models("src.modules")`, so new modules are picked up by `--autogenerate` without any manual import wiring — provided your model is in `modules//models.py`. + +## Adding Custom Middleware + +Middleware lives at `backend/src/infrastructure/middleware.py` (or a peer file you create). The pattern: ```python -from fastapi import APIRouter -from .categories import router as categories_router -# ... other imports - -router = APIRouter() -router.include_router(categories_router, prefix="/categories") -# ... other router includes -``` +import time -### Creating Custom Middleware - -Create middleware in `src/app/middleware/`: - -```python from fastapi import Request, Response -from starlette.middleware.base import BaseHTTPMiddleware +from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint +from starlette.types import ASGIApp -class CustomHeaderMiddleware(BaseHTTPMiddleware): - async def dispatch(self, request: Request, call_next): - # Pre-processing - start_time = time.time() - - # Process request +class TimingMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: + start = time.perf_counter() response = await call_next(request) - - # Post-processing - process_time = time.time() - start_time - response.headers["X-Process-Time"] = str(process_time) - + elapsed_ms = (time.perf_counter() - start) * 1000 + response.headers["X-Process-Time-Ms"] = f"{elapsed_ms:.1f}" return response ``` -Register in `src/app/main.py`: +Register in `infrastructure/app_factory.py` (or your overridden `create_application`): ```python -from .middleware.custom_header_middleware import CustomHeaderMiddleware - -app.add_middleware(CustomHeaderMiddleware) +application.add_middleware(TimingMiddleware) ``` -## Testing - -### Test Configuration +Order matters — middleware added later runs **earlier** in the request path. The boilerplate's own middlewares (`SecurityHeadersMiddleware`, `ClientCacheMiddleware`, `RateLimiterMiddleware`, `SessionMiddleware`, etc.) are added in a deliberate order; see `app_factory.py:create_application`. -The boilerplate uses pytest for testing. Test configuration is in `pytest.ini` and test dependencies in `pyproject.toml`. +## Adding a Custom Dependency -### Database Testing Setup - -Create test database fixtures in `tests/conftest.py`: +Dependencies belong with the feature they serve. For session-aware dependencies, look at `infrastructure/auth/session/dependencies.py:get_current_user` for a template. ```python -import asyncio -import pytest -import pytest_asyncio -from httpx import AsyncClient -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import sessionmaker - -from src.app.core.config import settings -from src.app.core.db.database import Base, async_get_db -from src.app.main import app - -# Test database URL -TEST_DATABASE_URL = "postgresql+asyncpg://test_user:test_pass@localhost:5432/test_db" - -# Create test engine -test_engine = create_async_engine(TEST_DATABASE_URL, echo=True) -TestSessionLocal = sessionmaker( - test_engine, class_=AsyncSession, expire_on_commit=False -) +from typing import Annotated +from fastapi import Depends, Request -@pytest_asyncio.fixture -async def async_session(): - async with test_engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - async with TestSessionLocal() as session: - yield session - - async with test_engine.begin() as conn: - await conn.run_sync(Base.metadata.drop_all) +from src.infrastructure.auth.session.dependencies import get_current_user -@pytest_asyncio.fixture -async def async_client(async_session): - def get_test_db(): - return async_session - - app.dependency_overrides[async_get_db] = get_test_db - - async with AsyncClient(app=app, base_url="http://test") as client: - yield client - - app.dependency_overrides.clear() +def get_workspace( + request: Request, + current_user: Annotated[dict, Depends(get_current_user)], +) -> str: + workspace = request.headers.get("X-Workspace") + if not workspace: + raise PermissionDeniedError("Missing workspace header") + # validate workspace membership against current_user... + return workspace ``` -### Writing Tests +Use it as `Depends(get_workspace)` on a route, or in another dependency for chaining. -#### Model Tests +## Debugging Tips -```python -# tests/test_models.py -import pytest -from src.app.models.user import User +### See every SQL query +Set `DATABASE_ECHO=true` in your `.env`. Every statement (and parameter binding) is logged. Useful when investigating why a FastCRUD call returns the wrong shape, or when chasing N+1 issues. -@pytest_asyncio.fixture -async def test_user(async_session): - user = User( - name="Test User", - username="testuser", - email="test@example.com", - hashed_password="hashed_password" - ) - async_session.add(user) - await async_session.commit() - await async_session.refresh(user) - return user - - -async def test_user_creation(test_user): - assert test_user.name == "Test User" - assert test_user.username == "testuser" - assert test_user.email == "test@example.com" +### Inspect rate-limit and cache state + +```bash +docker compose exec redis redis-cli +> SELECT 0 # cache DB +> KEYS '*' +> SELECT 1 # rate-limiter DB +> KEYS 'ratelimit:*' +> SELECT 3 # taskiq queue DB +> LRANGE default 0 -1 # pending tasks ``` -#### API Endpoint Tests +Each subsystem uses a different Redis DB number; see `.env.example` for the conventions (`CACHE_REDIS_DB=0`, `SESSION_REDIS_DB=1`, `RATE_LIMITER_REDIS_DB=1` (yes, the rate limiter shares with sessions in defaults — change one if you want isolation), `TASKIQ_REDIS_DB=3`). -```python -# tests/test_api.py -import pytest -from httpx import AsyncClient - - -async def test_create_user(async_client: AsyncClient): - user_data = { - "name": "New User", - "username": "newuser", - "email": "new@example.com", - "password": "SecurePass123!" - } - - response = await async_client.post("/api/v1/users", json=user_data) - assert response.status_code == 201 - - data = response.json() - assert data["name"] == "New User" - assert data["username"] == "newuser" - assert "hashed_password" not in data # Ensure password not exposed - - -async def test_read_users(async_client: AsyncClient): - response = await async_client.get("/api/v1/users") - assert response.status_code == 200 - - data = response.json() - assert "data" in data - assert "total_count" in data -``` +### Watch sessions live -#### CRUD Tests +If a user reports being logged out unexpectedly, check the session backend directly: ```python -# tests/test_crud.py -import pytest -from src.app.crud.crud_users import crud_users -from src.app.schemas.user import UserCreate - - -async def test_crud_create_user(async_session): - user_data = UserCreate( - name="CRUD User", - username="cruduser", - email="crud@example.com", - password="password123" - ) - - user = await crud_users.create(db=async_session, object=user_data) - assert user["name"] == "CRUD User" - assert user["username"] == "cruduser" - - -async def test_crud_get_user(async_session, test_user): - retrieved_user = await crud_users.get( - db=async_session, - id=test_user.id - ) - assert retrieved_user["name"] == test_user.name +from src.infrastructure.auth.session import SessionManager +manager = SessionManager() +sessions = await manager.get_user_sessions(user_id=42) ``` -### Running Tests +See [Authentication → Sessions](authentication/sessions.md) for full details. -```bash -# Run all tests -uv run pytest +### Use the interactive docs -# Run with coverage -uv run pytest --cov=src +`http://localhost:8000/docs` (Swagger UI) and `http://localhost:8000/redoc` are auto-generated from your routes. Send requests directly from the UI; every endpoint that takes a Pydantic body has a "Try it out" form. Only present in non-production by default — gated by `OPENAPI_URL`. -# Run specific test file -uv run pytest tests/test_api.py +### Production validators -# Run with verbose output -uv run pytest -v +When `ENVIRONMENT=production`, `infrastructure/security/` runs validators at startup that fail loudly on: -# Run tests matching pattern -uv run pytest -k "test_user" -``` +- Placeholder `SECRET_KEY` +- `DEBUG=true` +- Unset `CORS_ORIGINS` or `CORS_ORIGINS=*` -## Customization +If your prod boot is failing with one of those, that's your hint — don't bypass the validator. -### Environment-Specific Configuration +## Testing -Create environment-specific settings: +The repo is **set up** for `pytest` but doesn't ship example tests yet — `backend/pyproject.toml` configures pytest with: -```python -# src/app/core/config.py -class LocalSettings(Settings): - ENVIRONMENT: str = "local" - DEBUG: bool = True - -class ProductionSettings(Settings): - ENVIRONMENT: str = "production" - DEBUG: bool = False - # Production-specific settings - -def get_settings(): - env = os.getenv("ENVIRONMENT", "local") - if env == "production": - return ProductionSettings() - return LocalSettings() - -settings = get_settings() +```toml +[tool.pytest.ini_options] +pythonpath = ["src"] +testpaths = ["tests"] +env = ["ENVIRONMENT=pytest", "PYTEST_CURRENT_TEST=true"] ``` -### Custom Logging +Tests run with `ENVIRONMENT=pytest`, which the production validator treats as "not production" — your test suite won't be blocked by missing prod-only env vars. -Configure logging in `src/app/core/config.py`: +A sane starting `tests/conftest.py`: ```python -import logging -from pythonjsonlogger import jsonlogger - -def setup_logging(): - # JSON logging for production - if settings.ENVIRONMENT == "production": - logHandler = logging.StreamHandler() - formatter = jsonlogger.JsonFormatter() - logHandler.setFormatter(formatter) - logger = logging.getLogger() - logger.addHandler(logHandler) - logger.setLevel(logging.INFO) - else: - # Simple logging for development - logging.basicConfig( - level=logging.DEBUG, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" - ) -``` - -## Opting Out of Services +# tests/conftest.py +import pytest +import pytest_asyncio +from httpx import ASGITransport, AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine -### Disabling Redis Caching +from src.infrastructure.database.models import Base +from src.infrastructure.database.session import async_session +from src.interfaces.main import app -1. Remove cache decorators from endpoints -2. Update dependencies in `src/app/core/config.py`: +TEST_DATABASE_URL = "postgresql+asyncpg://postgres:postgres@localhost:5432/test_db" -```python -class Settings(BaseSettings): - # Comment out or remove Redis cache settings - # REDIS_CACHE_HOST: str = "localhost" - # REDIS_CACHE_PORT: int = 6379 - pass -``` -3. Remove Redis cache imports and usage - -### Disabling Background Tasks (ARQ) +@pytest_asyncio.fixture +async def db_engine(): + engine = create_async_engine(TEST_DATABASE_URL) + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + yield engine + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await engine.dispose() -1. Remove ARQ from `pyproject.toml` dependencies -2. Remove worker configuration from `docker-compose.yml` -3. Delete `src/app/core/worker/` directory -4. Remove task-related endpoints -### Disabling Rate Limiting +@pytest_asyncio.fixture +async def db_session(db_engine) -> AsyncSession: + factory = async_sessionmaker(db_engine, expire_on_commit=False) + async with factory() as session: + yield session -1. Remove rate limiting dependencies from endpoints: -```python -# Remove this dependency -dependencies=[Depends(rate_limiter_dependency)] +@pytest_asyncio.fixture +async def client(db_session): + async def override_db(): + yield db_session + + app.dependency_overrides[async_session] = override_db + transport = ASGITransport(app=app) + async with AsyncClient(transport=transport, base_url="http://test") as ac: + yield ac + app.dependency_overrides.clear() ``` -2. Remove rate limiting models and schemas -3. Update database migrations to remove rate limit tables - -### Disabling Authentication - -1. Remove JWT dependencies from protected endpoints -2. Remove user-related models and endpoints -3. Update database to remove user tables -4. Remove authentication middleware - -### Minimal FastAPI Setup - -For a minimal setup with just basic FastAPI: +Then a smoke test: ```python -# src/app/main.py (minimal version) -from fastapi import FastAPI - -app = FastAPI( - title="Minimal API", - description="Basic FastAPI application", - version="1.0.0" -) - -@app.get("/") -async def root(): - return {"message": "Hello World"} - -@app.get("/health") -async def health_check(): - return {"status": "healthy"} +# tests/test_smoke.py +async def test_health(client): + response = await client.get("/api/v1/health") + assert response.status_code == 200 ``` -## Best Practices - -### Code Organization - -- Keep models, schemas, and CRUD operations in separate files -- Use consistent naming conventions across the application -- Group related functionality in modules -- Follow FastAPI and Pydantic best practices - -### Database Operations +For tests that genuinely need Postgres semantics (FK constraints, ARRAY types, JSONB), `testcontainers-postgres` is already a dev dependency — spin up a real Postgres in a fixture instead of mocking the database. -- Always use transactions for multi-step operations -- Implement soft deletes for important data -- Use database constraints for data integrity -- Index frequently queried columns +For unit tests on services, mock at the **CRUD layer**, not at the database. The service contract is "I call `crud_widgets.get` and get back a dict-or-None"; that's the seam to mock. -### API Design +## Customizing the Settings -- Use consistent response formats -- Implement proper error handling -- Version your APIs from the start -- Document all endpoints with proper schemas +Settings live in `backend/src/infrastructure/config/settings.py`. To add a new env-driven value: -### Security +1. Add the field to the relevant settings class (or create a new one): + ```python + class WidgetSettings(BaseSettings): + WIDGET_BATCH_SIZE: int = config("WIDGET_BATCH_SIZE", default=100, cast=int) + ``` +2. Add it to the composed `Settings` mixin list at the bottom of `settings.py`. +3. Document the env var in `backend/.env.example`. -- Never expose sensitive data in API responses -- Use proper authentication and authorization -- Validate all input data -- Implement rate limiting for public endpoints -- Use HTTPS in production +Then read it via `get_settings().WIDGET_BATCH_SIZE`. -### Performance +See [Configuration → Settings Classes](configuration/settings-classes.md) for the full pattern. -- Use async/await consistently -- Implement caching for expensive operations -- Use database connection pooling -- Monitor and optimize slow queries -- Use pagination for large datasets +## Disabling Subsystems -## Troubleshooting +Most major subsystems toggle via env vars rather than code changes: -### Common Issues +| Subsystem | Toggle | Effect | +|------------------|---------------------------------------|---------------------------------------------| +| Cache | `CACHE_ENABLED=false` | `@cache` becomes a no-op | +| Client cache | `CLIENT_CACHE_ENABLED=false` | Middleware doesn't mount | +| Rate limiter | `RATE_LIMITER_ENABLED=false` | `check_rate_limit` returns immediately | +| Background tasks | Don't run the worker | The broker is created but no consumer | +| Admin panel | `ADMIN_ENABLED=false` | `/admin` is unmounted | +| Documentation | `OPENAPI_URL=` | Disables `/docs` and `/redoc` | -**Import Errors**: Ensure all new models are imported in `__init__.py` files +Removing a subsystem entirely (deleting the code) is rare and usually wrong — leaving it disabled costs nothing. -**Migration Failures**: Check model definitions and relationships before generating migrations +## Common Mistakes -**Test Failures**: Verify test database configuration and isolation +### "Auto-import" gotchas -**Performance Issues**: Check for N+1 queries and missing database indexes +The boilerplate uses `import_models("src.modules")` in Alembic to discover models. **The discovery walks `modules//models.py` only.** If you put models in `modules//sub/inner.py`, autogenerate won't find them. Either keep models in `models.py` or hand-import the file. -**Authentication Problems**: Verify JWT configuration and token expiration settings +### Forgetting `lazy="selectin"` on a relationship -### Debugging Tips +SQLAdmin runs in async context. A relationship without `lazy="selectin"` raises `MissingGreenlet` when the admin tries to render it. Both `User.tier` and other relationships in the boilerplate already use this pattern — copy from those. -- Use FastAPI's automatic interactive docs at `/docs` -- Enable SQL query logging in development -- Use proper logging throughout the application -- Test endpoints with realistic data volumes -- Monitor database performance with query analysis +### Dataclass models without `init=False` on relationships -## Database Migrations +`Base = DeclarativeBase + MappedAsDataclass`. Relationship fields must use `init=False` or they end up in the dataclass `__init__` and crash on insert. See `modules/user/models.py:User.tier` for the pattern. -!!! warning "Important Setup for Docker Users" - If you're using the database in Docker, you need to expose the port to run migrations. Change this in `docker-compose.yml`: - - ```yaml - db: - image: postgres:13 - env_file: - - ./src/.env - volumes: - - postgres-data:/var/lib/postgresql/data - # -------- replace with comment to run migrations with docker -------- - ports: - - 5432:5432 - # expose: - # - "5432" - ``` - -### Creating Migrations - -!!! warning "Model Import Requirement" - To create tables if you haven't created endpoints yet, ensure you import the models in `src/app/models/__init__.py`. This step is crucial for Alembic to detect new tables. - -While in the `src` folder, run Alembic migrations: - -```bash -# Generate migration file -uv run alembic revision --autogenerate -m "Description of changes" - -# Apply migrations -uv run alembic upgrade head -``` +### Catching exceptions too broadly in routes -!!! note "Without uv" - If you don't have uv, run `pip install alembic` first, then use `alembic` commands directly. +The route layer catches domain errors (`ResourceNotFoundError`, `PermissionDeniedError`, etc.) and re-raises specific HTTP exceptions. Don't catch them inside the service — services raise; routes translate. The `handle_exception` helper in `modules/common/utils/error_handler.py` does the translation; routes call it as a fallback for unexpected errors. -### Migration Workflow +### Cache decorators without `request: Request` -1. **Make Model Changes** - Modify your SQLAlchemy models -2. **Import Models** - Ensure models are imported in `src/app/models/__init__.py` -3. **Generate Migration** - Run `alembic revision --autogenerate` -4. **Review Migration** - Check the generated migration file in `src/migrations/versions/` -5. **Apply Migration** - Run `alembic upgrade head` -6. **Test Changes** - Verify your changes work as expected +The `@cache` decorator inspects `request.method` to decide read vs invalidate. The first parameter of every decorated route must be `request: Request`. See [Caching → Server-Side Cache](caching/redis-cache.md) for the rest of the contract. -### Common Migration Tasks +## Key Files -#### Adding a New Model +| Component | Location | +|------------------------------|-------------------------------------------------------------| +| App factory / middleware order | `backend/src/infrastructure/app_factory.py` | +| Settings | `backend/src/infrastructure/config/settings.py` | +| Lifespan / startup | `backend/src/infrastructure/app_factory.py:lifespan_factory`| +| Database session | `backend/src/infrastructure/database/session.py` | +| Module template (reference) | `backend/src/modules/user/` | +| Pre-commit | `.pre-commit-config.yaml` | +| pyproject (lint / type / test) | `backend/pyproject.toml` | -```python -# 1. Create the model file (e.g., src/app/models/category.py) -from sqlalchemy import String -from sqlalchemy.orm import Mapped, mapped_column - -from app.core.db.database import Base - -class Category(Base): - __tablename__ = "categories" - - id: Mapped[int] = mapped_column(primary_key=True) - name: Mapped[str] = mapped_column(String(50)) - description: Mapped[str] = mapped_column(String(255), nullable=True) -``` - -```python -# 2. Import in src/app/models/__init__.py -from .user import User -from .post import Post -from .tier import Tier -from .rate_limit import RateLimit -from .category import Category # Add this line -``` - -```bash -# 3. Generate and apply migration -cd src -uv run alembic revision --autogenerate -m "Add categories table" -uv run alembic upgrade head -``` - -#### Modifying Existing Models - -```python -# 1. Modify your model -class User(Base): - # ... existing fields ... - bio: Mapped[str] = mapped_column(String(500), nullable=True) # New field -``` - -```bash -# 2. Generate migration -uv run alembic revision --autogenerate -m "Add bio field to users" - -# 3. Review the generated migration file -# 4. Apply migration -uv run alembic upgrade head -``` +## Next Steps -This guide provides the foundation for extending and customizing the FastAPI boilerplate. For specific implementation details, refer to the existing code examples throughout the boilerplate. \ No newline at end of file +- **[Project Structure](project-structure.md)** — full layout walkthrough +- **[Testing](testing.md)** — test patterns and infrastructure +- **[Production](production.md)** — deployment and hardening checklist diff --git a/docs/user-guide/index.md b/docs/user-guide/index.md index 81a754d7..9d85e096 100644 --- a/docs/user-guide/index.md +++ b/docs/user-guide/index.md @@ -27,13 +27,13 @@ This guide covers all aspects of working with the FastAPI Boilerplate: - **[API Versioning](api/versioning.md)** - Manage API versions and backward compatibility ### Security & Authentication -- **[Authentication Overview](authentication/index.md)** - Secure your API with JWT authentication -- **[JWT Tokens](authentication/jwt-tokens.md)** - Understand access and refresh token management +- **[Authentication Overview](authentication/index.md)** - Secure your API with session-based auth, OAuth, and API keys +- **[Sessions](authentication/sessions.md)** - Server-side sessions with HTTP-only cookies and CSRF protection - **[User Management](authentication/user-management.md)** - Handle user registration, login, and profiles - **[Permissions](authentication/permissions.md)** - Implement role-based access control ### Admin Panel -Powered by [CRUDAdmin](https://github.com/benavlabs/crudadmin) - a modern admin interface generator for FastAPI. +Powered by [SQLAdmin](https://aminalaee.dev/sqladmin/) - a flexible admin interface generated from your SQLAlchemy models. - **[Admin Panel Overview](admin-panel/index.md)** - Web-based database management interface - **[Configuration](admin-panel/configuration.md)** - Setup, session backends, and environment variables @@ -47,7 +47,7 @@ Powered by [CRUDAdmin](https://github.com/benavlabs/crudadmin) - a modern admin - **[Cache Strategies](caching/cache-strategies.md)** - Advanced caching patterns and invalidation ### Background Processing -- **[Background Tasks](background-tasks/index.md)** - Handle long-running operations with ARQ +- **[Background Tasks](background-tasks/index.md)** - Handle long-running operations with Taskiq (Redis or RabbitMQ broker) ### Rate Limiting - **[Rate Limiting](rate-limiting/index.md)** - Protect your API from abuse with Redis-based rate limiting diff --git a/docs/user-guide/production.md b/docs/user-guide/production.md index 3dfdd8be..61f76fed 100644 --- a/docs/user-guide/production.md +++ b/docs/user-guide/production.md @@ -1,673 +1,363 @@ # Production Deployment -This guide covers deploying the FastAPI boilerplate to production with proper performance, security, and reliability configurations. +This page is the production hardening checklist for a FastAPI-boilerplate deployment. It covers the boilerplate's built-in production validators, env-var hygiene, the multi-stage Dockerfile that ships, and the operational decisions you'll make once. -## Production Architecture +## The Production Validator -The recommended production setup uses: +When `ENVIRONMENT=production`, `infrastructure/security/production_validator.py` runs at startup and refuses to boot the app on critical issues. Two tiers: -- **Gunicorn** - WSGI server managing Uvicorn workers -- **Uvicorn Workers** - ASGI server handling FastAPI requests -- **NGINX** - Reverse proxy and load balancer -- **PostgreSQL** - Production database -- **Redis** - Caching and background tasks -- **Docker** - Containerization +### Critical (raises `ProductionSecurityError`, app exits) -## Environment Configuration +The app **will not start** if any of these is true: -### Production Environment Variables +- **`SECRET_KEY` is insecure.** Default placeholder, < 32 chars, contains an obvious string ("password", "secret", "test", "dev", "default", etc.), or has a predictable pattern (repetition, all-same-char). +- **`POSTGRES_PASSWORD=postgres`** (the well-known default). Attackers try this first. +- **`POSTGRES_PASSWORD` is empty.** Database is unprotected. -Update your `.env` file for production: +### Warnings (logged, app starts) -```bash -# ------------- environment ------------- -ENVIRONMENT="production" +These don't block startup but you should fix them before the app sees real traffic: -# ------------- app settings ------------- -APP_NAME="Your Production App" -DEBUG=false +- **Redis without a password** (`CACHE_REDIS_PASSWORD`, `SESSION_REDIS_PASSWORD`, `RATE_LIMITER_REDIS_PASSWORD`, `TASKIQ_REDIS_PASSWORD` all unset) +- **`CORS_ORIGINS=*`** — allows any origin to send credentialed requests +- **`DEBUG=true`** — exposes stack traces in error responses +- **API docs (`/docs`, `/redoc`) reachable** — see "Documentation" below +- **Session config too loose** (cookies not marked `Secure`, very long max-age, etc.) +- **Weak admin credentials** (default username/password patterns) -# ------------- database ------------- -POSTGRES_USER="prod_user" -POSTGRES_PASSWORD="secure_production_password" -POSTGRES_SERVER="db" # or your database host -POSTGRES_PORT=5432 -POSTGRES_DB="prod_database" - -# ------------- redis ------------- -REDIS_CACHE_HOST="redis" -REDIS_CACHE_PORT=6379 -REDIS_QUEUE_HOST="redis" -REDIS_QUEUE_PORT=6379 -REDIS_RATE_LIMIT_HOST="redis" -REDIS_RATE_LIMIT_PORT=6379 - -# ------------- security ------------- -SECRET_KEY="your-super-secure-secret-key-generate-with-openssl" -ALGORITHM="HS256" -ACCESS_TOKEN_EXPIRE_MINUTES=30 -REFRESH_TOKEN_EXPIRE_DAYS=7 - -# ------------- logging ------------- -LOG_LEVEL="INFO" -``` - -### Docker Configuration +The validator is **not** a substitute for a thorough threat model — it catches the most common deployment mistakes, not all of them. Treat it as a smoke test. -#### Production Dockerfile +## Production `.env` Checklist -```dockerfile -FROM python:3.11-slim +Generate a `.env` for production from `backend/.env.example`. The bare-minimum changes: -WORKDIR /code +```env +# Environment +ENVIRONMENT=production +DEBUG=false -# Install system dependencies -RUN apt-get update && apt-get install -y \ - gcc \ - && rm -rf /var/lib/apt/lists/* +# App +APP_NAME="Your Production App" +VERSION=1.0.0 -# Install UV -RUN pip install uv +# Secrets — generate a fresh, unique value +SECRET_KEY= -# Copy dependency files -COPY pyproject.toml uv.lock ./ +# Database — never use defaults +POSTGRES_USER=app_prod +POSTGRES_PASSWORD= +POSTGRES_SERVER= +POSTGRES_PORT=5432 +POSTGRES_DB=app_prod + +# Auto-creating tables in prod is dangerous; use Alembic instead +CREATE_TABLES_ON_STARTUP=false + +# Migrations: must be opted into, even with the right env +CONFIRM_PRODUCTION_MIGRATION=yes # only when actively running migrations + +# CORS — list the exact origins that can call your API +CORS_ORIGINS=["https://app.example.com","https://admin.example.com"] + +# Cache (Redis or Memcached) +CACHE_ENABLED=true +CACHE_BACKEND=redis +CACHE_REDIS_HOST= +CACHE_REDIS_PASSWORD= + +# Sessions +SESSION_BACKEND=redis +SESSION_REDIS_HOST= +SESSION_REDIS_PASSWORD= +SESSION_SECURE_COOKIES=true # required when serving over HTTPS +CSRF_ENABLED=true + +# Rate limiting +RATE_LIMITER_ENABLED=true +RATE_LIMITER_BACKEND=redis +RATE_LIMITER_REDIS_HOST= +RATE_LIMITER_REDIS_PASSWORD= +RATE_LIMITER_FAIL_OPEN=true # let traffic through when Redis errors + +# Taskiq +TASKIQ_ENABLED=true +TASKIQ_BROKER_TYPE=redis +TASKIQ_REDIS_HOST= +TASKIQ_REDIS_PASSWORD= + +# Admin panel +ADMIN_ENABLED=false # safest default in prod +ADMIN_USERNAME= +ADMIN_PASSWORD= + +# Documentation +OPENAPI_URL= # disable /docs and /redoc -# Install dependencies -RUN uv sync --frozen --no-dev +# Logging +LOG_LEVEL=INFO +LOG_FORMAT=json +``` -# Copy application code -COPY src/ ./src/ +Notes worth calling out: -# Create non-root user -RUN useradd --create-home --shell /bin/bash app \ - && chown -R app:app /code -USER app +- **`CREATE_TABLES_ON_STARTUP=false`** — production should run schema changes via Alembic, not by `Base.metadata.create_all` on every boot. +- **`CONFIRM_PRODUCTION_MIGRATION=yes`** — `migrations/env.py` calls `validate_production_migration` which **refuses** to run migrations against production unless this is explicitly set. Ship deployment commands with it; never set it in long-lived env files. +- **`SESSION_SECURE_COOKIES=true`** — cookies are sent only over HTTPS. Required if you're terminating TLS at a proxy. +- **`OPENAPI_URL=`** (empty) disables the Swagger UI and OpenAPI spec entirely. The validator warns when this is exposed in production. -# Production command with Gunicorn -CMD ["uv", "run", "gunicorn", "src.app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "--bind", "0.0.0.0:8000"] -``` +See [Configuration → Environment-Specific](configuration/environment-specific.md) for the full per-environment matrix. -#### Production Docker Compose +## Generating a Strong `SECRET_KEY` -```yaml -version: '3.8' - -services: - web: - build: . - ports: - - "8000:8000" - env_file: - - ./src/.env - depends_on: - - db - - redis - restart: unless-stopped - deploy: - resources: - limits: - memory: 1G - reservations: - memory: 512M - - worker: - build: . - command: uv run arq src.app.core.worker.settings.WorkerSettings - env_file: - - ./src/.env - depends_on: - - db - - redis - restart: unless-stopped - deploy: - replicas: 2 - - db: - image: postgres:15 - volumes: - - postgres_data:/var/lib/postgresql/data/ - environment: - - POSTGRES_USER=${POSTGRES_USER} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_DB=${POSTGRES_DB} - restart: unless-stopped - deploy: - resources: - limits: - memory: 2G - reservations: - memory: 1G - - redis: - image: redis:7-alpine - restart: unless-stopped - volumes: - - redis_data:/data - deploy: - resources: - limits: - memory: 512M - reservations: - memory: 256M - - nginx: - image: nginx:alpine - ports: - - "80:80" - - "443:443" - volumes: - - ./nginx/nginx.conf:/etc/nginx/nginx.conf - - ./nginx/ssl:/etc/nginx/ssl - depends_on: - - web - restart: unless-stopped - -volumes: - postgres_data: - redis_data: +```bash +openssl rand -hex 32 ``` -## Gunicorn Configuration +Or: -### Basic Gunicorn Setup +```bash +python -c "import secrets; print(secrets.token_hex(32))" +``` -Create `gunicorn.conf.py`: +Never reuse the dev key. Never commit prod keys. Pull from a secrets manager (AWS Secrets Manager, HashiCorp Vault, Doppler, etc.) at deploy time — `.env` files on disk are an audit-trail problem. -```python -import multiprocessing +## The Production Dockerfile -# Server socket -bind = "0.0.0.0:8000" -backlog = 2048 +The boilerplate ships a multi-stage `backend/Dockerfile`: -# Worker processes -workers = multiprocessing.cpu_count() * 2 + 1 -worker_class = "uvicorn.workers.UvicornWorker" -worker_connections = 1000 -max_requests = 1000 -max_requests_jitter = 50 +| Stage | Purpose | +|--------------------|--------------------------------------------------| +| `requirements-stage` | Exports pinned requirements from `uv.lock` | +| `base` | Production base — copies source, installs deps | +| `dev` | Adds dev deps, mounts tests, runs `fastapi dev` | +| `migrate` | Runs `alembic upgrade head` and exits | +| `prod` | Runs `fastapi run` with configurable workers | -# Restart workers after this many requests, with up to 50 jitter -preload_app = True +To build the production image: -# Logging -accesslog = "-" -errorlog = "-" -loglevel = "info" -access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s" %(D)s' - -# Process naming -proc_name = "fastapi-boilerplate" - -# Server mechanics -daemon = False -pidfile = "/tmp/gunicorn.pid" -user = None -group = None -tmp_upload_dir = None - -# SSL (if terminating SSL at application level) -# keyfile = "/path/to/keyfile" -# certfile = "/path/to/certfile" - -# Worker timeout -timeout = 30 -keepalive = 2 - -# Memory management -max_requests = 1000 -max_requests_jitter = 50 -preload_app = True +```bash +docker build --target prod -t myapp-api:1.0.0 -f backend/Dockerfile backend/ ``` -### Running with Gunicorn +To run a one-off migration: ```bash -# Basic command -uv run gunicorn src.app.main:app -w 4 -k uvicorn.workers.UvicornWorker +docker run --rm \ + --env-file backend/.env.production \ + -e CONFIRM_PRODUCTION_MIGRATION=yes \ + --target migrate \ + myapp-api:1.0.0 +``` -# With configuration file -uv run gunicorn src.app.main:app -c gunicorn.conf.py +The `prod` stage's `CMD` is: -# With specific bind address -uv run gunicorn src.app.main:app -w 4 -k uvicorn.workers.UvicornWorker --bind 0.0.0.0:8000 +```dockerfile +CMD ["sh", "-c", "fastapi run interfaces/main.py --host 0.0.0.0 --port 8000 --workers $WORKERS"] ``` -## NGINX Configuration - -### Single Server Setup - -Create `nginx/nginx.conf`: - -```nginx -events { - worker_connections 1024; -} - -http { - upstream fastapi_backend { - server web:8000; - } - - server { - listen 80; - server_name your-domain.com; - - # Redirect HTTP to HTTPS - return 301 https://$server_name$request_uri; - } - - server { - listen 443 ssl http2; - server_name your-domain.com; - - # SSL Configuration - ssl_certificate /etc/nginx/ssl/cert.pem; - ssl_certificate_key /etc/nginx/ssl/key.pem; - ssl_protocols TLSv1.2 TLSv1.3; - ssl_ciphers ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384; - ssl_prefer_server_ciphers off; - - # Security headers - add_header X-Frame-Options DENY; - add_header X-Content-Type-Options nosniff; - add_header X-XSS-Protection "1; mode=block"; - add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always; - - # Gzip compression - gzip on; - gzip_vary on; - gzip_min_length 10240; - gzip_proxied expired no-cache no-store private must-revalidate auth; - gzip_types - text/plain - text/css - text/xml - text/javascript - application/javascript - application/xml+rss - application/json; - - # Rate limiting - limit_req_zone $binary_remote_addr zone=api:10m rate=10r/s; - - location / { - limit_req zone=api burst=20 nodelay; - - proxy_pass http://fastapi_backend; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - - # Timeouts - proxy_connect_timeout 60s; - proxy_send_timeout 60s; - proxy_read_timeout 60s; - - # Buffer settings - proxy_buffering on; - proxy_buffer_size 8k; - proxy_buffers 8 8k; - } - - # Health check endpoint (no rate limiting) - location /health { - proxy_pass http://fastapi_backend; - proxy_set_header Host $host; - access_log off; - } - - # Ready check endpoint (no rate limiting) - location /ready { - proxy_pass http://fastapi_backend; - proxy_set_header Host $host; - access_log off; - } - - # Static files (if any) - location /static/ { - alias /code/static/; - expires 1y; - add_header Cache-Control "public, immutable"; - } - } -} +`fastapi run` is FastAPI's production-friendly equivalent to `uvicorn` — it sets sane defaults (no `--reload`, properly configured logging, etc.) and is what the framework itself recommends. Override the worker count with `WORKERS` (defaults to 1): + +```bash +docker run -d \ + --env-file .env.production \ + -e WORKERS=4 \ + -p 8000:8000 \ + myapp-api:1.0.0 ``` -### Simple Single Server (default.conf) +### Picking a Worker Count -For basic production setup, create `default.conf`: +Rough rule: `2 × CPU cores + 1` for I/O-bound workloads, fewer for CPU-bound. Each worker is a separate process; they don't share memory. Caches and DB pools are per-worker — bring `DATABASE_POOL_SIZE` down if you're scaling workers up. -```nginx -# ---------------- Running With One Server ---------------- -server { - listen 80; +For most APIs, **don't reach for gunicorn**. `fastapi run` (which wraps uvicorn) handles process management fine. Add a process supervisor (Kubernetes, ECS, systemd, supervisord) at the orchestration layer. - location / { - proxy_pass http://web:8000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } -} -``` +## Running the Background Worker -### Load Balancing Multiple Servers - -For horizontal scaling with multiple FastAPI instances: - -```nginx -# ---------------- To Run with Multiple Servers ---------------- -upstream fastapi_app { - server fastapi1:8000; # Replace with actual server names - server fastapi2:8000; - # Add more servers as needed -} - -server { - listen 80; - - location / { - proxy_pass http://fastapi_app; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } -} -``` +In production, run a separate worker container/service: -### Advanced Load Balancing - -For production with advanced features: - -```nginx -upstream fastapi_backend { - least_conn; - server web1:8000 weight=3; - server web2:8000 weight=2; - server web3:8000 weight=1; - - # Health checks - keepalive 32; -} - -server { - listen 443 ssl http2; - server_name your-domain.com; - - location / { - proxy_pass http://fastapi_backend; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - - # Connection settings for load balancing - proxy_http_version 1.1; - proxy_set_header Connection ""; - } -} +```bash +docker run -d \ + --env-file .env.production \ + --target base \ + myapp-api:1.0.0 \ + sh -c "taskiq worker infrastructure.taskiq.worker:default_broker --workers 4" ``` -### SSL Certificate Setup +In Kubernetes / ECS, that's a separate `Deployment` / `Service` with its own scaling. The worker doesn't accept HTTP traffic — it only consumes from the broker. -#### Using Let's Encrypt (Certbot) +Tune via: -```bash -# Install certbot -sudo apt-get update -sudo apt-get install certbot python3-certbot-nginx +- `--workers ` — process count +- `TASKIQ_WORKER_CONCURRENCY` — async tasks per process +- `TASKIQ_MAX_TASKS_PER_WORKER` — recycle a worker after N tasks (defaults to 1000) to bound memory leaks -# Obtain certificate -sudo certbot --nginx -d your-domain.com +See [Background Tasks](background-tasks/index.md) for the full Taskiq setup. -# Auto-renewal (add to crontab) -0 2 * * 1 /usr/bin/certbot renew --quiet -``` +## Database Migrations in Production -#### Manual SSL Setup +The migration env (`backend/migrations/env.py`) calls `validate_production_migration` at the start of every Alembic run. In production: ```bash -# Generate self-signed certificate (development only) -mkdir -p nginx/ssl -openssl req -x509 -nodes -days 365 -newkey rsa:2048 \ - -keyout nginx/ssl/key.pem \ - -out nginx/ssl/cert.pem +# Will FAIL — refuses to run without confirmation +CONFIRM_PRODUCTION_MIGRATION= alembic upgrade head + +# OK — explicitly confirmed +CONFIRM_PRODUCTION_MIGRATION=yes alembic upgrade head ``` -## Production Best Practices +This is intentional: `alembic upgrade head` should not be a routine boot-time command. Run migrations as a deliberate step in your deployment pipeline: -### Database Optimization +1. Build the new image +2. Run the migration container (`--target migrate` with `CONFIRM_PRODUCTION_MIGRATION=yes`) +3. **Then** roll out the API container -#### PostgreSQL Configuration +If your pipeline runs migrations after rollout, you can briefly serve a new code version against an old schema. Don't do that. -```sql --- Optimize PostgreSQL for production -ALTER SYSTEM SET shared_buffers = '256MB'; -ALTER SYSTEM SET effective_cache_size = '1GB'; -ALTER SYSTEM SET random_page_cost = 1.1; -ALTER SYSTEM SET effective_io_concurrency = 200; -SELECT pg_reload_conf(); -``` +For zero-downtime deploys, do schema changes in two phases — see [Database → Migrations](database/migrations.md) for the expand/contract pattern. -#### Connection Pooling +## TLS, Reverse Proxy, and CORS -```python -# src/app/core/db/database.py -from sqlalchemy.ext.asyncio import create_async_engine - -# Production database settings -engine = create_async_engine( - DATABASE_URL, - echo=False, # Disable in production - pool_size=20, - max_overflow=0, - pool_pre_ping=True, - pool_recycle=3600, -) +The boilerplate doesn't terminate TLS — that's your reverse proxy's job (Nginx, Caddy, ALB, Cloud Run's built-in TLS, etc.). Common deployment shapes: + +```text +[Client] → HTTPS → [Reverse Proxy] → HTTP → [API container] + → HTTP → [API container] + → HTTP → [API container] ``` -### Redis Configuration +The proxy must: -#### Redis Production Settings +- Forward `X-Forwarded-Proto: https` and `X-Forwarded-For: ` (FastAPI / Starlette respect these by default) +- Pass through cookies (`Set-Cookie`) untouched +- Set `Host` correctly so the API's URL building works -```bash -# redis.conf adjustments -maxmemory 512mb -maxmemory-policy allkeys-lru -save 900 1 -save 300 10 -save 60 10000 -``` +`CORS_ORIGINS` should list your **frontend** origins, not the API origin. Wildcard (`*`) is incompatible with credentialed requests anyway — the validator warns on it for a reason. -### Application Optimization +## Logging in Production -#### Logging Configuration +Use JSON log output for ingestion into your log aggregator: -```python -# src/app/core/config.py -import logging -from pythonjsonlogger import jsonlogger - -def setup_production_logging(): - logHandler = logging.StreamHandler() - formatter = jsonlogger.JsonFormatter( - "%(asctime)s %(name)s %(levelname)s %(message)s" - ) - logHandler.setFormatter(formatter) - - logger = logging.getLogger() - logger.addHandler(logHandler) - logger.setLevel(logging.INFO) - - # Reduce noise from third-party libraries - logging.getLogger("uvicorn.access").setLevel(logging.WARNING) - logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING) +```env +LOG_LEVEL=INFO +LOG_FORMAT=json ``` -#### Performance Monitoring +The boilerplate's logger (`infrastructure/logging/`) attaches a correlation ID per request — it appears in every log line for that request, including downstream Taskiq tasks if you propagate it. Useful for tying together "user X reported error Y" with the actual server-side trace. -```python -# src/app/middleware/monitoring.py -import time -from fastapi import Request -from starlette.middleware.base import BaseHTTPMiddleware - -class MonitoringMiddleware(BaseHTTPMiddleware): - async def dispatch(self, request: Request, call_next): - start_time = time.time() - - response = await call_next(request) - - process_time = time.time() - start_time - response.headers["X-Process-Time"] = str(process_time) - - # Log slow requests - if process_time > 1.0: - logger.warning(f"Slow request: {request.method} {request.url} - {process_time:.2f}s") - - return response -``` +For lower-noise production logs: -### Security Configuration +- `LOG_LEVEL=INFO` is the right default. `WARNING` skips request logs, which makes incident debugging harder. +- Sample low-information lines (health-check polls, etc.) at the proxy or aggregator, not in the app. -#### Environment Security +For OpenTelemetry / APM integration, hook into the FastAPI app at startup — there's no built-in hook in the boilerplate. -```python -# src/app/core/config.py -class ProductionSettings(Settings): - # Hide docs in production - ENVIRONMENT: str = "production" - - # Security settings - SECRET_KEY: str = Field(..., min_length=32) - ALLOWED_HOSTS: list[str] = ["your-domain.com", "api.your-domain.com"] - - # Database security - POSTGRES_PASSWORD: str = Field(..., min_length=16) - - class Config: - case_sensitive = True +## Health and Readiness + +The boilerplate ships a `GET /api/v1/health` endpoint. Use it as your liveness probe: + +```yaml +# Kubernetes / Docker probe +livenessProbe: + httpGet: + path: /api/v1/health + port: 8000 + initialDelaySeconds: 10 + periodSeconds: 10 ``` -#### Rate Limiting +For a **readiness** probe (does the app actually have working DB / Redis connections?), the built-in health check is too thin — it returns 200 immediately. If you want strict readiness, add a richer endpoint that probes the database and cache: ```python -# Adjust rate limits for production -DEFAULT_RATE_LIMIT_LIMIT = 100 # requests per period -DEFAULT_RATE_LIMIT_PERIOD = 3600 # 1 hour +@router.get("/ready") +async def ready(db: Annotated[AsyncSession, Depends(async_session)]) -> dict[str, str]: + await db.execute(text("SELECT 1")) + await cache_get(key="readiness_probe") # short-circuit; we don't care about value + return {"status": "ready"} ``` -### Deployment Process +Drop it into a private health-only router that's not gated by the rate limiter. -#### CI/CD Pipeline (GitHub Actions) +## Hardening Checklist -```yaml -# .github/workflows/deploy.yml -name: Deploy to Production - -on: - push: - branches: [main] - -jobs: - deploy: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Build and push Docker image - env: - DOCKER_REGISTRY: your-registry.com - run: | - docker build -t $DOCKER_REGISTRY/fastapi-app:latest . - docker push $DOCKER_REGISTRY/fastapi-app:latest - - - name: Deploy to production - run: | - # Your deployment commands - ssh production-server "docker compose pull && docker compose up -d" -``` +Before shipping: -#### Zero-Downtime Deployment +- [ ] `ENVIRONMENT=production` in the runtime +- [ ] `SECRET_KEY` is fresh, > 32 chars, never seen anywhere else +- [ ] `POSTGRES_PASSWORD` is unique and pulled from a secrets manager +- [ ] `DEBUG=false` (validator warns otherwise) +- [ ] `CORS_ORIGINS` lists only your frontend origins; no `*` +- [ ] `OPENAPI_URL=` (empty) — `/docs` and `/redoc` are not exposed +- [ ] `SESSION_SECURE_COOKIES=true` and you're terminating TLS at a proxy +- [ ] `CSRF_ENABLED=true` +- [ ] All Redis instances have `*_REDIS_PASSWORD` set +- [ ] `ADMIN_ENABLED=false` (or restricted at the network layer) +- [ ] Database migrations run via the `migrate` Dockerfile stage with `CONFIRM_PRODUCTION_MIGRATION=yes` +- [ ] `CREATE_TABLES_ON_STARTUP=false` +- [ ] Pre-commit and CI are running on every PR (lint, mypy, tests) +- [ ] Backups configured for the production database and Redis (if you're using Redis for sessions / state you can't lose) +- [ ] Monitoring set up: error rates, latency p95/p99, DB connection saturation, queue depth, Redis memory -```bash -#!/bin/bash -# deploy.sh - Zero-downtime deployment script +## Scaling Considerations -# Pull new images -docker compose pull +### API instances -# Start new containers -docker compose up -d --no-deps --scale web=2 web +Horizontal scaling is straightforward — add more `prod` containers behind your load balancer. Sessions are stored in Redis (when `SESSION_BACKEND=redis`), so any instance can serve any user. -# Wait for health check -sleep 30 +If you're stuck on `SESSION_BACKEND=memory`, you can't horizontally scale safely: each instance has its own session table. Switch backends before scaling. -# Stop old containers -docker compose up -d --no-deps --scale web=1 web +### Database -# Clean up -docker system prune -f -``` +Watch `database_pool_size × api_workers + worker_concurrency × taskiq_workers` against your Postgres `max_connections`. Common pitfall: 4 API workers × 10 pool size = 40 connections per API replica, easy to blow past 100 connection cap with two replicas + Taskiq. -### Monitoring and Alerting +Use a connection pooler (PgBouncer, RDS Proxy) at scale. The boilerplate's `DATABASE_URL` accepts a pooler endpoint identically. -#### Basic Monitoring Setup +### Redis -```python -# Basic metrics collection -import psutil -from fastapi import APIRouter - -router = APIRouter() - -@router.get("/metrics") -async def get_metrics(): - return { - "cpu_percent": psutil.cpu_percent(), - "memory_percent": psutil.virtual_memory().percent, - "disk_usage": psutil.disk_usage('/').percent - } -``` +The defaults use four separate DB numbers (`CACHE_REDIS_DB=0`, `SESSION_REDIS_DB=1`, `RATE_LIMITER_REDIS_DB=1`, `TASKIQ_REDIS_DB=3`) on the **same** Redis instance. Fine for small deployments. At scale, split sessions and the cache onto different Redis clusters — sessions are small and durability-sensitive; the cache is large, eviction-tolerant, and high-traffic. Mixing them puts your sessions at risk during cache memory pressure. -### Backup Strategy +### Taskiq workers -#### Database Backup +Worker scaling is independent of API scaling. If your tasks become a bottleneck, scale the worker `Deployment` without touching the API. -```bash -#!/bin/bash -# backup-db.sh -BACKUP_DIR="/backups" -DATE=$(date +%Y%m%d_%H%M%S) +## Common Production Issues -pg_dump -h localhost -U $POSTGRES_USER $POSTGRES_DB | gzip > $BACKUP_DIR/backup_$DATE.sql.gz +### "App fails to boot with `ProductionSecurityError`" -# Keep only last 7 days of backups -find $BACKUP_DIR -name "backup_*.sql.gz" -mtime +7 -delete -``` +Read the message — it tells you which check failed. Don't bypass it; fix the underlying config. + +### "Sessions invalidate after every deploy" + +You're on `SESSION_BACKEND=memory`. Switch to `redis` (or `memcached`) and add the relevant `*_REDIS_*` env vars. + +### "Sudden burst of 429s after a config change" + +Check that your rate-limit rule rows still match the routes. After path renames or sanitization rule changes, the lookup may miss and apply the (often tighter) `DEFAULT_RATE_LIMIT_LIMIT` instead. + +### "Cache backend not available" warnings under load -## Troubleshooting +Pool exhaustion. Bump `CACHE_REDIS_POOL_SIZE` (default 10), check Redis memory pressure, look for connection leaks in your application code. -### Common Production Issues +### "Tasks queue but no worker picks them up" -**High Memory Usage**: Check for memory leaks, optimize database queries, adjust worker counts +The worker process isn't running, isn't pointed at the same Redis, or hasn't imported the task module. See [Background Tasks → Troubleshooting](background-tasks/index.md#troubleshooting). -**Slow Response Times**: Enable query logging, check database indexes, optimize N+1 queries +### "404 on `/admin` after deploy" -**Connection Timeouts**: Adjust proxy timeouts, check database connection pool settings +`ADMIN_ENABLED=false`. Either enable it (and lock it down at the network layer) or run admin tasks through scripts. -**SSL Certificate Issues**: Verify certificate paths, check renewal process +## Key Files -### Performance Tuning +| Component | Location | +|-----------------------------------|-------------------------------------------------------------------| +| Production validator | `backend/src/infrastructure/security/production_validator.py` | +| Migration validator | `backend/migrations/env.py:validate_production_migration` | +| Multi-stage Dockerfile | `backend/Dockerfile` | +| Settings | `backend/src/infrastructure/config/settings.py` | +| App factory / lifespan | `backend/src/infrastructure/app_factory.py` | -- Monitor database query performance -- Implement proper caching strategies -- Use connection pooling -- Optimize Docker image layers -- Configure proper resource limits +## Next Steps -This production guide provides a solid foundation for deploying the FastAPI boilerplate to production environments with proper performance, security, and reliability configurations. \ No newline at end of file +- **[Configuration → Environment-Specific](configuration/environment-specific.md)** — per-environment env-var matrix +- **[Database → Migrations](database/migrations.md)** — zero-downtime schema-change patterns +- **[Authentication → Sessions](authentication/sessions.md)** — production session configuration +- **[Testing](testing.md)** — the test setup that ships with the boilerplate diff --git a/docs/user-guide/project-structure.md b/docs/user-guide/project-structure.md index be47b728..49e5ec58 100644 --- a/docs/user-guide/project-structure.md +++ b/docs/user-guide/project-structure.md @@ -1,296 +1,264 @@ # Project Structure -Understanding the project structure is essential for navigating the FastAPI Boilerplate effectively. This guide explains the organization of the codebase, the purpose of each directory, and how components interact with each other. +The codebase follows a three-layer architecture (**interfaces / infrastructure / modules**) with **vertical-slice modules** — each feature owns its models, schemas, CRUD, service, and routes in one folder. This guide explains how everything is organized and where to put new code. -## Overview +## Repository Root -The FastAPI Boilerplate follows a clean, modular architecture that separates concerns and promotes maintainability. The structure is designed to scale from simple APIs to complex applications while maintaining code organization and clarity. +```text +fastapi-boilerplate/ +├── backend/ # Python project root (see below) +├── docs/ # zensical documentation +├── .github/ # CI workflows +├── README.md +└── LICENSE.md +``` + +The Python project lives entirely under `backend/`. If you ever add a frontend, it would sit alongside as `frontend/`. -## Root Directory Structure +## Backend Layout ```text -FastAPI-boilerplate/ -├── Dockerfile # Container configuration -├── docker-compose.yml # Multi-service orchestration -├── pyproject.toml # Project configuration and dependencies -├── uv.lock # Dependency lock file -├── README.md # Project documentation -├── LICENSE.md # License information -├── tests/ # Test suite -├── docs/ # Documentation -└── src/ # Source code +backend/ +├── pyproject.toml # Dependencies and tooling config +├── uv.lock # Locked dependency versions +├── Dockerfile # Container image for the app +├── alembic.ini # Alembic migration config +├── .env.example # Reference for environment variables +├── migrations/ # Alembic migrations +│ ├── env.py +│ ├── script.py.mako +│ └── versions/ +├── scripts/ # One-off setup scripts +│ ├── create_first_superuser.py +│ ├── create_first_tier.py +│ ├── create_tables.py +│ └── setup_initial_data.py +├── src/ # Application source (the three layers below) +└── tests/ # Test suite (unit + integration) ``` ### Configuration Files | File | Purpose | |------|---------| -| `Dockerfile` | Defines the container image for the application | -| `docker-compose.yml` | Orchestrates multiple services (API, database, Redis, worker) | -| `pyproject.toml` | Modern Python project configuration with dependencies and metadata | -| `uv.lock` | Locks exact dependency versions for reproducible builds | +| `pyproject.toml` | Project metadata, dependencies (`[project]`), tooling config (ruff, mypy, pytest) | +| `uv.lock` | Locks exact dependency versions for reproducible installs | +| `Dockerfile` | Multi-stage build: requirements export → base → dev/prod/migrate stages | +| `alembic.ini` | Alembic settings (script location, logging) | +| `.env.example` | Documented reference of every environment variable | -## Source Code Structure - -The `src/` directory contains all application code: +## The Three Layers (`src/`) ```text src/ -├── app/ # Main application package -│ ├── main.py # Application entry point -│ ├── api/ # API layer -│ ├── core/ # Core utilities and configurations -│ ├── crud/ # Database operations -│ ├── models/ # SQLAlchemy models -│ ├── schemas/ # Pydantic schemas -│ ├── middleware/ # Custom middleware -│ └── logs/ # Application logs -├── migrations/ # Database migrations -└── scripts/ # Utility scripts +├── interfaces/ # HOW the world talks to the app (HTTP, admin UI) +├── infrastructure/ # WHAT the app uses (DB, cache, auth, taskiq, config) +└── modules/ # WHAT the app IS (vertical-slice feature modules) ``` -## Core Application (`src/app/`) +The flow is **interfaces → modules → infrastructure**: -### Entry Point -- **`main.py`** - FastAPI application instance and configuration +- `interfaces` mounts routers, middleware, and the admin UI. +- `modules` express domain features. Each one is self-contained. +- `infrastructure` provides the cross-cutting plumbing every layer above can reach for. -### API Layer (`api/`) -```text -api/ -├── dependencies.py # Shared dependencies -└── v1/ # API version 1 - ├── login.py # Authentication endpoints - ├── logout.py # Logout functionality - ├── users.py # User management - ├── posts.py # Post operations - ├── tasks.py # Background task endpoints - ├── tiers.py # User tier management - └── rate_limits.py # Rate limiting endpoints -``` +Modules don't import each other directly except for the shared `common` module. Interfaces don't contain business logic. Infrastructure doesn't know about specific features. -**Purpose**: Contains all API endpoints organized by functionality and version. +### `src/interfaces/` -### Core System (`core/`) ```text -core/ -├── config.py # Application settings -├── logger.py # Logging configuration -├── schemas.py # Core Pydantic schemas -├── security.py # Security utilities -├── setup.py # Application factory -├── db/ # Database core -├── exceptions/ # Custom exceptions -├── utils/ # Utility functions -└── worker/ # Background worker +interfaces/ +├── main.py # FastAPI app instance + lifespan + middleware setup +├── api/ +│ ├── __init__.py # Mounts /api router +│ └── v1/ +│ └── __init__.py # Mounts /v1 + each module's router +└── admin/ + ├── initialize.py # SQLAdmin setup (mounted at /admin) + ├── auth.py # Admin auth backend + ├── mixins.py + └── views/ # SQLAdmin model views (Tier, User, etc.) ``` -**Purpose**: Houses core functionality, configuration, and shared utilities. +`main.py` is the entry point — `uv run fastapi dev src/interfaces/main.py` starts here. The `v1/__init__.py` aggregator imports each module's `routes` and includes them under the right prefix. -#### Database Core (`core/db/`) -```text -db/ -├── database.py # Database connection and session management -├── models.py # Base models and mixins -├── crud_token_blacklist.py # Token blacklist operations -└── token_blacklist.py # Token blacklist model -``` +### `src/infrastructure/` -#### Exceptions (`core/exceptions/`) ```text -exceptions/ -├── cache_exceptions.py # Cache-related exceptions -└── http_exceptions.py # HTTP exceptions +infrastructure/ +├── app_factory.py # Builds the FastAPI app (CORS, GZip, middleware, lifespan) +├── middleware.py # ClientCache, SecurityHeaders, etc. +├── config/ # Settings + Pydantic-driven env loading +│ ├── settings.py +│ └── enums.py +├── database/ # SQLAlchemy engine, session, base model +├── auth/ # Session auth, OAuth, HTTP exceptions, route handlers +│ ├── session/ # Server-side sessions (memory/redis/memcached backends) +│ ├── oauth/ # OAuth provider abstractions (Google, GitHub stub) +│ ├── routes.py # /auth/login, /logout, /oauth/google, /check-auth +│ ├── http_exceptions.py +│ └── utils.py +├── cache/ # Redis/Memcached cache + decorator +│ └── backends/ +├── rate_limit/ # Rate limiter middleware + Redis/Memcached backends +│ └── backends/ +├── taskiq/ # Async task queue (broker, worker entry point, registry) +├── security/ # Production security validator +└── logging/ # Centralized logging configuration ``` -#### Utilities (`core/utils/`) -```text -utils/ -├── cache.py # Caching utilities -├── queue.py # Task queue management -└── rate_limit.py # Rate limiting utilities -``` +`infrastructure/auth/routes.py` is intentionally placed here (instead of in a `modules/auth/` folder) because authentication is structural — every feature relies on it. -#### Worker (`core/worker/`) -```text -worker/ -├── settings.py # Worker configuration -└── functions.py # Background task definitions -``` +### `src/modules/` — Vertical-Slice Features -### Data Layer - -#### Models (`models/`) ```text -models/ -├── user.py # User model -├── post.py # Post model -├── tier.py # User tier model -└── rate_limit.py # Rate limit model +modules/ +├── common/ # Cross-module shared schemas, exceptions, utils +│ ├── constants.py +│ ├── exceptions.py +│ ├── schemas.py +│ └── utils/ +├── user/ +│ ├── models.py # SQLAlchemy User model +│ ├── schemas.py # Pydantic UserCreate, UserRead, UserUpdate, etc. +│ ├── crud.py # FastCRUD wrapper (crud_users) +│ ├── service.py # Business logic (UserService) +│ ├── routes.py # APIRouter with /users endpoints +│ └── enums.py # OAuthProvider, etc. +├── tier/ # Subscription tiers (model + simple CRUD) +├── rate_limit/ # Per-tier rate limit definitions +└── api_keys/ # API keys, key usage, key permissions ``` -**Purpose**: SQLAlchemy ORM models defining database schema. - -#### Schemas (`schemas/`) -```text -schemas/ -├── user.py # User validation schemas -├── post.py # Post validation schemas -├── tier.py # Tier validation schemas -├── rate_limit.py # Rate limit schemas -└── job.py # Background job schemas -``` +Each module is **self-contained**: drop it in, drop it out, with minimal blast radius. The aggregator at `interfaces/api/v1/__init__.py` is the only place that knows about every module's router. -**Purpose**: Pydantic schemas for request/response validation and serialization. +### Common Module Files -#### CRUD Operations (`crud/`) -```text -crud/ -├── crud_base.py # Base CRUD class -├── crud_users.py # User operations -├── crud_posts.py # Post operations -├── crud_tier.py # Tier operations -├── crud_rate_limit.py # Rate limit operations -└── helper.py # CRUD helper functions -``` - -**Purpose**: Database operations using FastCRUD for consistent data access patterns. +| File | Purpose | +|------|---------| +| `models.py` | SQLAlchemy ORM models (table schema) | +| `schemas.py` | Pydantic request/response models | +| `crud.py` | FastCRUD instances for the model | +| `service.py` | Business logic — orchestrates CRUD calls, applies rules | +| `routes.py` | `APIRouter` with the module's endpoints | +| `enums.py` | StrEnum types if the module needs them (optional) | -### Additional Components +## Migrations (`backend/migrations/`) -#### Middleware (`middleware/`) ```text -middleware/ -└── client_cache_middleware.py # Client-side caching middleware -``` - -#### Logs (`logs/`) -```text -logs/ -└── app.log # Application log file +migrations/ +├── env.py # Alembic environment (loads all models) +├── script.py.mako # Template for new migrations +└── versions/ # One file per migration revision ``` -## Database Migrations (`src/migrations/`) +Run from `backend/`: -```text -migrations/ -├── README # Migration instructions -├── env.py # Alembic environment configuration -├── script.py.mako # Migration template -└── versions/ # Individual migration files +```bash +uv run alembic revision --autogenerate -m "add foo" +uv run alembic upgrade head ``` -**Purpose**: Alembic database migrations for schema version control. - -## Utility Scripts (`src/scripts/`) +## Scripts (`backend/scripts/`) ```text scripts/ -├── create_first_superuser.py # Create initial admin user -└── create_first_tier.py # Create initial user tier +├── setup_initial_data.py # All-in-one: tables + tier + admin +├── create_first_superuser.py # Just the admin user +├── create_first_tier.py # Just the default tier +└── create_tables.py # Just the database tables ``` -**Purpose**: Initialization and maintenance scripts. +The most common entry point is `setup_initial_data` which calls all three. -## Testing Structure (`tests/`) +```bash +uv run python -m scripts.setup_initial_data +``` + +## Tests (`backend/tests/`) ```text tests/ -├── conftest.py # Pytest configuration and fixtures -├── test_user_unit.py # User-related unit tests -└── helpers/ # Test utilities - ├── generators.py # Test data generators - └── mocks.py # Mock objects and functions +├── conftest.py # Pytest fixtures (Postgres testcontainer, db session, client, mocks) +├── unit/ # Unit tests (no external deps) +│ ├── infrastructure/ +│ └── modules/ +└── integration/ # Integration tests (real Postgres via testcontainers) ``` -## Architectural Patterns - -### Layered Architecture - -The boilerplate implements a clean layered architecture: - -1. **API Layer** (`api/`) - Handles HTTP requests and responses -2. **Business Logic** (`crud/`) - Implements business rules and data operations -3. **Data Access** (`models/`) - Defines data structure and database interaction -4. **Core Services** (`core/`) - Provides shared functionality and configuration +Run from `backend/`: -### Dependency Injection - -FastAPI's dependency injection system is used throughout: - -- **Database Sessions** - Injected into endpoints via `async_get_db` -- **Authentication** - User context provided by `get_current_user` -- **Rate Limiting** - Applied via `rate_limiter_dependency` -- **Caching** - Managed through decorators and middleware - -### Configuration Management - -All configuration is centralized in `core/config.py`: - -- **Environment Variables** - Loaded from `.env` file -- **Settings Classes** - Organized by functionality (database, security, etc.) -- **Type Safety** - Using Pydantic for validation +```bash +uv run pytest tests/unit # fast, no Docker +uv run pytest tests/integration # spins up Postgres in Docker via testcontainers +uv run pytest # everything +``` -### Error Handling +## Architectural Patterns -Centralized exception handling: +### Three-Layer Architecture -- **Custom Exceptions** - Defined in `core/exceptions/` -- **HTTP Status Codes** - Consistent error responses -- **Logging** - Automatic error logging and tracking +1. **Interfaces** (`interfaces/`) - HTTP routes, admin UI, the FastAPI app instance +2. **Modules** (`modules/`) - Domain features as vertical slices +3. **Infrastructure** (`infrastructure/`) - Cross-cutting plumbing (DB, cache, auth, queue, config, logging) -## Design Principles +Dependencies flow downward: interfaces depend on modules and infrastructure; modules depend on infrastructure (and `modules/common`). Infrastructure has no upward dependencies. -### Single Responsibility +### Vertical Slices -Each module has a clear, single purpose: +Each `modules//` folder owns the entire stack for that feature. Adding a new feature means adding **one** new folder, not editing five separate top-level directories. -- Models define data structure -- Schemas handle validation -- CRUD manages data operations -- API endpoints handle requests +### Dependency Injection -### Separation of Concerns +FastAPI's `Depends` is used throughout: -- Business logic separated from presentation -- Database operations isolated from API logic -- Configuration centralized and environment-aware +- **Database session** — `Depends(async_session)` from `infrastructure.database.session` +- **Current user** — `Depends(get_current_user)` from `infrastructure.auth.session.dependencies` +- **Superuser only** — `Depends(get_current_superuser)` +- **Service instances** — Each module's `routes.py` defines its own `get__service()` factory -### Modularity +### Configuration -- Features can be added/removed independently -- Services can be disabled via configuration -- Clear interfaces between components +All configuration lives in `infrastructure/config/settings.py`, loaded from `.env`: -### Scalability +- Settings classes grouped by concern (`DatabaseSettings`, `CacheSettings`, `AuthSettings`, etc.) +- A single `Settings` class composes them +- `get_settings()` returns a cached singleton -- Async/await throughout the application -- Connection pooling for database access -- Caching and background task support -- Horizontal scaling ready +### Error Handling -## Navigation Tips +- Domain exceptions in `modules/common/exceptions.py` (e.g. `ResourceNotFoundError`, `PermissionDeniedError`) +- HTTP-shaped exceptions in `infrastructure/auth/http_exceptions.py` +- Routes catch domain exceptions and translate them via `modules/common/utils/error_handler.handle_exception` -### Finding Code +## Adding a New Feature -- **Models** → `src/app/models/` -- **API Endpoints** → `src/app/api/v1/` -- **Database Operations** → `src/app/crud/` -- **Configuration** → `src/app/core/config.py` -- **Business Logic** → Distributed across CRUD and API layers +The recommended flow: -### Adding New Features +1. **Create the module folder**: `mkdir backend/src/modules/widgets` +2. **Define the model**: `backend/src/modules/widgets/models.py` +3. **Add schemas**: `backend/src/modules/widgets/schemas.py` +4. **Wrap with FastCRUD**: `backend/src/modules/widgets/crud.py` +5. **Write the service**: `backend/src/modules/widgets/service.py` +6. **Expose routes**: `backend/src/modules/widgets/routes.py` +7. **Register the router** in `backend/src/interfaces/api/v1/__init__.py` +8. **Generate a migration**: `uv run alembic revision --autogenerate -m "add widgets"` +9. **Apply**: `uv run alembic upgrade head` -1. **Model** → Define in `models/` -2. **Schema** → Create in `schemas/` -3. **CRUD** → Implement in `crud/` -4. **API** → Add endpoints in `api/v1/` -5. **Migration** → Generate with Alembic +See [Development Guide](development.md) for a full walkthrough. -### Understanding Data Flow +## Data Flow ```text -Request → API Endpoint → Dependencies → CRUD → Model → Database -Response ← API Response ← Schema ← CRUD ← Query Result ← Database +HTTP Request + → interfaces/api/v1/__init__.py + → modules//routes.py + → modules//service.py + → modules//crud.py (FastCRUD) + → infrastructure/database/session.py + → PostgreSQL + +HTTP Response ← Pydantic schema ← service ← CRUD result ← DB query ``` -This structure provides a solid foundation for building scalable, maintainable APIs while keeping the codebase organized and easy to navigate. \ No newline at end of file +This layering keeps HTTP concerns out of business logic, and business logic out of data access — making the codebase straightforward to navigate, test, and extend. diff --git a/docs/user-guide/rate-limiting/index.md b/docs/user-guide/rate-limiting/index.md index 8cba87a0..8e57b1b4 100644 --- a/docs/user-guide/rate-limiting/index.md +++ b/docs/user-guide/rate-limiting/index.md @@ -1,481 +1,338 @@ # Rate Limiting -The boilerplate includes a sophisticated rate limiting system built on Redis that protects your API from abuse while supporting user tiers with different access levels. This system provides flexible, scalable rate limiting for production applications. +The boilerplate ships a flexible rate limiter that supports per-tier, per-path limits with Redis or Memcached backends. This page covers how the pieces fit together, how to enable enforcement on your routes, and the gotchas to know upfront. + +## What's Built In + +```text +backend/src/infrastructure/rate_limit/ +├── base.py RateLimiterBackend abstract base +├── backends/ Redis and Memcached implementations +├── exceptions.py RateLimitException, RateLimiterBackendException +├── initialize.py initialize_rate_limiter() / close_rate_limiter() +├── middleware.py RateLimiterMiddleware + check_rate_limit dependency +├── provider.py increment_and_check, get_count, reset +└── utils.py sanitize_path + +backend/src/modules/rate_limit/ +├── models.py RateLimit (tier_id, path, limit, period) +├── routes.py GET / GET-by-name / PATCH / DELETE on /api/v1/rate-limits/ +├── crud.py / service.py +└── schemas.py +``` -## Overview +The middleware and provider are wired up; the backend is initialized in the app's lifespan. **Enforcement is opt-in per route** — see below. -Rate limiting controls how many requests users can make within a specific time period. The boilerplate implements: +## How a Request Flows Through It -- **Redis-Based Storage**: Fast, distributed rate limiting using Redis -- **User Tier System**: Different limits for different user types -- **Path-Specific Limits**: Granular control per API endpoint -- **Fallback Protection**: Default limits for unauthenticated users +1. **Request arrives**, `RateLimiterMiddleware` is on the stack but **does not enforce limits** — it only attaches `X-RateLimit-*` headers to the response after the handler runs. +2. **The route's `Depends(check_rate_limit)` runs.** This is the actual enforcement point. Without this dependency on a route, no limit is checked. +3. **`check_rate_limit` extracts the user** from `request.state.user` (or falls back to client IP for anonymous requests), looks up the user's tier and the matching rate-limit row from the database, and computes `(limit, period)`. +4. **`increment_and_check`** atomically increments the counter at `ratelimit:{user_or_ip}:{sanitized_path}` and returns `(count, is_limited)`. The TTL on the key is set on first increment to `period` seconds. +5. **If `is_limited`**, raises `RateLimitException` (HTTP 429). Otherwise, sets `request.state.rate_limit_headers` so the middleware can attach them to the response. -## Quick Example +The key shape (no window suffix — the TTL handles the window): -```python -from fastapi import Depends -from app.api.dependencies import rate_limiter_dependency - -@router.post("/api/v1/posts", dependencies=[Depends(rate_limiter_dependency)]) -async def create_post(post_data: PostCreate): - # This endpoint is automatically rate limited based on: - # - User's tier (basic, premium, enterprise) - # - Specific limits for the /posts endpoint - # - Default limits for unauthenticated users - return await crud_posts.create(db=db, object=post_data) +```text +ratelimit:{user_id_or_ip}:{sanitized_path} ``` -## Architecture - -### Rate Limiting Components - -**Rate Limiter Class**: Singleton Redis client for checking limits
-**User Tiers**: Database-stored user subscription levels
-**Rate Limit Rules**: Path-specific limits per tier
-**Dependency Injection**: Automatic enforcement via FastAPI dependencies
+## Enabling Enforcement on a Route -### How It Works +Add the dependency: -1. **Request Arrives**: User makes API request to protected endpoint -2. **User Identification**: System identifies user and their tier -3. **Limit Lookup**: Finds applicable rate limit for user tier + endpoint -4. **Redis Check**: Increments counter in Redis sliding window -5. **Allow/Deny**: Request proceeds or returns 429 Too Many Requests - -## User Tier System +```python +from fastapi import APIRouter, Depends +from src.infrastructure.rate_limit import check_rate_limit -### Default Tiers +router = APIRouter() -The system supports flexible user tiers with different access levels: -```python -# Example tier configuration -tiers = { - "free": { - "requests_per_minute": 10, - "requests_per_hour": 100, - "special_endpoints": { - "/api/v1/ai/generate": {"limit": 2, "period": 3600}, # 2 per hour - "/api/v1/exports": {"limit": 1, "period": 86400}, # 1 per day - } - }, - "premium": { - "requests_per_minute": 60, - "requests_per_hour": 1000, - "special_endpoints": { - "/api/v1/ai/generate": {"limit": 50, "period": 3600}, - "/api/v1/exports": {"limit": 10, "period": 86400}, - } - }, - "enterprise": { - "requests_per_minute": 300, - "requests_per_hour": 10000, - "special_endpoints": { - "/api/v1/ai/generate": {"limit": 500, "period": 3600}, - "/api/v1/exports": {"limit": 100, "period": 86400}, - } - } -} +@router.post("/widgets", dependencies=[Depends(check_rate_limit)]) +async def create_widget(...): ... ``` -### Rate Limit Database Structure +Or apply it to every route in a router: ```python -# Rate limits are stored per tier and path -class RateLimit: - id: int - tier_id: int # Links to user tier - name: str # Descriptive name - path: str # API path (sanitized) - limit: int # Number of requests allowed - period: int # Time period in seconds +router = APIRouter(dependencies=[Depends(check_rate_limit)]) ``` -## Implementation Details +That's all that's required — provided the rate limiter is enabled (`RATE_LIMITER_ENABLED=true`), every request to that route is checked. -### Automatic Rate Limiting +!!! warning "Currently no built-in route uses `check_rate_limit`" + The boilerplate's shipped routes (`/api/v1/users`, `/api/v1/auth`, `/api/v1/tiers`, `/api/v1/rate-limits`, `/api/v1/api-keys`) do **not** apply `check_rate_limit` by default. You add the dependency where you want enforcement. The middleware will still attach `X-RateLimit-*` headers, but only when something has populated `request.state.rate_limit_headers` — which only happens after `check_rate_limit` has run. -The system automatically applies rate limiting through dependency injection: +## Configuration -```python -@router.post("/protected-endpoint", dependencies=[Depends(rate_limiter_dependency)]) -async def protected_endpoint(): - """This endpoint is automatically rate limited.""" - pass - -# The dependency: -# 1. Identifies the user and their tier -# 2. Looks up rate limits for this path -# 3. Checks Redis counter -# 4. Allows or blocks the request +```env +# Master toggle +RATE_LIMITER_ENABLED=true + +# Backend selection (mirrors the cache backend selector) +RATE_LIMITER_BACKEND=redis # or "memcached" + +# Behavior on backend errors: +# true → log and let the request through (recommended) +# false → raise RateLimitException ("Access denied as a precaution") +RATE_LIMITER_FAIL_OPEN=true + +# Defaults applied when the user has no tier or no matching rate-limit row +DEFAULT_RATE_LIMIT_LIMIT=100 +DEFAULT_RATE_LIMIT_PERIOD=60 # seconds — 100/60s by default + +# Redis backend (when RATE_LIMITER_BACKEND=redis) +RATE_LIMITER_REDIS_HOST=redis # use "localhost" without Docker +RATE_LIMITER_REDIS_PORT=6379 +RATE_LIMITER_REDIS_DB=1 # separate from CACHE / SESSION / TASKIQ +RATE_LIMITER_REDIS_PASSWORD= +RATE_LIMITER_REDIS_CONNECT_TIMEOUT=5 +RATE_LIMITER_REDIS_POOL_SIZE=10 + +# Memcached backend (when RATE_LIMITER_BACKEND=memcached) +RATE_LIMITER_MEMCACHED_HOST=localhost +RATE_LIMITER_MEMCACHED_PORT=11211 +RATE_LIMITER_MEMCACHED_POOL_SIZE=10 ``` -#### Example Dependency Implementation -To make the rate limiting dependency functional, you must implement how user tiers and paths resolve to actual rate limits. -Below is a complete example using Redis and the database to determine per-tier and per-path restrictions. +When `RATE_LIMITER_ENABLED=false`, `check_rate_limit` returns immediately — the dependency is a no-op. Useful in tests and for isolating performance issues. + +## User-Tier vs IP-Based Limits + +The rate limiter has two paths depending on whether `request.state.user` is set: ```python -async def rate_limiter_dependency( - request: Request, - db: AsyncSession = Depends(async_get_db), - user=Depends(get_current_user_optional), -): - """ - Enforces rate limits per user tier and API path. - - - Identifies user (or defaults to IP-based anonymous rate limit) - - Finds tier-specific limit for the request path - - Checks Redis counter to determine if request should be allowed - """ - path = sanitize_path(request.url.path) - user_id = getattr(user, "id", None) or request.client.host or "anonymous" - - # Determine user tier (default to "free" or anonymous) - if user and getattr(user, "tier_id", None): - tier = await crud_tiers.get(db=db, id=user.tier_id) +# Inside _check_rate_limit +if user: + user_id = user["id"] + tier = await crud_tiers.get(db=db, id=user["tier_id"], ...) + if tier: + rate_limit = await crud_rate_limits.get(db=db, tier_id=tier["id"], path=sanitized_path, ...) + if rate_limit: + limit, period = rate_limit["limit"], rate_limit["period"] + else: + limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD else: - tier = await crud_tiers.get(db=db, name="free") - - if not tier: - raise RateLimitException("Tier configuration not found") - - # Find specific rate limit rule for this path + tier - rate_limit_rule = await crud_rate_limits.get_by_path_and_tier( - db=db, path=path, tier_id=tier.id - ) - - # Use default limits if no specific rule is found - limit = getattr(rate_limit_rule, "limit", 100) - period = getattr(rate_limit_rule, "period", 3600) - - # Check rate limit in Redis - is_limited = await rate_limiter.is_rate_limited( - db=db, - user_id=user_id, - path=path, - limit=limit, - period=period, - ) - - if is_limited: - raise RateLimitException( - f"Rate limit exceeded for path '{path}'. Try again later." - ) + limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD +else: + # Anonymous — key by client IP + user_id = request.client.host + limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD ``` -### Redis-Based Counting +!!! warning "`request.state.user` is not populated automatically" + The default session auth dependency (`get_current_user`) does not write the user back to `request.state.user`. Until you add a small helper that does, **every request looks anonymous to the rate limiter**, and tier-specific limits won't apply. -The rate limiter uses Redis for distributed, high-performance counting: +A minimal middleware to bridge the two: ```python -# Sliding window implementation -async def is_rate_limited(self, user_id: int, path: str, limit: int, period: int) -> bool: - current_timestamp = int(datetime.now(UTC).timestamp()) - window_start = current_timestamp - (current_timestamp % period) - - # Create unique key for this user/path/window - key = f"ratelimit:{user_id}:{sanitized_path}:{window_start}" - - # Increment counter - current_count = await redis_client.incr(key) - - # Set expiration on first increment - if current_count == 1: - await redis_client.expire(key, period) - - # Check if limit exceeded - return current_count > limit +# infrastructure/auth/rate_limit_user_middleware.py +from starlette.middleware.base import BaseHTTPMiddleware +from src.infrastructure.auth.session.dependencies import _resolve_session_user # pseudo + + +class AttachUserToRateLimitMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request, call_next): + try: + user = await _resolve_session_user(request) + if user: + request.state.user = user + except Exception: + pass + return await call_next(request) ``` -### Path Sanitization +Mount this **before** `RateLimiterMiddleware`. The exact resolution code depends on how you reuse your session backend — if this is a setup you need, copy the validation logic from `infrastructure/auth/session/dependencies.py:get_current_user` into the middleware. + +For most teams, IP-based default limits (`100 req/60s`) are enough until you have an actual product reason to bring tiers into the rate-limit story. -API paths are sanitized for consistent Redis key generation: +## Path Sanitization + +Paths are normalized for consistent keys: ```python def sanitize_path(path: str) -> str: return path.strip("/").replace("/", "_") -# Examples: -# "/api/v1/users" → "api_v1_users" -# "/posts/{id}" → "posts_{id}" +# /api/v1/users → "api_v1_users" +# /api/v1/users/42 → "api_v1_users_42" +# /api/v1/users/{id} → "api_v1_users_{id}" ``` -## Configuration +The middleware first looks up the `RateLimit` row by sanitized path. If nothing matches, it falls back to looking up the original path. **In practice you should store the sanitized form in the database** — that's what the lookup primarily uses, and it's what the cache key format mirrors. -### Environment Variables +Note: paths with path parameters (`/users/42`) sanitize to `api_v1_users_42`, which means **each individual resource ID gets its own counter**. That's almost always what you want (otherwise a single hot resource could rate-limit unrelated reads), but if you specifically want a single counter for a parameterized route, store the rule under the literal pattern `api_v1_users_{id}` and write a small middleware that matches the route against the path template before sanitizing. -```bash -# Rate Limiting Settings -DEFAULT_RATE_LIMIT_LIMIT=100 # Default requests per period -DEFAULT_RATE_LIMIT_PERIOD=3600 # Default period (1 hour) +## Managing Rate-Limit Rules -# Redis Rate Limiter Settings -REDIS_RATE_LIMITER_HOST=localhost -REDIS_RATE_LIMITER_PORT=6379 -REDIS_RATE_LIMITER_DB=2 # Separate from cache/queue -``` - -### Creating User Tiers +The `RateLimit` model: ```python -# Create tiers via API (superuser only) -POST /api/v1/tiers -{ - "name": "premium", - "description": "Premium subscription with higher limits" -} - -# Assign tier to user -PUT /api/v1/users/{user_id}/tier -{ - "tier_id": 2 -} +class RateLimit(Base, TimestampMixin, SoftDeleteMixin): + __tablename__ = "rate_limits" + + id: int + tier_id: int # FK to tiers.id + name: str # unique — used as the URL path on /rate-limits/{name} + path: str # sanitized path the rule applies to + limit: int # max requests per period + period: int # seconds ``` -### Setting Rate Limits +### What the API exposes -```python -# Create rate limits per tier and endpoint -POST /api/v1/tier/premium/rate_limit -{ - "name": "premium_posts_limit", - "path": "/api/v1/posts", - "limit": 100, # 100 requests - "period": 3600 # per hour -} - -# Different limits for different endpoints -POST /api/v1/tier/free/rate_limit -{ - "name": "free_ai_limit", - "path": "/api/v1/ai/generate", - "limit": 5, # 5 requests - "period": 86400 # per day -} -``` +| Method | Path | Auth | Notes | +|--------|------------------------------|-------------|------------------------------------------| +| GET | `/api/v1/rate-limits/` | Public | Paginated list of all rate-limit rules | +| GET | `/api/v1/rate-limits/{name}` | Public | Get a rule by name | +| PATCH | `/api/v1/rate-limits/{name}` | Superuser | Update an existing rule | +| DELETE | `/api/v1/rate-limits/{name}` | Superuser | Delete a rule | -## Usage Patterns +There's **no POST endpoint** for creating rate-limit rules. To seed initial rules, you have three options: -### Basic Protection +### Option 1: SQL / Migration + +Add an Alembic migration that inserts the rows: ```python -# Protect all endpoints in a router -router = APIRouter(dependencies=[Depends(rate_limiter_dependency)]) - -@router.get("/users") -async def get_users(): - """Rate limited based on user tier.""" - pass - -@router.post("/posts") -async def create_post(): - """Rate limited based on user tier.""" - pass +# alembic/versions/xxxx_seed_rate_limits.py +def upgrade(): + op.execute(""" + INSERT INTO rate_limits (tier_id, name, path, "limit", period, created_at) + VALUES + (1, 'free_widgets_create', 'api_v1_widgets', 10, 60, NOW()), + (2, 'pro_widgets_create', 'api_v1_widgets', 100, 60, NOW()) + """) ``` -### Selective Protection +### Option 2: Custom Seed Script + +Add a one-off in `backend/scripts/`: ```python -# Protect only specific endpoints -@router.get("/public-data") -async def get_public_data(): - """No rate limiting - public endpoint.""" - pass - -@router.post("/premium-feature", dependencies=[Depends(rate_limiter_dependency)]) -async def premium_feature(): - """Rate limited - premium feature.""" - pass -``` +# backend/scripts/setup_rate_limits.py +import asyncio -### Custom Error Handling +from src.infrastructure.database.session import local_session +from src.modules.rate_limit.crud import crud_rate_limits -```python -from app.core.exceptions.http_exceptions import RateLimitException - -@app.exception_handler(RateLimitException) -async def rate_limit_handler(request: Request, exc: RateLimitException): - """Custom rate limit error response.""" - return JSONResponse( - status_code=429, - content={ - "error": "Rate limit exceeded", - "message": "Too many requests. Please try again later.", - "retry_after": 60 # Suggest retry time - }, - headers={"Retry-After": "60"} - ) -``` -## Monitoring and Analytics +async def main(): + async with local_session() as db: + await crud_rate_limits.create(db=db, object={ + "tier_id": 1, "name": "free_widgets_create", + "path": "api_v1_widgets", "limit": 10, "period": 60, + }) + await db.commit() -### Rate Limit Metrics -```python -@router.get("/admin/rate-limit-stats") -async def get_rate_limit_stats(): - """Monitor rate limiting effectiveness.""" - - # Get Redis statistics - redis_info = await rate_limiter.client.info() - - # Count current rate limit keys - pattern = "ratelimit:*" - keys = await rate_limiter.client.keys(pattern) - - # Analyze by endpoint - endpoint_stats = {} - for key in keys: - parts = key.split(":") - if len(parts) >= 3: - endpoint = parts[2] - endpoint_stats[endpoint] = endpoint_stats.get(endpoint, 0) + 1 - - return { - "total_active_limits": len(keys), - "redis_memory_usage": redis_info.get("used_memory_human"), - "endpoint_stats": endpoint_stats - } +if __name__ == "__main__": + asyncio.run(main()) ``` -### User Analytics +Run with `uv run python -m scripts.setup_rate_limits` (from `backend/`). -```python -async def analyze_user_usage(user_id: int, days: int = 7): - """Analyze user's API usage patterns.""" - - # This would require additional logging/analytics - # implementation to track request patterns - - return { - "user_id": user_id, - "tier": "premium", - "requests_last_7_days": 2540, - "average_requests_per_day": 363, - "top_endpoints": [ - {"path": "/api/v1/posts", "count": 1200}, - {"path": "/api/v1/users", "count": 800}, - {"path": "/api/v1/ai/generate", "count": 540} - ], - "rate_limit_hits": 12, # Times user hit rate limits - "suggested_tier": "enterprise" # Based on usage patterns - } -``` +### Option 3: Add a SQLAdmin View -## Best Practices +Mirror `UserAdmin` and `TierAdmin` to add a `RateLimitAdmin` view — see [Admin Panel → Adding Models](../admin-panel/adding-models.md). This gives you a UI for creating, editing, and deleting rules. -### Rate Limit Design +## Response Headers -```python -# Design limits based on resource cost -expensive_endpoints = { - "/api/v1/ai/generate": {"limit": 10, "period": 3600}, # AI is expensive - "/api/v1/reports/export": {"limit": 3, "period": 86400}, # Export is heavy - "/api/v1/bulk/import": {"limit": 1, "period": 3600}, # Import is intensive -} - -# More generous limits for lightweight endpoints -lightweight_endpoints = { - "/api/v1/users/me": {"limit": 1000, "period": 3600}, # Profile access - "/api/v1/posts": {"limit": 300, "period": 3600}, # Content browsing - "/api/v1/search": {"limit": 500, "period": 3600}, # Search queries -} -``` +When `check_rate_limit` runs successfully, the middleware attaches: -### Production Considerations +| Header | Meaning | +|-----------------------|--------------------------------------------------| +| `X-RateLimit-Limit` | The configured limit for this user × path | +| `X-RateLimit-Remaining` | How many requests are left in the current window | +| `X-RateLimit-Reset` | Period (seconds) for the window | -```python -# Use separate Redis database for rate limiting -REDIS_RATE_LIMITER_DB=2 # Isolate from cache and queues - -# Set appropriate Redis memory policies -# maxmemory-policy volatile-lru # Remove expired rate limit keys first - -# Monitor Redis memory usage -# Rate limit keys can accumulate quickly under high load - -# Consider rate limit key cleanup -async def cleanup_expired_rate_limits(): - """Clean up expired rate limit keys.""" - pattern = "ratelimit:*" - keys = await redis_client.keys(pattern) - - for key in keys: - ttl = await redis_client.ttl(key) - if ttl == -2: # Key expired but not cleaned up - await redis_client.delete(key) -``` +These are standard-ish (formatted like the GitHub / Stripe convention, not RFC 6585). Frontends can read them to surface graceful "you're approaching your limit" UI. + +## Programmatic Cache-like Operations -### Security Considerations +The provider exposes the same primitives the middleware uses, in case you need to apply rate limits outside the HTTP request path (background jobs that throttle calls to a third party, for example): ```python -# Rate limit by IP for unauthenticated users -if not user: - user_id = request.client.host if request.client else "unknown" - limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD +from src.infrastructure.rate_limit import increment_and_check, get_count, reset + +count, is_limited = await increment_and_check( + key="external_api_calls:user_42", + limit=100, + period=3600, + fail_open=True, +) -# Prevent rate limit enumeration attacks -# Don't expose exact remaining requests in error messages - -# Use progressive delays for repeated violations -# Consider temporary bans for severe abuse - -# Log rate limit violations for security monitoring -if is_limited: - logger.warning( - f"Rate limit exceeded", - extra={ - "user_id": user_id, - "path": path, - "ip": request.client.host if request.client else "unknown", - "user_agent": request.headers.get("user-agent") - } - ) +current = await get_count("external_api_calls:user_42") +await reset("external_api_calls:user_42") ``` -## Common Use Cases +Use a key prefix that doesn't collide with the HTTP rate limiter's `ratelimit:` namespace. -### API Monetization +## Backend Differences -```python -# Different tiers for different pricing levels -tiers = { - "free": {"daily_requests": 1000, "cost": 0}, - "starter": {"daily_requests": 10000, "cost": 29}, - "professional": {"daily_requests": 100000, "cost": 99}, - "enterprise": {"daily_requests": 1000000, "cost": 499} -} -``` +| Feature | Redis | Memcached | +|-----------------------------|-------|-----------| +| Atomic increment + TTL set | Yes | Yes | +| `get_count` / `reset` | Yes | Yes | +| Pattern-based reset | Yes | No | +| Connection pooling | Yes | Yes | -### Resource Protection +Both backends do everything the middleware needs. Pick Redis if you're already running it for cache or Taskiq. -```python -# Protect expensive operations -@router.post("/ai/generate-image", dependencies=[Depends(rate_limiter_dependency)]) -async def generate_image(): - """Expensive AI operation - heavily rate limited.""" - pass - -@router.get("/data/export", dependencies=[Depends(rate_limiter_dependency)]) -async def export_data(): - """Database-intensive operation - rate limited.""" - pass -``` +## Production Considerations -### Abuse Prevention +### Pool sizing -```python -# Strict limits on user-generated content -@router.post("/posts", dependencies=[Depends(rate_limiter_dependency)]) -async def create_post(): - """Prevent spam posting.""" - pass - -@router.post("/comments", dependencies=[Depends(rate_limiter_dependency)]) -async def create_comment(): - """Prevent comment spam.""" - pass -``` +`RATE_LIMITER_REDIS_POOL_SIZE=10` is enough for typical workloads. If you're seeing `redis.exceptions.ConnectionError` under load, it usually means pool exhaustion — raise the pool size or check upstream connection-leak issues first. + +### Fail-open vs fail-closed + +The default `RATE_LIMITER_FAIL_OPEN=true` means a Redis outage doesn't take your API down — requests pass through unrate-limited. This is the right call for most public APIs. + +If you specifically need rate limits enforced even during cache outages (e.g. you're protecting an expensive AI inference endpoint that you don't want hammered), set `RATE_LIMITER_FAIL_OPEN=false`. Be aware: a flaky Redis connection now translates directly into 429s for users. + +### Window behavior + +The implementation uses a fixed-window counter (TTL on first increment). At the boundary between windows, a user can technically make `2 × limit` requests in a short span. For most use cases this is fine; if you need stricter sliding-window semantics, build that on top of the provider yourself. + +### Anonymous-user limits + +IP-based rate limits are easy to bypass with NAT / proxies / IPv6 rotation. They're a speed bump, not security. If you're trying to prevent abuse rather than control fair use, you need authentication, captchas, or upstream firewall rules — not just rate limits. + +## Troubleshooting + +### "I added `Depends(check_rate_limit)` but no headers appear" + +- Confirm `RATE_LIMITER_ENABLED=true` +- Confirm the rate limiter initialized cleanly at startup (look for `Cache backend not available` or similar) +- Confirm the dependency runs **before** the response is built (it does, by virtue of being a dependency — but if you're seeing an empty body the route may have errored earlier) + +### "All requests look anonymous even though users are logged in" + +`request.state.user` isn't being populated. Either implement the bridge middleware shown above, or accept that the rate limiter operates on IP only. + +### "Path lookups never find the rate-limit row" + +Verify the `path` column in `rate_limits` matches the **sanitized** form (slashes replaced with underscores). The lookup tries sanitized first, then falls back to the original path — but keys in Redis always use the sanitized version, so configs should match. + +### "The rate-limiter dependency raises `RateLimiterBackendException`" + +The Redis connection failed and `RATE_LIMITER_FAIL_OPEN=false`. Either fix Redis, switch to fail-open, or temporarily disable the limiter (`RATE_LIMITER_ENABLED=false`). + +## Key Files + +| Component | Location | +|-----------------------|-----------------------------------------------------------| +| Middleware + dependency | `backend/src/infrastructure/rate_limit/middleware.py` | +| Provider API | `backend/src/infrastructure/rate_limit/provider.py` | +| Backend implementations | `backend/src/infrastructure/rate_limit/backends/` | +| Path sanitization | `backend/src/infrastructure/rate_limit/utils.py` | +| RateLimit model | `backend/src/modules/rate_limit/models.py` | +| Rate-limit routes | `backend/src/modules/rate_limit/routes.py` | +| Settings | `backend/src/infrastructure/config/settings.py` (`RateLimiterSettings`) | + +## Next Steps -This comprehensive rate limiting system provides robust protection against API abuse while supporting flexible business models through user tiers and granular endpoint controls. \ No newline at end of file +- **[Tiers](../authentication/permissions.md#tier-based-authorization)** — Setting up user tiers +- **[Admin Panel → Adding Models](../admin-panel/adding-models.md)** — Adding a `RateLimitAdmin` view +- **[Caching → Cache Strategies](../caching/cache-strategies.md)** — Patterns that share the same Redis-as-state mindset diff --git a/docs/user-guide/testing.md b/docs/user-guide/testing.md index 8c164809..9e85ef64 100644 --- a/docs/user-guide/testing.md +++ b/docs/user-guide/testing.md @@ -1,810 +1,399 @@ -# Testing Guide - -This guide covers comprehensive testing strategies for the FastAPI boilerplate, including unit tests, integration tests, and API testing. - -## Test Setup - -### Testing Dependencies - -The boilerplate uses these testing libraries: - -- **pytest** - Testing framework -- **pytest-asyncio** - Async test support -- **httpx** - Async HTTP client for API tests -- **pytest-cov** - Coverage reporting -- **faker** - Test data generation - -### Test Configuration - -#### pytest.ini - -```ini -[tool:pytest] -testpaths = tests -python_files = test_*.py -python_classes = Test* -python_functions = test_* -addopts = - -v - --strict-markers - --strict-config - --cov=src - --cov-report=term-missing - --cov-report=html - --cov-report=xml - --cov-fail-under=80 -markers = - unit: Unit tests - integration: Integration tests - api: API tests - slow: Slow tests -asyncio_mode = auto +# Testing + +The boilerplate ships pytest configured against `tests/` at the repo root, with `testcontainers-postgres` available for real-database tests and `httpx` for HTTP-level tests against the FastAPI app. **No example tests ship yet** — this page covers the patterns you'll use when you add them. + +## What's Configured + +`backend/pyproject.toml`: + +```toml +[tool.pytest.ini_options] +pythonpath = ["src"] +testpaths = ["tests"] +python_files = ["test_*.py"] +python_functions = ["test_*"] +python_classes = ["Test*"] +asyncio_mode = "auto" +env = ["ENVIRONMENT=pytest", "PYTEST_CURRENT_TEST=true"] +markers = [ + "unit: Unit tests that don't require external dependencies", + "integration: Integration tests that may require external services", + "asyncio: Tests that use asyncio", + "slow: marks tests as slow running", +] ``` -#### Test Database Setup +What this gets you: + +- **`pythonpath = ["src"]`** — `from src.modules.user.service import UserService` works without manual sys.path hacks +- **`asyncio_mode = "auto"`** — every `async def test_*` runs under pytest-asyncio; no decorator needed +- **`ENVIRONMENT=pytest`** — the production validator skips its checks (sees a non-`production` env), so you don't need a real `SECRET_KEY` to boot the test app +- **Markers** for `unit` / `integration` / `slow` — use them to split your suite + +Available test dependencies (from `[dependency-groups].dev`): + +- `pytest`, `pytest-asyncio`, `pytest-mock` +- `httpx` — for in-process HTTP testing +- `faker` — for realistic fixture data +- `testcontainers` + `testcontainers-postgres` — for real-Postgres integration tests +- `pytest-xdist[psutil]` — for parallel test execution + +The repo doesn't currently bundle `pytest-cov`. Add it (`uv add --dev pytest-cov`) when you start tracking coverage. + +## Test Layout + +Use `tests/` at the repository root. A standard layout: + +```text +tests/ +├── conftest.py # global fixtures (app, db, client) +├── helpers/ +│ ├── __init__.py +│ └── factories.py # data-creation helpers (faker-based) +├── unit/ +│ ├── modules/ +│ │ ├── user/ +│ │ │ ├── test_service.py +│ │ │ └── test_schemas.py +│ │ └── tier/ +│ │ └── test_service.py +│ └── infrastructure/ +│ └── test_session_manager.py +└── integration/ + ├── api/ + │ ├── test_auth.py + │ ├── test_users.py + │ └── test_tiers.py + └── db/ + └── test_migrations.py +``` -Create `tests/conftest.py`: +The split is a guideline, not a rule: -```python -import asyncio -import pytest -import pytest_asyncio -from typing import AsyncGenerator -from httpx import AsyncClient -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import sessionmaker -from faker import Faker +- **Unit tests** mock the database (often by mocking the FastCRUD layer) and run fast +- **Integration tests** use a real Postgres (via testcontainers or a local DB) and exercise the HTTP layer -from src.app.core.config import settings -from src.app.core.db.database import Base, async_get_db -from src.app.main import app -from src.app.models.user import User -from src.app.models.post import Post -from src.app.core.security import get_password_hash +## A Working `conftest.py` -# Test database configuration -TEST_DATABASE_URL = "postgresql+asyncpg://test_user:test_pass@localhost:5432/test_db" +This is the conftest you'd start from. It provides three layers of fixtures: the FastAPI `app`, an async `db_session`, and an `httpx.AsyncClient` whose database dependency is overridden to use the test session. -# Create test engine and session -test_engine = create_async_engine(TEST_DATABASE_URL, echo=False) -TestSessionLocal = sessionmaker( - test_engine, class_=AsyncSession, expire_on_commit=False -) +```python +# tests/conftest.py +from collections.abc import AsyncGenerator -fake = Faker() +import pytest_asyncio +from httpx import ASGITransport, AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from testcontainers.postgres import PostgresContainer + +from src.infrastructure.database.models import Base +from src.infrastructure.database.session import async_session +from src.interfaces.main import app + + +@pytest_asyncio.fixture(scope="session") +async def postgres_container() -> AsyncGenerator[PostgresContainer, None]: + container = PostgresContainer("postgres:16-alpine", driver="asyncpg") + container.start() + try: + yield container + finally: + container.stop() + + +@pytest_asyncio.fixture(scope="session") +async def db_engine(postgres_container): + url = postgres_container.get_connection_url() + engine = create_async_engine(url, echo=False, future=True) + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + yield engine + await engine.dispose() @pytest_asyncio.fixture -async def async_session() -> AsyncGenerator[AsyncSession, None]: - """Create a fresh database session for each test.""" - async with test_engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - async with TestSessionLocal() as session: - yield session - - async with test_engine.begin() as conn: - await conn.run_sync(Base.metadata.drop_all) +async def db_session(db_engine) -> AsyncGenerator[AsyncSession, None]: + factory = async_sessionmaker(db_engine, expire_on_commit=False) + async with factory() as session: + try: + yield session + finally: + await session.rollback() @pytest_asyncio.fixture -async def async_client(async_session: AsyncSession) -> AsyncGenerator[AsyncClient, None]: - """Create an async HTTP client for testing.""" - def get_test_db(): - return async_session - - app.dependency_overrides[async_get_db] = get_test_db - - async with AsyncClient(app=app, base_url="http://test") as client: - yield client - +async def client(db_session) -> AsyncGenerator[AsyncClient, None]: + async def override_db(): + yield db_session + + app.dependency_overrides[async_session] = override_db + transport = ASGITransport(app=app) + async with AsyncClient(transport=transport, base_url="http://test") as ac: + yield ac app.dependency_overrides.clear() +``` +Key things to notice: -@pytest_asyncio.fixture -async def test_user(async_session: AsyncSession) -> User: - """Create a test user.""" - user = User( - name=fake.name(), - username=fake.user_name(), - email=fake.email(), - hashed_password=get_password_hash("testpassword123"), - is_superuser=False - ) - async_session.add(user) - await async_session.commit() - await async_session.refresh(user) - return user +- **`testcontainers.PostgresContainer`** spins up a real Postgres, scoped to the test session. First test pays a few seconds of startup; later tests are fast. +- **`db_session`** rolls back at the end of every test, so tests don't bleed into each other. The container is reused; the data is not. +- **`app.dependency_overrides[async_session]`** swaps the production dependency for one that yields the test session — every route ends up reading/writing through your test transaction. +- **`ASGITransport`** runs the FastAPI app in-process — no real HTTP server is started. +If you don't want to depend on Docker for tests, swap `PostgresContainer` for a connection to a local Postgres (e.g. one already running for development). Use a separate database name (`test_db`, dropped at the end of the session). -@pytest_asyncio.fixture -async def test_superuser(async_session: AsyncSession) -> User: - """Create a test superuser.""" - user = User( - name="Super Admin", - username="superadmin", - email="admin@test.com", - hashed_password=get_password_hash("superpassword123"), - is_superuser=True - ) - async_session.add(user) - await async_session.commit() - await async_session.refresh(user) - return user +## Writing Unit Tests +Unit tests should not touch the database. Mock at the **CRUD layer** — your service contract is "I call `crud_widgets.get` and get back a dict-or-None", and that's the seam. -@pytest_asyncio.fixture -async def test_post(async_session: AsyncSession, test_user: User) -> Post: - """Create a test post.""" - post = Post( - title=fake.sentence(), - content=fake.text(), - created_by_user_id=test_user.id - ) - async_session.add(post) - await async_session.commit() - await async_session.refresh(post) - return post +```python +# tests/unit/modules/user/test_service.py +from unittest.mock import AsyncMock +import pytest -@pytest_asyncio.fixture -async def auth_headers(async_client: AsyncClient, test_user: User) -> dict: - """Get authentication headers for a test user.""" - login_data = { - "username": test_user.username, - "password": "testpassword123" - } - - response = await async_client.post("/api/v1/auth/login", data=login_data) - token = response.json()["access_token"] - - return {"Authorization": f"Bearer {token}"} +from src.modules.common.exceptions import ResourceNotFoundError +from src.modules.user.service import UserService -@pytest_asyncio.fixture -async def superuser_headers(async_client: AsyncClient, test_superuser: User) -> dict: - """Get authentication headers for a test superuser.""" - login_data = { - "username": test_superuser.username, - "password": "superpassword123" - } - - response = await async_client.post("/api/v1/auth/login", data=login_data) - token = response.json()["access_token"] - - return {"Authorization": f"Bearer {token}"} +@pytest.mark.unit +async def test_get_by_id_returns_user(mocker): + mock_crud = mocker.patch("src.modules.user.service.crud_users") + mock_crud.get = AsyncMock(return_value={"id": 1, "username": "alice"}) + + user = await UserService().get_by_id(user_id=1, db=AsyncMock()) + + assert user["username"] == "alice" + mock_crud.get.assert_awaited_once() + + +@pytest.mark.unit +async def test_get_by_id_raises_when_missing(mocker): + mock_crud = mocker.patch("src.modules.user.service.crud_users") + mock_crud.get = AsyncMock(return_value=None) + + with pytest.raises(ResourceNotFoundError): + await UserService().get_by_id(user_id=999, db=AsyncMock()) ``` -## Unit Tests +`pytest-mock`'s `mocker` fixture handles cleanup automatically. `AsyncMock` matches the async CRUD interface. -### Model Tests +## Writing Integration Tests + +Integration tests use the real database via `client` and `db_session`. The session-based auth flow needs to be honored — `httpx.AsyncClient` keeps cookies between calls, so log in once and reuse the client. ```python -# tests/test_models.py +# tests/integration/api/test_users.py import pytest -from datetime import datetime -from src.app.models.user import User -from src.app.models.post import Post +from src.modules.user.service import UserService +from tests.helpers.factories import build_user_create_payload -@pytest.mark.unit -class TestUserModel: - """Test User model functionality.""" - - async def test_user_creation(self, async_session): - """Test creating a user.""" - user = User( - name="Test User", - username="testuser", - email="test@example.com", - hashed_password="hashed_password" - ) - - async_session.add(user) - await async_session.commit() - await async_session.refresh(user) - - assert user.id is not None - assert user.name == "Test User" - assert user.username == "testuser" - assert user.email == "test@example.com" - assert user.created_at is not None - assert user.is_superuser is False - assert user.is_deleted is False - - async def test_user_relationships(self, async_session, test_user): - """Test user relationships.""" - post = Post( - title="Test Post", - content="Test content", - created_by_user_id=test_user.id - ) - - async_session.add(post) - await async_session.commit() - - # Test relationship - await async_session.refresh(test_user) - assert len(test_user.posts) == 1 - assert test_user.posts[0].title == "Test Post" +@pytest.mark.integration +async def test_register_login_and_fetch_me(client, db_session): + # Register + payload = build_user_create_payload(email="alice@example.com", username="alice") + register_response = await client.post("/api/v1/users/", json=payload) + assert register_response.status_code == 201 + user_data = register_response.json() + assert user_data["username"] == "alice" + + # Log in — sets session cookie on the client + login_response = await client.post( + "/api/v1/auth/login", + json={"username": "alice", "password": payload["password"]}, + ) + assert login_response.status_code == 200 -@pytest.mark.unit -class TestPostModel: - """Test Post model functionality.""" - - async def test_post_creation(self, async_session, test_user): - """Test creating a post.""" - post = Post( - title="Test Post", - content="This is test content", - created_by_user_id=test_user.id - ) - - async_session.add(post) - await async_session.commit() - await async_session.refresh(post) - - assert post.id is not None - assert post.title == "Test Post" - assert post.content == "This is test content" - assert post.created_by_user_id == test_user.id - assert post.created_at is not None - assert post.is_deleted is False + # Authenticated request reuses the cookie automatically + me_response = await client.get("/api/v1/users/me/") + assert me_response.status_code == 200 + assert me_response.json()["username"] == "alice" ``` -### Schema Tests +A small factory to keep payloads readable: ```python -# tests/test_schemas.py -import pytest -from pydantic import ValidationError -from src.app.schemas.user import UserCreate, UserRead, UserUpdate -from src.app.schemas.post import PostCreate, PostRead, PostUpdate - +# tests/helpers/factories.py +from faker import Faker -@pytest.mark.unit -class TestUserSchemas: - """Test User schema validation.""" - - def test_user_create_valid(self): - """Test valid user creation schema.""" - user_data = { - "name": "John Doe", - "username": "johndoe", - "email": "john@example.com", - "password": "SecurePass123!" - } - - user = UserCreate(**user_data) - assert user.name == "John Doe" - assert user.username == "johndoe" - assert user.email == "john@example.com" - assert user.password == "SecurePass123!" - - def test_user_create_invalid_email(self): - """Test invalid email validation.""" - with pytest.raises(ValidationError) as exc_info: - UserCreate( - name="John Doe", - username="johndoe", - email="invalid-email", - password="SecurePass123!" - ) - - errors = exc_info.value.errors() - assert any(error['type'] == 'value_error' for error in errors) - - def test_user_create_short_password(self): - """Test password length validation.""" - with pytest.raises(ValidationError) as exc_info: - UserCreate( - name="John Doe", - username="johndoe", - email="john@example.com", - password="123" - ) - - errors = exc_info.value.errors() - assert any(error['type'] == 'value_error' for error in errors) - - def test_user_update_partial(self): - """Test partial user update.""" - update_data = {"name": "Jane Doe"} - user_update = UserUpdate(**update_data) - - assert user_update.name == "Jane Doe" - assert user_update.username is None - assert user_update.email is None +fake = Faker() -@pytest.mark.unit -class TestPostSchemas: - """Test Post schema validation.""" - - def test_post_create_valid(self): - """Test valid post creation.""" - post_data = { - "title": "Test Post", - "content": "This is a test post content" - } - - post = PostCreate(**post_data) - assert post.title == "Test Post" - assert post.content == "This is a test post content" - - def test_post_create_empty_title(self): - """Test empty title validation.""" - with pytest.raises(ValidationError): - PostCreate( - title="", - content="This is a test post content" - ) - - def test_post_create_long_title(self): - """Test title length validation.""" - with pytest.raises(ValidationError): - PostCreate( - title="x" * 101, # Exceeds max length - content="This is a test post content" - ) +def build_user_create_payload(**overrides) -> dict: + return { + "name": fake.name(), + "username": fake.user_name(), + "email": fake.email(), + "password": "TestPass123!", + **overrides, + } ``` -### CRUD Tests +### Authenticating as a Specific User + +Because session auth is cookie-based, the client retains the session for subsequent requests. For tests that need a logged-in superuser without going through the registration flow, seed a superuser directly via the service: ```python -# tests/test_crud.py -import pytest -from src.app.crud.crud_users import crud_users -from src.app.crud.crud_posts import crud_posts -from src.app.schemas.user import UserCreate, UserUpdate -from src.app.schemas.post import PostCreate, PostUpdate +# tests/conftest.py (additional fixture) +@pytest_asyncio.fixture +async def superuser_client(client, db_session): + service = UserService() + await service.create( + payload={ + "name": "Super", + "username": "super", + "email": "super@test.com", + "password": "SuperPass123!", + }, + db=db_session, + ) + # Manually flip is_superuser via crud_users.update if your service doesn't expose it + await db_session.commit() + await client.post("/api/v1/auth/login", + json={"username": "super", "password": "SuperPass123!"}) + yield client +``` -@pytest.mark.unit -class TestUserCRUD: - """Test User CRUD operations.""" - - async def test_create_user(self, async_session): - """Test creating a user.""" - user_data = UserCreate( - name="CRUD User", - username="cruduser", - email="crud@example.com", - password="password123" - ) - - user = await crud_users.create(db=async_session, object=user_data) - assert user["name"] == "CRUD User" - assert user["username"] == "cruduser" - assert user["email"] == "crud@example.com" - assert "id" in user - - async def test_get_user(self, async_session, test_user): - """Test getting a user.""" - retrieved_user = await crud_users.get( - db=async_session, - id=test_user.id - ) - - assert retrieved_user is not None - assert retrieved_user["id"] == test_user.id - assert retrieved_user["name"] == test_user.name - assert retrieved_user["username"] == test_user.username - - async def test_get_user_by_email(self, async_session, test_user): - """Test getting a user by email.""" - retrieved_user = await crud_users.get( - db=async_session, - email=test_user.email - ) - - assert retrieved_user is not None - assert retrieved_user["email"] == test_user.email - - async def test_update_user(self, async_session, test_user): - """Test updating a user.""" - update_data = UserUpdate(name="Updated Name") - - updated_user = await crud_users.update( - db=async_session, - object=update_data, - id=test_user.id - ) - - assert updated_user["name"] == "Updated Name" - assert updated_user["id"] == test_user.id - - async def test_delete_user(self, async_session, test_user): - """Test soft deleting a user.""" - await crud_users.delete(db=async_session, id=test_user.id) - - # User should be soft deleted - deleted_user = await crud_users.get( - db=async_session, - id=test_user.id, - is_deleted=True - ) - - assert deleted_user is not None - assert deleted_user["is_deleted"] is True - - async def test_get_multi_users(self, async_session): - """Test getting multiple users.""" - # Create multiple users - for i in range(5): - user_data = UserCreate( - name=f"User {i}", - username=f"user{i}", - email=f"user{i}@example.com", - password="password123" - ) - await crud_users.create(db=async_session, object=user_data) - - # Get users with pagination - result = await crud_users.get_multi( - db=async_session, - offset=0, - limit=3 - ) - - assert len(result["data"]) == 3 - assert result["total_count"] == 5 - assert result["has_more"] is True +Then `superuser_client` is a logged-in `AsyncClient` for any test that needs admin access. +### Resetting the Session Between Tests -@pytest.mark.unit -class TestPostCRUD: - """Test Post CRUD operations.""" - - async def test_create_post(self, async_session, test_user): - """Test creating a post.""" - post_data = PostCreate( - title="Test Post", - content="This is test content" - ) - - post = await crud_posts.create( - db=async_session, - object=post_data, - created_by_user_id=test_user.id - ) - - assert post["title"] == "Test Post" - assert post["content"] == "This is test content" - assert post["created_by_user_id"] == test_user.id - - async def test_get_posts_by_user(self, async_session, test_user): - """Test getting posts by user.""" - # Create multiple posts - for i in range(3): - post_data = PostCreate( - title=f"Post {i}", - content=f"Content {i}" - ) - await crud_posts.create( - db=async_session, - object=post_data, - created_by_user_id=test_user.id - ) - - # Get posts by user - result = await crud_posts.get_multi( - db=async_session, - created_by_user_id=test_user.id - ) - - assert len(result["data"]) == 3 - assert result["total_count"] == 3 -``` +By default, `httpx.AsyncClient` carries cookies for the lifetime of the client fixture. Since `client` is function-scoped, each test starts with no session. If you ever need to log out mid-test, call `await client.post("/api/v1/auth/logout/")` or clear cookies via `client.cookies.clear()`. -## Integration Tests +## CSRF in Tests -### API Endpoint Tests +If `CSRF_ENABLED=true` (the default), state-changing requests need a CSRF token. The boilerplate's CSRF flow uses double-submit cookies — the server sets a cookie, and you echo the value back in a header. -```python -# tests/test_api_users.py -import pytest -from httpx import AsyncClient +Either: +- **Disable CSRF in the test environment** by setting `CSRF_ENABLED=false` in the test fixture. Quick and pragmatic for service-layer integration tests where CSRF isn't the focus. +- **Honor the flow** for tests that need to assert it works: + ```python + # Hit a GET first so the server sets the CSRF cookie + await client.get("/api/v1/auth/me/") # any safe endpoint + csrf_token = client.cookies["csrf_token"] # cookie name from your config + response = await client.post( + "/api/v1/widgets/", + json={...}, + headers={"X-CSRF-Token": csrf_token}, + ) + ``` -@pytest.mark.integration -class TestUserAPI: - """Test User API endpoints.""" - - async def test_create_user(self, async_client: AsyncClient): - """Test user creation endpoint.""" - user_data = { - "name": "New User", - "username": "newuser", - "email": "new@example.com", - "password": "SecurePass123!" - } - - response = await async_client.post("/api/v1/users", json=user_data) - assert response.status_code == 201 - - data = response.json() - assert data["name"] == "New User" - assert data["username"] == "newuser" - assert data["email"] == "new@example.com" - assert "hashed_password" not in data - assert "id" in data - - async def test_create_user_duplicate_email(self, async_client: AsyncClient, test_user): - """Test creating user with duplicate email.""" - user_data = { - "name": "Duplicate User", - "username": "duplicateuser", - "email": test_user.email, # Use existing email - "password": "SecurePass123!" - } - - response = await async_client.post("/api/v1/users", json=user_data) - assert response.status_code == 409 # Conflict - - async def test_get_users(self, async_client: AsyncClient): - """Test getting users list.""" - response = await async_client.get("/api/v1/users") - assert response.status_code == 200 - - data = response.json() - assert "data" in data - assert "total_count" in data - assert "has_more" in data - assert isinstance(data["data"], list) - - async def test_get_user_by_id(self, async_client: AsyncClient, test_user): - """Test getting specific user.""" - response = await async_client.get(f"/api/v1/users/{test_user.id}") - assert response.status_code == 200 - - data = response.json() - assert data["id"] == test_user.id - assert data["name"] == test_user.name - assert data["username"] == test_user.username - - async def test_get_user_not_found(self, async_client: AsyncClient): - """Test getting non-existent user.""" - response = await async_client.get("/api/v1/users/99999") - assert response.status_code == 404 - - async def test_update_user_authorized(self, async_client: AsyncClient, test_user, auth_headers): - """Test updating user with proper authorization.""" - update_data = {"name": "Updated Name"} - - response = await async_client.patch( - f"/api/v1/users/{test_user.id}", - json=update_data, - headers=auth_headers - ) - assert response.status_code == 200 - - data = response.json() - assert data["name"] == "Updated Name" - assert data["id"] == test_user.id - - async def test_update_user_unauthorized(self, async_client: AsyncClient, test_user): - """Test updating user without authorization.""" - update_data = {"name": "Updated Name"} - - response = await async_client.patch( - f"/api/v1/users/{test_user.id}", - json=update_data - ) - assert response.status_code == 401 - - async def test_delete_user_superuser(self, async_client: AsyncClient, test_user, superuser_headers): - """Test deleting user as superuser.""" - response = await async_client.delete( - f"/api/v1/users/{test_user.id}", - headers=superuser_headers - ) - assert response.status_code == 200 - - async def test_delete_user_forbidden(self, async_client: AsyncClient, test_user, auth_headers): - """Test deleting user without superuser privileges.""" - response = await async_client.delete( - f"/api/v1/users/{test_user.id}", - headers=auth_headers - ) - assert response.status_code == 403 +See [Authentication → Sessions](authentication/sessions.md) for the CSRF specifics. +## Testing Cached Endpoints -@pytest.mark.integration -class TestAuthAPI: - """Test Authentication API endpoints.""" - - async def test_login_success(self, async_client: AsyncClient, test_user): - """Test successful login.""" - login_data = { - "username": test_user.username, - "password": "testpassword123" - } - - response = await async_client.post("/api/v1/auth/login", data=login_data) - assert response.status_code == 200 - - data = response.json() - assert "access_token" in data - assert "refresh_token" in data - assert data["token_type"] == "bearer" - - async def test_login_invalid_credentials(self, async_client: AsyncClient, test_user): - """Test login with invalid credentials.""" - login_data = { - "username": test_user.username, - "password": "wrongpassword" - } - - response = await async_client.post("/api/v1/auth/login", data=login_data) - assert response.status_code == 401 - - async def test_get_current_user(self, async_client: AsyncClient, test_user, auth_headers): - """Test getting current user information.""" - response = await async_client.get("/api/v1/auth/me", headers=auth_headers) - assert response.status_code == 200 - - data = response.json() - assert data["id"] == test_user.id - assert data["username"] == test_user.username - - async def test_refresh_token(self, async_client: AsyncClient, test_user): - """Test token refresh.""" - # First login to get refresh token - login_data = { - "username": test_user.username, - "password": "testpassword123" - } - - login_response = await async_client.post("/api/v1/auth/login", data=login_data) - refresh_token = login_response.json()["refresh_token"] - - # Use refresh token to get new access token - refresh_response = await async_client.post( - "/api/v1/auth/refresh", - headers={"Authorization": f"Bearer {refresh_token}"} - ) - - assert refresh_response.status_code == 200 - data = refresh_response.json() - assert "access_token" in data +The `@cache` decorator is process-aware: in tests, it talks to whichever cache backend `CACHE_BACKEND` points at. Two strategies: + +- **Disable caching** — `CACHE_ENABLED=false` in the test environment. Simplest. The decorator becomes a no-op. +- **Use a local Redis** — point `CACHE_REDIS_HOST` at `localhost` (or a testcontainer). Useful when the test specifically asserts caching behavior. + +For most unit/integration tests, disable. Add explicit cache tests under `tests/integration/` only when you need to verify invalidation behavior. + +## Testing Background Tasks + +Taskiq tasks shouldn't actually run during tests. Use Taskiq's `InMemoryBroker` to make `.kiq()` calls execute synchronously: + +```python +# tests/conftest.py (additional) +from taskiq import InMemoryBroker + +from infrastructure.taskiq import default_broker as real_broker + +@pytest_asyncio.fixture(autouse=True) +async def in_memory_broker(monkeypatch): + test_broker = InMemoryBroker() + monkeypatch.setattr("infrastructure.taskiq.brokers.default_broker", test_broker) + monkeypatch.setattr("infrastructure.taskiq.default_broker", test_broker) + yield test_broker ``` -## Running Tests +Now `await my_task.kiq(...)` runs the task body in the test process. For tests that specifically assert "the task was scheduled" without running it, swap to a mock broker that records calls instead. -### Basic Test Commands +## Running the Suite ```bash -# Run all tests +cd backend + +# Run everything uv run pytest -# Run specific test categories +# Just unit tests (skip the slower integration ones) uv run pytest -m unit + +# Just integration tests uv run pytest -m integration -uv run pytest -m api -# Run tests with coverage -uv run pytest --cov=src --cov-report=html +# Stop on first failure +uv run pytest -x + +# Keep running on failures, show output for tests matching a name +uv run pytest -k "user_login" -v -# Run tests in parallel +# Parallel via pytest-xdist uv run pytest -n auto -# Run specific test file -uv run pytest tests/test_api_users.py +# With coverage (after `uv add --dev pytest-cov`) +uv run pytest --cov=src --cov-report=term-missing +``` -# Run with verbose output -uv run pytest -v +## Continuous Integration -# Run tests matching pattern -uv run pytest -k "test_user" +The repo's `.github/workflows/tests.yml` runs the test suite on PRs (along with linting and type-checking workflows). All three workflows pin the working directory to `backend/` so the same `uv run pytest` works there as locally. -# Run tests and stop on first failure -uv run pytest -x +CI runs in a clean image, which means: -# Run slow tests -uv run pytest -m slow -``` +- **No Docker access by default** — testcontainers needs `docker` available. Either: + - Use the `services:` block in the workflow to start a Postgres container, then point your test conftest at it via env vars + - Or skip integration tests in CI and run them manually before each release +- **Connections to localhost are sandboxed** — anything connecting outside the runner needs explicit network setup -### Test Environment Setup +For most teams, running unit tests in CI and integration tests locally / on a periodic schedule is enough. -```bash -# Set up test database -createdb test_db +## Common Mistakes -# Run tests with specific environment -ENVIRONMENT=testing uv run pytest +### "My test isn't actually using the test database" -# Run tests with debug output -uv run pytest -s --log-cli-level=DEBUG -``` +Check that `app.dependency_overrides[async_session] = ...` matches the **same callable** the routes depend on. If a route does `Depends(some_other_db_dep)`, your override of `async_session` won't take effect. Look at the route's source. -## Testing Best Practices +### "Tests pass individually but fail when run together" -### Test Organization +The most common cause: shared state in the database between tests. Either: -- **Separate concerns**: Unit tests for business logic, integration tests for API endpoints -- **Use fixtures**: Create reusable test data and setup -- **Test isolation**: Each test should be independent -- **Clear naming**: Test names should describe what they're testing +- Make every test fixture roll back at the end (the `db_session` fixture above does) +- Use `truncate` between tests instead of `create_all` / `drop_all` (faster on big schemas) -### Test Data +### "Async tests hang" -- **Use factories**: Create test data programmatically -- **Avoid hardcoded values**: Use variables and constants -- **Clean up**: Ensure tests don't leave data behind -- **Realistic data**: Use faker or similar libraries for realistic test data +Almost always missing `asyncio_mode = "auto"` in `pyproject.toml`, or a fixture that's `async def` but not `pytest_asyncio.fixture`-decorated. Both must match. -### Assertions +### "Cookies aren't persisting between test calls" -- **Specific assertions**: Test specific behaviors, not just "it works" -- **Multiple assertions**: Test all relevant aspects of the response -- **Error cases**: Test error conditions and edge cases -- **Performance**: Include performance tests for critical paths +`httpx.AsyncClient` only keeps cookies if both calls go through the **same** client instance. If you create a new `AsyncClient` per request, you lose the session. Use the fixture client. -### Mocking +### "FastCRUD returns dicts, not models, in tests too" -```python -# Example of mocking external dependencies -from unittest.mock import patch, AsyncMock +Yes — that's the design. Don't try to `assert isinstance(result, Widget)`. Assert on dict keys: `assert result["name"] == "..."`. -@pytest.mark.unit -async def test_external_api_call(): - """Test function that calls external API.""" - with patch('src.app.services.external_api.make_request') as mock_request: - mock_request.return_value = {"status": "success"} - - result = await some_function_that_calls_external_api() - - assert result["status"] == "success" - mock_request.assert_called_once() -``` +### "Test database has stale schema after model changes" -### Continuous Integration - -```yaml -# .github/workflows/test.yml -name: Tests - -on: [push, pull_request] - -jobs: - test: - runs-on: ubuntu-latest - - services: - postgres: - image: postgres:15 - env: - POSTGRES_USER: test_user - POSTGRES_PASSWORD: test_pass - POSTGRES_DB: test_db - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - - steps: - - uses: actions/checkout@v3 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.11 - - - name: Install dependencies - run: | - pip install uv - uv sync - - - name: Run tests - run: uv run pytest --cov=src --cov-report=xml - - - name: Upload coverage - uses: codecov/codecov-action@v3 - with: - file: ./coverage.xml -``` +If you're using the `Base.metadata.create_all` shortcut (as in the conftest above), the schema rebuilds on every session. If you've added a fixture that rebuilds at module scope, restart the test session. For long-running test databases, run Alembic migrations in the fixture instead. + +## Key Files + +| Component | Location | +|--------------------------|-------------------------------------------------------------| +| Pytest config | `backend/pyproject.toml` (`[tool.pytest.ini_options]`) | +| Test root | `tests/` | +| Module under test (refs) | `backend/src/modules/user/`, `backend/src/modules/tier/` | +| Settings (test env) | `backend/src/infrastructure/config/settings.py` | +| Models / `Base` | `backend/src/infrastructure/database/models.py` | + +## Next Steps -This testing guide provides comprehensive coverage of testing strategies for the FastAPI boilerplate, ensuring reliable and maintainable code. \ No newline at end of file +- **[Development](development.md)** — broader development workflow +- **[Production](production.md)** — what changes when shipping the test suite to CI +- **[Authentication → Sessions](authentication/sessions.md)** — full session/CSRF flow you'll exercise in tests diff --git a/mkdocs.yml b/mkdocs.yml deleted file mode 100644 index 3abdf42b..00000000 --- a/mkdocs.yml +++ /dev/null @@ -1,159 +0,0 @@ -site_name: FastAPI Boilerplate -site_description: A production-ready FastAPI boilerplate with async support, JWT authentication, Redis caching, and more. -site_author: Benav Labs -site_url: https://github.com/benavlabs/fastapi-boilerplate - -theme: - name: material - font: - text: Ubuntu - logo: assets/FastAPI-boilerplate.png - favicon: assets/FastAPI-boilerplate.png - features: - - navigation.instant - - navigation.instant.prefetch - - navigation.tabs - - navigation.indexes - - search.suggest - - content.code.copy - - content.code.annotate - - navigation.top - - navigation.footer - palette: - - media: "(prefers-color-scheme: light)" - scheme: default - primary: custom - accent: custom - toggle: - icon: material/brightness-7 - name: Switch to dark mode - - - media: "(prefers-color-scheme: dark)" - scheme: slate - primary: custom - accent: custom - toggle: - icon: material/brightness-4 - name: Switch to light mode - -plugins: - - search - - mkdocstrings: - handlers: - python: - rendering: - show_source: true - -nav: - - Home: index.md - - Getting Started: - - Overview: getting-started/index.md - - Installation: getting-started/installation.md - - Configuration: getting-started/configuration.md - - First Run: getting-started/first-run.md - - User Guide: - - Overview: user-guide/index.md - - Project Structure: user-guide/project-structure.md - - Configuration: - - Overview: user-guide/configuration/index.md - - Environment Variables: user-guide/configuration/environment-variables.md - - Settings Classes: user-guide/configuration/settings-classes.md - - Docker Setup: user-guide/configuration/docker-setup.md - - Environment-Specific: user-guide/configuration/environment-specific.md - - Database: - - Overview: user-guide/database/index.md - - Models: user-guide/database/models.md - - Schemas: user-guide/database/schemas.md - - CRUD Operations: user-guide/database/crud.md - - Migrations: user-guide/database/migrations.md - - API: - - Overview: user-guide/api/index.md - - Endpoints: user-guide/api/endpoints.md - - Pagination: user-guide/api/pagination.md - - Exceptions: user-guide/api/exceptions.md - - Versioning: user-guide/api/versioning.md - - Authentication: - - Overview: user-guide/authentication/index.md - - JWT Tokens: user-guide/authentication/jwt-tokens.md - - User Management: user-guide/authentication/user-management.md - - Permissions: user-guide/authentication/permissions.md - - Admin Panel: - - user-guide/admin-panel/index.md - - Configuration: user-guide/admin-panel/configuration.md - - Adding Models: user-guide/admin-panel/adding-models.md - - User Management: user-guide/admin-panel/user-management.md - - Caching: - - Overview: user-guide/caching/index.md - - Redis Cache: user-guide/caching/redis-cache.md - - Client Cache: user-guide/caching/client-cache.md - - Cache Strategies: user-guide/caching/cache-strategies.md - - Background Tasks: user-guide/background-tasks/index.md - - Rate Limiting: user-guide/rate-limiting/index.md - - Development: user-guide/development.md - - Production: user-guide/production.md - - Testing: user-guide/testing.md - - Community: community.md - # - Examples: - # - Overview: examples/index.md - # - Basic CRUD: examples/basic-crud.md - # - Authentication Flow: examples/authentication-flow.md - # - Background Job Workflow: examples/background-job-workflow.md - # - Caching Patterns: examples/caching-patterns.md - # - Production Setup: examples/production-setup.md - # - Reference: - # - Overview: reference/index.md - # - API Reference: reference/api-reference.md - # - Configuration Reference: reference/configuration-reference.md - # - Database Schema: reference/database-schema.md - # - Middleware Reference: reference/middleware-reference.md - # - Dependencies Reference: reference/dependencies-reference.md - # - Contributing: - # - Overview: contributing/index.md - # - Development Setup: contributing/development-setup.md - # - Coding Standards: contributing/coding-standards.md - # - Pull Request Process: contributing/pull-request-process.md - # - Testing Guidelines: contributing/testing-guidelines.md - # - Migration Guides: - # - Overview: migration-guides/index.md - # - Version Migrations: migration-guides/from-v1-to-v2.md - # - From Other Frameworks: migration-guides/from-other-frameworks.md - # - FAQ: faq.md - -markdown_extensions: - - admonition - - codehilite - - toc: - permalink: true - - pymdownx.details - - pymdownx.highlight: - anchor_linenums: true - line_spans: __span - pygments_lang_class: true - - pymdownx.inlinehilite - - pymdownx.snippets - - pymdownx.superfences - - pymdownx.tabbed: - alternate_style: true - - pymdownx.tasklist: - custom_checkbox: true - - attr_list - - md_in_html - -extra: - social: - - icon: fontawesome/brands/github - link: https://github.com/benavlabs/fastapi-boilerplate - - icon: fontawesome/brands/python - link: https://pypi.org/project/fastapi/ - version: - provider: mike - analytics: - provider: google - property: !ENV [GOOGLE_ANALYTICS_KEY, ""] - -extra_css: - - stylesheets/extra.css - -repo_name: benavlabs/fastapi-boilerplate -repo_url: https://github.com/benavlabs/fastapi-boilerplate -edit_uri: edit/main/docs/ diff --git a/pyproject.toml b/pyproject.toml index fcfdcf85..0fe84d5b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,124 +1,25 @@ -[project] -name = "fastapi-boilerplate" -version = "0.1.0" -description = "Batteries-included FastAPI starter with production-ready defaults, optional modules, and clear docs." -authors = [{ name = "Benav Labs", email = "contact@benav.io" }] -license = { text = "MIT" } -readme = "README.md" -requires-python = ">=3.11, <4" -dependencies = [ - "python-dotenv>=1.0.0", - "pydantic[email]>=2.12.5", - "fastapi>=0.109.1", - "uvicorn>=0.27.0", - "uvloop>=0.19.0", - "httptools>=0.7.1", - "uuid>=1.30", - "uuid6>=2024.1.12", - "alembic>=1.13.1", - "asyncpg>=0.29.0", - "SQLAlchemy-Utils>=0.41.1", - "python-jose>=3.3.0", - "SQLAlchemy>=2.0.25", - "python-multipart>=0.0.9", - "greenlet>=2.0.2", - "httpx>=0.26.0", - "pydantic-settings>=2.12.0", - "redis>=5.0.1", - "arq>=0.25.0", - "bcrypt>=4.1.1", - "psycopg2-binary>=2.9.9", - "fastcrud>=0.19.2", - "crudadmin>=0.4.2", - "gunicorn>=23.0.0", - "ruff>=0.11.13", - "mypy>=1.16.0", - "structlog>=25.1.0,<=25.5.0", - "rich>=14.2.0", -] - -[project.optional-dependencies] -dev = [ - "pytest>=7.4.2", - "pytest-mock>=3.14.0", - "faker>=26.0.0", - "mypy>=1.8.0", - "types-redis>=4.6.0", - "ruff>=0.1.0", -] - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[tool.hatch.build.targets.sdist] -include = ["src/"] - -[tool.hatch.build.targets.wheel] -include = ["src/"] -packages = ["src"] - -[tool.ruff] -target-version = "py311" -line-length = 120 -fix = true - -[tool.ruff.lint] -select = [ - # https://docs.astral.sh/ruff/rules/#pyflakes-f - "F", # Pyflakes - # https://docs.astral.sh/ruff/rules/#pycodestyle-e-w - "E", # pycodestyle - "W", # Warning - # https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 - # https://docs.astral.sh/ruff/rules/#mccabe-c90 - "C", # Complexity (mccabe+) & comprehensions - # https://docs.astral.sh/ruff/rules/#pyupgrade-up - "UP", # pyupgrade - # https://docs.astral.sh/ruff/rules/#isort-i - "I", # isort -] -ignore = [ - # https://docs.astral.sh/ruff/rules/#pycodestyle-e-w - "E402", # module level import not at top of file - # https://docs.astral.sh/ruff/rules/#pyupgrade-up - "UP006", # use-pep585-annotation - "UP007", # use-pep604-annotation - "E741", # Ambiguous variable name - # "UP035", # deprecated-assertion -] +# Workspace root. +# +# This file exists so `uv` recognizes the repo as a workspace and can resolve +# both the `backend/` (deployable application) and `cli/` (developer/operator +# tool) packages from a single venv. It is NOT a deployable package itself — +# nothing here ships to production. +# +# Most commands work from anywhere in the repo: +# +# uv sync # syncs the workspace (one venv shared by all members) +# uv run bp ... # runs the CLI from cli/, accessible from any cwd +# uv run pytest # runs the backend test suite +# +# To install `bp` machine-wide so it works outside this repo: +# +# uv tool install --editable ./cli -[tool.ruff.lint.per-file-ignores] -"__init__.py" = [ - "F401", # unused import - "F403", # star imports -] - -[tool.ruff.lint.mccabe] -max-complexity = 24 - -[tool.ruff.lint.pydocstyle] -convention = "numpy" - -[tool.pytest.ini_options] -filterwarnings = [ - "ignore::PendingDeprecationWarning:starlette.formparsers", -] - -[dependency-groups] -dev = [ - "pre-commit>=4.3.0", - "pytest-asyncio>=1.0.0", -] - -[tool.mypy] -python_version = "3.11" -warn_return_any = true -warn_unused_configs = true -ignore_missing_imports = true -mypy_path = "src" -explicit_package_bases = true +[project] +name = "fastapi-boilerplate-workspace" +version = "0" +description = "Workspace root — see backend/ and cli/ for actual packages." +requires-python = ">=3.11" -[[tool.mypy.overrides]] -module = "src.app.*" -disallow_untyped_defs = true +[tool.uv.workspace] +members = ["backend", "cli"] diff --git a/scripts/gunicorn_managing_uvicorn_workers/.env.example b/scripts/gunicorn_managing_uvicorn_workers/.env.example deleted file mode 100644 index 1c1e5859..00000000 --- a/scripts/gunicorn_managing_uvicorn_workers/.env.example +++ /dev/null @@ -1,67 +0,0 @@ -# ============================================================================ -# WARNING: EXAMPLE CONFIGURATION - DO NOT USE IN PRODUCTION AS-IS -# ============================================================================ -# This file contains example values for development/testing purposes only. -# -# SECURITY CRITICAL: Before deploying to production, you MUST: -# 1. Copy this file to src/.env -# 2. Generate a new SECRET_KEY using: openssl rand -hex 32 -# 3. Change all passwords (POSTGRES_PASSWORD, ADMIN_PASSWORD, etc.) -# 4. Update all sensitive configuration values -# -# Using these example values in production is a SECURITY RISK. -# ============================================================================ - -# ------------- app settings ------------- -APP_NAME="My Project" -APP_DESCRIPTION="My Project Description" -APP_VERSION="0.1" -CONTACT_NAME="Me" -CONTACT_EMAIL="my.email@example.com" -LICENSE_NAME="MIT" - -# ------------- database ------------- -POSTGRES_USER="postgres" -POSTGRES_PASSWORD=1234 -POSTGRES_SERVER="db" -POSTGRES_PORT=5432 -POSTGRES_DB="postgres" -POSTGRES_ASYNC_PREFIX="postgresql+asyncpg://" - -# ------------- crypt ------------- -SECRET_KEY=953843cd400d99a039698e7feb46ca1b3e33c44fee2c24c6d88cf0f0b290fb61 -ALGORITHM=HS256 -ACCESS_TOKEN_EXPIRE_MINUTES=60 - -# ------------- admin ------------- -ADMIN_NAME="admin" -ADMIN_EMAIL="admin@example.com" -ADMIN_USERNAME="admin" -ADMIN_PASSWORD="Str1ngst!" - -# ------------- redis cache ------------- -REDIS_CACHE_HOST="redis" -REDIS_CACHE_PORT=6379 - -# ------------- redis queue ------------- -REDIS_QUEUE_HOST="redis" -REDIS_QUEUE_PORT=6379 - -# ------------- redis rate limit ------------- -REDIS_RATE_LIMIT_HOST="redis" -REDIS_RATE_LIMIT_PORT=6379 - -# ------------- client side cache ------------- -CLIENT_CACHE_MAX_AGE=60 - -# ------------- test ------------- -TEST_NAME="Tester User" -TEST_EMAIL="test@tester.com" -TEST_USERNAME="testeruser" -TEST_PASSWORD="Str1ngT3st!" - -# ------------- environment ------------- -ENVIRONMENT="staging" - -# ------------- first tier ------------- -TIER_NAME="free" diff --git a/scripts/gunicorn_managing_uvicorn_workers/Dockerfile b/scripts/gunicorn_managing_uvicorn_workers/Dockerfile deleted file mode 100644 index 98d55fcf..00000000 --- a/scripts/gunicorn_managing_uvicorn_workers/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# --------- requirements --------- - -FROM python:3.11 as requirements-stage - -WORKDIR /tmp - -RUN pip install poetry - -COPY ./pyproject.toml ./poetry.lock* /tmp/ - -RUN poetry export -f requirements.txt --output requirements.txt --without-hashes - - -# --------- final image build --------- -FROM python:3.11 - -WORKDIR /code - -COPY --from=requirements-stage /tmp/requirements.txt /code/requirements.txt - -RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt - -COPY ./src/app /code/app - -# -------- replace with comment to run with gunicorn -------- -# CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] -CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "-b", "0.0.0.0:8000"] diff --git a/scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml b/scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml deleted file mode 100644 index 8b4cefdf..00000000 --- a/scripts/gunicorn_managing_uvicorn_workers/docker-compose.yml +++ /dev/null @@ -1,112 +0,0 @@ -services: - web: - build: - context: . - dockerfile: Dockerfile - # -------- Both of the following commands should be commented to run with nginx -------- - - # -------- replace with comment to run with gunicorn or just uvicorn -------- - # command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload - command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 - env_file: - - ./src/.env - # -------- replace with expose if you are using nginx -------- - ports: - - "8000:8000" - # expose: - # - "8000" - depends_on: - - db - - redis - volumes: - - ./src/app:/code/app - - ./src/.env:/code/.env - - worker: - build: - context: . - dockerfile: Dockerfile - command: arq app.core.worker.settings.WorkerSettings - env_file: - - ./src/.env - depends_on: - - db - - redis - volumes: - - ./src/app:/code/app - - ./src/.env:/code/.env - - db: - image: postgres:13 - env_file: - - ./src/.env - volumes: - - postgres-data:/var/lib/postgresql/data - expose: - - "5432" - - redis: - image: redis:alpine - volumes: - - redis-data:/data - expose: - - "6379" - - #-------- uncomment to run with nginx -------- - # nginx: - # image: nginx:latest - # ports: - # - "80:80" - # volumes: - # - ./default.conf:/etc/nginx/conf.d/default.conf - # depends_on: - # - web - - #-------- uncomment to create first superuser -------- - create_superuser: - build: - context: . - dockerfile: Dockerfile - env_file: - - ./src/.env - depends_on: - - db - - web - command: python -m src.scripts.create_first_superuser - volumes: - - ./src:/code/src - - #-------- uncomment to run tests -------- - # pytest: - # build: - # context: . - # dockerfile: Dockerfile - # env_file: - # - ./src/.env - # depends_on: - # - db - # - create_superuser - # - redis - # command: python -m pytest ./tests - # volumes: - # - .:/code - - #-------- uncomment to create first tier -------- - # create_tier: - # build: - # context: . - # dockerfile: Dockerfile - # env_file: - # - ./src/.env - # depends_on: - # - create_superuser - # - db - # - web - # command: python -m src.scripts.create_first_tier - # volumes: - # - ./src:/code/src - -volumes: - postgres-data: - redis-data: - \ No newline at end of file diff --git a/scripts/local_with_uvicorn/.env.example b/scripts/local_with_uvicorn/.env.example deleted file mode 100644 index c4bf803f..00000000 --- a/scripts/local_with_uvicorn/.env.example +++ /dev/null @@ -1,72 +0,0 @@ -# ============================================================================ -# WARNING: EXAMPLE CONFIGURATION - DO NOT USE IN PRODUCTION AS-IS -# ============================================================================ -# This file contains example values for development/testing purposes only. -# -# SECURITY CRITICAL: Before deploying to production, you MUST: -# 1. Copy this file to src/.env -# 2. Generate a new SECRET_KEY using: openssl rand -hex 32 -# 3. Change all passwords (POSTGRES_PASSWORD, ADMIN_PASSWORD, etc.) -# 4. Update all sensitive configuration values -# -# Using these example values in production is a SECURITY RISK. -# ============================================================================ - -# ------------- app settings ------------- -APP_NAME="My Project" -APP_DESCRIPTION="My Project Description" -APP_VERSION="0.1" -CONTACT_NAME="Me" -CONTACT_EMAIL="my.email@example.com" -LICENSE_NAME="MIT" - -# ------------- database ------------- -POSTGRES_USER="postgres" -POSTGRES_PASSWORD=1234 -POSTGRES_SERVER="db" -POSTGRES_PORT=5432 -POSTGRES_DB="postgres" -POSTGRES_ASYNC_PREFIX="postgresql+asyncpg://" - -# ------------- crypt ------------- -SECRET_KEY=de2132a4a3a029d6a93a2aefcb519f0219990f92ca258a7c5ed938a444dbe1c8 -ALGORITHM=HS256 -ACCESS_TOKEN_EXPIRE_MINUTES=60 - -# ------------- admin ------------- -ADMIN_NAME="admin" -ADMIN_EMAIL="admin@example.com" -ADMIN_USERNAME="admin" -ADMIN_PASSWORD="Str1ngst!" - -# ------------- redis cache ------------- -REDIS_CACHE_HOST="redis" -REDIS_CACHE_PORT=6379 - -# ------------- redis queue ------------- -REDIS_QUEUE_HOST="redis" -REDIS_QUEUE_PORT=6379 - -# ------------- redis rate limit ------------- -REDIS_RATE_LIMIT_HOST="redis" -REDIS_RATE_LIMIT_PORT=6379 - -# ------------- client side cache ------------- -CLIENT_CACHE_MAX_AGE=60 - -# ------------- CORS ------------- -CORS_ORIGINS=["*"] -CORS_METHODS=["*"] -CORS_HEADERS=["*"] - -# ------------- test ------------- -TEST_NAME="Tester User" -TEST_EMAIL="test@tester.com" -TEST_USERNAME="testeruser" -TEST_PASSWORD="Str1ngT3st!" - -# ------------- environment ------------- -ENVIRONMENT="local" - -# ------------- first tier ------------- -TIER_NAME="free" diff --git a/scripts/local_with_uvicorn/Dockerfile b/scripts/local_with_uvicorn/Dockerfile deleted file mode 100644 index 2c3795ab..00000000 --- a/scripts/local_with_uvicorn/Dockerfile +++ /dev/null @@ -1,44 +0,0 @@ -# --------- Builder Stage --------- -FROM ghcr.io/astral-sh/uv:python3.11-bookworm-slim AS builder - -# Set environment variables for uv -ENV UV_COMPILE_BYTECODE=1 -ENV UV_LINK_MODE=copy - -WORKDIR /app - -# Install dependencies first (for better layer caching) -RUN --mount=type=cache,target=/root/.cache/uv \ - --mount=type=bind,source=uv.lock,target=uv.lock \ - --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ - uv sync --locked --no-install-project - -# Copy the project source code -COPY . /app - -# Install the project in non-editable mode -RUN --mount=type=cache,target=/root/.cache/uv \ - uv sync --locked --no-editable - -# --------- Final Stage --------- -FROM python:3.11-slim-bookworm - -# Create a non-root user for security -RUN groupadd --gid 1000 app \ - && useradd --uid 1000 --gid app --shell /bin/bash --create-home app - -# Copy the virtual environment from the builder stage -COPY --from=builder --chown=app:app /app/.venv /app/.venv - -# Ensure the virtual environment is in the PATH -ENV PATH="/app/.venv/bin:$PATH" - -# Switch to the non-root user -USER app - -# Set the working directory -WORKDIR /code - -# -------- replace with comment to run with gunicorn -------- -CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] -# CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "-b", "0.0.0.0:8000"] diff --git a/scripts/local_with_uvicorn/docker-compose.yml b/scripts/local_with_uvicorn/docker-compose.yml deleted file mode 100644 index 14cf968d..00000000 --- a/scripts/local_with_uvicorn/docker-compose.yml +++ /dev/null @@ -1,112 +0,0 @@ -services: - web: - build: - context: . - dockerfile: Dockerfile - # -------- Both of the following commands should be commented to run with nginx -------- - - # -------- replace with comment to run with gunicorn -------- - command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload - # command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 - env_file: - - ./src/.env - # -------- replace with expose if you are using nginx -------- - ports: - - "8000:8000" - # expose: - # - "8000" - depends_on: - - db - - redis - volumes: - - ./src/app:/code/app - - ./src/.env:/code/.env - - worker: - build: - context: . - dockerfile: Dockerfile - command: arq app.core.worker.settings.WorkerSettings - env_file: - - ./src/.env - depends_on: - - db - - redis - volumes: - - ./src/app:/code/app - - ./src/.env:/code/.env - - db: - image: postgres:13 - env_file: - - ./src/.env - volumes: - - postgres-data:/var/lib/postgresql/data - expose: - - "5432" - - redis: - image: redis:alpine - volumes: - - redis-data:/data - expose: - - "6379" - - #-------- uncomment to run with nginx -------- - # nginx: - # image: nginx:latest - # ports: - # - "80:80" - # volumes: - # - ./default.conf:/etc/nginx/conf.d/default.conf - # depends_on: - # - web - - #-------- uncomment to create first superuser -------- - create_superuser: - build: - context: . - dockerfile: Dockerfile - env_file: - - ./src/.env - depends_on: - - db - - web - command: python -m src.scripts.create_first_superuser - volumes: - - ./src:/code/src - - #-------- uncomment to run tests -------- - pytest: - build: - context: . - dockerfile: Dockerfile - env_file: - - ./src/.env - depends_on: - - db - - create_superuser - - redis - command: python -m pytest ./tests - volumes: - - .:/code - - #-------- uncomment to create first tier -------- - # create_tier: - # build: - # context: . - # dockerfile: Dockerfile - # env_file: - # - ./src/.env - # depends_on: - # - create_superuser - # - db - # - web - # command: python -m src.scripts.create_first_tier - # volumes: - # - ./src:/code/src - -volumes: - postgres-data: - redis-data: - diff --git a/scripts/production_with_nginx/.env.example b/scripts/production_with_nginx/.env.example deleted file mode 100644 index 6f9c5d68..00000000 --- a/scripts/production_with_nginx/.env.example +++ /dev/null @@ -1,67 +0,0 @@ -# ============================================================================ -# WARNING: EXAMPLE CONFIGURATION - DO NOT USE IN PRODUCTION AS-IS -# ============================================================================ -# This file contains example values for development/testing purposes only. -# -# SECURITY CRITICAL: Before deploying to production, you MUST: -# 1. Copy this file to src/.env -# 2. Generate a new SECRET_KEY using: openssl rand -hex 32 -# 3. Change all passwords (POSTGRES_PASSWORD, ADMIN_PASSWORD, etc.) -# 4. Update all sensitive configuration values -# -# Using these example values in production is a SECURITY RISK. -# ============================================================================ - -# ------------- app settings ------------- -APP_NAME="My Project" -APP_DESCRIPTION="My Project Description" -APP_VERSION="0.1" -CONTACT_NAME="Me" -CONTACT_EMAIL="my.email@example.com" -LICENSE_NAME="MIT" - -# ------------- database ------------- -POSTGRES_USER="postgres" -POSTGRES_PASSWORD=1234 -POSTGRES_SERVER="db" -POSTGRES_PORT=5432 -POSTGRES_DB="postgres" -POSTGRES_ASYNC_PREFIX="postgresql+asyncpg://" - -# ------------- crypt ------------- -SECRET_KEY=db210482bea9aae930b00b17f3449a21340c281ac7e1f2a4e33e2c5cd77f291e -ALGORITHM=HS256 -ACCESS_TOKEN_EXPIRE_MINUTES=60 - -# ------------- admin ------------- -ADMIN_NAME="admin" -ADMIN_EMAIL="admin@example.com" -ADMIN_USERNAME="admin" -ADMIN_PASSWORD="Str1ngst!" - -# ------------- redis cache ------------- -REDIS_CACHE_HOST="redis" -REDIS_CACHE_PORT=6379 - -# ------------- redis queue ------------- -REDIS_QUEUE_HOST="redis" -REDIS_QUEUE_PORT=6379 - -# ------------- redis rate limit ------------- -REDIS_RATE_LIMIT_HOST="redis" -REDIS_RATE_LIMIT_PORT=6379 - -# ------------- client side cache ------------- -CLIENT_CACHE_MAX_AGE=60 - -# ------------- test ------------- -TEST_NAME="Tester User" -TEST_EMAIL="test@tester.com" -TEST_USERNAME="testeruser" -TEST_PASSWORD="Str1ngT3st!" - -# ------------- environment ------------- -ENVIRONMENT="production" - -# ------------- first tier ------------- -TIER_NAME="free" diff --git a/scripts/production_with_nginx/Dockerfile b/scripts/production_with_nginx/Dockerfile deleted file mode 100644 index 8b8ccfee..00000000 --- a/scripts/production_with_nginx/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# --------- requirements --------- - -FROM python:3.11 as requirements-stage - -WORKDIR /tmp - -RUN pip install poetry - -COPY ./pyproject.toml ./poetry.lock* /tmp/ - -RUN poetry export -f requirements.txt --output requirements.txt --without-hashes - - -# --------- final image build --------- -FROM python:3.11 - -WORKDIR /code - -COPY --from=requirements-stage /tmp/requirements.txt /code/requirements.txt - -RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt - -COPY ./src/app /code/app - -# -------- replace with comment to run with gunicorn -------- -CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] -# CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "-b", "0.0.0.0:8000"] diff --git a/scripts/production_with_nginx/docker-compose.yml b/scripts/production_with_nginx/docker-compose.yml deleted file mode 100644 index 77c62967..00000000 --- a/scripts/production_with_nginx/docker-compose.yml +++ /dev/null @@ -1,110 +0,0 @@ -services: - web: - build: - context: . - dockerfile: Dockerfile - # -------- Both of the following commands should be commented to run with nginx -------- - - # -------- replace with comment to run with gunicorn -------- - # command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload - command: gunicorn app.main:app -w 4 -k uvicorn.workers.UvicornWorker -b 0.0.0.0:8000 - env_file: - - ./src/.env - # -------- replace ports with expose if you are using nginx -------- - # ports: - # - "8000:8000" - expose: - - "8000" - depends_on: - - db - - redis - volumes: - - ./src/app:/code/app - - ./src/.env:/code/.env - - worker: - build: - context: . - dockerfile: Dockerfile - command: arq app.core.worker.settings.WorkerSettings - env_file: - - ./src/.env - depends_on: - - db - - redis - volumes: - - ./src/app:/code/app - - ./src/.env:/code/.env - - db: - image: postgres:13 - env_file: - - ./src/.env - volumes: - - postgres-data:/var/lib/postgresql/data - expose: - - "5432" - - redis: - image: redis:alpine - volumes: - - redis-data:/data - expose: - - "6379" - - #-------- uncomment to run with nginx -------- - nginx: - image: nginx:latest - ports: - - "80:80" - volumes: - - ./default.conf:/etc/nginx/conf.d/default.conf - depends_on: - - web - - #-------- uncomment to create first superuser -------- - # create_superuser: - # build: - # context: . - # dockerfile: Dockerfile - # env_file: - # - ./src/.env - # depends_on: - # - db - # - web - # command: python -m src.scripts.create_first_superuser - # volumes: - # - ./src:/code/src - - #-------- uncomment to run tests -------- - # pytest: - # build: - # context: . - # dockerfile: Dockerfile - # env_file: - # - ./src/.env - # depends_on: - # - web - # - redis - # command: python -m pytest ./tests - # volumes: - # - .:/code - - #-------- uncomment to create first tier -------- - # create_tier: - # build: - # context: . - # dockerfile: Dockerfile - # env_file: - # - ./src/.env - # depends_on: - # - create_superuser - # - db - # - web - # command: python -m src.scripts.create_first_tier - # volumes: - # - ./src:/code/src - -volumes: - postgres-data: - redis-data: diff --git a/setup.py b/setup.py deleted file mode 100755 index ea0c717e..00000000 --- a/setup.py +++ /dev/null @@ -1,177 +0,0 @@ -#!/usr/bin/env python3 -""" -FastAPI Boilerplate Setup Script - -Automates copying the correct configuration files for different deployment scenarios. -""" - -import shutil -import sys -from pathlib import Path - -DEPLOYMENTS = { - "local": { - "name": "Local development with Uvicorn", - "description": "Auto-reload enabled, development-friendly", - "path": "scripts/local_with_uvicorn", - }, - "staging": { - "name": "Staging with Gunicorn managing Uvicorn workers", - "description": "Production-like setup for testing", - "path": "scripts/gunicorn_managing_uvicorn_workers", - }, - "production": { - "name": "Production with NGINX", - "description": "Full production setup with reverse proxy", - "path": "scripts/production_with_nginx", - }, -} - - -def show_help(): - """Display help information""" - print("FastAPI Boilerplate Setup") - print("=" * 25) - print() - print("Usage: python setup.py ") - print() - print("Available deployment types:") - for key, config in DEPLOYMENTS.items(): - print(f" {key:12} - {config['name']}") - print(f" {' ' * 12} {config['description']}") - print() - print("Examples:") - print(" python setup.py local # Set up for local development") - print(" python setup.py staging # Set up for staging environment") - print(" python setup.py production # Set up for production deployment") - - -def copy_files(deployment_type: str): - """Copy configuration files for the specified deployment type""" - if deployment_type not in DEPLOYMENTS: - print(f"❌ Unknown deployment type: {deployment_type}") - print() - show_help() - return False - - config = DEPLOYMENTS[deployment_type] - source_path = Path(config["path"]) - - if not source_path.exists(): - print(f"❌ Configuration path not found: {source_path}") - return False - - print(f"🚀 Setting up {config['name']}...") - print(f" {config['description']}") - print() - - files_to_copy = [ - ("Dockerfile", "Dockerfile"), - ("docker-compose.yml", "docker-compose.yml"), - (".env.example", "src/.env"), - ] - - success = True - for source_file, dest_file in files_to_copy: - source = source_path / source_file - dest = Path(dest_file) - - if not source.exists(): - print(f"⚠️ Warning: {source} not found, skipping...") - continue - - try: - dest.parent.mkdir(parents=True, exist_ok=True) - - shutil.copy2(source, dest) - print(f"✅ Copied {source} → {dest}") - - except Exception as e: - print(f"❌ Failed to copy {source} → {dest}: {e}") - success = False - - if success: - print() - print("🎉 Setup complete!") - print() - - if deployment_type in ["staging", "production"]: - print("⚠️ IMPORTANT: Update the .env file with your production values:") - print(" - Generate a new SECRET_KEY: openssl rand -hex 32") - print(" - Change all passwords and sensitive values") - print() - - print("Next steps:") - print(" docker compose up") - - if deployment_type == "local": - print(" open http://127.0.0.1:8000/docs") - elif deployment_type == "production": - print(" open http://localhost") - - return True - - return False - - -def interactive_setup(): - """Interactive setup when no arguments provided""" - print("FastAPI Boilerplate Setup") - print("=" * 25) - print() - print("Choose your deployment type:") - print() - - options = list(DEPLOYMENTS.keys()) - for i, key in enumerate(options, 1): - config = DEPLOYMENTS[key] - print(f" {i}. {config['name']}") - print(f" {config['description']}") - print() - - while True: - try: - choice = input(f"Enter your choice (1-{len(options)}): ").strip() - - if choice.isdigit(): - choice_num = int(choice) - if 1 <= choice_num <= len(options): - return options[choice_num - 1] - - if choice.lower() in DEPLOYMENTS: - return choice.lower() - - print(f"❌ Invalid choice. Please enter 1-{len(options)} or the deployment name.") - - except KeyboardInterrupt: - print("\n\n👋 Setup cancelled.") - return None - except EOFError: - print("\n\n👋 Setup cancelled.") - return None - - -def main(): - """Main entry point""" - if len(sys.argv) > 1 and sys.argv[1] in ["-h", "--help", "help"]: - show_help() - return - - if len(sys.argv) == 2: - deployment_type = sys.argv[1].lower() - elif len(sys.argv) == 1: - deployment_type = interactive_setup() - if deployment_type is None: - return - else: - show_help() - return - - success = copy_files(deployment_type) - - if not success: - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/src/alembic.ini b/src/alembic.ini deleted file mode 100644 index 07489da9..00000000 --- a/src/alembic.ini +++ /dev/null @@ -1,116 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = migrations - -# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s -# Uncomment the line below if you want the files to be prepended with date and time -# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file -# for all available tokens -# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s - -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. -prepend_sys_path = . - -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python-dateutil library that can be -# installed by adding `alembic[tz]` to the pip requirements -# string value is passed to dateutil.tz.gettz() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the -# "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to migrations/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. - -# set to 'true' to search source files recursively -# in each "version_locations" directory -# new in Alembic version 1.10 -# recursive_version_locations = false - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -sqlalchemy.url = driver://user:pass@localhost/dbname - - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME - -# lint with attempts to fix using "ruff" - use the exec runner, execute a binary -# hooks = ruff -# ruff.type = exec -# ruff.executable = %(here)s/.venv/bin/ruff -# ruff.options = --fix REVISION_SCRIPT_FILENAME - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/src/app/admin/initialize.py b/src/app/admin/initialize.py deleted file mode 100644 index d1531d8f..00000000 --- a/src/app/admin/initialize.py +++ /dev/null @@ -1,53 +0,0 @@ -from typing import Optional - -from crudadmin import CRUDAdmin - -from ..core.config import EnvironmentOption, settings -from ..core.db.database import async_get_db -from .views import register_admin_views - - -def create_admin_interface() -> Optional[CRUDAdmin]: - """Create and configure the admin interface.""" - if not settings.CRUD_ADMIN_ENABLED: - return None - - session_backend = "memory" - redis_config = None - - if settings.CRUD_ADMIN_REDIS_ENABLED: - session_backend = "redis" - redis_config = { - "host": settings.CRUD_ADMIN_REDIS_HOST, - "port": settings.CRUD_ADMIN_REDIS_PORT, - "db": settings.CRUD_ADMIN_REDIS_DB, - "password": settings.CRUD_ADMIN_REDIS_PASSWORD if settings.CRUD_ADMIN_REDIS_PASSWORD != "None" else None, - } - - admin = CRUDAdmin( - session=async_get_db, - SECRET_KEY=settings.SECRET_KEY.get_secret_value(), - mount_path=settings.CRUD_ADMIN_MOUNT_PATH, - session_backend=session_backend, - redis_config=redis_config, - allowed_ips=settings.CRUD_ADMIN_ALLOWED_IPS_LIST if settings.CRUD_ADMIN_ALLOWED_IPS_LIST else None, - allowed_networks=settings.CRUD_ADMIN_ALLOWED_NETWORKS_LIST - if settings.CRUD_ADMIN_ALLOWED_NETWORKS_LIST - else None, - max_sessions_per_user=settings.CRUD_ADMIN_MAX_SESSIONS, - session_timeout_minutes=settings.CRUD_ADMIN_SESSION_TIMEOUT, - secure_cookies=settings.SESSION_SECURE_COOKIES, - enforce_https=settings.ENVIRONMENT == EnvironmentOption.PRODUCTION, - track_events=settings.CRUD_ADMIN_TRACK_EVENTS, - track_sessions_in_db=settings.CRUD_ADMIN_TRACK_SESSIONS, - initial_admin={ - "username": settings.ADMIN_USERNAME, - "password": settings.ADMIN_PASSWORD, - } - if settings.ADMIN_USERNAME and settings.ADMIN_PASSWORD - else None, - ) - - register_admin_views(admin) - - return admin diff --git a/src/app/admin/views.py b/src/app/admin/views.py deleted file mode 100644 index 3e4d935b..00000000 --- a/src/app/admin/views.py +++ /dev/null @@ -1,61 +0,0 @@ -from typing import Annotated - -from crudadmin import CRUDAdmin -from crudadmin.admin_interface.model_view import PasswordTransformer -from pydantic import BaseModel, Field - -from ..core.security import get_password_hash -from ..models.post import Post -from ..models.tier import Tier -from ..models.user import User -from ..schemas.post import PostUpdate -from ..schemas.tier import TierCreate, TierUpdate -from ..schemas.user import UserCreate, UserCreateInternal, UserUpdate - - -class PostCreateAdmin(BaseModel): - title: Annotated[str, Field(min_length=2, max_length=30, examples=["This is my post"])] - text: Annotated[str, Field(min_length=1, max_length=63206, examples=["This is the content of my post."])] - created_by_user_id: int - media_url: Annotated[ - str | None, - Field(pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", examples=["https://www.postimageurl.com"], default=None), - ] - - -def register_admin_views(admin: CRUDAdmin) -> None: - """Register all models and their schemas with the admin interface. - - This function adds all available models to the admin interface with appropriate - schemas and permissions. - """ - - password_transformer = PasswordTransformer( - password_field="password", - hashed_field="hashed_password", - hash_function=get_password_hash, - required_fields=["name", "username", "email"], - ) - - admin.add_view( - model=User, - create_schema=UserCreate, - update_schema=UserUpdate, - update_internal_schema=UserCreateInternal, - password_transformer=password_transformer, - allowed_actions={"view", "create", "update"}, - ) - - admin.add_view( - model=Tier, - create_schema=TierCreate, - update_schema=TierUpdate, - allowed_actions={"view", "create", "update", "delete"}, - ) - - admin.add_view( - model=Post, - create_schema=PostCreateAdmin, - update_schema=PostUpdate, - allowed_actions={"view", "create", "update", "delete"}, - ) diff --git a/src/app/api/dependencies.py b/src/app/api/dependencies.py deleted file mode 100644 index 5fead48a..00000000 --- a/src/app/api/dependencies.py +++ /dev/null @@ -1,106 +0,0 @@ -from typing import Annotated, Any - -from fastapi import Depends, HTTPException, Request -from sqlalchemy.ext.asyncio import AsyncSession - -from ..core.config import settings -from ..core.db.database import async_get_db -from ..core.exceptions.http_exceptions import ForbiddenException, RateLimitException, UnauthorizedException -from ..core.logger import logging -from ..core.security import TokenType, oauth2_scheme, verify_token -from ..core.utils.rate_limit import rate_limiter -from ..crud.crud_rate_limit import crud_rate_limits -from ..crud.crud_tier import crud_tiers -from ..crud.crud_users import crud_users -from ..schemas.rate_limit import RateLimitRead, sanitize_path -from ..schemas.tier import TierRead - -logger = logging.getLogger(__name__) - -DEFAULT_LIMIT = settings.DEFAULT_RATE_LIMIT_LIMIT -DEFAULT_PERIOD = settings.DEFAULT_RATE_LIMIT_PERIOD - - -async def get_current_user( - token: Annotated[str, Depends(oauth2_scheme)], db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict[str, Any]: - token_data = await verify_token(token, TokenType.ACCESS, db) - if token_data is None: - raise UnauthorizedException("User not authenticated.") - - if "@" in token_data.username_or_email: - user = await crud_users.get(db=db, email=token_data.username_or_email, is_deleted=False) - else: - user = await crud_users.get(db=db, username=token_data.username_or_email, is_deleted=False) - - if user: - return user - - raise UnauthorizedException("User not authenticated.") - - -async def get_optional_user(request: Request, db: AsyncSession = Depends(async_get_db)) -> dict | None: - token = request.headers.get("Authorization") - if not token: - return None - - try: - token_type, _, token_value = token.partition(" ") - if token_type.lower() != "bearer" or not token_value: - return None - - token_data = await verify_token(token_value, TokenType.ACCESS, db) - if token_data is None: - return None - - return await get_current_user(token_value, db=db) - - except HTTPException as http_exc: - if http_exc.status_code != 401: - logger.error(f"Unexpected HTTPException in get_optional_user: {http_exc.detail}") - return None - - except Exception as exc: - logger.error(f"Unexpected error in get_optional_user: {exc}") - return None - - -async def get_current_superuser(current_user: Annotated[dict, Depends(get_current_user)]) -> dict: - if not current_user["is_superuser"]: - raise ForbiddenException("You do not have enough privileges.") - - return current_user - - -async def rate_limiter_dependency( - request: Request, db: Annotated[AsyncSession, Depends(async_get_db)], user: dict | None = Depends(get_optional_user) -) -> None: - if hasattr(request.app.state, "initialization_complete"): - await request.app.state.initialization_complete.wait() - - path = sanitize_path(request.url.path) - if user: - user_id = user["id"] - tier = await crud_tiers.get(db, id=user["tier_id"], schema_to_select=TierRead) - if tier: - rate_limit = await crud_rate_limits.get( - db=db, tier_id=tier["id"], path=path, schema_to_select=RateLimitRead - ) - if rate_limit: - limit, period = rate_limit["limit"], rate_limit["period"] - else: - logger.warning( - f"User {user_id} with tier '{tier['name']}' has no specific rate limit for path '{path}'. \ - Applying default rate limit." - ) - limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD - else: - logger.warning(f"User {user_id} has no assigned tier. Applying default rate limit.") - limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD - else: - user_id = request.client.host if request.client else "unknown" - limit, period = DEFAULT_LIMIT, DEFAULT_PERIOD - - is_limited = await rate_limiter.is_rate_limited(db=db, user_id=user_id, path=path, limit=limit, period=period) - if is_limited: - raise RateLimitException("Rate limit exceeded.") diff --git a/src/app/api/v1/__init__.py b/src/app/api/v1/__init__.py deleted file mode 100644 index 7575848f..00000000 --- a/src/app/api/v1/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -from fastapi import APIRouter - -from .health import router as health_router -from .login import router as login_router -from .logout import router as logout_router -from .posts import router as posts_router -from .rate_limits import router as rate_limits_router -from .tasks import router as tasks_router -from .tiers import router as tiers_router -from .users import router as users_router - -router = APIRouter(prefix="/v1") -router.include_router(health_router) -router.include_router(login_router) -router.include_router(logout_router) -router.include_router(users_router) -router.include_router(posts_router) -router.include_router(tasks_router) -router.include_router(tiers_router) -router.include_router(rate_limits_router) diff --git a/src/app/api/v1/health.py b/src/app/api/v1/health.py deleted file mode 100644 index fd55071e..00000000 --- a/src/app/api/v1/health.py +++ /dev/null @@ -1,57 +0,0 @@ -import logging -from datetime import UTC, datetime -from typing import Annotated - -from fastapi import APIRouter, Depends, status -from fastapi.responses import JSONResponse -from redis.asyncio import Redis -from sqlalchemy.ext.asyncio import AsyncSession - -from ...core.config import settings -from ...core.db.database import async_get_db -from ...core.health import check_database_health, check_redis_health -from ...core.schemas import HealthCheck, ReadyCheck -from ...core.utils.cache import async_get_redis - -router = APIRouter(tags=["health"]) - -STATUS_HEALTHY = "healthy" -STATUS_UNHEALTHY = "unhealthy" - -LOGGER = logging.getLogger(__name__) - - -@router.get("/health", response_model=HealthCheck) -async def health(): - http_status = status.HTTP_200_OK - response = { - "status": STATUS_HEALTHY, - "environment": settings.ENVIRONMENT.value, - "version": settings.APP_VERSION, - "timestamp": datetime.now(UTC).isoformat(timespec="seconds"), - } - - return JSONResponse(status_code=http_status, content=response) - - -@router.get("/ready", response_model=ReadyCheck) -async def ready(redis: Annotated[Redis, Depends(async_get_redis)], db: Annotated[AsyncSession, Depends(async_get_db)]): - database_status = await check_database_health(db=db) - LOGGER.debug(f"Database health check status: {database_status}") - redis_status = await check_redis_health(redis=redis) - LOGGER.debug(f"Redis health check status: {redis_status}") - - overall_status = STATUS_HEALTHY if database_status and redis_status else STATUS_UNHEALTHY - http_status = status.HTTP_200_OK if overall_status == STATUS_HEALTHY else status.HTTP_503_SERVICE_UNAVAILABLE - - response = { - "status": overall_status, - "environment": settings.ENVIRONMENT.value, - "version": settings.APP_VERSION, - "app": STATUS_HEALTHY, - "database": STATUS_HEALTHY if database_status else STATUS_UNHEALTHY, - "redis": STATUS_HEALTHY if redis_status else STATUS_UNHEALTHY, - "timestamp": datetime.now(UTC).isoformat(timespec="seconds"), - } - - return JSONResponse(status_code=http_status, content=response) diff --git a/src/app/api/v1/login.py b/src/app/api/v1/login.py deleted file mode 100644 index e784731f..00000000 --- a/src/app/api/v1/login.py +++ /dev/null @@ -1,58 +0,0 @@ -from datetime import timedelta -from typing import Annotated - -from fastapi import APIRouter, Depends, Request, Response -from fastapi.security import OAuth2PasswordRequestForm -from sqlalchemy.ext.asyncio import AsyncSession - -from ...core.config import settings -from ...core.db.database import async_get_db -from ...core.exceptions.http_exceptions import UnauthorizedException -from ...core.schemas import Token -from ...core.security import ( - ACCESS_TOKEN_EXPIRE_MINUTES, - TokenType, - authenticate_user, - create_access_token, - create_refresh_token, - verify_token, -) - -router = APIRouter(tags=["login"]) - - -@router.post("/login", response_model=Token) -async def login_for_access_token( - response: Response, - form_data: Annotated[OAuth2PasswordRequestForm, Depends()], - db: Annotated[AsyncSession, Depends(async_get_db)], -) -> dict[str, str]: - user = await authenticate_user(username_or_email=form_data.username, password=form_data.password, db=db) - if not user: - raise UnauthorizedException("Wrong username, email or password.") - - access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) - access_token = await create_access_token(data={"sub": user["username"]}, expires_delta=access_token_expires) - - refresh_token = await create_refresh_token(data={"sub": user["username"]}) - max_age = settings.REFRESH_TOKEN_EXPIRE_DAYS * 24 * 60 * 60 - - response.set_cookie( - key="refresh_token", value=refresh_token, httponly=True, secure=True, samesite="lax", max_age=max_age - ) - - return {"access_token": access_token, "token_type": "bearer"} - - -@router.post("/refresh") -async def refresh_access_token(request: Request, db: AsyncSession = Depends(async_get_db)) -> dict[str, str]: - refresh_token = request.cookies.get("refresh_token") - if not refresh_token: - raise UnauthorizedException("Refresh token missing.") - - user_data = await verify_token(refresh_token, TokenType.REFRESH, db) - if not user_data: - raise UnauthorizedException("Invalid refresh token.") - - new_access_token = await create_access_token(data={"sub": user_data.username_or_email}) - return {"access_token": new_access_token, "token_type": "bearer"} diff --git a/src/app/api/v1/logout.py b/src/app/api/v1/logout.py deleted file mode 100644 index b4dafc80..00000000 --- a/src/app/api/v1/logout.py +++ /dev/null @@ -1,31 +0,0 @@ -from typing import Optional - -from fastapi import APIRouter, Cookie, Depends, Response -from jose import JWTError -from sqlalchemy.ext.asyncio import AsyncSession - -from ...core.db.database import async_get_db -from ...core.exceptions.http_exceptions import UnauthorizedException -from ...core.security import blacklist_tokens, oauth2_scheme - -router = APIRouter(tags=["login"]) - - -@router.post("/logout") -async def logout( - response: Response, - access_token: str = Depends(oauth2_scheme), - refresh_token: Optional[str] = Cookie(None, alias="refresh_token"), - db: AsyncSession = Depends(async_get_db), -) -> dict[str, str]: - try: - if not refresh_token: - raise UnauthorizedException("Refresh token not found") - - await blacklist_tokens(access_token=access_token, refresh_token=refresh_token, db=db) - response.delete_cookie(key="refresh_token") - - return {"message": "Logged out successfully"} - - except JWTError: - raise UnauthorizedException("Invalid token.") diff --git a/src/app/api/v1/posts.py b/src/app/api/v1/posts.py deleted file mode 100644 index 493fcb08..00000000 --- a/src/app/api/v1/posts.py +++ /dev/null @@ -1,157 +0,0 @@ -from typing import Annotated, Any - -from fastapi import APIRouter, Depends, Request -from fastcrud import PaginatedListResponse, compute_offset, paginated_response -from sqlalchemy.ext.asyncio import AsyncSession - -from ...api.dependencies import get_current_superuser, get_current_user -from ...core.db.database import async_get_db -from ...core.exceptions.http_exceptions import ForbiddenException, NotFoundException -from ...core.utils.cache import cache -from ...crud.crud_posts import crud_posts -from ...crud.crud_users import crud_users -from ...schemas.post import PostCreate, PostCreateInternal, PostRead, PostUpdate -from ...schemas.user import UserRead - -router = APIRouter(tags=["posts"]) - - -@router.post("/{username}/post", response_model=PostRead, status_code=201) -async def write_post( - request: Request, - username: str, - post: PostCreate, - current_user: Annotated[dict, Depends(get_current_user)], - db: Annotated[AsyncSession, Depends(async_get_db)], -) -> dict[str, Any]: - db_user = await crud_users.get(db=db, username=username, is_deleted=False, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - if current_user["id"] != db_user["id"]: - raise ForbiddenException() - - post_internal_dict = post.model_dump() - post_internal_dict["created_by_user_id"] = db_user["id"] - - post_internal = PostCreateInternal(**post_internal_dict) - created_post = await crud_posts.create(db=db, object=post_internal, schema_to_select=PostRead) - - if created_post is None: - raise NotFoundException("Failed to create post") - - return created_post - - -@router.get("/{username}/posts", response_model=PaginatedListResponse[PostRead]) -@cache( - key_prefix="{username}_posts:page_{page}:items_per_page:{items_per_page}", - resource_id_name="username", - expiration=60, -) -async def read_posts( - request: Request, - username: str, - db: Annotated[AsyncSession, Depends(async_get_db)], - page: int = 1, - items_per_page: int = 10, -) -> dict: - db_user = await crud_users.get(db=db, username=username, is_deleted=False, schema_to_select=UserRead) - if not db_user: - raise NotFoundException("User not found") - - posts_data = await crud_posts.get_multi( - db=db, - offset=compute_offset(page, items_per_page), - limit=items_per_page, - created_by_user_id=db_user["id"], - is_deleted=False, - ) - - response: dict[str, Any] = paginated_response(crud_data=posts_data, page=page, items_per_page=items_per_page) - return response - - -@router.get("/{username}/post/{id}", response_model=PostRead) -@cache(key_prefix="{username}_post_cache", resource_id_name="id") -async def read_post( - request: Request, username: str, id: int, db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict[str, Any]: - db_user = await crud_users.get(db=db, username=username, is_deleted=False, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - db_post = await crud_posts.get( - db=db, id=id, created_by_user_id=db_user["id"], is_deleted=False, schema_to_select=PostRead - ) - if db_post is None: - raise NotFoundException("Post not found") - - return db_post - - -@router.patch("/{username}/post/{id}") -@cache("{username}_post_cache", resource_id_name="id", pattern_to_invalidate_extra=["{username}_posts:*"]) -async def patch_post( - request: Request, - username: str, - id: int, - values: PostUpdate, - current_user: Annotated[dict, Depends(get_current_user)], - db: Annotated[AsyncSession, Depends(async_get_db)], -) -> dict[str, str]: - db_user = await crud_users.get(db=db, username=username, is_deleted=False, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - if current_user["id"] != db_user["id"]: - raise ForbiddenException() - - db_post = await crud_posts.get(db=db, id=id, is_deleted=False, schema_to_select=PostRead) - if db_post is None: - raise NotFoundException("Post not found") - - await crud_posts.update(db=db, object=values, id=id) - return {"message": "Post updated"} - - -@router.delete("/{username}/post/{id}") -@cache("{username}_post_cache", resource_id_name="id", to_invalidate_extra={"{username}_posts": "{username}"}) -async def erase_post( - request: Request, - username: str, - id: int, - current_user: Annotated[dict, Depends(get_current_user)], - db: Annotated[AsyncSession, Depends(async_get_db)], -) -> dict[str, str]: - db_user = await crud_users.get(db=db, username=username, is_deleted=False, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - if current_user["id"] != db_user["id"]: - raise ForbiddenException() - - db_post = await crud_posts.get(db=db, id=id, is_deleted=False, schema_to_select=PostRead) - if db_post is None: - raise NotFoundException("Post not found") - - await crud_posts.delete(db=db, id=id) - - return {"message": "Post deleted"} - - -@router.delete("/{username}/db_post/{id}", dependencies=[Depends(get_current_superuser)]) -@cache("{username}_post_cache", resource_id_name="id", to_invalidate_extra={"{username}_posts": "{username}"}) -async def erase_db_post( - request: Request, username: str, id: int, db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict[str, str]: - db_user = await crud_users.get(db=db, username=username, is_deleted=False, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - db_post = await crud_posts.get(db=db, id=id, is_deleted=False, schema_to_select=PostRead) - if db_post is None: - raise NotFoundException("Post not found") - - await crud_posts.db_delete(db=db, id=id) - return {"message": "Post deleted from the database"} diff --git a/src/app/api/v1/rate_limits.py b/src/app/api/v1/rate_limits.py deleted file mode 100644 index eec6f4b7..00000000 --- a/src/app/api/v1/rate_limits.py +++ /dev/null @@ -1,115 +0,0 @@ -from typing import Annotated, Any - -from fastapi import APIRouter, Depends, Request -from fastcrud import PaginatedListResponse, compute_offset, paginated_response -from sqlalchemy.ext.asyncio import AsyncSession - -from ...api.dependencies import get_current_superuser -from ...core.db.database import async_get_db -from ...core.exceptions.http_exceptions import DuplicateValueException, NotFoundException -from ...crud.crud_rate_limit import crud_rate_limits -from ...crud.crud_tier import crud_tiers -from ...schemas.rate_limit import RateLimitCreate, RateLimitCreateInternal, RateLimitRead, RateLimitUpdate -from ...schemas.tier import TierRead - -router = APIRouter(tags=["rate_limits"]) - - -@router.post("/tier/{tier_name}/rate_limit", dependencies=[Depends(get_current_superuser)], status_code=201) -async def write_rate_limit( - request: Request, tier_name: str, rate_limit: RateLimitCreate, db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict[str, Any]: - db_tier = await crud_tiers.get(db=db, name=tier_name, schema_to_select=TierRead) - if not db_tier: - raise NotFoundException("Tier not found") - - rate_limit_internal_dict = rate_limit.model_dump() - rate_limit_internal_dict["tier_id"] = db_tier["id"] - - db_rate_limit = await crud_rate_limits.exists(db=db, name=rate_limit_internal_dict["name"]) - if db_rate_limit: - raise DuplicateValueException("Rate Limit Name not available") - - rate_limit_internal = RateLimitCreateInternal(**rate_limit_internal_dict) - created_rate_limit = await crud_rate_limits.create( - db=db, object=rate_limit_internal, schema_to_select=RateLimitRead - ) - - if created_rate_limit is None: - raise NotFoundException("Failed to create rate limit") - - return created_rate_limit - - -@router.get("/tier/{tier_name}/rate_limits", response_model=PaginatedListResponse[RateLimitRead]) -async def read_rate_limits( - request: Request, - tier_name: str, - db: Annotated[AsyncSession, Depends(async_get_db)], - page: int = 1, - items_per_page: int = 10, -) -> dict: - db_tier = await crud_tiers.get(db=db, name=tier_name, schema_to_select=TierRead) - if not db_tier: - raise NotFoundException("Tier not found") - - rate_limits_data = await crud_rate_limits.get_multi( - db=db, - offset=compute_offset(page, items_per_page), - limit=items_per_page, - tier_id=db_tier["id"], - ) - - response: dict[str, Any] = paginated_response(crud_data=rate_limits_data, page=page, items_per_page=items_per_page) - return response - - -@router.get("/tier/{tier_name}/rate_limit/{id}", response_model=RateLimitRead) -async def read_rate_limit( - request: Request, tier_name: str, id: int, db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict[str, Any]: - db_tier = await crud_tiers.get(db=db, name=tier_name, schema_to_select=TierRead) - if not db_tier: - raise NotFoundException("Tier not found") - - db_rate_limit = await crud_rate_limits.get(db=db, tier_id=db_tier["id"], id=id, schema_to_select=RateLimitRead) - if db_rate_limit is None: - raise NotFoundException("Rate Limit not found") - - return db_rate_limit - - -@router.patch("/tier/{tier_name}/rate_limit/{id}", dependencies=[Depends(get_current_superuser)]) -async def patch_rate_limit( - request: Request, - tier_name: str, - id: int, - values: RateLimitUpdate, - db: Annotated[AsyncSession, Depends(async_get_db)], -) -> dict[str, str]: - db_tier = await crud_tiers.get(db=db, name=tier_name, schema_to_select=TierRead) - if not db_tier: - raise NotFoundException("Tier not found") - - db_rate_limit = await crud_rate_limits.get(db=db, tier_id=db_tier["id"], id=id, schema_to_select=RateLimitRead) - if db_rate_limit is None: - raise NotFoundException("Rate Limit not found") - - await crud_rate_limits.update(db=db, object=values, id=id) - return {"message": "Rate Limit updated"} - - -@router.delete("/tier/{tier_name}/rate_limit/{id}", dependencies=[Depends(get_current_superuser)]) -async def erase_rate_limit( - request: Request, tier_name: str, id: int, db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict[str, str]: - db_tier = await crud_tiers.get(db=db, name=tier_name, schema_to_select=TierRead) - if not db_tier: - raise NotFoundException("Tier not found") - - db_rate_limit = await crud_rate_limits.get(db=db, tier_id=db_tier["id"], id=id, schema_to_select=RateLimitRead) - if db_rate_limit is None: - raise NotFoundException("Rate Limit not found") - - await crud_rate_limits.delete(db=db, id=id) - return {"message": "Rate Limit deleted"} diff --git a/src/app/api/v1/tasks.py b/src/app/api/v1/tasks.py deleted file mode 100644 index 77930e0f..00000000 --- a/src/app/api/v1/tasks.py +++ /dev/null @@ -1,59 +0,0 @@ -from typing import Any - -from arq.jobs import Job as ArqJob -from fastapi import APIRouter, Depends, HTTPException - -from ...api.dependencies import rate_limiter_dependency -from ...core.utils import queue -from ...schemas.job import Job - -router = APIRouter(prefix="/tasks", tags=["tasks"]) - - -@router.post("/task", response_model=Job, status_code=201, dependencies=[Depends(rate_limiter_dependency)]) -async def create_task(message: str) -> dict[str, str]: - """Create a new background task. - - Parameters - ---------- - message: str - The message or data to be processed by the task. - - Returns - ------- - dict[str, str] - A dictionary containing the ID of the created task. - """ - if queue.pool is None: - raise HTTPException(status_code=503, detail="Queue is not available") - - job = await queue.pool.enqueue_job("sample_background_task", message) - if job is None: - raise HTTPException(status_code=500, detail="Failed to create task") - - return {"id": job.job_id} - - -@router.get("/task/{task_id}") -async def get_task(task_id: str) -> dict[str, Any] | None: - """Get information about a specific background task. - - Parameters - ---------- - task_id: str - The ID of the task. - - Returns - ------- - Optional[dict[str, Any]] - A dictionary containing information about the task if found, or None otherwise. - """ - if queue.pool is None: - raise HTTPException(status_code=503, detail="Queue is not available") - - job = ArqJob(task_id, queue.pool) - job_info = await job.info() - if job_info is None: - return None - - return job_info.__dict__ diff --git a/src/app/api/v1/tiers.py b/src/app/api/v1/tiers.py deleted file mode 100644 index 03ca6f88..00000000 --- a/src/app/api/v1/tiers.py +++ /dev/null @@ -1,72 +0,0 @@ -from typing import Annotated, Any - -from fastapi import APIRouter, Depends, Request -from fastcrud import PaginatedListResponse, compute_offset, paginated_response -from sqlalchemy.ext.asyncio import AsyncSession - -from ...api.dependencies import get_current_superuser -from ...core.db.database import async_get_db -from ...core.exceptions.http_exceptions import DuplicateValueException, NotFoundException -from ...crud.crud_tier import crud_tiers -from ...schemas.tier import TierCreate, TierCreateInternal, TierRead, TierUpdate - -router = APIRouter(tags=["tiers"]) - - -@router.post("/tier", dependencies=[Depends(get_current_superuser)], status_code=201) -async def write_tier( - request: Request, tier: TierCreate, db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict[str, Any]: - tier_internal_dict = tier.model_dump() - db_tier = await crud_tiers.exists(db=db, name=tier_internal_dict["name"]) - if db_tier: - raise DuplicateValueException("Tier Name not available") - - tier_internal = TierCreateInternal(**tier_internal_dict) - created_tier = await crud_tiers.create(db=db, object=tier_internal, schema_to_select=TierRead) - - if created_tier is None: - raise NotFoundException("Failed to create tier") - - return created_tier - - -@router.get("/tiers", response_model=PaginatedListResponse[TierRead]) -async def read_tiers( - request: Request, db: Annotated[AsyncSession, Depends(async_get_db)], page: int = 1, items_per_page: int = 10 -) -> dict: - tiers_data = await crud_tiers.get_multi(db=db, offset=compute_offset(page, items_per_page), limit=items_per_page) - - response: dict[str, Any] = paginated_response(crud_data=tiers_data, page=page, items_per_page=items_per_page) - return response - - -@router.get("/tier/{name}", response_model=TierRead) -async def read_tier(request: Request, name: str, db: Annotated[AsyncSession, Depends(async_get_db)]) -> dict[str, Any]: - db_tier = await crud_tiers.get(db=db, name=name, schema_to_select=TierRead) - if db_tier is None: - raise NotFoundException("Tier not found") - - return db_tier - - -@router.patch("/tier/{name}", dependencies=[Depends(get_current_superuser)]) -async def patch_tier( - request: Request, name: str, values: TierUpdate, db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict[str, str]: - db_tier = await crud_tiers.get(db=db, name=name, schema_to_select=TierRead) - if db_tier is None: - raise NotFoundException("Tier not found") - - await crud_tiers.update(db=db, object=values, name=name) - return {"message": "Tier updated"} - - -@router.delete("/tier/{name}", dependencies=[Depends(get_current_superuser)]) -async def erase_tier(request: Request, name: str, db: Annotated[AsyncSession, Depends(async_get_db)]) -> dict[str, str]: - db_tier = await crud_tiers.get(db=db, name=name, schema_to_select=TierRead) - if db_tier is None: - raise NotFoundException("Tier not found") - - await crud_tiers.delete(db=db, name=name) - return {"message": "Tier deleted"} diff --git a/src/app/api/v1/users.py b/src/app/api/v1/users.py deleted file mode 100644 index 60264cc2..00000000 --- a/src/app/api/v1/users.py +++ /dev/null @@ -1,203 +0,0 @@ -from typing import Annotated, Any - -from fastapi import APIRouter, Depends, Request -from fastcrud import PaginatedListResponse, compute_offset, paginated_response -from sqlalchemy.ext.asyncio import AsyncSession - -from ...api.dependencies import get_current_superuser, get_current_user -from ...core.db.database import async_get_db -from ...core.exceptions.http_exceptions import DuplicateValueException, ForbiddenException, NotFoundException -from ...core.security import blacklist_token, get_password_hash, oauth2_scheme -from ...crud.crud_rate_limit import crud_rate_limits -from ...crud.crud_tier import crud_tiers -from ...crud.crud_users import crud_users -from ...schemas.tier import TierRead -from ...schemas.user import UserCreate, UserCreateInternal, UserRead, UserTierUpdate, UserUpdate - -router = APIRouter(tags=["users"]) - - -@router.post("/user", response_model=UserRead, status_code=201) -async def write_user( - request: Request, user: UserCreate, db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict[str, Any]: - email_row = await crud_users.exists(db=db, email=user.email) - if email_row: - raise DuplicateValueException("Email is already registered") - - username_row = await crud_users.exists(db=db, username=user.username) - if username_row: - raise DuplicateValueException("Username not available") - - user_internal_dict = user.model_dump() - user_internal_dict["hashed_password"] = get_password_hash(password=user_internal_dict["password"]) - del user_internal_dict["password"] - - user_internal = UserCreateInternal(**user_internal_dict) - created_user = await crud_users.create(db=db, object=user_internal, schema_to_select=UserRead) - - if created_user is None: - raise NotFoundException("Failed to create user") - - return created_user - - -@router.get("/users", response_model=PaginatedListResponse[UserRead]) -async def read_users( - request: Request, db: Annotated[AsyncSession, Depends(async_get_db)], page: int = 1, items_per_page: int = 10 -) -> dict: - users_data = await crud_users.get_multi( - db=db, - offset=compute_offset(page, items_per_page), - limit=items_per_page, - is_deleted=False, - ) - - response: dict[str, Any] = paginated_response(crud_data=users_data, page=page, items_per_page=items_per_page) - return response - - -@router.get("/user/me/", response_model=UserRead) -async def read_users_me(request: Request, current_user: Annotated[dict, Depends(get_current_user)]) -> dict: - return current_user - - -@router.get("/user/{username}", response_model=UserRead) -async def read_user( - request: Request, username: str, db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict[str, Any]: - db_user = await crud_users.get(db=db, username=username, is_deleted=False, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - return db_user - - -@router.patch("/user/{username}") -async def patch_user( - request: Request, - values: UserUpdate, - username: str, - current_user: Annotated[dict, Depends(get_current_user)], - db: Annotated[AsyncSession, Depends(async_get_db)], -) -> dict[str, str]: - db_user = await crud_users.get(db=db, username=username) - if db_user is None: - raise NotFoundException("User not found") - - db_username = db_user["username"] - db_email = db_user["email"] - - if db_username != current_user["username"]: - raise ForbiddenException() - - if values.email is not None and values.email != db_email: - if await crud_users.exists(db=db, email=values.email): - raise DuplicateValueException("Email is already registered") - - if values.username is not None and values.username != db_username: - if await crud_users.exists(db=db, username=values.username): - raise DuplicateValueException("Username not available") - - await crud_users.update(db=db, object=values, username=username) - return {"message": "User updated"} - - -@router.delete("/user/{username}") -async def erase_user( - request: Request, - username: str, - current_user: Annotated[dict, Depends(get_current_user)], - db: Annotated[AsyncSession, Depends(async_get_db)], - token: str = Depends(oauth2_scheme), -) -> dict[str, str]: - db_user = await crud_users.get(db=db, username=username, schema_to_select=UserRead) - if not db_user: - raise NotFoundException("User not found") - - if username != current_user["username"]: - raise ForbiddenException() - - await crud_users.delete(db=db, username=username) - await blacklist_token(token=token, db=db) - return {"message": "User deleted"} - - -@router.delete("/db_user/{username}", dependencies=[Depends(get_current_superuser)]) -async def erase_db_user( - request: Request, - username: str, - db: Annotated[AsyncSession, Depends(async_get_db)], - token: str = Depends(oauth2_scheme), -) -> dict[str, str]: - db_user = await crud_users.exists(db=db, username=username) - if not db_user: - raise NotFoundException("User not found") - - await crud_users.db_delete(db=db, username=username) - await blacklist_token(token=token, db=db) - return {"message": "User deleted from the database"} - - -@router.get("/user/{username}/rate_limits", dependencies=[Depends(get_current_superuser)]) -async def read_user_rate_limits( - request: Request, username: str, db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict[str, Any]: - db_user = await crud_users.get(db=db, username=username, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - user_dict = dict(db_user) - if db_user["tier_id"] is None: - user_dict["tier_rate_limits"] = [] - return user_dict - - db_tier = await crud_tiers.get(db=db, id=db_user["tier_id"], schema_to_select=TierRead) - if db_tier is None: - raise NotFoundException("Tier not found") - - db_rate_limits = await crud_rate_limits.get_multi(db=db, tier_id=db_tier["id"]) - - user_dict["tier_rate_limits"] = db_rate_limits["data"] - - return user_dict - - -@router.get("/user/{username}/tier") -async def read_user_tier( - request: Request, username: str, db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict | None: - db_user = await crud_users.get(db=db, username=username, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - if db_user["tier_id"] is None: - return None - - db_tier = await crud_tiers.get(db=db, id=db_user["tier_id"], schema_to_select=TierRead) - if not db_tier: - raise NotFoundException("Tier not found") - - user_dict = dict(db_user) - tier_dict = dict(db_tier) - - for key, value in tier_dict.items(): - user_dict[f"tier_{key}"] = value - - return user_dict - - -@router.patch("/user/{username}/tier", dependencies=[Depends(get_current_superuser)]) -async def patch_user_tier( - request: Request, username: str, values: UserTierUpdate, db: Annotated[AsyncSession, Depends(async_get_db)] -) -> dict[str, str]: - db_user = await crud_users.get(db=db, username=username, schema_to_select=UserRead) - if db_user is None: - raise NotFoundException("User not found") - - db_tier = await crud_tiers.get(db=db, id=values.tier_id, schema_to_select=TierRead) - if db_tier is None: - raise NotFoundException("Tier not found") - - await crud_users.update(db=db, object=values.model_dump(), username=username) - return {"message": f"User {db_user['name']} Tier updated"} diff --git a/src/app/core/config.py b/src/app/core/config.py deleted file mode 100644 index 1841d012..00000000 --- a/src/app/core/config.py +++ /dev/null @@ -1,204 +0,0 @@ -import os -from enum import Enum - -from pydantic import SecretStr, computed_field -from pydantic_settings import BaseSettings, SettingsConfigDict - - -class AppSettings(BaseSettings): - APP_NAME: str = "FastAPI app" - APP_DESCRIPTION: str | None = None - APP_VERSION: str | None = None - LICENSE_NAME: str | None = None - CONTACT_NAME: str | None = None - CONTACT_EMAIL: str | None = None - - -class CryptSettings(BaseSettings): - SECRET_KEY: SecretStr = SecretStr("secret-key") - ALGORITHM: str = "HS256" - ACCESS_TOKEN_EXPIRE_MINUTES: int = 30 - REFRESH_TOKEN_EXPIRE_DAYS: int = 7 - - -class FileLoggerSettings(BaseSettings): - FILE_LOG_MAX_BYTES: int = 10 * 1024 * 1024 - FILE_LOG_BACKUP_COUNT: int = 5 - FILE_LOG_FORMAT_JSON: bool = True - FILE_LOG_LEVEL: str = "INFO" - - # Include request ID, path, method, client host, and status code in the file log - FILE_LOG_INCLUDE_REQUEST_ID: bool = True - FILE_LOG_INCLUDE_PATH: bool = True - FILE_LOG_INCLUDE_METHOD: bool = True - FILE_LOG_INCLUDE_CLIENT_HOST: bool = True - FILE_LOG_INCLUDE_STATUS_CODE: bool = True - - -class ConsoleLoggerSettings(BaseSettings): - CONSOLE_LOG_LEVEL: str = "INFO" - CONSOLE_LOG_FORMAT_JSON: bool = False - - # Include request ID, path, method, client host, and status code in the console log - CONSOLE_LOG_INCLUDE_REQUEST_ID: bool = False - CONSOLE_LOG_INCLUDE_PATH: bool = False - CONSOLE_LOG_INCLUDE_METHOD: bool = False - CONSOLE_LOG_INCLUDE_CLIENT_HOST: bool = False - CONSOLE_LOG_INCLUDE_STATUS_CODE: bool = False - - -class DatabaseSettings(BaseSettings): - pass - - -class SQLiteSettings(DatabaseSettings): - SQLITE_URI: str = "./sql_app.db" - SQLITE_SYNC_PREFIX: str = "sqlite:///" - SQLITE_ASYNC_PREFIX: str = "sqlite+aiosqlite:///" - - -class MySQLSettings(DatabaseSettings): - MYSQL_USER: str = "username" - MYSQL_PASSWORD: str = "password" - MYSQL_SERVER: str = "localhost" - MYSQL_PORT: int = 5432 - MYSQL_DB: str = "dbname" - MYSQL_SYNC_PREFIX: str = "mysql://" - MYSQL_ASYNC_PREFIX: str = "mysql+aiomysql://" - MYSQL_URL: str | None = None - - @computed_field # type: ignore[prop-decorator] - @property - def MYSQL_URI(self) -> str: - credentials = f"{self.MYSQL_USER}:{self.MYSQL_PASSWORD}" - location = f"{self.MYSQL_SERVER}:{self.MYSQL_PORT}/{self.MYSQL_DB}" - return f"{credentials}@{location}" - - -class PostgresSettings(DatabaseSettings): - POSTGRES_USER: str = "postgres" - POSTGRES_PASSWORD: str = "postgres" - POSTGRES_SERVER: str = "localhost" - POSTGRES_PORT: int = 5432 - POSTGRES_DB: str = "postgres" - POSTGRES_SYNC_PREFIX: str = "postgresql://" - POSTGRES_ASYNC_PREFIX: str = "postgresql+asyncpg://" - POSTGRES_URL: str | None = None - - @computed_field # type: ignore[prop-decorator] - @property - def POSTGRES_URI(self) -> str: - credentials = f"{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}" - location = f"{self.POSTGRES_SERVER}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}" - return f"{credentials}@{location}" - - -class FirstUserSettings(BaseSettings): - ADMIN_NAME: str = "admin" - ADMIN_EMAIL: str = "admin@admin.com" - ADMIN_USERNAME: str = "admin" - ADMIN_PASSWORD: str = "!Ch4ng3Th1sP4ssW0rd!" - - -class TestSettings(BaseSettings): - ... - - -class RedisCacheSettings(BaseSettings): - REDIS_CACHE_HOST: str = "localhost" - REDIS_CACHE_PORT: int = 6379 - - @computed_field # type: ignore[prop-decorator] - @property - def REDIS_CACHE_URL(self) -> str: - return f"redis://{self.REDIS_CACHE_HOST}:{self.REDIS_CACHE_PORT}" - - -class ClientSideCacheSettings(BaseSettings): - CLIENT_CACHE_MAX_AGE: int = 60 - - -class RedisQueueSettings(BaseSettings): - REDIS_QUEUE_HOST: str = "localhost" - REDIS_QUEUE_PORT: int = 6379 - - -class RedisRateLimiterSettings(BaseSettings): - REDIS_RATE_LIMIT_HOST: str = "localhost" - REDIS_RATE_LIMIT_PORT: int = 6379 - - @computed_field # type: ignore[prop-decorator] - @property - def REDIS_RATE_LIMIT_URL(self) -> str: - return f"redis://{self.REDIS_RATE_LIMIT_HOST}:{self.REDIS_RATE_LIMIT_PORT}" - - -class DefaultRateLimitSettings(BaseSettings): - DEFAULT_RATE_LIMIT_LIMIT: int = 10 - DEFAULT_RATE_LIMIT_PERIOD: int = 3600 - - -class CRUDAdminSettings(BaseSettings): - CRUD_ADMIN_ENABLED: bool = True - CRUD_ADMIN_MOUNT_PATH: str = "/admin" - - CRUD_ADMIN_ALLOWED_IPS_LIST: list[str] | None = None - CRUD_ADMIN_ALLOWED_NETWORKS_LIST: list[str] | None = None - CRUD_ADMIN_MAX_SESSIONS: int = 10 - CRUD_ADMIN_SESSION_TIMEOUT: int = 1440 - SESSION_SECURE_COOKIES: bool = True - - CRUD_ADMIN_TRACK_EVENTS: bool = True - CRUD_ADMIN_TRACK_SESSIONS: bool = True - - CRUD_ADMIN_REDIS_ENABLED: bool = False - CRUD_ADMIN_REDIS_HOST: str = "localhost" - CRUD_ADMIN_REDIS_PORT: int = 6379 - CRUD_ADMIN_REDIS_DB: int = 0 - CRUD_ADMIN_REDIS_PASSWORD: str | None = "None" - CRUD_ADMIN_REDIS_SSL: bool = False - - -class EnvironmentOption(str, Enum): - LOCAL = "local" - STAGING = "staging" - PRODUCTION = "production" - - -class EnvironmentSettings(BaseSettings): - ENVIRONMENT: EnvironmentOption = EnvironmentOption.LOCAL - - -class CORSSettings(BaseSettings): - CORS_ORIGINS: list[str] = ["*"] - CORS_METHODS: list[str] = ["*"] - CORS_HEADERS: list[str] = ["*"] - - -class Settings( - AppSettings, - SQLiteSettings, - PostgresSettings, - CryptSettings, - FirstUserSettings, - TestSettings, - RedisCacheSettings, - ClientSideCacheSettings, - RedisQueueSettings, - RedisRateLimiterSettings, - DefaultRateLimitSettings, - CRUDAdminSettings, - EnvironmentSettings, - CORSSettings, - FileLoggerSettings, - ConsoleLoggerSettings, -): - model_config = SettingsConfigDict( - env_file=os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "..", ".env"), - env_file_encoding="utf-8", - case_sensitive=True, - extra="ignore", - ) - - -settings = Settings() diff --git a/src/app/core/db/crud_token_blacklist.py b/src/app/core/db/crud_token_blacklist.py deleted file mode 100644 index 79e0c9ea..00000000 --- a/src/app/core/db/crud_token_blacklist.py +++ /dev/null @@ -1,14 +0,0 @@ -from fastcrud import FastCRUD - -from ..db.token_blacklist import TokenBlacklist -from ..schemas import TokenBlacklistCreate, TokenBlacklistRead, TokenBlacklistUpdate - -CRUDTokenBlacklist = FastCRUD[ - TokenBlacklist, - TokenBlacklistCreate, - TokenBlacklistUpdate, - TokenBlacklistUpdate, - TokenBlacklistUpdate, - TokenBlacklistRead, -] -crud_token_blacklist = CRUDTokenBlacklist(TokenBlacklist) diff --git a/src/app/core/db/database.py b/src/app/core/db/database.py deleted file mode 100644 index b95ab3b8..00000000 --- a/src/app/core/db/database.py +++ /dev/null @@ -1,26 +0,0 @@ -from collections.abc import AsyncGenerator - -from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine -from sqlalchemy.ext.asyncio.session import AsyncSession -from sqlalchemy.orm import DeclarativeBase, MappedAsDataclass - -from ..config import settings - - -class Base(DeclarativeBase, MappedAsDataclass): - pass - - -DATABASE_URI = settings.POSTGRES_URI -DATABASE_PREFIX = settings.POSTGRES_ASYNC_PREFIX -DATABASE_URL = f"{DATABASE_PREFIX}{DATABASE_URI}" - - -async_engine = create_async_engine(DATABASE_URL, echo=False, future=True) - -local_session = async_sessionmaker(bind=async_engine, class_=AsyncSession, expire_on_commit=False) - - -async def async_get_db() -> AsyncGenerator[AsyncSession, None]: - async with local_session() as db: - yield db diff --git a/src/app/core/db/models.py b/src/app/core/db/models.py deleted file mode 100644 index 0ade95ff..00000000 --- a/src/app/core/db/models.py +++ /dev/null @@ -1,27 +0,0 @@ -import uuid as uuid_pkg -from datetime import UTC, datetime - -from sqlalchemy import Boolean, DateTime, text -from sqlalchemy.dialects.postgresql import UUID -from sqlalchemy.orm import Mapped, mapped_column -from uuid6 import uuid7 - - -class UUIDMixin: - uuid: Mapped[uuid_pkg.UUID] = mapped_column( - UUID(as_uuid=True), primary_key=True, default=uuid7, server_default=text("gen_random_uuid()") - ) - - -class TimestampMixin: - created_at: Mapped[datetime] = mapped_column( - DateTime, default=datetime.now(UTC), server_default=text("current_timestamp(0)") - ) - updated_at: Mapped[datetime | None] = mapped_column( - DateTime, nullable=True, onupdate=datetime.now(UTC), server_default=text("current_timestamp(0)") - ) - - -class SoftDeleteMixin: - deleted_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) - is_deleted: Mapped[bool] = mapped_column(Boolean, default=False) diff --git a/src/app/core/db/token_blacklist.py b/src/app/core/db/token_blacklist.py deleted file mode 100644 index 6a387ae4..00000000 --- a/src/app/core/db/token_blacklist.py +++ /dev/null @@ -1,14 +0,0 @@ -from datetime import datetime - -from sqlalchemy import DateTime, String -from sqlalchemy.orm import Mapped, mapped_column - -from .database import Base - - -class TokenBlacklist(Base): - __tablename__ = "token_blacklist" - - id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) - token: Mapped[str] = mapped_column(String, unique=True, index=True) - expires_at: Mapped[datetime] = mapped_column(DateTime) diff --git a/src/app/core/exceptions/cache_exceptions.py b/src/app/core/exceptions/cache_exceptions.py deleted file mode 100644 index 761b8875..00000000 --- a/src/app/core/exceptions/cache_exceptions.py +++ /dev/null @@ -1,16 +0,0 @@ -class CacheIdentificationInferenceError(Exception): - def __init__(self, message: str = "Could not infer id for resource being cached.") -> None: - self.message = message - super().__init__(self.message) - - -class InvalidRequestError(Exception): - def __init__(self, message: str = "Type of request not supported.") -> None: - self.message = message - super().__init__(self.message) - - -class MissingClientError(Exception): - def __init__(self, message: str = "Client is None.") -> None: - self.message = message - super().__init__(self.message) diff --git a/src/app/core/exceptions/http_exceptions.py b/src/app/core/exceptions/http_exceptions.py deleted file mode 100644 index 9afe66ad..00000000 --- a/src/app/core/exceptions/http_exceptions.py +++ /dev/null @@ -1,11 +0,0 @@ -# ruff: noqa -from fastcrud.exceptions.http_exceptions import ( - CustomException, - BadRequestException, - NotFoundException, - ForbiddenException, - UnauthorizedException, - UnprocessableEntityException, - DuplicateValueException, - RateLimitException, -) diff --git a/src/app/core/health.py b/src/app/core/health.py deleted file mode 100644 index 60e2e050..00000000 --- a/src/app/core/health.py +++ /dev/null @@ -1,25 +0,0 @@ -import logging - -from redis.asyncio import Redis -from sqlalchemy import text -from sqlalchemy.ext.asyncio import AsyncSession - -LOGGER = logging.getLogger(__name__) - - -async def check_database_health(db: AsyncSession) -> bool: - try: - await db.execute(text("SELECT 1")) - return True - except Exception as e: - LOGGER.exception(f"Database health check failed with error: {e}") - return False - - -async def check_redis_health(redis: Redis) -> bool: - try: - await redis.ping() - return True - except Exception as e: - LOGGER.exception(f"Redis health check failed with error: {e}") - return False diff --git a/src/app/core/logger.py b/src/app/core/logger.py deleted file mode 100644 index 7aab1d51..00000000 --- a/src/app/core/logger.py +++ /dev/null @@ -1,132 +0,0 @@ -"""Logging configuration for the application.""" - -import logging -import os -from logging.handlers import RotatingFileHandler - -import structlog -from structlog.dev import ConsoleRenderer -from structlog.processors import JSONRenderer -from structlog.types import EventDict, Processor - -from ..core.config import settings - - -def drop_color_message_key(_, __, event_dict: EventDict) -> EventDict: - """Uvicorn adds `color_message` which duplicates `event`. - - Remove it to avoid double logging. - """ - event_dict.pop("color_message", None) - return event_dict - - -def file_log_filter_processors(_, __, event_dict: EventDict) -> EventDict: - """Filter out the request ID, path, method, client host, and status code from the event dict if the - corresponding setting is False.""" - - if not settings.FILE_LOG_INCLUDE_REQUEST_ID: - event_dict.pop("request_id", None) - if not settings.FILE_LOG_INCLUDE_PATH: - event_dict.pop("path", None) - if not settings.FILE_LOG_INCLUDE_METHOD: - event_dict.pop("method", None) - if not settings.FILE_LOG_INCLUDE_CLIENT_HOST: - event_dict.pop("client_host", None) - if not settings.FILE_LOG_INCLUDE_STATUS_CODE: - event_dict.pop("status_code", None) - return event_dict - - -def console_log_filter_processors(_, __, event_dict: EventDict) -> EventDict: - """Filter out the request ID, path, method, client host, and status code from the event dict if the - corresponding setting is False.""" - - if not settings.CONSOLE_LOG_INCLUDE_REQUEST_ID: - event_dict.pop("request_id", None) - if not settings.CONSOLE_LOG_INCLUDE_PATH: - event_dict.pop("path", None) - if not settings.CONSOLE_LOG_INCLUDE_METHOD: - event_dict.pop("method", None) - if not settings.CONSOLE_LOG_INCLUDE_CLIENT_HOST: - event_dict.pop("client_host", None) - if not settings.CONSOLE_LOG_INCLUDE_STATUS_CODE: - event_dict.pop("status_code", None) - return event_dict - - -# Shared processors for all loggers -timestamper = structlog.processors.TimeStamper(fmt="iso") -SHARED_PROCESSORS: list[Processor] = [ - structlog.contextvars.merge_contextvars, - structlog.stdlib.add_logger_name, - structlog.stdlib.add_log_level, - structlog.stdlib.PositionalArgumentsFormatter(), - structlog.stdlib.ExtraAdder(), - drop_color_message_key, - timestamper, - structlog.processors.StackInfoRenderer(), -] - - -# Configure structlog globally -structlog.configure( - processors=SHARED_PROCESSORS + [structlog.stdlib.ProcessorFormatter.wrap_for_formatter], - logger_factory=structlog.stdlib.LoggerFactory(), - cache_logger_on_first_use=True, -) - - -def build_formatter(*, json_output: bool, pre_chain: list[Processor]) -> structlog.stdlib.ProcessorFormatter: - """Build a ProcessorFormatter with the specified renderer and processors.""" - renderer = JSONRenderer() if json_output else ConsoleRenderer() - - processors = [structlog.stdlib.ProcessorFormatter.remove_processors_meta, renderer] - - if json_output: - pre_chain = pre_chain + [structlog.processors.format_exc_info] - - return structlog.stdlib.ProcessorFormatter(foreign_pre_chain=pre_chain, processors=processors) - - -# Setup log directory -LOG_DIR = os.path.join(os.path.dirname(os.path.dirname(__file__)), "logs") -os.makedirs(LOG_DIR, exist_ok=True) - - -# File handler configuration -file_handler = RotatingFileHandler( - filename=os.path.join(LOG_DIR, "app.log"), - maxBytes=settings.FILE_LOG_MAX_BYTES, - backupCount=settings.FILE_LOG_BACKUP_COUNT, -) -file_handler.setLevel(settings.FILE_LOG_LEVEL) -file_handler.setFormatter( - build_formatter( - json_output=settings.FILE_LOG_FORMAT_JSON, pre_chain=SHARED_PROCESSORS + [file_log_filter_processors] - ) -) - -# Console handler configuration -console_handler = logging.StreamHandler() -console_handler.setLevel(settings.CONSOLE_LOG_LEVEL) -console_handler.setFormatter( - build_formatter( - json_output=settings.CONSOLE_LOG_FORMAT_JSON, pre_chain=SHARED_PROCESSORS + [console_log_filter_processors] - ) -) - - -# Root logger configuration -root_logger = logging.getLogger() -root_logger.setLevel(logging.INFO) -root_logger.handlers.clear() # avoid duplicate logs -root_logger.addHandler(file_handler) -root_logger.addHandler(console_handler) - -# Uvicorn logger integration -for logger_name in ("uvicorn", "uvicorn.error", "uvicorn.access"): - logger = logging.getLogger(logger_name) - logger.handlers.clear() - logger.propagate = True - logger.setLevel(logging.INFO) diff --git a/src/app/core/security.py b/src/app/core/security.py deleted file mode 100644 index d589078b..00000000 --- a/src/app/core/security.py +++ /dev/null @@ -1,136 +0,0 @@ -from datetime import UTC, datetime, timedelta -from enum import Enum -from typing import Any, Literal - -import bcrypt -from fastapi.security import OAuth2PasswordBearer -from jose import JWTError, jwt -from pydantic import SecretStr -from sqlalchemy.ext.asyncio import AsyncSession - -from ..crud.crud_users import crud_users -from .config import settings -from .db.crud_token_blacklist import crud_token_blacklist -from .schemas import TokenBlacklistCreate, TokenData - -SECRET_KEY: SecretStr = settings.SECRET_KEY -ALGORITHM = settings.ALGORITHM -ACCESS_TOKEN_EXPIRE_MINUTES = settings.ACCESS_TOKEN_EXPIRE_MINUTES -REFRESH_TOKEN_EXPIRE_DAYS = settings.REFRESH_TOKEN_EXPIRE_DAYS - -oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/v1/login") - - -class TokenType(str, Enum): - ACCESS = "access" - REFRESH = "refresh" - - -async def verify_password(plain_password: str, hashed_password: str) -> bool: - correct_password: bool = bcrypt.checkpw(plain_password.encode(), hashed_password.encode()) - return correct_password - - -def get_password_hash(password: str) -> str: - hashed_password: str = bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode() - return hashed_password - - -async def authenticate_user(username_or_email: str, password: str, db: AsyncSession) -> dict[str, Any] | Literal[False]: - if "@" in username_or_email: - db_user = await crud_users.get(db=db, email=username_or_email, is_deleted=False) - else: - db_user = await crud_users.get(db=db, username=username_or_email, is_deleted=False) - - if not db_user: - return False - - if not await verify_password(password, db_user["hashed_password"]): - return False - - return db_user - - -async def create_access_token(data: dict[str, Any], expires_delta: timedelta | None = None) -> str: - to_encode = data.copy() - if expires_delta: - expire = datetime.now(UTC).replace(tzinfo=None) + expires_delta - else: - expire = datetime.now(UTC).replace(tzinfo=None) + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) - to_encode.update({"exp": expire, "token_type": TokenType.ACCESS}) - encoded_jwt: str = jwt.encode(to_encode, SECRET_KEY.get_secret_value(), algorithm=ALGORITHM) - return encoded_jwt - - -async def create_refresh_token(data: dict[str, Any], expires_delta: timedelta | None = None) -> str: - to_encode = data.copy() - if expires_delta: - expire = datetime.now(UTC).replace(tzinfo=None) + expires_delta - else: - expire = datetime.now(UTC).replace(tzinfo=None) + timedelta(days=REFRESH_TOKEN_EXPIRE_DAYS) - to_encode.update({"exp": expire, "token_type": TokenType.REFRESH}) - encoded_jwt: str = jwt.encode(to_encode, SECRET_KEY.get_secret_value(), algorithm=ALGORITHM) - return encoded_jwt - - -async def verify_token(token: str, expected_token_type: TokenType, db: AsyncSession) -> TokenData | None: - """Verify a JWT token and return TokenData if valid. - - Parameters - ---------- - token: str - The JWT token to be verified. - expected_token_type: TokenType - The expected type of token (access or refresh) - db: AsyncSession - Database session for performing database operations. - - Returns - ------- - TokenData | None - TokenData instance if the token is valid, None otherwise. - """ - is_blacklisted = await crud_token_blacklist.exists(db, token=token) - if is_blacklisted: - return None - - try: - payload = jwt.decode(token, SECRET_KEY.get_secret_value(), algorithms=[ALGORITHM]) - username_or_email: str | None = payload.get("sub") - token_type: str | None = payload.get("token_type") - - if username_or_email is None or token_type != expected_token_type: - return None - - return TokenData(username_or_email=username_or_email) - - except JWTError: - return None - - -async def blacklist_tokens(access_token: str, refresh_token: str, db: AsyncSession) -> None: - """Blacklist both access and refresh tokens. - - Parameters - ---------- - access_token: str - The access token to blacklist - refresh_token: str - The refresh token to blacklist - db: AsyncSession - Database session for performing database operations. - """ - for token in [access_token, refresh_token]: - payload = jwt.decode(token, SECRET_KEY.get_secret_value(), algorithms=[ALGORITHM]) - exp_timestamp = payload.get("exp") - if exp_timestamp is not None: - expires_at = datetime.fromtimestamp(exp_timestamp) - await crud_token_blacklist.create(db, object=TokenBlacklistCreate(token=token, expires_at=expires_at)) - - -async def blacklist_token(token: str, db: AsyncSession) -> None: - payload = jwt.decode(token, SECRET_KEY.get_secret_value(), algorithms=[ALGORITHM]) - exp_timestamp = payload.get("exp") - if exp_timestamp is not None: - expires_at = datetime.fromtimestamp(exp_timestamp) - await crud_token_blacklist.create(db, object=TokenBlacklistCreate(token=token, expires_at=expires_at)) diff --git a/src/app/core/setup.py b/src/app/core/setup.py deleted file mode 100644 index 743a9f04..00000000 --- a/src/app/core/setup.py +++ /dev/null @@ -1,245 +0,0 @@ -from collections.abc import AsyncGenerator, Callable -from contextlib import _AsyncGeneratorContextManager, asynccontextmanager -from typing import Any - -import anyio -import fastapi -import redis.asyncio as redis -from arq import create_pool -from arq.connections import RedisSettings -from fastapi import APIRouter, Depends, FastAPI -from fastapi.middleware.cors import CORSMiddleware -from fastapi.openapi.docs import get_redoc_html, get_swagger_ui_html -from fastapi.openapi.utils import get_openapi - -from ..api.dependencies import get_current_superuser -from ..core.utils.rate_limit import rate_limiter -from ..middleware.client_cache_middleware import ClientCacheMiddleware -from ..middleware.logger_middleware import LoggerMiddleware -from ..models import * # noqa: F403 -from .config import ( - AppSettings, - ClientSideCacheSettings, - CORSSettings, - DatabaseSettings, - EnvironmentOption, - EnvironmentSettings, - RedisCacheSettings, - RedisQueueSettings, - RedisRateLimiterSettings, - settings, -) -from .db.database import Base -from .db.database import async_engine as engine -from .utils import cache, queue - - -# -------------- database -------------- -async def create_tables() -> None: - async with engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - -# -------------- cache -------------- -async def create_redis_cache_pool() -> None: - cache.pool = redis.ConnectionPool.from_url(settings.REDIS_CACHE_URL) - cache.client = redis.Redis.from_pool(cache.pool) # type: ignore - - -async def close_redis_cache_pool() -> None: - if cache.client is not None: - await cache.client.aclose() # type: ignore - - -# -------------- queue -------------- -async def create_redis_queue_pool() -> None: - queue.pool = await create_pool(RedisSettings(host=settings.REDIS_QUEUE_HOST, port=settings.REDIS_QUEUE_PORT)) - - -async def close_redis_queue_pool() -> None: - if queue.pool is not None: - await queue.pool.aclose() # type: ignore - - -# -------------- rate limit -------------- -async def create_redis_rate_limit_pool() -> None: - rate_limiter.initialize(settings.REDIS_RATE_LIMIT_URL) # type: ignore - - -async def close_redis_rate_limit_pool() -> None: - if rate_limiter.client is not None: - await rate_limiter.client.aclose() # type: ignore - - -# -------------- application -------------- -async def set_threadpool_tokens(number_of_tokens: int = 100) -> None: - limiter = anyio.to_thread.current_default_thread_limiter() - limiter.total_tokens = number_of_tokens - - -def lifespan_factory( - settings: ( - DatabaseSettings - | RedisCacheSettings - | AppSettings - | ClientSideCacheSettings - | CORSSettings - | RedisQueueSettings - | RedisRateLimiterSettings - | EnvironmentSettings - ), - create_tables_on_start: bool = True, -) -> Callable[[FastAPI], _AsyncGeneratorContextManager[Any]]: - """Factory to create a lifespan async context manager for a FastAPI app.""" - - @asynccontextmanager - async def lifespan(app: FastAPI) -> AsyncGenerator: - from asyncio import Event - - initialization_complete = Event() - app.state.initialization_complete = initialization_complete - - await set_threadpool_tokens() - - try: - if isinstance(settings, RedisCacheSettings): - await create_redis_cache_pool() - - if isinstance(settings, RedisQueueSettings): - await create_redis_queue_pool() - - if isinstance(settings, RedisRateLimiterSettings): - await create_redis_rate_limit_pool() - - if create_tables_on_start: - await create_tables() - - initialization_complete.set() - - yield - - finally: - if isinstance(settings, RedisCacheSettings): - await close_redis_cache_pool() - - if isinstance(settings, RedisQueueSettings): - await close_redis_queue_pool() - - if isinstance(settings, RedisRateLimiterSettings): - await close_redis_rate_limit_pool() - - return lifespan - - -# -------------- application -------------- -def create_application( - router: APIRouter, - settings: ( - DatabaseSettings - | RedisCacheSettings - | AppSettings - | ClientSideCacheSettings - | CORSSettings - | RedisQueueSettings - | RedisRateLimiterSettings - | EnvironmentSettings - ), - create_tables_on_start: bool = True, - lifespan: Callable[[FastAPI], _AsyncGeneratorContextManager[Any]] | None = None, - **kwargs: Any, -) -> FastAPI: - """Creates and configures a FastAPI application based on the provided settings. - - This function initializes a FastAPI application and configures it with various settings - and handlers based on the type of the `settings` object provided. - - Parameters - ---------- - router : APIRouter - The APIRouter object containing the routes to be included in the FastAPI application. - - settings - An instance representing the settings for configuring the FastAPI application. - It determines the configuration applied: - - - AppSettings: Configures basic app metadata like name, description, contact, and license info. - - DatabaseSettings: Adds event handlers for initializing database tables during startup. - - RedisCacheSettings: Sets up event handlers for creating and closing a Redis cache pool. - - ClientSideCacheSettings: Integrates middleware for client-side caching. - - CORSSettings: Integrates CORS middleware with specified origins. - - RedisQueueSettings: Sets up event handlers for creating and closing a Redis queue pool. - - RedisRateLimiterSettings: Sets up event handlers for creating and closing a Redis rate limiter pool. - - EnvironmentSettings: Conditionally sets documentation URLs and integrates custom routes for API documentation - based on the environment type. - - create_tables_on_start : bool - A flag to indicate whether to create database tables on application startup. - Defaults to True. - - **kwargs - Additional keyword arguments passed directly to the FastAPI constructor. - - Returns - ------- - FastAPI - A fully configured FastAPI application instance. - - The function configures the FastAPI application with different features and behaviors - based on the provided settings. It includes setting up database connections, Redis pools - for caching, queue, and rate limiting, client-side caching, and customizing the API documentation - based on the environment settings. - """ - # --- before creating application --- - if isinstance(settings, AppSettings): - to_update = { - "title": settings.APP_NAME, - "description": settings.APP_DESCRIPTION, - "contact": {"name": settings.CONTACT_NAME, "email": settings.CONTACT_EMAIL}, - "license_info": {"name": settings.LICENSE_NAME}, - } - kwargs.update(to_update) - - if isinstance(settings, EnvironmentSettings): - kwargs.update({"docs_url": None, "redoc_url": None, "openapi_url": None}) - - # Use custom lifespan if provided, otherwise use default factory - if lifespan is None: - lifespan = lifespan_factory(settings, create_tables_on_start=create_tables_on_start) - - application = FastAPI(lifespan=lifespan, **kwargs) - application.include_router(router) - - if isinstance(settings, ClientSideCacheSettings): - application.add_middleware(ClientCacheMiddleware, max_age=settings.CLIENT_CACHE_MAX_AGE) - - if isinstance(settings, CORSSettings): - application.add_middleware( - CORSMiddleware, - allow_origins=settings.CORS_ORIGINS, - allow_credentials=True, - allow_methods=settings.CORS_METHODS, - allow_headers=settings.CORS_HEADERS, - ) - application.add_middleware(LoggerMiddleware) - if isinstance(settings, EnvironmentSettings): - if settings.ENVIRONMENT != EnvironmentOption.PRODUCTION: - docs_router = APIRouter() - if settings.ENVIRONMENT != EnvironmentOption.LOCAL: - docs_router = APIRouter(dependencies=[Depends(get_current_superuser)]) - - @docs_router.get("/docs", include_in_schema=False) - async def get_swagger_documentation() -> fastapi.responses.HTMLResponse: - return get_swagger_ui_html(openapi_url="/openapi.json", title="docs") - - @docs_router.get("/redoc", include_in_schema=False) - async def get_redoc_documentation() -> fastapi.responses.HTMLResponse: - return get_redoc_html(openapi_url="/openapi.json", title="docs") - - @docs_router.get("/openapi.json", include_in_schema=False) - async def openapi() -> dict[str, Any]: - out: dict = get_openapi(title=application.title, version=application.version, routes=application.routes) - return out - - application.include_router(docs_router) - - return application diff --git a/src/app/core/utils/cache.py b/src/app/core/utils/cache.py deleted file mode 100644 index ca082466..00000000 --- a/src/app/core/utils/cache.py +++ /dev/null @@ -1,346 +0,0 @@ -import functools -import json -import re -from collections.abc import AsyncGenerator, Callable -from typing import Any - -from fastapi import Request -from fastapi.encoders import jsonable_encoder -from redis.asyncio import ConnectionPool, Redis - -from ..exceptions.cache_exceptions import CacheIdentificationInferenceError, InvalidRequestError, MissingClientError - -pool: ConnectionPool | None = None -client: Redis | None = None - - -def _infer_resource_id(kwargs: dict[str, Any], resource_id_type: type | tuple[type, ...]) -> int | str: - """Infer the resource ID from a dictionary of keyword arguments. - - Parameters - ---------- - kwargs: Dict[str, Any] - A dictionary of keyword arguments. - resource_id_type: Union[type, Tuple[type, ...]] - The expected type of the resource ID, which can be integer (int) or a string (str). - - Returns - ------- - Union[None, int, str] - The inferred resource ID. If it cannot be inferred or does not match the expected type, it returns None. - - Note - ---- - - When `resource_id_type` is `int`, the function looks for an argument with the key 'id'. - - When `resource_id_type` is `str`, it attempts to infer the resource ID as a string. - """ - resource_id: int | str | None = None - for arg_name, arg_value in kwargs.items(): - if isinstance(arg_value, resource_id_type): - if (resource_id_type is int) and ("id" in arg_name): - resource_id = arg_value - - elif (resource_id_type is int) and ("id" not in arg_name): - pass - - elif resource_id_type is str: - resource_id = arg_value - - if resource_id is None: - raise CacheIdentificationInferenceError - - return resource_id - - -def _extract_data_inside_brackets(input_string: str) -> list[str]: - """Extract data inside curly brackets from a given string using regular expressions. - - Parameters - ---------- - input_string: str - The input string in which to find data enclosed within curly brackets. - - Returns - ------- - List[str] - A list of strings containing the data found inside the curly brackets within the input string. - - Example - ------- - >>> _extract_data_inside_brackets("The {quick} brown {fox} jumps over the {lazy} dog.") - ['quick', 'fox', 'lazy'] - """ - data_inside_brackets = re.findall(r"{(.*?)}", input_string) - return data_inside_brackets - - -def _construct_data_dict(data_inside_brackets: list[str], kwargs: dict[str, Any]) -> dict[str, Any]: - """Construct a dictionary based on data inside brackets and keyword arguments. - - Parameters - ---------- - data_inside_brackets: List[str] - A list of keys inside brackets. - kwargs: Dict[str, Any] - A dictionary of keyword arguments. - - Returns - ------- - Dict[str, Any]: A dictionary with keys from data_inside_brackets and corresponding values from kwargs. - """ - data_dict = {} - for key in data_inside_brackets: - data_dict[key] = kwargs[key] - return data_dict - - -def _format_prefix(prefix: str, kwargs: dict[str, Any]) -> str: - """Format a prefix using keyword arguments. - - Parameters - ---------- - prefix: str - The prefix template to be formatted. - kwargs: Dict[str, Any] - A dictionary of keyword arguments. - - Returns - ------- - str: The formatted prefix. - """ - data_inside_brackets = _extract_data_inside_brackets(prefix) - data_dict = _construct_data_dict(data_inside_brackets, kwargs) - formatted_prefix = prefix.format(**data_dict) - return formatted_prefix - - -def _format_extra_data(to_invalidate_extra: dict[str, str], kwargs: dict[str, Any]) -> dict[str, Any]: - """Format extra data based on provided templates and keyword arguments. - - This function takes a dictionary of templates and their associated values and a dictionary of keyword arguments. - It formats the templates with the corresponding values from the keyword arguments and returns a dictionary - where keys are the formatted templates and values are the associated keyword argument values. - - Parameters - ---------- - to_invalidate_extra: Dict[str, str] - A dictionary where keys are templates and values are the associated values. - kwargs: Dict[str, Any] - A dictionary of keyword arguments. - - Returns - ------- - Dict[str, Any]: A dictionary where keys are formatted templates and values - are associated keyword argument values. - """ - formatted_extra = {} - for prefix, id_template in to_invalidate_extra.items(): - formatted_prefix = _format_prefix(prefix, kwargs) - id = _extract_data_inside_brackets(id_template)[0] - formatted_extra[formatted_prefix] = kwargs[id] - - return formatted_extra - - -async def _delete_keys_by_pattern(pattern: str) -> None: - """Delete keys from Redis that match a given pattern using the SCAN command. - - This function iteratively scans the Redis key space for keys that match a specific pattern - and deletes them. It uses the SCAN command to efficiently find keys, which is more - performance-friendly compared to the KEYS command, especially for large datasets. - - The function scans the key space in an iterative manner using a cursor-based approach. - It retrieves a batch of keys matching the pattern on each iteration and deletes them - until no matching keys are left. - - Parameters - ---------- - pattern: str - The pattern to match keys against. The pattern can include wildcards, - such as '*' for matching any character sequence. Example: 'user:*' - - Notes - ----- - - The SCAN command is used with a count of 100 to retrieve keys in batches. - This count can be adjusted based on the size of your dataset and Redis performance. - - - The function uses the delete command to remove keys in bulk. If the dataset - is extremely large, consider implementing additional logic to handle bulk deletion - more efficiently. - - - Be cautious with patterns that could match a large number of keys, as deleting - many keys simultaneously may impact the performance of the Redis server. - """ - if client is None: - return - - cursor = 0 - while True: - cursor, keys = await client.scan(cursor, match=pattern, count=100) - if keys: - await client.delete(*keys) - if cursor == 0: - break - - -def cache( - key_prefix: str, - resource_id_name: Any = None, - expiration: int = 3600, - resource_id_type: type | tuple[type, ...] = int, - to_invalidate_extra: dict[str, Any] | None = None, - pattern_to_invalidate_extra: list[str] | None = None, -) -> Callable: - """Cache decorator for FastAPI endpoints. - - This decorator enables caching the results of FastAPI endpoint functions to improve response times - and reduce the load on the application by storing and retrieving data in a cache. - - Parameters - ---------- - key_prefix: str - A unique prefix to identify the cache key. - resource_id_name: Any, optional - The name of the resource ID argument in the decorated function. If provided, it is used directly; - otherwise, the resource ID is inferred from the function's arguments. - expiration: int, optional - The expiration time for the cached data in seconds. Defaults to 3600 seconds (1 hour). - resource_id_type: Union[type, Tuple[type, ...]], default int - The expected type of the resource ID. - This can be a single type (e.g., int) or a tuple of types (e.g., (int, str)). - Defaults to int. This is used only if resource_id_name is not provided. - to_invalidate_extra: Dict[str, Any] | None, optional - A dictionary where keys are cache key prefixes and values are templates for cache key suffixes. - These keys are invalidated when the decorated function is called with a method other than GET. - pattern_to_invalidate_extra: List[str] | None, optional - A list of string patterns for cache keys that should be invalidated when the decorated function is called. - This allows for bulk invalidation of cache keys based on a matching pattern. - - Returns - ------- - Callable - A decorator function that can be applied to FastAPI endpoint functions. - - Example usage - ------------- - - ```python - from fastapi import FastAPI, Request - from my_module import cache # Replace with your actual module and imports - - app = FastAPI() - - # Define a sample endpoint with caching - @app.get("/sample/{resource_id}") - @cache(key_prefix="sample_data", expiration=3600, resource_id_type=int) - async def sample_endpoint(request: Request, resource_id: int): - # Your endpoint logic here - return {"data": "your_data"} - ``` - - This decorator caches the response data of the endpoint function using a unique cache key. - The cached data is retrieved for GET requests, and the cache is invalidated for other types of requests. - - Advanced Example Usage - ------------- - ```python - from fastapi import FastAPI, Request - from my_module import cache - - app = FastAPI() - - - @app.get("/users/{user_id}/items") - @cache(key_prefix="user_items", resource_id_name="user_id", expiration=1200) - async def read_user_items(request: Request, user_id: int): - # Endpoint logic to fetch user's items - return {"items": "user specific items"} - - - @app.put("/items/{item_id}") - @cache( - key_prefix="item_data", - resource_id_name="item_id", - to_invalidate_extra={"user_items": "{user_id}"}, - pattern_to_invalidate_extra=["user_*_items:*"], - ) - async def update_item(request: Request, item_id: int, data: dict, user_id: int): - # Update logic for an item - # Invalidate both the specific item cache and all user-specific item lists - return {"status": "updated"} - ``` - - In this example: - - When reading user items, the response is cached under a key formed with 'user_items' prefix and 'user_id'. - - When updating an item, the cache for this specific item (under 'item_data:item_id') and all caches with keys - starting with 'user_{user_id}_items:' are invalidated. The `to_invalidate_extra` parameter specifically targets - the cache for user-specific item lists, while `pattern_to_invalidate_extra` allows bulk invalidation of all keys - matching the pattern 'user_*_items:*', covering all users. - - Note - ---- - - resource_id_type is used only if resource_id is not passed. - - `to_invalidate_extra` and `pattern_to_invalidate_extra` are used for cache invalidation on methods other than GET. - - Using `pattern_to_invalidate_extra` can be resource-intensive on large datasets. Use it judiciously and - consider the potential impact on Redis performance. - """ - - def wrapper(func: Callable) -> Callable: - @functools.wraps(func) - async def inner(request: Request, *args: Any, **kwargs: Any) -> Any: - if client is None: - raise MissingClientError - - if resource_id_name: - resource_id = kwargs[resource_id_name] - else: - resource_id = _infer_resource_id(kwargs=kwargs, resource_id_type=resource_id_type) - - formatted_key_prefix = _format_prefix(key_prefix, kwargs) - cache_key = f"{formatted_key_prefix}:{resource_id}" - if request.method == "GET": - if to_invalidate_extra is not None or pattern_to_invalidate_extra is not None: - raise InvalidRequestError - - cached_data = await client.get(cache_key) - if cached_data: - return json.loads(cached_data.decode()) - - result = await func(request, *args, **kwargs) - - if request.method == "GET": - serializable_data = jsonable_encoder(result) - serialized_data = json.dumps(serializable_data) - - await client.set(cache_key, serialized_data) - await client.expire(cache_key, expiration) - - return json.loads(serialized_data) - - else: - await client.delete(cache_key) - if to_invalidate_extra is not None: - formatted_extra = _format_extra_data(to_invalidate_extra, kwargs) - for prefix, id in formatted_extra.items(): - extra_cache_key = f"{prefix}:{id}" - await client.delete(extra_cache_key) - - if pattern_to_invalidate_extra is not None: - for pattern in pattern_to_invalidate_extra: - formatted_pattern = _format_prefix(pattern, kwargs) - await _delete_keys_by_pattern(formatted_pattern + "*") - - return result - - return inner - - return wrapper - - -async def async_get_redis() -> AsyncGenerator[Redis, None]: - """Get a Redis client from the pool for each request.""" - client = Redis(connection_pool=pool) - try: - yield client - finally: - await client.aclose() # type: ignore diff --git a/src/app/core/utils/queue.py b/src/app/core/utils/queue.py deleted file mode 100644 index 70840373..00000000 --- a/src/app/core/utils/queue.py +++ /dev/null @@ -1,3 +0,0 @@ -from arq.connections import ArqRedis - -pool: ArqRedis | None = None diff --git a/src/app/core/utils/rate_limit.py b/src/app/core/utils/rate_limit.py deleted file mode 100644 index 9cbb4eb8..00000000 --- a/src/app/core/utils/rate_limit.py +++ /dev/null @@ -1,61 +0,0 @@ -from datetime import UTC, datetime -from typing import Optional - -from redis.asyncio import ConnectionPool, Redis -from sqlalchemy.ext.asyncio import AsyncSession - -from ...core.logger import logging -from ...schemas.rate_limit import sanitize_path - -logger = logging.getLogger(__name__) - - -class RateLimiter: - _instance: Optional["RateLimiter"] = None - pool: Optional[ConnectionPool] = None - client: Optional[Redis] = None - - def __new__(cls) -> "RateLimiter": - if cls._instance is None: - cls._instance = super().__new__(cls) - return cls._instance - - @classmethod - def initialize(cls, redis_url: str) -> None: - instance = cls() - if instance.pool is None: - instance.pool = ConnectionPool.from_url(redis_url) - instance.client = Redis(connection_pool=instance.pool) - - @classmethod - def get_client(cls) -> Redis: - instance = cls() - if instance.client is None: - logger.error("Redis client is not initialized.") - raise Exception("Redis client is not initialized.") - return instance.client - - async def is_rate_limited(self, db: AsyncSession, user_id: int, path: str, limit: int, period: int) -> bool: - client = self.get_client() - current_timestamp = int(datetime.now(UTC).timestamp()) - window_start = current_timestamp - (current_timestamp % period) - - sanitized_path = sanitize_path(path) - key = f"ratelimit:{user_id}:{sanitized_path}:{window_start}" - - try: - current_count = await client.incr(key) - if current_count == 1: - await client.expire(key, period) - - if current_count > limit: - return True - - except Exception as e: - logger.exception(f"Error checking rate limit for user {user_id} on path {path}: {e}") - raise e - - return False - - -rate_limiter = RateLimiter() diff --git a/src/app/core/worker/functions.py b/src/app/core/worker/functions.py deleted file mode 100644 index 74c3b4ad..00000000 --- a/src/app/core/worker/functions.py +++ /dev/null @@ -1,34 +0,0 @@ -import asyncio -import logging -from typing import Any - -import structlog -import uvloop -from arq.worker import Worker - -asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) - - -# -------- background tasks -------- -async def sample_background_task(ctx: Worker, name: str) -> str: - await asyncio.sleep(5) - return f"Task {name} is complete!" - - -# -------- base functions -------- -async def startup(ctx: Worker) -> None: - logging.info("Worker Started") - - -async def shutdown(ctx: Worker) -> None: - logging.info("Worker end") - - -async def on_job_start(ctx: dict[str, Any]) -> None: - structlog.contextvars.bind_contextvars(job_id=ctx["job_id"]) - logging.info("Job Started") - - -async def on_job_end(ctx: dict[str, Any]) -> None: - logging.info("Job Competed") - structlog.contextvars.clear_contextvars() diff --git a/src/app/core/worker/settings.py b/src/app/core/worker/settings.py deleted file mode 100644 index 7f8511ef..00000000 --- a/src/app/core/worker/settings.py +++ /dev/null @@ -1,42 +0,0 @@ -import asyncio -from typing import cast - -from arq.cli import watch_reload -from arq.connections import RedisSettings -from arq.typing import WorkerSettingsType -from arq.worker import check_health, run_worker - -from ...core.config import settings -from ...core.logger import logging # noqa: F401 -from .functions import on_job_end, on_job_start, sample_background_task, shutdown, startup - -REDIS_QUEUE_HOST = settings.REDIS_QUEUE_HOST -REDIS_QUEUE_PORT = settings.REDIS_QUEUE_PORT - - -class WorkerSettings: - functions = [sample_background_task] - redis_settings = RedisSettings(host=REDIS_QUEUE_HOST, port=REDIS_QUEUE_PORT) - on_startup = startup - on_shutdown = shutdown - on_job_start = on_job_start - on_job_end = on_job_end - handle_signals = False - - -def start_arq_service(check: bool = False, burst: int | None = None, watch: str | None = None): - worker_settings_ = cast("WorkerSettingsType", WorkerSettings) - - if check: - exit(check_health(worker_settings_)) - else: - kwargs = {} if burst is None else {"burst": burst} - if watch: - asyncio.run(watch_reload(watch, worker_settings_)) - else: - run_worker(worker_settings_, **kwargs) - - -if __name__ == "__main__": - start_arq_service() - # python -m src.app.core.worker.settings diff --git a/src/app/crud/crud_posts.py b/src/app/crud/crud_posts.py deleted file mode 100644 index 011a10bb..00000000 --- a/src/app/crud/crud_posts.py +++ /dev/null @@ -1,7 +0,0 @@ -from fastcrud import FastCRUD - -from ..models.post import Post -from ..schemas.post import PostCreateInternal, PostDelete, PostRead, PostUpdate, PostUpdateInternal - -CRUDPost = FastCRUD[Post, PostCreateInternal, PostUpdate, PostUpdateInternal, PostDelete, PostRead] -crud_posts = CRUDPost(Post) diff --git a/src/app/crud/crud_rate_limit.py b/src/app/crud/crud_rate_limit.py deleted file mode 100644 index 52b13ebd..00000000 --- a/src/app/crud/crud_rate_limit.py +++ /dev/null @@ -1,15 +0,0 @@ -from fastcrud import FastCRUD - -from ..models.rate_limit import RateLimit -from ..schemas.rate_limit import ( - RateLimitCreateInternal, - RateLimitDelete, - RateLimitRead, - RateLimitUpdate, - RateLimitUpdateInternal, -) - -CRUDRateLimit = FastCRUD[ - RateLimit, RateLimitCreateInternal, RateLimitUpdate, RateLimitUpdateInternal, RateLimitDelete, RateLimitRead -] -crud_rate_limits = CRUDRateLimit(RateLimit) diff --git a/src/app/crud/crud_tier.py b/src/app/crud/crud_tier.py deleted file mode 100644 index 98b38a9b..00000000 --- a/src/app/crud/crud_tier.py +++ /dev/null @@ -1,7 +0,0 @@ -from fastcrud import FastCRUD - -from ..models.tier import Tier -from ..schemas.tier import TierCreateInternal, TierDelete, TierRead, TierUpdate, TierUpdateInternal - -CRUDTier = FastCRUD[Tier, TierCreateInternal, TierUpdate, TierUpdateInternal, TierDelete, TierRead] -crud_tiers = CRUDTier(Tier) diff --git a/src/app/crud/crud_users.py b/src/app/crud/crud_users.py deleted file mode 100644 index 462b23c6..00000000 --- a/src/app/crud/crud_users.py +++ /dev/null @@ -1,7 +0,0 @@ -from fastcrud import FastCRUD - -from ..models.user import User -from ..schemas.user import UserCreateInternal, UserDelete, UserRead, UserUpdate, UserUpdateInternal - -CRUDUser = FastCRUD[User, UserCreateInternal, UserUpdate, UserUpdateInternal, UserDelete, UserRead] -crud_users = CRUDUser(User) diff --git a/src/app/main.py b/src/app/main.py deleted file mode 100644 index 6e72ab29..00000000 --- a/src/app/main.py +++ /dev/null @@ -1,34 +0,0 @@ -from collections.abc import AsyncGenerator -from contextlib import asynccontextmanager - -from fastapi import FastAPI - -from .admin.initialize import create_admin_interface -from .api import router -from .core.config import settings -from .core.setup import create_application, lifespan_factory - -admin = create_admin_interface() - - -@asynccontextmanager -async def lifespan_with_admin(app: FastAPI) -> AsyncGenerator[None, None]: - """Custom lifespan that includes admin initialization.""" - # Get the default lifespan - default_lifespan = lifespan_factory(settings) - - # Run the default lifespan initialization and our admin initialization - async with default_lifespan(app): - # Initialize admin interface if it exists - if admin: - # Initialize admin database and setup - await admin.initialize() - - yield - - -app = create_application(router=router, settings=settings, lifespan=lifespan_with_admin) - -# Mount admin interface if enabled -if admin: - app.mount(settings.CRUD_ADMIN_MOUNT_PATH, admin.app) diff --git a/src/app/middleware/client_cache_middleware.py b/src/app/middleware/client_cache_middleware.py deleted file mode 100644 index 4b2ef63b..00000000 --- a/src/app/middleware/client_cache_middleware.py +++ /dev/null @@ -1,56 +0,0 @@ -from fastapi import FastAPI, Request, Response -from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint - - -class ClientCacheMiddleware(BaseHTTPMiddleware): - """Middleware to set the `Cache-Control` header for client-side caching on all responses. - - Parameters - ---------- - app: FastAPI - The FastAPI application instance. - max_age: int, optional - Duration (in seconds) for which the response should be cached. Defaults to 60 seconds. - - Attributes - ---------- - max_age: int - Duration (in seconds) for which the response should be cached. - - Methods - ------- - async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: - Process the request and set the `Cache-Control` header in the response. - - Note - ---- - - The `Cache-Control` header instructs clients (e.g., browsers) - to cache the response for the specified duration. - """ - - def __init__(self, app: FastAPI, max_age: int = 60) -> None: - super().__init__(app) - self.max_age = max_age - - async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: - """Process the request and set the `Cache-Control` header in the response. - - Parameters - ---------- - request: Request - The incoming request. - call_next: RequestResponseEndpoint - The next middleware or route handler in the processing chain. - - Returns - ------- - Response - The response object with the `Cache-Control` header set. - - Note - ---- - - This method is automatically called by Starlette for processing the request-response cycle. - """ - response: Response = await call_next(request) - response.headers["Cache-Control"] = f"public, max-age={self.max_age}" - return response diff --git a/src/app/middleware/logger_middleware.py b/src/app/middleware/logger_middleware.py deleted file mode 100644 index aac4066d..00000000 --- a/src/app/middleware/logger_middleware.py +++ /dev/null @@ -1,38 +0,0 @@ -# app/middleware/request_id.py -import uuid - -import structlog -from fastapi import FastAPI, Request -from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint -from starlette.responses import Response - - -class LoggerMiddleware(BaseHTTPMiddleware): - """Middleware to add request ID to the context variables. - - Parameters - ---------- - app: FastAPI - The FastAPI application instance. - """ - - def __init__(self, app: FastAPI) -> None: - super().__init__(app) - - async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response: - """ - Add request ID to the context variables. - """ - request_id = request.headers.get("X-Request-ID", str(uuid.uuid4())) - structlog.contextvars.clear_contextvars() - structlog.contextvars.bind_contextvars( - request_id=request_id, - client_host=request.client.host if request.client else None, - status_code=None, - path=request.url.path, - method=request.method, - ) - response = await call_next(request) - structlog.contextvars.bind_contextvars(status_code=response.status_code) - response.headers["X-Request-ID"] = request_id - return response diff --git a/src/app/models/__init__.py b/src/app/models/__init__.py deleted file mode 100644 index c44c105e..00000000 --- a/src/app/models/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .post import Post -from .rate_limit import RateLimit -from .tier import Tier -from .user import User diff --git a/src/app/models/post.py b/src/app/models/post.py deleted file mode 100644 index 032e9ca9..00000000 --- a/src/app/models/post.py +++ /dev/null @@ -1,24 +0,0 @@ -import uuid as uuid_pkg -from datetime import UTC, datetime - -from sqlalchemy import UUID, DateTime, ForeignKey, String -from sqlalchemy.orm import Mapped, mapped_column -from uuid6 import uuid7 - -from ..core.db.database import Base - - -class Post(Base): - __tablename__ = "post" - - id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) - created_by_user_id: Mapped[int] = mapped_column(ForeignKey("user.id"), index=True) - title: Mapped[str] = mapped_column(String(30)) - text: Mapped[str] = mapped_column(String(63206)) - uuid: Mapped[uuid_pkg.UUID] = mapped_column(UUID(as_uuid=True), default_factory=uuid7, unique=True) - media_url: Mapped[str | None] = mapped_column(String, default=None) - - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default_factory=lambda: datetime.now(UTC)) - updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) - deleted_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) - is_deleted: Mapped[bool] = mapped_column(default=False, index=True) diff --git a/src/app/models/rate_limit.py b/src/app/models/rate_limit.py deleted file mode 100644 index 8a88e5fb..00000000 --- a/src/app/models/rate_limit.py +++ /dev/null @@ -1,20 +0,0 @@ -from datetime import UTC, datetime - -from sqlalchemy import DateTime, ForeignKey, Integer, String -from sqlalchemy.orm import Mapped, mapped_column - -from ..core.db.database import Base - - -class RateLimit(Base): - __tablename__ = "rate_limit" - - id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) - tier_id: Mapped[int] = mapped_column(ForeignKey("tier.id"), index=True) - name: Mapped[str] = mapped_column(String, nullable=False, unique=True) - path: Mapped[str] = mapped_column(String, nullable=False) - limit: Mapped[int] = mapped_column(Integer, nullable=False) - period: Mapped[int] = mapped_column(Integer, nullable=False) - - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default_factory=lambda: datetime.now(UTC)) - updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) diff --git a/src/app/models/tier.py b/src/app/models/tier.py deleted file mode 100644 index d45b0993..00000000 --- a/src/app/models/tier.py +++ /dev/null @@ -1,16 +0,0 @@ -from datetime import UTC, datetime - -from sqlalchemy import DateTime, String -from sqlalchemy.orm import Mapped, mapped_column - -from ..core.db.database import Base - - -class Tier(Base): - __tablename__ = "tier" - - id: Mapped[int] = mapped_column("id", autoincrement=True, nullable=False, unique=True, primary_key=True, init=False) - name: Mapped[str] = mapped_column(String, nullable=False, unique=True) - - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default_factory=lambda: datetime.now(UTC)) - updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) diff --git a/src/app/models/user.py b/src/app/models/user.py deleted file mode 100644 index 07cca2d8..00000000 --- a/src/app/models/user.py +++ /dev/null @@ -1,30 +0,0 @@ -import uuid as uuid_pkg -from datetime import UTC, datetime - -from sqlalchemy import DateTime, ForeignKey, String -from sqlalchemy.dialects.postgresql import UUID -from sqlalchemy.orm import Mapped, mapped_column -from uuid6 import uuid7 - -from ..core.db.database import Base - - -class User(Base): - __tablename__ = "user" - - id: Mapped[int] = mapped_column(autoincrement=True, primary_key=True, init=False) - - name: Mapped[str] = mapped_column(String(30)) - username: Mapped[str] = mapped_column(String(20), unique=True, index=True) - email: Mapped[str] = mapped_column(String(50), unique=True, index=True) - hashed_password: Mapped[str] = mapped_column(String) - - profile_image_url: Mapped[str] = mapped_column(String, default="https://profileimageurl.com") - uuid: Mapped[uuid_pkg.UUID] = mapped_column(UUID(as_uuid=True), default_factory=uuid7, unique=True) - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default_factory=lambda: datetime.now(UTC)) - updated_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) - deleted_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), default=None) - is_deleted: Mapped[bool] = mapped_column(default=False, index=True) - is_superuser: Mapped[bool] = mapped_column(default=False) - - tier_id: Mapped[int | None] = mapped_column(ForeignKey("tier.id"), index=True, default=None, init=False) diff --git a/src/app/schemas/job.py b/src/app/schemas/job.py deleted file mode 100644 index 13f75963..00000000 --- a/src/app/schemas/job.py +++ /dev/null @@ -1,5 +0,0 @@ -from pydantic import BaseModel - - -class Job(BaseModel): - id: str diff --git a/src/app/schemas/post.py b/src/app/schemas/post.py deleted file mode 100644 index d9eb5093..00000000 --- a/src/app/schemas/post.py +++ /dev/null @@ -1,69 +0,0 @@ -from datetime import datetime -from typing import Annotated - -from pydantic import BaseModel, ConfigDict, Field - -from ..core.schemas import PersistentDeletion, TimestampSchema, UUIDSchema - - -class PostBase(BaseModel): - title: Annotated[str, Field(min_length=2, max_length=30, examples=["This is my post"])] - text: Annotated[str, Field(min_length=1, max_length=63206, examples=["This is the content of my post."])] - - -class Post(TimestampSchema, PostBase, UUIDSchema, PersistentDeletion): - media_url: Annotated[ - str | None, - Field(pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", examples=["https://www.postimageurl.com"], default=None), - ] - created_by_user_id: int - - -class PostRead(BaseModel): - id: int - title: Annotated[str, Field(min_length=2, max_length=30, examples=["This is my post"])] - text: Annotated[str, Field(min_length=1, max_length=63206, examples=["This is the content of my post."])] - media_url: Annotated[ - str | None, - Field(examples=["https://www.postimageurl.com"], default=None), - ] - created_by_user_id: int - created_at: datetime - - -class PostCreate(PostBase): - model_config = ConfigDict(extra="forbid") - - media_url: Annotated[ - str | None, - Field(pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", examples=["https://www.postimageurl.com"], default=None), - ] - - -class PostCreateInternal(PostCreate): - created_by_user_id: int - - -class PostUpdate(BaseModel): - model_config = ConfigDict(extra="forbid") - - title: Annotated[str | None, Field(min_length=2, max_length=30, examples=["This is my updated post"], default=None)] - text: Annotated[ - str | None, - Field(min_length=1, max_length=63206, examples=["This is the updated content of my post."], default=None), - ] - media_url: Annotated[ - str | None, - Field(pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", examples=["https://www.postimageurl.com"], default=None), - ] - - -class PostUpdateInternal(PostUpdate): - updated_at: datetime - - -class PostDelete(BaseModel): - model_config = ConfigDict(extra="forbid") - - is_deleted: bool - deleted_at: datetime diff --git a/src/app/schemas/rate_limit.py b/src/app/schemas/rate_limit.py deleted file mode 100644 index 5f3e4d17..00000000 --- a/src/app/schemas/rate_limit.py +++ /dev/null @@ -1,60 +0,0 @@ -from datetime import datetime -from typing import Annotated - -from pydantic import BaseModel, ConfigDict, Field, field_validator - -from ..core.schemas import TimestampSchema - - -def sanitize_path(path: str) -> str: - return path.strip("/").replace("/", "_") - - -class RateLimitBase(BaseModel): - path: Annotated[str, Field(examples=["users"])] - limit: Annotated[int, Field(examples=[5])] - period: Annotated[int, Field(examples=[60])] - - @field_validator("path") - def validate_and_sanitize_path(cls, v: str) -> str: - return sanitize_path(v) - - -class RateLimit(TimestampSchema, RateLimitBase): - tier_id: int - name: Annotated[str | None, Field(default=None, examples=["users:5:60"])] - - -class RateLimitRead(RateLimitBase): - id: int - tier_id: int - name: str - - -class RateLimitCreate(RateLimitBase): - model_config = ConfigDict(extra="forbid") - - name: Annotated[str | None, Field(default=None, examples=["api_v1_users:5:60"])] - - -class RateLimitCreateInternal(RateLimitCreate): - tier_id: int - - -class RateLimitUpdate(BaseModel): - path: str | None = Field(default=None) - limit: int | None = None - period: int | None = None - name: str | None = None - - @field_validator("path") - def validate_and_sanitize_path(cls, v: str) -> str: - return sanitize_path(v) if v is not None else None - - -class RateLimitUpdateInternal(RateLimitUpdate): - updated_at: datetime - - -class RateLimitDelete(BaseModel): - pass diff --git a/src/app/schemas/tier.py b/src/app/schemas/tier.py deleted file mode 100644 index 2e6f81f4..00000000 --- a/src/app/schemas/tier.py +++ /dev/null @@ -1,39 +0,0 @@ -from datetime import datetime -from typing import Annotated - -from pydantic import BaseModel, Field - -from ..core.schemas import TimestampSchema - - -class TierBase(BaseModel): - name: Annotated[str, Field(examples=["free"])] - - -class Tier(TimestampSchema, TierBase): - pass - - -class TierRead(TierBase): - id: int - created_at: datetime - - -class TierCreate(TierBase): - pass - - -class TierCreateInternal(TierCreate): - pass - - -class TierUpdate(BaseModel): - name: str | None = None - - -class TierUpdateInternal(TierUpdate): - updated_at: datetime - - -class TierDelete(BaseModel): - pass diff --git a/src/app/schemas/user.py b/src/app/schemas/user.py deleted file mode 100644 index c33a94e3..00000000 --- a/src/app/schemas/user.py +++ /dev/null @@ -1,74 +0,0 @@ -from datetime import datetime -from typing import Annotated - -from pydantic import BaseModel, ConfigDict, EmailStr, Field - -from ..core.schemas import PersistentDeletion, TimestampSchema, UUIDSchema - - -class UserBase(BaseModel): - name: Annotated[str, Field(min_length=2, max_length=30, examples=["User Userson"])] - username: Annotated[str, Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$", examples=["userson"])] - email: Annotated[EmailStr, Field(examples=["user.userson@example.com"])] - - -class User(TimestampSchema, UserBase, UUIDSchema, PersistentDeletion): - profile_image_url: Annotated[str, Field(default="https://www.profileimageurl.com")] - hashed_password: str - is_superuser: bool = False - tier_id: int | None = None - - -class UserRead(BaseModel): - id: int - - name: Annotated[str, Field(min_length=2, max_length=30, examples=["User Userson"])] - username: Annotated[str, Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$", examples=["userson"])] - email: Annotated[EmailStr, Field(examples=["user.userson@example.com"])] - profile_image_url: str - tier_id: int | None - - -class UserCreate(UserBase): - model_config = ConfigDict(extra="forbid") - - password: Annotated[str, Field(pattern=r"^.{8,}|[0-9]+|[A-Z]+|[a-z]+|[^a-zA-Z0-9]+$", examples=["Str1ngst!"])] - - -class UserCreateInternal(UserBase): - hashed_password: str - - -class UserUpdate(BaseModel): - model_config = ConfigDict(extra="forbid") - - name: Annotated[str | None, Field(min_length=2, max_length=30, examples=["User Userberg"], default=None)] - username: Annotated[ - str | None, Field(min_length=2, max_length=20, pattern=r"^[a-z0-9]+$", examples=["userberg"], default=None) - ] - email: Annotated[EmailStr | None, Field(examples=["user.userberg@example.com"], default=None)] - profile_image_url: Annotated[ - str | None, - Field( - pattern=r"^(https?|ftp)://[^\s/$.?#].[^\s]*$", examples=["https://www.profileimageurl.com"], default=None - ), - ] - - -class UserUpdateInternal(UserUpdate): - updated_at: datetime - - -class UserTierUpdate(BaseModel): - tier_id: int - - -class UserDelete(BaseModel): - model_config = ConfigDict(extra="forbid") - - is_deleted: bool - deleted_at: datetime - - -class UserRestoreDeleted(BaseModel): - is_deleted: bool diff --git a/src/migrations/README b/src/migrations/README deleted file mode 100644 index 2500aa1b..00000000 --- a/src/migrations/README +++ /dev/null @@ -1 +0,0 @@ -Generic single-database configuration. diff --git a/src/scripts/create_first_superuser.py b/src/scripts/create_first_superuser.py deleted file mode 100644 index baf58af6..00000000 --- a/src/scripts/create_first_superuser.py +++ /dev/null @@ -1,78 +0,0 @@ -import asyncio -import logging -from datetime import UTC, datetime - -from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, String, Table, insert, select -from sqlalchemy.dialects.postgresql import UUID -from uuid6 import uuid7 # 126 - -from ..app.core.config import settings -from ..app.core.db.database import AsyncSession, async_engine, local_session -from ..app.core.security import get_password_hash -from ..app.models.user import User - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -async def create_first_user(session: AsyncSession) -> None: - try: - name = settings.ADMIN_NAME - email = settings.ADMIN_EMAIL - username = settings.ADMIN_USERNAME - hashed_password = get_password_hash(settings.ADMIN_PASSWORD) - - query = select(User).filter_by(email=email) - result = await session.execute(query) - user = result.scalar_one_or_none() - - if user is None: - metadata = MetaData() - user_table = Table( - "user", - metadata, - Column("id", Integer, primary_key=True, autoincrement=True, nullable=False), - Column("name", String(30), nullable=False), - Column("username", String(20), nullable=False, unique=True, index=True), - Column("email", String(50), nullable=False, unique=True, index=True), - Column("hashed_password", String, nullable=False), - Column("profile_image_url", String, default="https://profileimageurl.com"), - Column("uuid", UUID(as_uuid=True), default=uuid7, unique=True), - Column("created_at", DateTime(timezone=True), default=lambda: datetime.now(UTC), nullable=False), - Column("updated_at", DateTime), - Column("deleted_at", DateTime), - Column("is_deleted", Boolean, default=False, index=True), - Column("is_superuser", Boolean, default=False), - Column("tier_id", Integer, ForeignKey("tier.id"), index=True), - ) - - data = { - "name": name, - "email": email, - "username": username, - "hashed_password": hashed_password, - "is_superuser": True, - } - - stmt = insert(user_table).values(data) - async with async_engine.connect() as conn: - await conn.execute(stmt) - await conn.commit() - - logger.info(f"Admin user {username} created successfully.") - - else: - logger.info(f"Admin user {username} already exists.") - - except Exception as e: - logger.error(f"Error creating admin user: {e}") - - -async def main(): - async with local_session() as session: - await create_first_user(session) - - -if __name__ == "__main__": - loop = asyncio.get_event_loop() - loop.run_until_complete(main()) diff --git a/src/scripts/create_first_tier.py b/src/scripts/create_first_tier.py deleted file mode 100644 index baceb9f9..00000000 --- a/src/scripts/create_first_tier.py +++ /dev/null @@ -1,41 +0,0 @@ -import asyncio -import logging - -from sqlalchemy import select - -from ..app.core.config import config -from ..app.core.db.database import AsyncSession, local_session -from ..app.models.tier import Tier - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -async def create_first_tier(session: AsyncSession) -> None: - try: - tier_name = config("TIER_NAME", default="free") - - query = select(Tier).where(Tier.name == tier_name) - result = await session.execute(query) - tier = result.scalar_one_or_none() - - if tier is None: - session.add(Tier(name=tier_name)) - await session.commit() - logger.info(f"Tier '{tier_name}' created successfully.") - - else: - logger.info(f"Tier '{tier_name}' already exists.") - - except Exception as e: - logger.error(f"Error creating tier: {e}") - - -async def main(): - async with local_session() as session: - await create_first_tier(session) - - -if __name__ == "__main__": - loop = asyncio.get_event_loop() - loop.run_until_complete(main()) diff --git a/tests/conftest.py b/tests/conftest.py deleted file mode 100644 index 22038f1b..00000000 --- a/tests/conftest.py +++ /dev/null @@ -1,102 +0,0 @@ -from collections.abc import Callable, Generator -from typing import Any -from unittest.mock import AsyncMock, Mock - -import pytest -from faker import Faker -from fastapi.testclient import TestClient -from sqlalchemy import create_engine -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.orm import sessionmaker -from sqlalchemy.orm.session import Session - -from src.app.core.config import settings -from src.app.main import app - -DATABASE_URI = settings.POSTGRES_URI -DATABASE_PREFIX = settings.POSTGRES_SYNC_PREFIX - -sync_engine = create_engine(DATABASE_PREFIX + DATABASE_URI) -local_session = sessionmaker(autocommit=False, autoflush=False, bind=sync_engine) - - -fake = Faker() - - -@pytest.fixture(scope="session") -def client() -> Generator[TestClient, Any, None]: - with TestClient(app) as _client: - yield _client - app.dependency_overrides = {} - sync_engine.dispose() - - -@pytest.fixture -def db() -> Generator[Session, Any, None]: - session = local_session() - yield session - session.close() - - -def override_dependency(dependency: Callable[..., Any], mocked_response: Any) -> None: - app.dependency_overrides[dependency] = lambda: mocked_response - - -@pytest.fixture -def mock_db(): - """Mock database session for unit tests.""" - return Mock(spec=AsyncSession) - - -@pytest.fixture -def mock_redis(): - """Mock Redis connection for unit tests.""" - mock_redis = Mock() - mock_redis.get = AsyncMock(return_value=None) - mock_redis.set = AsyncMock(return_value=True) - mock_redis.delete = AsyncMock(return_value=True) - return mock_redis - - -@pytest.fixture -def sample_user_data(): - """Generate sample user data for tests.""" - return { - "name": fake.name(), - "username": fake.user_name(), - "email": fake.email(), - "password": fake.password(), - } - - -@pytest.fixture -def sample_user_read(): - """Generate a sample UserRead object.""" - from uuid6 import uuid7 - - from src.app.schemas.user import UserRead - - return UserRead( - id=1, - uuid=uuid7(), - name=fake.name(), - username=fake.user_name(), - email=fake.email(), - profile_image_url=fake.image_url(), - is_superuser=False, - created_at=fake.date_time(), - updated_at=fake.date_time(), - tier_id=None, - ) - - -@pytest.fixture -def current_user_dict(): - """Mock current user from auth dependency.""" - return { - "id": 1, - "username": fake.user_name(), - "email": fake.email(), - "name": fake.name(), - "is_superuser": False, - } diff --git a/tests/helpers/generators.py b/tests/helpers/generators.py deleted file mode 100644 index 5016d68b..00000000 --- a/tests/helpers/generators.py +++ /dev/null @@ -1,24 +0,0 @@ -from sqlalchemy.orm import Session -from uuid6 import uuid7 # 126 - -from src.app import models -from src.app.core.security import get_password_hash -from tests.conftest import fake - - -def create_user(db: Session, is_super_user: bool = False) -> models.User: - _user = models.User( - name=fake.name(), - username=fake.user_name(), - email=fake.email(), - hashed_password=get_password_hash(fake.password()), - profile_image_url=fake.image_url(), - uuid=uuid7, - is_superuser=is_super_user, - ) - - db.add(_user) - db.commit() - db.refresh(_user) - - return _user diff --git a/tests/helpers/mocks.py b/tests/helpers/mocks.py deleted file mode 100644 index 713ae680..00000000 --- a/tests/helpers/mocks.py +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Any - -from fastapi.encoders import jsonable_encoder - -from src.app import models -from tests.conftest import fake - - -def get_current_user(user: models.User) -> dict[str, Any]: - return jsonable_encoder(user) - - -def oauth2_scheme() -> str: - token = fake.sha256() - if isinstance(token, bytes): - token = token.decode("utf-8") - return token # type: ignore diff --git a/tests/test_user.py b/tests/test_user.py deleted file mode 100644 index 0cddbea6..00000000 --- a/tests/test_user.py +++ /dev/null @@ -1,194 +0,0 @@ -"""Unit tests for user API endpoints.""" - -from unittest.mock import AsyncMock, Mock, patch - -import pytest - -from src.app.api.v1.users import erase_user, patch_user, read_user, read_users, write_user -from src.app.core.exceptions.http_exceptions import DuplicateValueException, ForbiddenException, NotFoundException -from src.app.schemas.user import UserCreate, UserRead, UserUpdate - - -class TestWriteUser: - """Test user creation endpoint.""" - - @pytest.mark.asyncio - async def test_create_user_success(self, mock_db, sample_user_data, sample_user_read): - """Test successful user creation.""" - user_create = UserCreate(**sample_user_data) - - with patch("src.app.api.v1.users.crud_users") as mock_crud: - # Mock that email and username don't exist - mock_crud.exists = AsyncMock(side_effect=[False, False]) # email, then username - mock_crud.create = AsyncMock(return_value=sample_user_read.model_dump()) - - with patch("src.app.api.v1.users.get_password_hash") as mock_hash: - mock_hash.return_value = "hashed_password" - - result = await write_user(Mock(), user_create, mock_db) - - assert result == sample_user_read.model_dump() - mock_crud.exists.assert_any_call(db=mock_db, email=user_create.email) - mock_crud.exists.assert_any_call(db=mock_db, username=user_create.username) - mock_crud.create.assert_called_once() - - @pytest.mark.asyncio - async def test_create_user_duplicate_email(self, mock_db, sample_user_data): - """Test user creation with duplicate email.""" - user_create = UserCreate(**sample_user_data) - - with patch("src.app.api.v1.users.crud_users") as mock_crud: - # Mock that email already exists - mock_crud.exists = AsyncMock(return_value=True) - - with pytest.raises(DuplicateValueException, match="Email is already registered"): - await write_user(Mock(), user_create, mock_db) - - @pytest.mark.asyncio - async def test_create_user_duplicate_username(self, mock_db, sample_user_data): - """Test user creation with duplicate username.""" - user_create = UserCreate(**sample_user_data) - - with patch("src.app.api.v1.users.crud_users") as mock_crud: - # Mock email doesn't exist, but username does - mock_crud.exists = AsyncMock(side_effect=[False, True]) - - with pytest.raises(DuplicateValueException, match="Username not available"): - await write_user(Mock(), user_create, mock_db) - - -class TestReadUser: - """Test user retrieval endpoint.""" - - @pytest.mark.asyncio - async def test_read_user_success(self, mock_db, sample_user_read): - """Test successful user retrieval.""" - username = "test_user" - - with patch("src.app.api.v1.users.crud_users") as mock_crud: - user_dict = sample_user_read.model_dump() - mock_crud.get = AsyncMock(return_value=user_dict) - - result = await read_user(Mock(), username, mock_db) - - assert result == user_dict - mock_crud.get.assert_called_once_with( - db=mock_db, username=username, is_deleted=False, schema_to_select=UserRead - ) - - @pytest.mark.asyncio - async def test_read_user_not_found(self, mock_db): - """Test user retrieval when user doesn't exist.""" - username = "nonexistent_user" - - with patch("src.app.api.v1.users.crud_users") as mock_crud: - mock_crud.get = AsyncMock(return_value=None) - - with pytest.raises(NotFoundException, match="User not found"): - await read_user(Mock(), username, mock_db) - - -class TestReadUsers: - """Test users list endpoint.""" - - @pytest.mark.asyncio - async def test_read_users_success(self, mock_db): - """Test successful users list retrieval.""" - mock_users_data = {"data": [{"id": 1}, {"id": 2}], "count": 2} - - with patch("src.app.api.v1.users.crud_users") as mock_crud: - mock_crud.get_multi = AsyncMock(return_value=mock_users_data) - - with patch("src.app.api.v1.users.paginated_response") as mock_paginated: - expected_response = {"data": [{"id": 1}, {"id": 2}], "pagination": {}} - mock_paginated.return_value = expected_response - - result = await read_users(Mock(), mock_db, page=1, items_per_page=10) - - assert result == expected_response - mock_crud.get_multi.assert_called_once() - mock_paginated.assert_called_once() - - -class TestPatchUser: - """Test user update endpoint.""" - - @pytest.mark.asyncio - async def test_patch_user_success(self, mock_db, current_user_dict, sample_user_read): - """Test successful user update.""" - username = current_user_dict["username"] - user_update = UserUpdate(name="New Name") - - user_dict = sample_user_read.model_dump() - user_dict["username"] = username - - with patch("src.app.api.v1.users.crud_users") as mock_crud: - mock_crud.get = AsyncMock(return_value=user_dict) - mock_crud.exists = AsyncMock(return_value=False) - mock_crud.update = AsyncMock(return_value=None) - - result = await patch_user(Mock(), user_update, username, current_user_dict, mock_db) - - assert result == {"message": "User updated"} - mock_crud.update.assert_called_once() - - @pytest.mark.asyncio - async def test_patch_user_forbidden(self, mock_db, current_user_dict, sample_user_read): - """Test user update when user tries to update another user.""" - username = "different_user" - user_update = UserUpdate(name="New Name") - user_dict = sample_user_read.model_dump() - user_dict["username"] = username - - with patch("src.app.api.v1.users.crud_users") as mock_crud: - mock_crud.get = AsyncMock(return_value=user_dict) - - with pytest.raises(ForbiddenException): - await patch_user(Mock(), user_update, username, current_user_dict, mock_db) - - -class TestEraseUser: - """Test user deletion endpoint.""" - - @pytest.mark.asyncio - async def test_erase_user_success(self, mock_db, current_user_dict, sample_user_read): - """Test successful user deletion.""" - username = current_user_dict["username"] - sample_user_read.username = username - token = "mock_token" - - with patch("src.app.api.v1.users.crud_users") as mock_crud: - mock_crud.get = AsyncMock(return_value=sample_user_read) - mock_crud.delete = AsyncMock(return_value=None) - - with patch("src.app.api.v1.users.blacklist_token", new_callable=AsyncMock) as mock_blacklist: - result = await erase_user(Mock(), username, current_user_dict, mock_db, token) - - assert result == {"message": "User deleted"} - mock_crud.delete.assert_called_once_with(db=mock_db, username=username) - mock_blacklist.assert_called_once_with(token=token, db=mock_db) - - @pytest.mark.asyncio - async def test_erase_user_not_found(self, mock_db, current_user_dict): - """Test user deletion when user doesn't exist.""" - username = "nonexistent_user" - token = "mock_token" - - with patch("src.app.api.v1.users.crud_users") as mock_crud: - mock_crud.get = AsyncMock(return_value=None) - - with pytest.raises(NotFoundException, match="User not found"): - await erase_user(Mock(), username, current_user_dict, mock_db, token) - - @pytest.mark.asyncio - async def test_erase_user_forbidden(self, mock_db, current_user_dict, sample_user_read): - """Test user deletion when user tries to delete another user.""" - username = "different_user" - sample_user_read.username = username - token = "mock_token" - - with patch("src.app.api.v1.users.crud_users") as mock_crud: - mock_crud.get = AsyncMock(return_value=sample_user_read) - - with pytest.raises(ForbiddenException): - await erase_user(Mock(), username, current_user_dict, mock_db, token) diff --git a/uv.lock b/uv.lock index 10c85d61..60039143 100644 --- a/uv.lock +++ b/uv.lock @@ -1,31 +1,221 @@ version = 1 revision = 3 -requires-python = ">=3.11, <4" +requires-python = ">=3.11" +resolution-markers = [ + "python_full_version >= '3.15'", + "python_full_version == '3.14.*'", + "python_full_version == '3.13.*'", + "python_full_version < '3.13'", +] + +[manifest] +members = [ + "fastapi-boilerplate", + "fastapi-boilerplate-cli", + "fastapi-boilerplate-workspace", +] + +[[package]] +name = "aio-pika" +version = "9.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiormq" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/63/56354526f2e6e915c93bee6e4dedb35888fe82d6bc1a19f35f5a77e795ff/aio_pika-9.6.2.tar.gz", hash = "sha256:c49e9246080dc8ffa1bb0e4aca407bf3d8ad78c3ee3a93df88b68fe65d7a49b9", size = 70851, upload-time = "2026-03-22T19:03:20.878Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/05/256fa313f48bed075056d13593b92ce804be05d75f4f312be24edb82860a/aio_pika-9.6.2-py3-none-any.whl", hash = "sha256:2a5478af920d169795071c9c09c7542cd8cdece60438cf7804533dcbcce93b7f", size = 56269, upload-time = "2026-03-22T19:03:19.558Z" }, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.13.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/9a/152096d4808df8e4268befa55fba462f440f14beab85e8ad9bf990516918/aiohttp-3.13.5.tar.gz", hash = "sha256:9d98cc980ecc96be6eb4c1994ce35d28d8b1f5e5208a23b421187d1209dbb7d1", size = 7858271, upload-time = "2026-03-31T22:01:03.343Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/f5/a20c4ac64aeaef1679e25c9983573618ff765d7aa829fa2b84ae7573169e/aiohttp-3.13.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ab7229b6f9b5c1ba4910d6c41a9eb11f543eadb3f384df1b4c293f4e73d44d6", size = 757513, upload-time = "2026-03-31T21:57:02.146Z" }, + { url = "https://files.pythonhosted.org/packages/75/0a/39fa6c6b179b53fcb3e4b3d2b6d6cad0180854eda17060c7218540102bef/aiohttp-3.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f14c50708bb156b3a3ca7230b3d820199d56a48e3af76fa21c2d6087190fe3d", size = 506748, upload-time = "2026-03-31T21:57:04.275Z" }, + { url = "https://files.pythonhosted.org/packages/87/ec/e38ce072e724fd7add6243613f8d1810da084f54175353d25ccf9f9c7e5a/aiohttp-3.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7d2f8616f0ff60bd332022279011776c3ac0faa0f1b463f7bb12326fbc97a1c", size = 501673, upload-time = "2026-03-31T21:57:06.208Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ba/3bc7525d7e2beaa11b309a70d48b0d3cfc3c2089ec6a7d0820d59c657053/aiohttp-3.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2567b72e1ffc3ab25510db43f355b29eeada56c0a622e58dcdb19530eb0a3cb", size = 1763757, upload-time = "2026-03-31T21:57:07.882Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ab/e87744cf18f1bd78263aba24924d4953b41086bd3a31d22452378e9028a0/aiohttp-3.13.5-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fb0540c854ac9c0c5ad495908fdfd3e332d553ec731698c0e29b1877ba0d2ec6", size = 1720152, upload-time = "2026-03-31T21:57:09.946Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f3/ed17a6f2d742af17b50bae2d152315ed1b164b07a5fd5cc1754d99e4dfa5/aiohttp-3.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c9883051c6972f58bfc4ebb2116345ee2aa151178e99c3f2b2bbe2af712abd13", size = 1818010, upload-time = "2026-03-31T21:57:12.157Z" }, + { url = "https://files.pythonhosted.org/packages/53/06/ecbc63dc937192e2a5cb46df4d3edb21deb8225535818802f210a6ea5816/aiohttp-3.13.5-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2294172ce08a82fb7c7273485895de1fa1186cc8294cfeb6aef4af42ad261174", size = 1907251, upload-time = "2026-03-31T21:57:14.023Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a5/0521aa32c1ddf3aa1e71dcc466be0b7db2771907a13f18cddaa45967d97b/aiohttp-3.13.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3a807cabd5115fb55af198b98178997a5e0e57dead43eb74a93d9c07d6d4a7dc", size = 1759969, upload-time = "2026-03-31T21:57:16.146Z" }, + { url = "https://files.pythonhosted.org/packages/f6/78/a38f8c9105199dd3b9706745865a8a59d0041b6be0ca0cc4b2ccf1bab374/aiohttp-3.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:aa6d0d932e0f39c02b80744273cd5c388a2d9bc07760a03164f229c8e02662f6", size = 1616871, upload-time = "2026-03-31T21:57:17.856Z" }, + { url = "https://files.pythonhosted.org/packages/6f/41/27392a61ead8ab38072105c71aa44ff891e71653fe53d576a7067da2b4e8/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60869c7ac4aaabe7110f26499f3e6e5696eae98144735b12a9c3d9eae2b51a49", size = 1739844, upload-time = "2026-03-31T21:57:19.679Z" }, + { url = "https://files.pythonhosted.org/packages/6e/55/5564e7ae26d94f3214250009a0b1c65a0c6af4bf88924ccb6fdab901de28/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:26d2f8546f1dfa75efa50c3488215a903c0168d253b75fba4210f57ab77a0fb8", size = 1731969, upload-time = "2026-03-31T21:57:22.006Z" }, + { url = "https://files.pythonhosted.org/packages/6d/c5/705a3929149865fc941bcbdd1047b238e4a72bcb215a9b16b9d7a2e8d992/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1162a1492032c82f14271e831c8f4b49f2b6078f4f5fc74de2c912fa225d51d", size = 1795193, upload-time = "2026-03-31T21:57:24.256Z" }, + { url = "https://files.pythonhosted.org/packages/a6/19/edabed62f718d02cff7231ca0db4ef1c72504235bc467f7b67adb1679f48/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:8b14eb3262fad0dc2f89c1a43b13727e709504972186ff6a99a3ecaa77102b6c", size = 1606477, upload-time = "2026-03-31T21:57:26.364Z" }, + { url = "https://files.pythonhosted.org/packages/de/fc/76f80ef008675637d88d0b21584596dc27410a990b0918cb1e5776545b5b/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ca9ac61ac6db4eb6c2a0cd1d0f7e1357647b638ccc92f7e9d8d133e71ed3c6ac", size = 1813198, upload-time = "2026-03-31T21:57:28.316Z" }, + { url = "https://files.pythonhosted.org/packages/e5/67/5b3ac26b80adb20ea541c487f73730dc8fa107d632c998f25bbbab98fcda/aiohttp-3.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7996023b2ed59489ae4762256c8516df9820f751cf2c5da8ed2fb20ee50abab3", size = 1752321, upload-time = "2026-03-31T21:57:30.549Z" }, + { url = "https://files.pythonhosted.org/packages/88/06/e4a2e49255ea23fa4feeb5ab092d90240d927c15e47b5b5c48dff5a9ce29/aiohttp-3.13.5-cp311-cp311-win32.whl", hash = "sha256:77dfa48c9f8013271011e51c00f8ada19851f013cde2c48fca1ba5e0caf5bb06", size = 439069, upload-time = "2026-03-31T21:57:32.388Z" }, + { url = "https://files.pythonhosted.org/packages/c0/43/8c7163a596dab4f8be12c190cf467a1e07e4734cf90eebb39f7f5d53fc6a/aiohttp-3.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:d3a4834f221061624b8887090637db9ad4f61752001eae37d56c52fddade2dc8", size = 462859, upload-time = "2026-03-31T21:57:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/be/6f/353954c29e7dcce7cf00280a02c75f30e133c00793c7a2ed3776d7b2f426/aiohttp-3.13.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:023ecba036ddd840b0b19bf195bfae970083fd7024ce1ac22e9bba90464620e9", size = 748876, upload-time = "2026-03-31T21:57:36.319Z" }, + { url = "https://files.pythonhosted.org/packages/f5/1b/428a7c64687b3b2e9cd293186695affc0e1e54a445d0361743b231f11066/aiohttp-3.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15c933ad7920b7d9a20de151efcd05a6e38302cbf0e10c9b2acb9a42210a2416", size = 499557, upload-time = "2026-03-31T21:57:38.236Z" }, + { url = "https://files.pythonhosted.org/packages/29/47/7be41556bfbb6917069d6a6634bb7dd5e163ba445b783a90d40f5ac7e3a7/aiohttp-3.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab2899f9fa2f9f741896ebb6fa07c4c883bfa5c7f2ddd8cf2aafa86fa981b2d2", size = 500258, upload-time = "2026-03-31T21:57:39.923Z" }, + { url = "https://files.pythonhosted.org/packages/67/84/c9ecc5828cb0b3695856c07c0a6817a99d51e2473400f705275a2b3d9239/aiohttp-3.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60eaa2d440cd4707696b52e40ed3e2b0f73f65be07fd0ef23b6b539c9c0b0b4", size = 1749199, upload-time = "2026-03-31T21:57:41.938Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d3/3c6d610e66b495657622edb6ae7c7fd31b2e9086b4ec50b47897ad6042a9/aiohttp-3.13.5-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:55b3bdd3292283295774ab585160c4004f4f2f203946997f49aac032c84649e9", size = 1721013, upload-time = "2026-03-31T21:57:43.904Z" }, + { url = "https://files.pythonhosted.org/packages/49/a0/24409c12217456df0bae7babe3b014e460b0b38a8e60753d6cb339f6556d/aiohttp-3.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2b2355dc094e5f7d45a7bb262fe7207aa0460b37a0d87027dcf21b5d890e7d5", size = 1781501, upload-time = "2026-03-31T21:57:46.285Z" }, + { url = "https://files.pythonhosted.org/packages/98/9d/b65ec649adc5bccc008b0957a9a9c691070aeac4e41cea18559fef49958b/aiohttp-3.13.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b38765950832f7d728297689ad78f5f2cf79ff82487131c4d26fe6ceecdc5f8e", size = 1878981, upload-time = "2026-03-31T21:57:48.734Z" }, + { url = "https://files.pythonhosted.org/packages/57/d8/8d44036d7eb7b6a8ec4c5494ea0c8c8b94fbc0ed3991c1a7adf230df03bf/aiohttp-3.13.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b18f31b80d5a33661e08c89e202edabf1986e9b49c42b4504371daeaa11b47c1", size = 1767934, upload-time = "2026-03-31T21:57:51.171Z" }, + { url = "https://files.pythonhosted.org/packages/31/04/d3f8211f273356f158e3464e9e45484d3fb8c4ce5eb2f6fe9405c3273983/aiohttp-3.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:33add2463dde55c4f2d9635c6ab33ce154e5ecf322bd26d09af95c5f81cfa286", size = 1566671, upload-time = "2026-03-31T21:57:53.326Z" }, + { url = "https://files.pythonhosted.org/packages/41/db/073e4ebe00b78e2dfcacff734291651729a62953b48933d765dc513bf798/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:327cc432fdf1356fb4fbc6fe833ad4e9f6aacb71a8acaa5f1855e4b25910e4a9", size = 1705219, upload-time = "2026-03-31T21:57:55.385Z" }, + { url = "https://files.pythonhosted.org/packages/48/45/7dfba71a2f9fd97b15c95c06819de7eb38113d2cdb6319669195a7d64270/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7c35b0bf0b48a70b4cb4fc5d7bed9b932532728e124874355de1a0af8ec4bc88", size = 1743049, upload-time = "2026-03-31T21:57:57.341Z" }, + { url = "https://files.pythonhosted.org/packages/18/71/901db0061e0f717d226386a7f471bb59b19566f2cae5f0d93874b017271f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:df23d57718f24badef8656c49743e11a89fd6f5358fa8a7b96e728fda2abf7d3", size = 1749557, upload-time = "2026-03-31T21:57:59.626Z" }, + { url = "https://files.pythonhosted.org/packages/08/d5/41eebd16066e59cd43728fe74bce953d7402f2b4ddfdfef2c0e9f17ca274/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:02e048037a6501a5ec1f6fc9736135aec6eb8a004ce48838cb951c515f32c80b", size = 1558931, upload-time = "2026-03-31T21:58:01.972Z" }, + { url = "https://files.pythonhosted.org/packages/30/e6/4a799798bf05740e66c3a1161079bda7a3dd8e22ca392481d7a7f9af82a6/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31cebae8b26f8a615d2b546fee45d5ffb76852ae6450e2a03f42c9102260d6fe", size = 1774125, upload-time = "2026-03-31T21:58:04.007Z" }, + { url = "https://files.pythonhosted.org/packages/84/63/7749337c90f92bc2cb18f9560d67aa6258c7060d1397d21529b8004fcf6f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:888e78eb5ca55a615d285c3c09a7a91b42e9dd6fc699b166ebd5dee87c9ccf14", size = 1732427, upload-time = "2026-03-31T21:58:06.337Z" }, + { url = "https://files.pythonhosted.org/packages/98/de/cf2f44ff98d307e72fb97d5f5bbae3bfcb442f0ea9790c0bf5c5c2331404/aiohttp-3.13.5-cp312-cp312-win32.whl", hash = "sha256:8bd3ec6376e68a41f9f95f5ed170e2fcf22d4eb27a1f8cb361d0508f6e0557f3", size = 433534, upload-time = "2026-03-31T21:58:08.712Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ca/eadf6f9c8fa5e31d40993e3db153fb5ed0b11008ad5d9de98a95045bed84/aiohttp-3.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:110e448e02c729bcebb18c60b9214a87ba33bac4a9fa5e9a5f139938b56c6cb1", size = 460446, upload-time = "2026-03-31T21:58:10.945Z" }, + { url = "https://files.pythonhosted.org/packages/78/e9/d76bf503005709e390122d34e15256b88f7008e246c4bdbe915cd4f1adce/aiohttp-3.13.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5029cc80718bbd545123cd8fe5d15025eccaaaace5d0eeec6bd556ad6163d61", size = 742930, upload-time = "2026-03-31T21:58:13.155Z" }, + { url = "https://files.pythonhosted.org/packages/57/00/4b7b70223deaebd9bb85984d01a764b0d7bd6526fcdc73cca83bcbe7243e/aiohttp-3.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4bb6bf5811620003614076bdc807ef3b5e38244f9d25ca5fe888eaccea2a9832", size = 496927, upload-time = "2026-03-31T21:58:15.073Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f5/0fb20fb49f8efdcdce6cd8127604ad2c503e754a8f139f5e02b01626523f/aiohttp-3.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a84792f8631bf5a94e52d9cc881c0b824ab42717165a5579c760b830d9392ac9", size = 497141, upload-time = "2026-03-31T21:58:17.009Z" }, + { url = "https://files.pythonhosted.org/packages/3b/86/b7c870053e36a94e8951b803cb5b909bfbc9b90ca941527f5fcafbf6b0fa/aiohttp-3.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57653eac22c6a4c13eb22ecf4d673d64a12f266e72785ab1c8b8e5940d0e8090", size = 1732476, upload-time = "2026-03-31T21:58:18.925Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e5/4e161f84f98d80c03a238671b4136e6530453d65262867d989bbe78244d0/aiohttp-3.13.5-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5e5f7debc7a57af53fdf5c5009f9391d9f4c12867049d509bf7bb164a6e295b", size = 1706507, upload-time = "2026-03-31T21:58:21.094Z" }, + { url = "https://files.pythonhosted.org/packages/d4/56/ea11a9f01518bd5a2a2fcee869d248c4b8a0cfa0bb13401574fa31adf4d4/aiohttp-3.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c719f65bebcdf6716f10e9eff80d27567f7892d8988c06de12bbbd39307c6e3a", size = 1773465, upload-time = "2026-03-31T21:58:23.159Z" }, + { url = "https://files.pythonhosted.org/packages/eb/40/333ca27fb74b0383f17c90570c748f7582501507307350a79d9f9f3c6eb1/aiohttp-3.13.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d97f93fdae594d886c5a866636397e2bcab146fd7a132fd6bb9ce182224452f8", size = 1873523, upload-time = "2026-03-31T21:58:25.59Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d2/e2f77eef1acb7111405433c707dc735e63f67a56e176e72e9e7a2cd3f493/aiohttp-3.13.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3df334e39d4c2f899a914f1dba283c1aadc311790733f705182998c6f7cae665", size = 1754113, upload-time = "2026-03-31T21:58:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/fb/56/3f653d7f53c89669301ec9e42c95233e2a0c0a6dd051269e6e678db4fdb0/aiohttp-3.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe6970addfea9e5e081401bcbadf865d2b6da045472f58af08427e108d618540", size = 1562351, upload-time = "2026-03-31T21:58:29.918Z" }, + { url = "https://files.pythonhosted.org/packages/ec/a6/9b3e91eb8ae791cce4ee736da02211c85c6f835f1bdfac0594a8a3b7018c/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7becdf835feff2f4f335d7477f121af787e3504b48b449ff737afb35869ba7bb", size = 1693205, upload-time = "2026-03-31T21:58:32.214Z" }, + { url = "https://files.pythonhosted.org/packages/98/fc/bfb437a99a2fcebd6b6eaec609571954de2ed424f01c352f4b5504371dd3/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:676e5651705ad5d8a70aeb8eb6936c436d8ebbd56e63436cb7dd9bb36d2a9a46", size = 1730618, upload-time = "2026-03-31T21:58:34.728Z" }, + { url = "https://files.pythonhosted.org/packages/e4/b6/c8534862126191a034f68153194c389addc285a0f1347d85096d349bbc15/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9b16c653d38eb1a611cc898c41e76859ca27f119d25b53c12875fd0474ae31a8", size = 1745185, upload-time = "2026-03-31T21:58:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/0b/93/4ca8ee2ef5236e2707e0fd5fecb10ce214aee1ff4ab307af9c558bda3b37/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:999802d5fa0389f58decd24b537c54aa63c01c3219ce17d1214cbda3c2b22d2d", size = 1557311, upload-time = "2026-03-31T21:58:39.38Z" }, + { url = "https://files.pythonhosted.org/packages/57/ae/76177b15f18c5f5d094f19901d284025db28eccc5ae374d1d254181d33f4/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ec707059ee75732b1ba130ed5f9580fe10ff75180c812bc267ded039db5128c6", size = 1773147, upload-time = "2026-03-31T21:58:41.476Z" }, + { url = "https://files.pythonhosted.org/packages/01/a4/62f05a0a98d88af59d93b7fcac564e5f18f513cb7471696ac286db970d6a/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d6d44a5b48132053c2f6cd5c8cb14bc67e99a63594e336b0f2af81e94d5530c", size = 1730356, upload-time = "2026-03-31T21:58:44.049Z" }, + { url = "https://files.pythonhosted.org/packages/e4/85/fc8601f59dfa8c9523808281f2da571f8b4699685f9809a228adcc90838d/aiohttp-3.13.5-cp313-cp313-win32.whl", hash = "sha256:329f292ed14d38a6c4c435e465f48bebb47479fd676a0411936cc371643225cc", size = 432637, upload-time = "2026-03-31T21:58:46.167Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1b/ac685a8882896acf0f6b31d689e3792199cfe7aba37969fa91da63a7fa27/aiohttp-3.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:69f571de7500e0557801c0b51f4780482c0ec5fe2ac851af5a92cfce1af1cb83", size = 458896, upload-time = "2026-03-31T21:58:48.119Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/46572759afc859e867a5bc8ec3487315869013f59281ce61764f76d879de/aiohttp-3.13.5-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:eb4639f32fd4a9904ab8fb45bf3383ba71137f3d9d4ba25b3b3f3109977c5b8c", size = 745721, upload-time = "2026-03-31T21:58:50.229Z" }, + { url = "https://files.pythonhosted.org/packages/13/fe/8a2efd7626dbe6049b2ef8ace18ffda8a4dfcbe1bcff3ac30c0c7575c20b/aiohttp-3.13.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:7e5dc4311bd5ac493886c63cbf76ab579dbe4641268e7c74e48e774c74b6f2be", size = 497663, upload-time = "2026-03-31T21:58:52.232Z" }, + { url = "https://files.pythonhosted.org/packages/9b/91/cc8cc78a111826c54743d88651e1687008133c37e5ee615fee9b57990fac/aiohttp-3.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:756c3c304d394977519824449600adaf2be0ccee76d206ee339c5e76b70ded25", size = 499094, upload-time = "2026-03-31T21:58:54.566Z" }, + { url = "https://files.pythonhosted.org/packages/0a/33/a8362cb15cf16a3af7e86ed11962d5cd7d59b449202dc576cdc731310bde/aiohttp-3.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecc26751323224cf8186efcf7fbcbc30f4e1d8c7970659daf25ad995e4032a56", size = 1726701, upload-time = "2026-03-31T21:58:56.864Z" }, + { url = "https://files.pythonhosted.org/packages/45/0c/c091ac5c3a17114bd76cbf85d674650969ddf93387876cf67f754204bd77/aiohttp-3.13.5-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10a75acfcf794edf9d8db50e5a7ec5fc818b2a8d3f591ce93bc7b1210df016d2", size = 1683360, upload-time = "2026-03-31T21:58:59.072Z" }, + { url = "https://files.pythonhosted.org/packages/23/73/bcee1c2b79bc275e964d1446c55c54441a461938e70267c86afaae6fba27/aiohttp-3.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f7a18f258d124cd678c5fe072fe4432a4d5232b0657fca7c1847f599233c83a", size = 1773023, upload-time = "2026-03-31T21:59:01.776Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ef/720e639df03004fee2d869f771799d8c23046dec47d5b81e396c7cda583a/aiohttp-3.13.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:df6104c009713d3a89621096f3e3e88cc323fd269dbd7c20afe18535094320be", size = 1853795, upload-time = "2026-03-31T21:59:04.568Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c9/989f4034fb46841208de7aeeac2c6d8300745ab4f28c42f629ba77c2d916/aiohttp-3.13.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:241a94f7de7c0c3b616627aaad530fe2cb620084a8b144d3be7b6ecfe95bae3b", size = 1730405, upload-time = "2026-03-31T21:59:07.221Z" }, + { url = "https://files.pythonhosted.org/packages/ce/75/ee1fd286ca7dc599d824b5651dad7b3be7ff8d9a7e7b3fe9820d9180f7db/aiohttp-3.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c974fb66180e58709b6fc402846f13791240d180b74de81d23913abe48e96d94", size = 1558082, upload-time = "2026-03-31T21:59:09.484Z" }, + { url = "https://files.pythonhosted.org/packages/c3/20/1e9e6650dfc436340116b7aa89ff8cb2bbdf0abc11dfaceaad8f74273a10/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6e27ea05d184afac78aabbac667450c75e54e35f62238d44463131bd3f96753d", size = 1692346, upload-time = "2026-03-31T21:59:12.068Z" }, + { url = "https://files.pythonhosted.org/packages/d8/40/8ebc6658d48ea630ac7903912fe0dd4e262f0e16825aa4c833c56c9f1f56/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a79a6d399cef33a11b6f004c67bb07741d91f2be01b8d712d52c75711b1e07c7", size = 1698891, upload-time = "2026-03-31T21:59:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/d8/78/ea0ae5ec8ba7a5c10bdd6e318f1ba5e76fcde17db8275188772afc7917a4/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c632ce9c0b534fbe25b52c974515ed674937c5b99f549a92127c85f771a78772", size = 1742113, upload-time = "2026-03-31T21:59:17.068Z" }, + { url = "https://files.pythonhosted.org/packages/8a/66/9d308ed71e3f2491be1acb8769d96c6f0c47d92099f3bc9119cada27b357/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:fceedde51fbd67ee2bcc8c0b33d0126cc8b51ef3bbde2f86662bd6d5a6f10ec5", size = 1553088, upload-time = "2026-03-31T21:59:19.541Z" }, + { url = "https://files.pythonhosted.org/packages/da/a6/6cc25ed8dfc6e00c90f5c6d126a98e2cf28957ad06fa1036bd34b6f24a2c/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f92995dfec9420bb69ae629abf422e516923ba79ba4403bc750d94fb4a6c68c1", size = 1757976, upload-time = "2026-03-31T21:59:22.311Z" }, + { url = "https://files.pythonhosted.org/packages/c1/2b/cce5b0ffe0de99c83e5e36d8f828e4161e415660a9f3e58339d07cce3006/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20ae0ff08b1f2c8788d6fb85afcb798654ae6ba0b747575f8562de738078457b", size = 1712444, upload-time = "2026-03-31T21:59:24.635Z" }, + { url = "https://files.pythonhosted.org/packages/6c/cf/9e1795b4160c58d29421eafd1a69c6ce351e2f7c8d3c6b7e4ca44aea1a5b/aiohttp-3.13.5-cp314-cp314-win32.whl", hash = "sha256:b20df693de16f42b2472a9c485e1c948ee55524786a0a34345511afdd22246f3", size = 438128, upload-time = "2026-03-31T21:59:27.291Z" }, + { url = "https://files.pythonhosted.org/packages/22/4d/eaedff67fc805aeba4ba746aec891b4b24cebb1a7d078084b6300f79d063/aiohttp-3.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:f85c6f327bf0b8c29da7d93b1cabb6363fb5e4e160a32fa241ed2dce21b73162", size = 464029, upload-time = "2026-03-31T21:59:29.429Z" }, + { url = "https://files.pythonhosted.org/packages/79/11/c27d9332ee20d68dd164dc12a6ecdef2e2e35ecc97ed6cf0d2442844624b/aiohttp-3.13.5-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:1efb06900858bb618ff5cee184ae2de5828896c448403d51fb633f09e109be0a", size = 778758, upload-time = "2026-03-31T21:59:31.547Z" }, + { url = "https://files.pythonhosted.org/packages/04/fb/377aead2e0a3ba5f09b7624f702a964bdf4f08b5b6728a9799830c80041e/aiohttp-3.13.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:fee86b7c4bd29bdaf0d53d14739b08a106fdda809ca5fe032a15f52fae5fe254", size = 512883, upload-time = "2026-03-31T21:59:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a6/aa109a33671f7a5d3bd78b46da9d852797c5e665bfda7d6b373f56bff2ec/aiohttp-3.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:20058e23909b9e65f9da62b396b77dfa95965cbe840f8def6e572538b1d32e36", size = 516668, upload-time = "2026-03-31T21:59:36.497Z" }, + { url = "https://files.pythonhosted.org/packages/79/b3/ca078f9f2fa9563c36fb8ef89053ea2bb146d6f792c5104574d49d8acb63/aiohttp-3.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cf20a8d6868cb15a73cab329ffc07291ba8c22b1b88176026106ae39aa6df0f", size = 1883461, upload-time = "2026-03-31T21:59:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e3/a7ad633ca1ca497b852233a3cce6906a56c3225fb6d9217b5e5e60b7419d/aiohttp-3.13.5-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:330f5da04c987f1d5bdb8ae189137c77139f36bd1cb23779ca1a354a4b027800", size = 1747661, upload-time = "2026-03-31T21:59:41.187Z" }, + { url = "https://files.pythonhosted.org/packages/33/b9/cd6fe579bed34a906d3d783fe60f2fa297ef55b27bb4538438ee49d4dc41/aiohttp-3.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f1cbf0c7926d315c3c26c2da41fd2b5d2fe01ac0e157b78caefc51a782196cf", size = 1863800, upload-time = "2026-03-31T21:59:43.84Z" }, + { url = "https://files.pythonhosted.org/packages/c0/3f/2c1e2f5144cefa889c8afd5cf431994c32f3b29da9961698ff4e3811b79a/aiohttp-3.13.5-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:53fc049ed6390d05423ba33103ded7281fe897cf97878f369a527070bd95795b", size = 1958382, upload-time = "2026-03-31T21:59:46.187Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/f31ec3f1013723b3babe3609e7f119c2c2fb6ef33da90061a705ef3e1bc8/aiohttp-3.13.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:898703aa2667e3c5ca4c54ca36cd73f58b7a38ef87a5606414799ebce4d3fd3a", size = 1803724, upload-time = "2026-03-31T21:59:48.656Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b4/57712dfc6f1542f067daa81eb61da282fab3e6f1966fca25db06c4fc62d5/aiohttp-3.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0494a01ca9584eea1e5fbd6d748e61ecff218c51b576ee1999c23db7066417d8", size = 1640027, upload-time = "2026-03-31T21:59:51.284Z" }, + { url = "https://files.pythonhosted.org/packages/25/3c/734c878fb43ec083d8e31bf029daae1beafeae582d1b35da234739e82ee7/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6cf81fe010b8c17b09495cbd15c1d35afbc8fb405c0c9cf4738e5ae3af1d65be", size = 1806644, upload-time = "2026-03-31T21:59:53.753Z" }, + { url = "https://files.pythonhosted.org/packages/20/a5/f671e5cbec1c21d044ff3078223f949748f3a7f86b14e34a365d74a5d21f/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:c564dd5f09ddc9d8f2c2d0a301cd30a79a2cc1b46dd1a73bef8f0038863d016b", size = 1791630, upload-time = "2026-03-31T21:59:56.239Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/fb8d0ad63a0b8a99be97deac8c04dacf0785721c158bdf23d679a87aa99e/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:2994be9f6e51046c4f864598fd9abeb4fba6e88f0b2152422c9666dcd4aea9c6", size = 1809403, upload-time = "2026-03-31T21:59:59.103Z" }, + { url = "https://files.pythonhosted.org/packages/59/0c/bfed7f30662fcf12206481c2aac57dedee43fe1c49275e85b3a1e1742294/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:157826e2fa245d2ef46c83ea8a5faf77ca19355d278d425c29fda0beb3318037", size = 1634924, upload-time = "2026-03-31T22:00:02.116Z" }, + { url = "https://files.pythonhosted.org/packages/17/d6/fd518d668a09fd5a3319ae5e984d4d80b9a4b3df4e21c52f02251ef5a32e/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:a8aca50daa9493e9e13c0f566201a9006f080e7c50e5e90d0b06f53146a54500", size = 1836119, upload-time = "2026-03-31T22:00:04.756Z" }, + { url = "https://files.pythonhosted.org/packages/78/b7/15fb7a9d52e112a25b621c67b69c167805cb1f2ab8f1708a5c490d1b52fe/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3b13560160d07e047a93f23aaa30718606493036253d5430887514715b67c9d9", size = 1772072, upload-time = "2026-03-31T22:00:07.494Z" }, + { url = "https://files.pythonhosted.org/packages/7e/df/57ba7f0c4a553fc2bd8b6321df236870ec6fd64a2a473a8a13d4f733214e/aiohttp-3.13.5-cp314-cp314t-win32.whl", hash = "sha256:9a0f4474b6ea6818b41f82172d799e4b3d29e22c2c520ce4357856fced9af2f8", size = 471819, upload-time = "2026-03-31T22:00:10.277Z" }, + { url = "https://files.pythonhosted.org/packages/62/29/2f8418269e46454a26171bfdd6a055d74febf32234e474930f2f60a17145/aiohttp-3.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:18a2f6c1182c51baa1d28d68fea51513cb2a76612f038853c0ad3c145423d3d9", size = 505441, upload-time = "2026-03-31T22:00:12.791Z" }, +] + +[[package]] +name = "aiomcache" +version = "0.8.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/0a/914d8df1002d88ca70679d192f6e16d113e6b5cbcc13c51008db9230025f/aiomcache-0.8.2.tar.gz", hash = "sha256:43b220d7f499a32a71871c4f457116eb23460fa216e69c1d32b81e3209e51359", size = 10640, upload-time = "2024-05-07T15:03:14.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/f8/78455f6377cbe85f335f4dbd40a807dafb72bd5fa05eb946f2ad0cec3d40/aiomcache-0.8.2-py3-none-any.whl", hash = "sha256:9d78d6b6e74e775df18b350b1cddfa96bd2f0a44d49ad27fa87759a3469cef5e", size = 10145, upload-time = "2024-05-07T15:03:12.003Z" }, +] + +[[package]] +name = "aiormq" +version = "6.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pamqp" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6c/0e/db90154d52d399108903fe603e5110a533c42065180265dd003788264080/aiormq-6.9.4.tar.gz", hash = "sha256:0e7c01b662804e1cc7ace9a17794e8c1192a27fc2afa96162362a6e61ae8e8ef", size = 49232, upload-time = "2026-03-23T09:18:19.493Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/48/1ce3773f392f02ceda37aee168fade9d725483a9592c202d06044cd093ff/aiormq-6.9.4-py3-none-any.whl", hash = "sha256:726a8586695e863fba68cf88842065ab12348c9438dcebdfc9d0bddaf6083277", size = 32166, upload-time = "2026-03-23T09:18:17.523Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] [[package]] name = "aiosqlite" -version = "0.21.0" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/8a/64761f4005f17809769d23e518d915db74e6310474e733e3593cfc854ef1/aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650", size = 14821, upload-time = "2025-12-23T19:25:43.997Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/b7/e3bf5133d697a08128598c8d0abc5e16377b51465a33756de24fa7dee953/aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb", size = 17405, upload-time = "2025-12-23T19:25:42.139Z" }, +] + +[[package]] +name = "aiostream" +version = "0.7.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/13/7d/8bca2bf9a247c2c5dfeec1d7a5f40db6518f88d314b8bca9da29670d2671/aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3", size = 13454, upload-time = "2025-02-03T07:30:16.235Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/65/b9b69695702b76a878c9879f2ee80cefce75bc5cb864fc100460bc1c5380/aiostream-0.7.1.tar.gz", hash = "sha256:272aaa0d8f83beb906f5aa9022bb59046bb7a103fa3770f807c31f918595acf6", size = 44059, upload-time = "2025-10-13T20:02:06.961Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f5/10/6c25ed6de94c49f88a91fa5018cb4c0f3625f31d5be9f771ebe5cc7cd506/aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0", size = 15792, upload-time = "2025-02-03T07:30:13.6Z" }, + { url = "https://files.pythonhosted.org/packages/52/a0/d7c6ca304140f3f49987d710e15bc164248924a35d8cdfac2f6e87fca041/aiostream-0.7.1-py3-none-any.whl", hash = "sha256:ea8739e9158ee6a606b3feedf3762721c3507344e540d09a10984c5e88a13b37", size = 41416, upload-time = "2025-10-13T20:02:05.535Z" }, ] [[package]] name = "alembic" -version = "1.16.1" +version = "1.18.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mako" }, { name = "sqlalchemy" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/89/bfb4fe86e3fc3972d35431af7bedbc60fa606e8b17196704a1747f7aa4c3/alembic-1.16.1.tar.gz", hash = "sha256:43d37ba24b3d17bc1eb1024fe0f51cd1dc95aeb5464594a02c6bb9ca9864bfa4", size = 1955006, upload-time = "2025-05-21T23:11:05.991Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/13/8b084e0f2efb0275a1d534838844926f798bd766566b1375174e2448cd31/alembic-1.18.4.tar.gz", hash = "sha256:cb6e1fd84b6174ab8dbb2329f86d631ba9559dd78df550b57804d607672cedbc", size = 2056725, upload-time = "2026-02-10T16:00:47.195Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/29/6533c317b74f707ea28f8d633734dbda2119bbadfc61b2f3640ba835d0f7/alembic-1.18.4-py3-none-any.whl", hash = "sha256:a5ed4adcf6d8a4cb575f3d759f071b03cd6e5c7618eb796cb52497be25bfe19a", size = 263893, upload-time = "2026-02-10T16:00:49.997Z" }, +] + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/59/565286efff3692c5716c212202af61466480f6357c4ae3089d4453bff1f3/alembic-1.16.1-py3-none-any.whl", hash = "sha256:0cdd48acada30d93aa1035767d67dff25702f8de74d7c3919f2e8492c8db2e67", size = 242488, upload-time = "2025-05-21T23:11:07.783Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, ] [[package]] @@ -39,29 +229,53 @@ wheels = [ [[package]] name = "anyio" -version = "4.9.0" +version = "4.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, - { name = "sniffio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, -] - -[[package]] -name = "arq" -version = "0.26.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "redis", extra = ["hiredis"] }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4f/65/5add7049297a449d1453e26a8d5924f0d5440b3876edc9e80d5dc621f16d/arq-0.26.3.tar.gz", hash = "sha256:362063ea3c726562fb69c723d5b8ee80827fdefda782a8547da5be3d380ac4b1", size = 291111, upload-time = "2025-01-06T22:44:49.771Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/85/b3/a24a183c628da633b7cafd1759b14aaf47958de82ba6bcae9f1c2898781d/arq-0.26.3-py3-none-any.whl", hash = "sha256:9f4b78149a58c9dc4b88454861a254b7c4e7a159f2c973c89b548288b77e9005", size = 25968, upload-time = "2025-01-06T22:44:45.771Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" }, +] + +[[package]] +name = "ast-serialize" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/9d/912fefab0e30aee6a3af8a62bbea4a81b29afa4ba2c973d31170620a26de/ast_serialize-0.3.0.tar.gz", hash = "sha256:1bc3ca09a63a021376527c4e938deedd11d11d675ce850e6f9c7487f5889992b", size = 60689, upload-time = "2026-04-30T23:24:48.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/57/a54d4de491d6cdd7a4e4b0952cc3ca9f60dcefa7b5fb48d6d492debe1649/ast_serialize-0.3.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:3a867927df59f76a18dc1d874a0b2c079b42c58972dca637905576deb0912e14", size = 1182966, upload-time = "2026-04-30T23:23:57.376Z" }, + { url = "https://files.pythonhosted.org/packages/ee/9e/a5db014bb0f91b209236b57c429389e31290c0093532b8436d577699b2fa/ast_serialize-0.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a6fb063bf040abf8321e7b8113a0554eda445ffc508aa51287f8808886a5ae22", size = 1171316, upload-time = "2026-04-30T23:23:59.63Z" }, + { url = "https://files.pythonhosted.org/packages/15/59/fd55133e478c4326f60a11df02573bf7ccb2ac685810b50f1803d0f68053/ast_serialize-0.3.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5075cd8482573d743586779e5f9b652a015e37d4e95132d7e5a9bc5c8f483d8f", size = 1232234, upload-time = "2026-04-30T23:24:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/cc/79/0ca1d26357ecb4a697d74d00b73ef3137f24c140424125393a0de820eb09/ast_serialize-0.3.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:41560b27794f4553b0f77811e9fb325b77db4a2b39018d437e09932275306e66", size = 1233437, upload-time = "2026-04-30T23:24:03.151Z" }, + { url = "https://files.pythonhosted.org/packages/53/3e/7078ec94dd6e124b8e028ac77016a4f13c83fa1c145790f2e68f3816998b/ast_serialize-0.3.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b967c01ca74909c5d90e0fe4393401e2cc5da5ebd9a6262a19e45ffd3757dec8", size = 1440188, upload-time = "2026-04-30T23:24:04.717Z" }, + { url = "https://files.pythonhosted.org/packages/21/16/cca7195ef55a012f8013c3442afa91d287a0a36dcf88b480b262475135b3/ast_serialize-0.3.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:424ebb8f46cd993f7cec4009d119312d8433dd90e6b0df0499cd2c91bdcc5af9", size = 1254211, upload-time = "2026-04-30T23:24:06.18Z" }, + { url = "https://files.pythonhosted.org/packages/a0/0f/f3d4dfae67dee6580534361a6343367d34217e7d25cff858bd1d8f03b8ed/ast_serialize-0.3.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d14b1d566b56e2ee70b11fec1de7e0b94ec7cd83717ec7d189967841a361190e", size = 1255973, upload-time = "2026-04-30T23:24:07.772Z" }, + { url = "https://files.pythonhosted.org/packages/14/41/55fbfe02c42f40fbe3e74eda167d977d555ff720ce1abfa08515236efd88/ast_serialize-0.3.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ba30b18735f047ec11103d1ab92f4789cf1fea1e0dc89b04a2f5a0632fd79de", size = 1298629, upload-time = "2026-04-30T23:24:09.4Z" }, + { url = "https://files.pythonhosted.org/packages/28/36/7d2501cacc7989fb8504aa9da2a2022a174200a59d4e6639de4367a57fdd/ast_serialize-0.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e6ea0754cb7b0f682ebb005ffb0d18f8d17993490d9c289863cd69cacc4ab8df", size = 1408435, upload-time = "2026-04-30T23:24:11.013Z" }, + { url = "https://files.pythonhosted.org/packages/03/e7/54e3b469c3fa0bf9cd532fa643d1d33b73303f8d70beac3e366b68dd64b7/ast_serialize-0.3.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:a0c5aa1073a5ba7b2abaa4b54abe8b8d75c4d1e2d54a2ff70b0ca6222fea5728", size = 1508174, upload-time = "2026-04-30T23:24:12.635Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/9b9621865b02c60539e26d9b114a312b4fa46aa703e33e79317174bfea21/ast_serialize-0.3.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:4e52650d834c1ea7791969a361de2c54c13b2fb4c519ec79445fa8b9021a147d", size = 1502354, upload-time = "2026-04-30T23:24:14.186Z" }, + { url = "https://files.pythonhosted.org/packages/34/dd/f138bc5c43b0c414fdd12eefe15677839323078b6e75301ad7f96cd26d45/ast_serialize-0.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:15bd6af3f136c61dae27805eb6b8f3269e85a545c4c27ffe9e530ead78d2b36d", size = 1450504, upload-time = "2026-04-30T23:24:16.076Z" }, + { url = "https://files.pythonhosted.org/packages/68/cf/97ef9e1c315601db74365955c8edd3292e3055500d6317602815dbdf08ae/ast_serialize-0.3.0-cp314-cp314t-win32.whl", hash = "sha256:d188bfe37b674b49708497683051d4b571366a668799c9b8e8a94513694969d9", size = 1058662, upload-time = "2026-04-30T23:24:17.535Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d6/e2c3483c31580fdb623f92ad38d2f856cde4b9205a3e6bd84760f3de7d82/ast_serialize-0.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:5832c2fdf8f8a6cf682b4cfcf677f5eaf39b4ddbc490f5480cfccdd1e7ce8fa1", size = 1100349, upload-time = "2026-04-30T23:24:18.992Z" }, + { url = "https://files.pythonhosted.org/packages/ab/89/29abcb1fe18a429cda60c6e0bbd1d6e90499339842a2f548d7567542357e/ast_serialize-0.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:670f177188d128fb7f9f15b5ad0e1b553d22c34e3f584dcb83eb8077600437f0", size = 1072895, upload-time = "2026-04-30T23:24:20.706Z" }, + { url = "https://files.pythonhosted.org/packages/bc/93/72abad83966ed6235647c9f956417dc1e17e997696388521910e3d1fa3f4/ast_serialize-0.3.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:2ec2fafa5e4313cc8feed96e436ebe19ac7bc6fa41fbc2827e826c48b9e4c3a9", size = 1190024, upload-time = "2026-04-30T23:24:22.486Z" }, + { url = "https://files.pythonhosted.org/packages/85/4f/eb88584b2f0234e581762011208ca203252bf6c98e59b4769daa571f3576/ast_serialize-0.3.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:ef6d3c08b7b4cd29b48410338e134764a00e76d25841eb02c1084e868c888ecc", size = 1178633, upload-time = "2026-04-30T23:24:24.35Z" }, + { url = "https://files.pythonhosted.org/packages/56/51/cf1ec1ff3e616373d0dcbd5fad502e0029dc541f13ab642259762a7d127f/ast_serialize-0.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d841424f41b886e98044abc80769c14a956e6e5ccd5fb5b0d9f5ead72be18a4", size = 1241351, upload-time = "2026-04-30T23:24:25.987Z" }, + { url = "https://files.pythonhosted.org/packages/0d/44/68fcf50478cf1093f2d423f034ae06453122c8b415d8e21a44668eca485d/ast_serialize-0.3.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d21453734ad39367ede5d37efe4f59f830ce1c09f432fc72a90e368f77a4a3e7", size = 1239582, upload-time = "2026-04-30T23:24:27.808Z" }, + { url = "https://files.pythonhosted.org/packages/9d/c1/a6c9fa284eceb5fc6f21347e968445a051d7ca2c4d34e6a04314646dbcee/ast_serialize-0.3.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f5e110cdce2a347e1dd987529c88ef54d26f67848dce3eba1b3b2cc2cf085c94", size = 1448853, upload-time = "2026-04-30T23:24:29.534Z" }, + { url = "https://files.pythonhosted.org/packages/23/5f/8ad3829a09e4e8c5328a53ce7d4711d660944e3e164c5f6abcc2c8f27167/ast_serialize-0.3.0-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6e23a98e57560a055f5c4b68700a0fd5ce483d2814c23140b3638c7f5d1e61", size = 1262204, upload-time = "2026-04-30T23:24:31.482Z" }, + { url = "https://files.pythonhosted.org/packages/25/13/44aa28d97f10e25247e8576b5f6b2795d4fa1a80acc88acc942c508d06f7/ast_serialize-0.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1c9e763d70293d65ce1e1ea8c943140c68d0953f0268c7ee0998f2e07f77dd0", size = 1266458, upload-time = "2026-04-30T23:24:33.088Z" }, + { url = "https://files.pythonhosted.org/packages/d8/58/b3a8be3777cd3744324fd5cec0d80d37cd96fc7cbb0fb010e03dff1e870f/ast_serialize-0.3.0-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4388a1796c228f1ce5c391426f7d21a0003ad3b47f677dbeded9bd1a85c7209f", size = 1308700, upload-time = "2026-04-30T23:24:34.657Z" }, + { url = "https://files.pythonhosted.org/packages/13/03/f8312d6b57f5471a9dc7946f22b8798a1fc296d38c25766223aacadec42c/ast_serialize-0.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5283cdcc0c64c3d8b9b688dc6aaa012d9c0cf1380a7f774a6bae6a1c01b3205a", size = 1416724, upload-time = "2026-04-30T23:24:36.562Z" }, + { url = "https://files.pythonhosted.org/packages/50/5d/13fc3789a7abac00559da2e2e9f386db4612aa1f84fc53d09bf714c37545/ast_serialize-0.3.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:f5ef88cc5842a5d7a6ac09dc0d5fc2c98f5d276c1f076f866d55047ce886785b", size = 1515441, upload-time = "2026-04-30T23:24:38.018Z" }, + { url = "https://files.pythonhosted.org/packages/eb/b9/7ab43fc7a23b1f970281093228f5f79bed6edeed7a3e672bde6d7a832a58/ast_serialize-0.3.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:cc14bf402bdc0978594ecce783793de2c7470cd4f5cd7eb286ca97ed8ff7cba9", size = 1510522, upload-time = "2026-04-30T23:24:39.798Z" }, + { url = "https://files.pythonhosted.org/packages/56/ec/d75fc2b788d319f1fad77c14156896f31afdfc68af85b505e5bdebcb9592/ast_serialize-0.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:11eae0cf1b7b3e0678133cc2daa974ea972caf02eb4b3aa062af6fa9acd52c57", size = 1460917, upload-time = "2026-04-30T23:24:41.305Z" }, + { url = "https://files.pythonhosted.org/packages/95/74/f99c81193a2725911e1911ae567ed27c2f2419332c7f3537366f9d238cac/ast_serialize-0.3.0-cp39-abi3-win32.whl", hash = "sha256:2db3dd99de5e6a5a11d7dda73de8750eb6e5baaf25245adf7bdcfe64b6108ae2", size = 1067804, upload-time = "2026-04-30T23:24:43.091Z" }, + { url = "https://files.pythonhosted.org/packages/16/81/76af00c47daa151e89f98ae21fbbcb2840aaa9f5766579c4da76a3c57188/ast_serialize-0.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:a2cd125adccf7969470621905d302750cd25951f22ea430d9a25b7be031e5549", size = 1105561, upload-time = "2026-04-30T23:24:44.578Z" }, + { url = "https://files.pythonhosted.org/packages/bd/46/d3ec57ad500f598d1554bd14ce4df615960549ab2844961bc4e1f5fbd174/ast_serialize-0.3.0-cp39-abi3-win_arm64.whl", hash = "sha256:0dd00da29985f15f50dc35728b7e1e7c84507bccfea1d9914738530f1c72238a", size = 1077165, upload-time = "2026-04-30T23:24:46.377Z" }, ] [[package]] @@ -75,163 +289,309 @@ wheels = [ [[package]] name = "asyncpg" -version = "0.30.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746, upload-time = "2024-10-20T00:30:41.127Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/0e/f5d708add0d0b97446c402db7e8dd4c4183c13edaabe8a8500b411e7b495/asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a", size = 674506, upload-time = "2024-10-20T00:29:27.988Z" }, - { url = "https://files.pythonhosted.org/packages/6a/a0/67ec9a75cb24a1d99f97b8437c8d56da40e6f6bd23b04e2f4ea5d5ad82ac/asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed", size = 645922, upload-time = "2024-10-20T00:29:29.391Z" }, - { url = "https://files.pythonhosted.org/packages/5c/d9/a7584f24174bd86ff1053b14bb841f9e714380c672f61c906eb01d8ec433/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a", size = 3079565, upload-time = "2024-10-20T00:29:30.832Z" }, - { url = "https://files.pythonhosted.org/packages/a0/d7/a4c0f9660e333114bdb04d1a9ac70db690dd4ae003f34f691139a5cbdae3/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956", size = 3109962, upload-time = "2024-10-20T00:29:33.114Z" }, - { url = "https://files.pythonhosted.org/packages/3c/21/199fd16b5a981b1575923cbb5d9cf916fdc936b377e0423099f209e7e73d/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056", size = 3064791, upload-time = "2024-10-20T00:29:34.677Z" }, - { url = "https://files.pythonhosted.org/packages/77/52/0004809b3427534a0c9139c08c87b515f1c77a8376a50ae29f001e53962f/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454", size = 3188696, upload-time = "2024-10-20T00:29:36.389Z" }, - { url = "https://files.pythonhosted.org/packages/52/cb/fbad941cd466117be58b774a3f1cc9ecc659af625f028b163b1e646a55fe/asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d", size = 567358, upload-time = "2024-10-20T00:29:37.915Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0a/0a32307cf166d50e1ad120d9b81a33a948a1a5463ebfa5a96cc5606c0863/asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f", size = 629375, upload-time = "2024-10-20T00:29:39.987Z" }, - { url = "https://files.pythonhosted.org/packages/4b/64/9d3e887bb7b01535fdbc45fbd5f0a8447539833b97ee69ecdbb7a79d0cb4/asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e", size = 673162, upload-time = "2024-10-20T00:29:41.88Z" }, - { url = "https://files.pythonhosted.org/packages/6e/eb/8b236663f06984f212a087b3e849731f917ab80f84450e943900e8ca4052/asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a", size = 637025, upload-time = "2024-10-20T00:29:43.352Z" }, - { url = "https://files.pythonhosted.org/packages/cc/57/2dc240bb263d58786cfaa60920779af6e8d32da63ab9ffc09f8312bd7a14/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3", size = 3496243, upload-time = "2024-10-20T00:29:44.922Z" }, - { url = "https://files.pythonhosted.org/packages/f4/40/0ae9d061d278b10713ea9021ef6b703ec44698fe32178715a501ac696c6b/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737", size = 3575059, upload-time = "2024-10-20T00:29:46.891Z" }, - { url = "https://files.pythonhosted.org/packages/c3/75/d6b895a35a2c6506952247640178e5f768eeb28b2e20299b6a6f1d743ba0/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a", size = 3473596, upload-time = "2024-10-20T00:29:49.201Z" }, - { url = "https://files.pythonhosted.org/packages/c8/e7/3693392d3e168ab0aebb2d361431375bd22ffc7b4a586a0fc060d519fae7/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af", size = 3641632, upload-time = "2024-10-20T00:29:50.768Z" }, - { url = "https://files.pythonhosted.org/packages/32/ea/15670cea95745bba3f0352341db55f506a820b21c619ee66b7d12ea7867d/asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e", size = 560186, upload-time = "2024-10-20T00:29:52.394Z" }, - { url = "https://files.pythonhosted.org/packages/7e/6b/fe1fad5cee79ca5f5c27aed7bd95baee529c1bf8a387435c8ba4fe53d5c1/asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305", size = 621064, upload-time = "2024-10-20T00:29:53.757Z" }, - { url = "https://files.pythonhosted.org/packages/3a/22/e20602e1218dc07692acf70d5b902be820168d6282e69ef0d3cb920dc36f/asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70", size = 670373, upload-time = "2024-10-20T00:29:55.165Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b3/0cf269a9d647852a95c06eb00b815d0b95a4eb4b55aa2d6ba680971733b9/asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3", size = 634745, upload-time = "2024-10-20T00:29:57.14Z" }, - { url = "https://files.pythonhosted.org/packages/8e/6d/a4f31bf358ce8491d2a31bfe0d7bcf25269e80481e49de4d8616c4295a34/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33", size = 3512103, upload-time = "2024-10-20T00:29:58.499Z" }, - { url = "https://files.pythonhosted.org/packages/96/19/139227a6e67f407b9c386cb594d9628c6c78c9024f26df87c912fabd4368/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4", size = 3592471, upload-time = "2024-10-20T00:30:00.354Z" }, - { url = "https://files.pythonhosted.org/packages/67/e4/ab3ca38f628f53f0fd28d3ff20edff1c975dd1cb22482e0061916b4b9a74/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4", size = 3496253, upload-time = "2024-10-20T00:30:02.794Z" }, - { url = "https://files.pythonhosted.org/packages/ef/5f/0bf65511d4eeac3a1f41c54034a492515a707c6edbc642174ae79034d3ba/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba", size = 3662720, upload-time = "2024-10-20T00:30:04.501Z" }, - { url = "https://files.pythonhosted.org/packages/e7/31/1513d5a6412b98052c3ed9158d783b1e09d0910f51fbe0e05f56cc370bc4/asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590", size = 560404, upload-time = "2024-10-20T00:30:06.537Z" }, - { url = "https://files.pythonhosted.org/packages/c8/a4/cec76b3389c4c5ff66301cd100fe88c318563ec8a520e0b2e792b5b84972/asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e", size = 621623, upload-time = "2024-10-20T00:30:09.024Z" }, +version = "0.31.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/17/cc02bc49bc350623d050fa139e34ea512cd6e020562f2a7312a7bcae4bc9/asyncpg-0.31.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eee690960e8ab85063ba93af2ce128c0f52fd655fdff9fdb1a28df01329f031d", size = 643159, upload-time = "2025-11-24T23:25:36.443Z" }, + { url = "https://files.pythonhosted.org/packages/a4/62/4ded7d400a7b651adf06f49ea8f73100cca07c6df012119594d1e3447aa6/asyncpg-0.31.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2657204552b75f8288de08ca60faf4a99a65deef3a71d1467454123205a88fab", size = 638157, upload-time = "2025-11-24T23:25:37.89Z" }, + { url = "https://files.pythonhosted.org/packages/d6/5b/4179538a9a72166a0bf60ad783b1ef16efb7960e4d7b9afe9f77a5551680/asyncpg-0.31.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a429e842a3a4b4ea240ea52d7fe3f82d5149853249306f7ff166cb9948faa46c", size = 2918051, upload-time = "2025-11-24T23:25:39.461Z" }, + { url = "https://files.pythonhosted.org/packages/e6/35/c27719ae0536c5b6e61e4701391ffe435ef59539e9360959240d6e47c8c8/asyncpg-0.31.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0807be46c32c963ae40d329b3a686356e417f674c976c07fa49f1b30303f109", size = 2972640, upload-time = "2025-11-24T23:25:41.512Z" }, + { url = "https://files.pythonhosted.org/packages/43/f4/01ebb9207f29e645a64699b9ce0eefeff8e7a33494e1d29bb53736f7766b/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e5d5098f63beeae93512ee513d4c0c53dc12e9aa2b7a1af5a81cddf93fe4e4da", size = 2851050, upload-time = "2025-11-24T23:25:43.153Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f4/03ff1426acc87be0f4e8d40fa2bff5c3952bef0080062af9efc2212e3be8/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37fc6c00a814e18eef51833545d1891cac9aa69140598bb076b4cd29b3e010b9", size = 2962574, upload-time = "2025-11-24T23:25:44.942Z" }, + { url = "https://files.pythonhosted.org/packages/c7/39/cc788dfca3d4060f9d93e67be396ceec458dfc429e26139059e58c2c244d/asyncpg-0.31.0-cp311-cp311-win32.whl", hash = "sha256:5a4af56edf82a701aece93190cc4e094d2df7d33f6e915c222fb09efbb5afc24", size = 521076, upload-time = "2025-11-24T23:25:46.486Z" }, + { url = "https://files.pythonhosted.org/packages/28/fc/735af5384c029eb7f1ca60ccb8fa95521dbdaeef788edf4cecfc604c3cab/asyncpg-0.31.0-cp311-cp311-win_amd64.whl", hash = "sha256:480c4befbdf079c14c9ca43c8c5e1fe8b6296c96f1f927158d4f1e750aacc047", size = 584980, upload-time = "2025-11-24T23:25:47.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/a6/59d0a146e61d20e18db7396583242e32e0f120693b67a8de43f1557033e2/asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad", size = 662042, upload-time = "2025-11-24T23:25:49.578Z" }, + { url = "https://files.pythonhosted.org/packages/36/01/ffaa189dcb63a2471720615e60185c3f6327716fdc0fc04334436fbb7c65/asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d", size = 638504, upload-time = "2025-11-24T23:25:51.501Z" }, + { url = "https://files.pythonhosted.org/packages/9f/62/3f699ba45d8bd24c5d65392190d19656d74ff0185f42e19d0bbd973bb371/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a", size = 3426241, upload-time = "2025-11-24T23:25:53.278Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d1/a867c2150f9c6e7af6462637f613ba67f78a314b00db220cd26ff559d532/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:aad7a33913fb8bcb5454313377cc330fbb19a0cd5faa7272407d8a0c4257b671", size = 3520321, upload-time = "2025-11-24T23:25:54.982Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1a/cce4c3f246805ecd285a3591222a2611141f1669d002163abef999b60f98/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3df118d94f46d85b2e434fd62c84cb66d5834d5a890725fe625f498e72e4d5ec", size = 3316685, upload-time = "2025-11-24T23:25:57.43Z" }, + { url = "https://files.pythonhosted.org/packages/40/ae/0fc961179e78cc579e138fad6eb580448ecae64908f95b8cb8ee2f241f67/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5b6efff3c17c3202d4b37189969acf8927438a238c6257f66be3c426beba20", size = 3471858, upload-time = "2025-11-24T23:25:59.636Z" }, + { url = "https://files.pythonhosted.org/packages/52/b2/b20e09670be031afa4cbfabd645caece7f85ec62d69c312239de568e058e/asyncpg-0.31.0-cp312-cp312-win32.whl", hash = "sha256:027eaa61361ec735926566f995d959ade4796f6a49d3bde17e5134b9964f9ba8", size = 527852, upload-time = "2025-11-24T23:26:01.084Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f0/f2ed1de154e15b107dc692262395b3c17fc34eafe2a78fc2115931561730/asyncpg-0.31.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d6bdcbc93d608a1158f17932de2321f68b1a967a13e014998db87a72ed3186", size = 597175, upload-time = "2025-11-24T23:26:02.564Z" }, + { url = "https://files.pythonhosted.org/packages/95/11/97b5c2af72a5d0b9bc3fa30cd4b9ce22284a9a943a150fdc768763caf035/asyncpg-0.31.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c204fab1b91e08b0f47e90a75d1b3c62174dab21f670ad6c5d0f243a228f015b", size = 661111, upload-time = "2025-11-24T23:26:04.467Z" }, + { url = "https://files.pythonhosted.org/packages/1b/71/157d611c791a5e2d0423f09f027bd499935f0906e0c2a416ce712ba51ef3/asyncpg-0.31.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54a64f91839ba59008eccf7aad2e93d6e3de688d796f35803235ea1c4898ae1e", size = 636928, upload-time = "2025-11-24T23:26:05.944Z" }, + { url = "https://files.pythonhosted.org/packages/2e/fc/9e3486fb2bbe69d4a867c0b76d68542650a7ff1574ca40e84c3111bb0c6e/asyncpg-0.31.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0e0822b1038dc7253b337b0f3f676cadc4ac31b126c5d42691c39691962e403", size = 3424067, upload-time = "2025-11-24T23:26:07.957Z" }, + { url = "https://files.pythonhosted.org/packages/12/c6/8c9d076f73f07f995013c791e018a1cd5f31823c2a3187fc8581706aa00f/asyncpg-0.31.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bef056aa502ee34204c161c72ca1f3c274917596877f825968368b2c33f585f4", size = 3518156, upload-time = "2025-11-24T23:26:09.591Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3b/60683a0baf50fbc546499cfb53132cb6835b92b529a05f6a81471ab60d0c/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0bfbcc5b7ffcd9b75ab1558f00db2ae07db9c80637ad1b2469c43df79d7a5ae2", size = 3319636, upload-time = "2025-11-24T23:26:11.168Z" }, + { url = "https://files.pythonhosted.org/packages/50/dc/8487df0f69bd398a61e1792b3cba0e47477f214eff085ba0efa7eac9ce87/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22bc525ebbdc24d1261ecbf6f504998244d4e3be1721784b5f64664d61fbe602", size = 3472079, upload-time = "2025-11-24T23:26:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/13/a1/c5bbeeb8531c05c89135cb8b28575ac2fac618bcb60119ee9696c3faf71c/asyncpg-0.31.0-cp313-cp313-win32.whl", hash = "sha256:f890de5e1e4f7e14023619399a471ce4b71f5418cd67a51853b9910fdfa73696", size = 527606, upload-time = "2025-11-24T23:26:14.78Z" }, + { url = "https://files.pythonhosted.org/packages/91/66/b25ccb84a246b470eb943b0107c07edcae51804912b824054b3413995a10/asyncpg-0.31.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc5f2fa9916f292e5c5c8b2ac2813763bcd7f58e130055b4ad8a0531314201ab", size = 596569, upload-time = "2025-11-24T23:26:16.189Z" }, + { url = "https://files.pythonhosted.org/packages/3c/36/e9450d62e84a13aea6580c83a47a437f26c7ca6fa0f0fd40b6670793ea30/asyncpg-0.31.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f6b56b91bb0ffc328c4e3ed113136cddd9deefdf5f79ab448598b9772831df44", size = 660867, upload-time = "2025-11-24T23:26:17.631Z" }, + { url = "https://files.pythonhosted.org/packages/82/4b/1d0a2b33b3102d210439338e1beea616a6122267c0df459ff0265cd5807a/asyncpg-0.31.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:334dec28cf20d7f5bb9e45b39546ddf247f8042a690bff9b9573d00086e69cb5", size = 638349, upload-time = "2025-11-24T23:26:19.689Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/e7f7ac9a7974f08eff9183e392b2d62516f90412686532d27e196c0f0eeb/asyncpg-0.31.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98cc158c53f46de7bb677fd20c417e264fc02b36d901cc2a43bd6cb0dc6dbfd2", size = 3410428, upload-time = "2025-11-24T23:26:21.275Z" }, + { url = "https://files.pythonhosted.org/packages/6f/de/bf1b60de3dede5c2731e6788617a512bc0ebd9693eac297ee74086f101d7/asyncpg-0.31.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9322b563e2661a52e3cdbc93eed3be7748b289f792e0011cb2720d278b366ce2", size = 3471678, upload-time = "2025-11-24T23:26:23.627Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/fc3ade003e22d8bd53aaf8f75f4be48f0b460fa73738f0391b9c856a9147/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19857a358fc811d82227449b7ca40afb46e75b33eb8897240c3839dd8b744218", size = 3313505, upload-time = "2025-11-24T23:26:25.235Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/73eb8a6789e927816f4705291be21f2225687bfa97321e40cd23055e903a/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ba5f8886e850882ff2c2ace5732300e99193823e8107e2c53ef01c1ebfa1e85d", size = 3434744, upload-time = "2025-11-24T23:26:26.944Z" }, + { url = "https://files.pythonhosted.org/packages/08/4b/f10b880534413c65c5b5862f79b8e81553a8f364e5238832ad4c0af71b7f/asyncpg-0.31.0-cp314-cp314-win32.whl", hash = "sha256:cea3a0b2a14f95834cee29432e4ddc399b95700eb1d51bbc5bfee8f31fa07b2b", size = 532251, upload-time = "2025-11-24T23:26:28.404Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2d/7aa40750b7a19efa5d66e67fc06008ca0f27ba1bd082e457ad82f59aba49/asyncpg-0.31.0-cp314-cp314-win_amd64.whl", hash = "sha256:04d19392716af6b029411a0264d92093b6e5e8285ae97a39957b9a9c14ea72be", size = 604901, upload-time = "2025-11-24T23:26:30.34Z" }, + { url = "https://files.pythonhosted.org/packages/ce/fe/b9dfe349b83b9dee28cc42360d2c86b2cdce4cb551a2c2d27e156bcac84d/asyncpg-0.31.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bdb957706da132e982cc6856bb2f7b740603472b54c3ebc77fe60ea3e57e1bd2", size = 702280, upload-time = "2025-11-24T23:26:32Z" }, + { url = "https://files.pythonhosted.org/packages/6a/81/e6be6e37e560bd91e6c23ea8a6138a04fd057b08cf63d3c5055c98e81c1d/asyncpg-0.31.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6d11b198111a72f47154fa03b85799f9be63701e068b43f84ac25da0bda9cb31", size = 682931, upload-time = "2025-11-24T23:26:33.572Z" }, + { url = "https://files.pythonhosted.org/packages/a6/45/6009040da85a1648dd5bc75b3b0a062081c483e75a1a29041ae63a0bf0dc/asyncpg-0.31.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18c83b03bc0d1b23e6230f5bf8d4f217dc9bc08644ce0502a9d91dc9e634a9c7", size = 3581608, upload-time = "2025-11-24T23:26:35.638Z" }, + { url = "https://files.pythonhosted.org/packages/7e/06/2e3d4d7608b0b2b3adbee0d0bd6a2d29ca0fc4d8a78f8277df04e2d1fd7b/asyncpg-0.31.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e009abc333464ff18b8f6fd146addffd9aaf63e79aa3bb40ab7a4c332d0c5e9e", size = 3498738, upload-time = "2025-11-24T23:26:37.275Z" }, + { url = "https://files.pythonhosted.org/packages/7d/aa/7d75ede780033141c51d83577ea23236ba7d3a23593929b32b49db8ed36e/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3b1fbcb0e396a5ca435a8826a87e5c2c2cc0c8c68eb6fadf82168056b0e53a8c", size = 3401026, upload-time = "2025-11-24T23:26:39.423Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7a/15e37d45e7f7c94facc1e9148c0e455e8f33c08f0b8a0b1deb2c5171771b/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8df714dba348efcc162d2adf02d213e5fab1bd9f557e1305633e851a61814a7a", size = 3429426, upload-time = "2025-11-24T23:26:41.032Z" }, + { url = "https://files.pythonhosted.org/packages/13/d5/71437c5f6ae5f307828710efbe62163974e71237d5d46ebd2869ea052d10/asyncpg-0.31.0-cp314-cp314t-win32.whl", hash = "sha256:1b41f1afb1033f2b44f3234993b15096ddc9cd71b21a42dbd87fc6a57b43d65d", size = 614495, upload-time = "2025-11-24T23:26:42.659Z" }, + { url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" }, +] + +[[package]] +name = "attrs" +version = "26.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/8e/82a0fe20a541c03148528be8cac2408564a6c9a0cc7e9171802bc1d26985/attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32", size = 952055, upload-time = "2026-03-19T14:22:25.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/b4/17d4b0b2a2dc85a6df63d1157e028ed19f90d4cd97c36717afef2bc2f395/attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309", size = 67548, upload-time = "2026-03-19T14:22:23.645Z" }, ] [[package]] name = "bcrypt" -version = "4.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/5d/6d7433e0f3cd46ce0b43cd65e1db465ea024dbb8216fb2404e919c2ad77b/bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18", size = 25697, upload-time = "2025-02-28T01:24:09.174Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/2c/3d44e853d1fe969d229bd58d39ae6902b3d924af0e2b5a60d17d4b809ded/bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281", size = 483719, upload-time = "2025-02-28T01:22:34.539Z" }, - { url = "https://files.pythonhosted.org/packages/a1/e2/58ff6e2a22eca2e2cff5370ae56dba29d70b1ea6fc08ee9115c3ae367795/bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb", size = 272001, upload-time = "2025-02-28T01:22:38.078Z" }, - { url = "https://files.pythonhosted.org/packages/37/1f/c55ed8dbe994b1d088309e366749633c9eb90d139af3c0a50c102ba68a1a/bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180", size = 277451, upload-time = "2025-02-28T01:22:40.787Z" }, - { url = "https://files.pythonhosted.org/packages/d7/1c/794feb2ecf22fe73dcfb697ea7057f632061faceb7dcf0f155f3443b4d79/bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f", size = 272792, upload-time = "2025-02-28T01:22:43.144Z" }, - { url = "https://files.pythonhosted.org/packages/13/b7/0b289506a3f3598c2ae2bdfa0ea66969812ed200264e3f61df77753eee6d/bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09", size = 289752, upload-time = "2025-02-28T01:22:45.56Z" }, - { url = "https://files.pythonhosted.org/packages/dc/24/d0fb023788afe9e83cc118895a9f6c57e1044e7e1672f045e46733421fe6/bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d", size = 277762, upload-time = "2025-02-28T01:22:47.023Z" }, - { url = "https://files.pythonhosted.org/packages/e4/38/cde58089492e55ac4ef6c49fea7027600c84fd23f7520c62118c03b4625e/bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd", size = 272384, upload-time = "2025-02-28T01:22:49.221Z" }, - { url = "https://files.pythonhosted.org/packages/de/6a/d5026520843490cfc8135d03012a413e4532a400e471e6188b01b2de853f/bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af", size = 277329, upload-time = "2025-02-28T01:22:51.603Z" }, - { url = "https://files.pythonhosted.org/packages/b3/a3/4fc5255e60486466c389e28c12579d2829b28a527360e9430b4041df4cf9/bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231", size = 305241, upload-time = "2025-02-28T01:22:53.283Z" }, - { url = "https://files.pythonhosted.org/packages/c7/15/2b37bc07d6ce27cc94e5b10fd5058900eb8fb11642300e932c8c82e25c4a/bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c", size = 309617, upload-time = "2025-02-28T01:22:55.461Z" }, - { url = "https://files.pythonhosted.org/packages/5f/1f/99f65edb09e6c935232ba0430c8c13bb98cb3194b6d636e61d93fe60ac59/bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f", size = 335751, upload-time = "2025-02-28T01:22:57.81Z" }, - { url = "https://files.pythonhosted.org/packages/00/1b/b324030c706711c99769988fcb694b3cb23f247ad39a7823a78e361bdbb8/bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d", size = 355965, upload-time = "2025-02-28T01:22:59.181Z" }, - { url = "https://files.pythonhosted.org/packages/aa/dd/20372a0579dd915dfc3b1cd4943b3bca431866fcb1dfdfd7518c3caddea6/bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4", size = 155316, upload-time = "2025-02-28T01:23:00.763Z" }, - { url = "https://files.pythonhosted.org/packages/6d/52/45d969fcff6b5577c2bf17098dc36269b4c02197d551371c023130c0f890/bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669", size = 147752, upload-time = "2025-02-28T01:23:02.908Z" }, - { url = "https://files.pythonhosted.org/packages/11/22/5ada0b9af72b60cbc4c9a399fdde4af0feaa609d27eb0adc61607997a3fa/bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d", size = 498019, upload-time = "2025-02-28T01:23:05.838Z" }, - { url = "https://files.pythonhosted.org/packages/b8/8c/252a1edc598dc1ce57905be173328eda073083826955ee3c97c7ff5ba584/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b", size = 279174, upload-time = "2025-02-28T01:23:07.274Z" }, - { url = "https://files.pythonhosted.org/packages/29/5b/4547d5c49b85f0337c13929f2ccbe08b7283069eea3550a457914fc078aa/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e", size = 283870, upload-time = "2025-02-28T01:23:09.151Z" }, - { url = "https://files.pythonhosted.org/packages/be/21/7dbaf3fa1745cb63f776bb046e481fbababd7d344c5324eab47f5ca92dd2/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59", size = 279601, upload-time = "2025-02-28T01:23:11.461Z" }, - { url = "https://files.pythonhosted.org/packages/6d/64/e042fc8262e971347d9230d9abbe70d68b0a549acd8611c83cebd3eaec67/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753", size = 297660, upload-time = "2025-02-28T01:23:12.989Z" }, - { url = "https://files.pythonhosted.org/packages/50/b8/6294eb84a3fef3b67c69b4470fcdd5326676806bf2519cda79331ab3c3a9/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761", size = 284083, upload-time = "2025-02-28T01:23:14.5Z" }, - { url = "https://files.pythonhosted.org/packages/62/e6/baff635a4f2c42e8788fe1b1633911c38551ecca9a749d1052d296329da6/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb", size = 279237, upload-time = "2025-02-28T01:23:16.686Z" }, - { url = "https://files.pythonhosted.org/packages/39/48/46f623f1b0c7dc2e5de0b8af5e6f5ac4cc26408ac33f3d424e5ad8da4a90/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d", size = 283737, upload-time = "2025-02-28T01:23:18.897Z" }, - { url = "https://files.pythonhosted.org/packages/49/8b/70671c3ce9c0fca4a6cc3cc6ccbaa7e948875a2e62cbd146e04a4011899c/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f", size = 312741, upload-time = "2025-02-28T01:23:21.041Z" }, - { url = "https://files.pythonhosted.org/packages/27/fb/910d3a1caa2d249b6040a5caf9f9866c52114d51523ac2fb47578a27faee/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732", size = 316472, upload-time = "2025-02-28T01:23:23.183Z" }, - { url = "https://files.pythonhosted.org/packages/dc/cf/7cf3a05b66ce466cfb575dbbda39718d45a609daa78500f57fa9f36fa3c0/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef", size = 343606, upload-time = "2025-02-28T01:23:25.361Z" }, - { url = "https://files.pythonhosted.org/packages/e3/b8/e970ecc6d7e355c0d892b7f733480f4aa8509f99b33e71550242cf0b7e63/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304", size = 362867, upload-time = "2025-02-28T01:23:26.875Z" }, - { url = "https://files.pythonhosted.org/packages/a9/97/8d3118efd8354c555a3422d544163f40d9f236be5b96c714086463f11699/bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51", size = 160589, upload-time = "2025-02-28T01:23:28.381Z" }, - { url = "https://files.pythonhosted.org/packages/29/07/416f0b99f7f3997c69815365babbc2e8754181a4b1899d921b3c7d5b6f12/bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62", size = 152794, upload-time = "2025-02-28T01:23:30.187Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c1/3fa0e9e4e0bfd3fd77eb8b52ec198fd6e1fd7e9402052e43f23483f956dd/bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3", size = 498969, upload-time = "2025-02-28T01:23:31.945Z" }, - { url = "https://files.pythonhosted.org/packages/ce/d4/755ce19b6743394787fbd7dff6bf271b27ee9b5912a97242e3caf125885b/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24", size = 279158, upload-time = "2025-02-28T01:23:34.161Z" }, - { url = "https://files.pythonhosted.org/packages/9b/5d/805ef1a749c965c46b28285dfb5cd272a7ed9fa971f970435a5133250182/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef", size = 284285, upload-time = "2025-02-28T01:23:35.765Z" }, - { url = "https://files.pythonhosted.org/packages/ab/2b/698580547a4a4988e415721b71eb45e80c879f0fb04a62da131f45987b96/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b", size = 279583, upload-time = "2025-02-28T01:23:38.021Z" }, - { url = "https://files.pythonhosted.org/packages/f2/87/62e1e426418204db520f955ffd06f1efd389feca893dad7095bf35612eec/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676", size = 297896, upload-time = "2025-02-28T01:23:39.575Z" }, - { url = "https://files.pythonhosted.org/packages/cb/c6/8fedca4c2ada1b6e889c52d2943b2f968d3427e5d65f595620ec4c06fa2f/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1", size = 284492, upload-time = "2025-02-28T01:23:40.901Z" }, - { url = "https://files.pythonhosted.org/packages/4d/4d/c43332dcaaddb7710a8ff5269fcccba97ed3c85987ddaa808db084267b9a/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe", size = 279213, upload-time = "2025-02-28T01:23:42.653Z" }, - { url = "https://files.pythonhosted.org/packages/dc/7f/1e36379e169a7df3a14a1c160a49b7b918600a6008de43ff20d479e6f4b5/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0", size = 284162, upload-time = "2025-02-28T01:23:43.964Z" }, - { url = "https://files.pythonhosted.org/packages/1c/0a/644b2731194b0d7646f3210dc4d80c7fee3ecb3a1f791a6e0ae6bb8684e3/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f", size = 312856, upload-time = "2025-02-28T01:23:46.011Z" }, - { url = "https://files.pythonhosted.org/packages/dc/62/2a871837c0bb6ab0c9a88bf54de0fc021a6a08832d4ea313ed92a669d437/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23", size = 316726, upload-time = "2025-02-28T01:23:47.575Z" }, - { url = "https://files.pythonhosted.org/packages/0c/a1/9898ea3faac0b156d457fd73a3cb9c2855c6fd063e44b8522925cdd8ce46/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe", size = 343664, upload-time = "2025-02-28T01:23:49.059Z" }, - { url = "https://files.pythonhosted.org/packages/40/f2/71b4ed65ce38982ecdda0ff20c3ad1b15e71949c78b2c053df53629ce940/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505", size = 363128, upload-time = "2025-02-28T01:23:50.399Z" }, - { url = "https://files.pythonhosted.org/packages/11/99/12f6a58eca6dea4be992d6c681b7ec9410a1d9f5cf368c61437e31daa879/bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a", size = 160598, upload-time = "2025-02-28T01:23:51.775Z" }, - { url = "https://files.pythonhosted.org/packages/a9/cf/45fb5261ece3e6b9817d3d82b2f343a505fd58674a92577923bc500bd1aa/bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b", size = 152799, upload-time = "2025-02-28T01:23:53.139Z" }, - { url = "https://files.pythonhosted.org/packages/4c/b1/1289e21d710496b88340369137cc4c5f6ee036401190ea116a7b4ae6d32a/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a", size = 275103, upload-time = "2025-02-28T01:24:00.764Z" }, - { url = "https://files.pythonhosted.org/packages/94/41/19be9fe17e4ffc5d10b7b67f10e459fc4eee6ffe9056a88de511920cfd8d/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce", size = 280513, upload-time = "2025-02-28T01:24:02.243Z" }, - { url = "https://files.pythonhosted.org/packages/aa/73/05687a9ef89edebdd8ad7474c16d8af685eb4591c3c38300bb6aad4f0076/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8", size = 274685, upload-time = "2025-02-28T01:24:04.512Z" }, - { url = "https://files.pythonhosted.org/packages/63/13/47bba97924ebe86a62ef83dc75b7c8a881d53c535f83e2c54c4bd701e05c/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938", size = 280110, upload-time = "2025-02-28T01:24:05.896Z" }, +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/85/3e65e01985fddf25b64ca67275bb5bdb4040bd1a53b66d355c6c37c8a680/bcrypt-5.0.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be", size = 481806, upload-time = "2025-09-25T19:49:05.102Z" }, + { url = "https://files.pythonhosted.org/packages/44/dc/01eb79f12b177017a726cbf78330eb0eb442fae0e7b3dfd84ea2849552f3/bcrypt-5.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2", size = 268626, upload-time = "2025-09-25T19:49:06.723Z" }, + { url = "https://files.pythonhosted.org/packages/8c/cf/e82388ad5959c40d6afd94fb4743cc077129d45b952d46bdc3180310e2df/bcrypt-5.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f", size = 271853, upload-time = "2025-09-25T19:49:08.028Z" }, + { url = "https://files.pythonhosted.org/packages/ec/86/7134b9dae7cf0efa85671651341f6afa695857fae172615e960fb6a466fa/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86", size = 269793, upload-time = "2025-09-25T19:49:09.727Z" }, + { url = "https://files.pythonhosted.org/packages/cc/82/6296688ac1b9e503d034e7d0614d56e80c5d1a08402ff856a4549cb59207/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23", size = 289930, upload-time = "2025-09-25T19:49:11.204Z" }, + { url = "https://files.pythonhosted.org/packages/d1/18/884a44aa47f2a3b88dd09bc05a1e40b57878ecd111d17e5bba6f09f8bb77/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2", size = 272194, upload-time = "2025-09-25T19:49:12.524Z" }, + { url = "https://files.pythonhosted.org/packages/0e/8f/371a3ab33c6982070b674f1788e05b656cfbf5685894acbfef0c65483a59/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83", size = 269381, upload-time = "2025-09-25T19:49:14.308Z" }, + { url = "https://files.pythonhosted.org/packages/b1/34/7e4e6abb7a8778db6422e88b1f06eb07c47682313997ee8a8f9352e5a6f1/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746", size = 271750, upload-time = "2025-09-25T19:49:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1b/54f416be2499bd72123c70d98d36c6cd61a4e33d9b89562c22481c81bb30/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e", size = 303757, upload-time = "2025-09-25T19:49:17.244Z" }, + { url = "https://files.pythonhosted.org/packages/13/62/062c24c7bcf9d2826a1a843d0d605c65a755bc98002923d01fd61270705a/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d", size = 306740, upload-time = "2025-09-25T19:49:18.693Z" }, + { url = "https://files.pythonhosted.org/packages/d5/c8/1fdbfc8c0f20875b6b4020f3c7dc447b8de60aa0be5faaf009d24242aec9/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba", size = 334197, upload-time = "2025-09-25T19:49:20.523Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c1/8b84545382d75bef226fbc6588af0f7b7d095f7cd6a670b42a86243183cd/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41", size = 352974, upload-time = "2025-09-25T19:49:22.254Z" }, + { url = "https://files.pythonhosted.org/packages/10/a6/ffb49d4254ed085e62e3e5dd05982b4393e32fe1e49bb1130186617c29cd/bcrypt-5.0.0-cp313-cp313t-win32.whl", hash = "sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861", size = 148498, upload-time = "2025-09-25T19:49:24.134Z" }, + { url = "https://files.pythonhosted.org/packages/48/a9/259559edc85258b6d5fc5471a62a3299a6aa37a6611a169756bf4689323c/bcrypt-5.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e", size = 145853, upload-time = "2025-09-25T19:49:25.702Z" }, + { url = "https://files.pythonhosted.org/packages/2d/df/9714173403c7e8b245acf8e4be8876aac64a209d1b392af457c79e60492e/bcrypt-5.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5", size = 139626, upload-time = "2025-09-25T19:49:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/f8/14/c18006f91816606a4abe294ccc5d1e6f0e42304df5a33710e9e8e95416e1/bcrypt-5.0.0-cp314-cp314t-macosx_10_12_universal2.whl", hash = "sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef", size = 481862, upload-time = "2025-09-25T19:49:28.365Z" }, + { url = "https://files.pythonhosted.org/packages/67/49/dd074d831f00e589537e07a0725cf0e220d1f0d5d8e85ad5bbff251c45aa/bcrypt-5.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4", size = 268544, upload-time = "2025-09-25T19:49:30.39Z" }, + { url = "https://files.pythonhosted.org/packages/f5/91/50ccba088b8c474545b034a1424d05195d9fcbaaf802ab8bfe2be5a4e0d7/bcrypt-5.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf", size = 271787, upload-time = "2025-09-25T19:49:32.144Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e7/d7dba133e02abcda3b52087a7eea8c0d4f64d3e593b4fffc10c31b7061f3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da", size = 269753, upload-time = "2025-09-25T19:49:33.885Z" }, + { url = "https://files.pythonhosted.org/packages/33/fc/5b145673c4b8d01018307b5c2c1fc87a6f5a436f0ad56607aee389de8ee3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9", size = 289587, upload-time = "2025-09-25T19:49:35.144Z" }, + { url = "https://files.pythonhosted.org/packages/27/d7/1ff22703ec6d4f90e62f1a5654b8867ef96bafb8e8102c2288333e1a6ca6/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f", size = 272178, upload-time = "2025-09-25T19:49:36.793Z" }, + { url = "https://files.pythonhosted.org/packages/c8/88/815b6d558a1e4d40ece04a2f84865b0fef233513bd85fd0e40c294272d62/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493", size = 269295, upload-time = "2025-09-25T19:49:38.164Z" }, + { url = "https://files.pythonhosted.org/packages/51/8c/e0db387c79ab4931fc89827d37608c31cc57b6edc08ccd2386139028dc0d/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b", size = 271700, upload-time = "2025-09-25T19:49:39.917Z" }, + { url = "https://files.pythonhosted.org/packages/06/83/1570edddd150f572dbe9fc00f6203a89fc7d4226821f67328a85c330f239/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c", size = 334034, upload-time = "2025-09-25T19:49:41.227Z" }, + { url = "https://files.pythonhosted.org/packages/c9/f2/ea64e51a65e56ae7a8a4ec236c2bfbdd4b23008abd50ac33fbb2d1d15424/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4", size = 352766, upload-time = "2025-09-25T19:49:43.08Z" }, + { url = "https://files.pythonhosted.org/packages/d7/d4/1a388d21ee66876f27d1a1f41287897d0c0f1712ef97d395d708ba93004c/bcrypt-5.0.0-cp314-cp314t-win32.whl", hash = "sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e", size = 152449, upload-time = "2025-09-25T19:49:44.971Z" }, + { url = "https://files.pythonhosted.org/packages/3f/61/3291c2243ae0229e5bca5d19f4032cecad5dfb05a2557169d3a69dc0ba91/bcrypt-5.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d", size = 149310, upload-time = "2025-09-25T19:49:46.162Z" }, + { url = "https://files.pythonhosted.org/packages/3e/89/4b01c52ae0c1a681d4021e5dd3e45b111a8fb47254a274fa9a378d8d834b/bcrypt-5.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993", size = 143761, upload-time = "2025-09-25T19:49:47.345Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" }, + { url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" }, + { url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" }, + { url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" }, + { url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" }, + { url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" }, + { url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" }, + { url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" }, + { url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" }, + { url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" }, + { url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" }, + { url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" }, + { url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" }, + { url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" }, + { url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" }, + { url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" }, + { url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" }, + { url = "https://files.pythonhosted.org/packages/8a/75/4aa9f5a4d40d762892066ba1046000b329c7cd58e888a6db878019b282dc/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534", size = 271180, upload-time = "2025-09-25T19:50:38.575Z" }, + { url = "https://files.pythonhosted.org/packages/54/79/875f9558179573d40a9cc743038ac2bf67dfb79cecb1e8b5d70e88c94c3d/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4", size = 273791, upload-time = "2025-09-25T19:50:39.913Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fe/975adb8c216174bf70fc17535f75e85ac06ed5252ea077be10d9cff5ce24/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911", size = 270746, upload-time = "2025-09-25T19:50:43.306Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", size = 273375, upload-time = "2025-09-25T19:50:45.43Z" }, ] [[package]] name = "certifi" -version = "2025.4.26" +version = "2026.4.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/ee/6caf7a40c36a1220410afe15a1cc64993a1f864871f698c0f93acb72842a/certifi-2026.4.22.tar.gz", hash = "sha256:8d455352a37b71bf76a79caa83a3d6c25afee4a385d632127b6afb3963f1c580", size = 137077, upload-time = "2026-04-22T11:26:11.191Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" }, + { url = "https://files.pythonhosted.org/packages/22/30/7cd8fdcdfbc5b869528b079bfb76dcdf6056b1a2097a662e5e8c04f42965/certifi-2026.4.22-py3-none-any.whl", hash = "sha256:3cb2210c8f88ba2318d29b0388d1023c8492ff72ecdde4ebdaddbb13a31b1c4a", size = 135707, upload-time = "2026-04-22T11:26:09.372Z" }, ] [[package]] name = "cffi" -version = "1.17.1" +version = "2.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pycparser" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264, upload-time = "2024-09-04T20:43:51.124Z" }, - { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651, upload-time = "2024-09-04T20:43:52.872Z" }, - { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259, upload-time = "2024-09-04T20:43:56.123Z" }, - { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200, upload-time = "2024-09-04T20:43:57.891Z" }, - { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235, upload-time = "2024-09-04T20:44:00.18Z" }, - { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721, upload-time = "2024-09-04T20:44:01.585Z" }, - { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242, upload-time = "2024-09-04T20:44:03.467Z" }, - { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999, upload-time = "2024-09-04T20:44:05.023Z" }, - { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242, upload-time = "2024-09-04T20:44:06.444Z" }, - { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604, upload-time = "2024-09-04T20:44:08.206Z" }, - { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727, upload-time = "2024-09-04T20:44:09.481Z" }, - { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400, upload-time = "2024-09-04T20:44:10.873Z" }, - { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, - { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, - { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, - { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, - { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, - { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, - { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, - { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, - { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, - { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, - { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, - { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, - { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, - { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, - { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, - { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, - { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, - { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, - { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" }, - { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, -] - -[[package]] -name = "cfgv" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/a1/67fe25fac3c7642725500a3f6cfe5821ad557c3abb11c9d20d12c7008d3e/charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5", size = 144271, upload-time = "2026-04-02T09:28:39.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/d7/b5b7020a0565c2e9fa8c09f4b5fa6232feb326b8c20081ccded47ea368fd/charset_normalizer-3.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7641bb8895e77f921102f72833904dcd9901df5d6d72a2ab8f31d04b7e51e4e7", size = 309705, upload-time = "2026-04-02T09:26:02.191Z" }, + { url = "https://files.pythonhosted.org/packages/5a/53/58c29116c340e5456724ecd2fff4196d236b98f3da97b404bc5e51ac3493/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:202389074300232baeb53ae2569a60901f7efadd4245cf3a3bf0617d60b439d7", size = 206419, upload-time = "2026-04-02T09:26:03.583Z" }, + { url = "https://files.pythonhosted.org/packages/b2/02/e8146dc6591a37a00e5144c63f29fb7c97a734ea8a111190783c0e60ab63/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:30b8d1d8c52a48c2c5690e152c169b673487a2a58de1ec7393196753063fcd5e", size = 227901, upload-time = "2026-04-02T09:26:04.738Z" }, + { url = "https://files.pythonhosted.org/packages/fb/73/77486c4cd58f1267bf17db420e930c9afa1b3be3fe8c8b8ebbebc9624359/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:532bc9bf33a68613fd7d65e4b1c71a6a38d7d42604ecf239c77392e9b4e8998c", size = 222742, upload-time = "2026-04-02T09:26:06.36Z" }, + { url = "https://files.pythonhosted.org/packages/a1/fa/f74eb381a7d94ded44739e9d94de18dc5edc9c17fb8c11f0a6890696c0a9/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe249cb4651fd12605b7288b24751d8bfd46d35f12a20b1ba33dea122e690df", size = 214061, upload-time = "2026-04-02T09:26:08.347Z" }, + { url = "https://files.pythonhosted.org/packages/dc/92/42bd3cefcf7687253fb86694b45f37b733c97f59af3724f356fa92b8c344/charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:65bcd23054beab4d166035cabbc868a09c1a49d1efe458fe8e4361215df40265", size = 199239, upload-time = "2026-04-02T09:26:09.823Z" }, + { url = "https://files.pythonhosted.org/packages/4c/3d/069e7184e2aa3b3cddc700e3dd267413dc259854adc3380421c805c6a17d/charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:08e721811161356f97b4059a9ba7bafb23ea5ee2255402c42881c214e173c6b4", size = 210173, upload-time = "2026-04-02T09:26:10.953Z" }, + { url = "https://files.pythonhosted.org/packages/62/51/9d56feb5f2e7074c46f93e0ebdbe61f0848ee246e2f0d89f8e20b89ebb8f/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e060d01aec0a910bdccb8be71faf34e7799ce36950f8294c8bf612cba65a2c9e", size = 209841, upload-time = "2026-04-02T09:26:12.142Z" }, + { url = "https://files.pythonhosted.org/packages/d2/59/893d8f99cc4c837dda1fe2f1139079703deb9f321aabcb032355de13b6c7/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:38c0109396c4cfc574d502df99742a45c72c08eff0a36158b6f04000043dbf38", size = 200304, upload-time = "2026-04-02T09:26:13.711Z" }, + { url = "https://files.pythonhosted.org/packages/7d/1d/ee6f3be3464247578d1ed5c46de545ccc3d3ff933695395c402c21fa6b77/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1c2a768fdd44ee4a9339a9b0b130049139b8ce3c01d2ce09f67f5a68048d477c", size = 229455, upload-time = "2026-04-02T09:26:14.941Z" }, + { url = "https://files.pythonhosted.org/packages/54/bb/8fb0a946296ea96a488928bdce8ef99023998c48e4713af533e9bb98ef07/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:1a87ca9d5df6fe460483d9a5bbf2b18f620cbed41b432e2bddb686228282d10b", size = 210036, upload-time = "2026-04-02T09:26:16.478Z" }, + { url = "https://files.pythonhosted.org/packages/9a/bc/015b2387f913749f82afd4fcba07846d05b6d784dd16123cb66860e0237d/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d635aab80466bc95771bb78d5370e74d36d1fe31467b6b29b8b57b2a3cd7d22c", size = 224739, upload-time = "2026-04-02T09:26:17.751Z" }, + { url = "https://files.pythonhosted.org/packages/17/ab/63133691f56baae417493cba6b7c641571a2130eb7bceba6773367ab9ec5/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae196f021b5e7c78e918242d217db021ed2a6ace2bc6ae94c0fc596221c7f58d", size = 216277, upload-time = "2026-04-02T09:26:18.981Z" }, + { url = "https://files.pythonhosted.org/packages/06/6d/3be70e827977f20db77c12a97e6a9f973631a45b8d186c084527e53e77a4/charset_normalizer-3.4.7-cp311-cp311-win32.whl", hash = "sha256:adb2597b428735679446b46c8badf467b4ca5f5056aae4d51a19f9570301b1ad", size = 147819, upload-time = "2026-04-02T09:26:20.295Z" }, + { url = "https://files.pythonhosted.org/packages/20/d9/5f67790f06b735d7c7637171bbfd89882ad67201891b7275e51116ed8207/charset_normalizer-3.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:8e385e4267ab76874ae30db04c627faaaf0b509e1ccc11a95b3fc3e83f855c00", size = 159281, upload-time = "2026-04-02T09:26:21.74Z" }, + { url = "https://files.pythonhosted.org/packages/ca/83/6413f36c5a34afead88ce6f66684d943d91f233d76dd083798f9602b75ae/charset_normalizer-3.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:d4a48e5b3c2a489fae013b7589308a40146ee081f6f509e047e0e096084ceca1", size = 147843, upload-time = "2026-04-02T09:26:22.901Z" }, + { url = "https://files.pythonhosted.org/packages/0c/eb/4fc8d0a7110eb5fc9cc161723a34a8a6c200ce3b4fbf681bc86feee22308/charset_normalizer-3.4.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:eca9705049ad3c7345d574e3510665cb2cf844c2f2dcfe675332677f081cbd46", size = 311328, upload-time = "2026-04-02T09:26:24.331Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e3/0fadc706008ac9d7b9b5be6dc767c05f9d3e5df51744ce4cc9605de7b9f4/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6178f72c5508bfc5fd446a5905e698c6212932f25bcdd4b47a757a50605a90e2", size = 208061, upload-time = "2026-04-02T09:26:25.568Z" }, + { url = "https://files.pythonhosted.org/packages/42/f0/3dd1045c47f4a4604df85ec18ad093912ae1344ac706993aff91d38773a2/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1421b502d83040e6d7fb2fb18dff63957f720da3d77b2fbd3187ceb63755d7b", size = 229031, upload-time = "2026-04-02T09:26:26.865Z" }, + { url = "https://files.pythonhosted.org/packages/dc/67/675a46eb016118a2fbde5a277a5d15f4f69d5f3f5f338e5ee2f8948fcf43/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:edac0f1ab77644605be2cbba52e6b7f630731fc42b34cb0f634be1a6eface56a", size = 225239, upload-time = "2026-04-02T09:26:28.044Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f8/d0118a2f5f23b02cd166fa385c60f9b0d4f9194f574e2b31cef350ad7223/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5649fd1c7bade02f320a462fdefd0b4bd3ce036065836d4f42e0de958038e116", size = 216589, upload-time = "2026-04-02T09:26:29.239Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f1/6d2b0b261b6c4ceef0fcb0d17a01cc5bc53586c2d4796fa04b5c540bc13d/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:203104ed3e428044fd943bc4bf45fa73c0730391f9621e37fe39ecf477b128cb", size = 202733, upload-time = "2026-04-02T09:26:30.5Z" }, + { url = "https://files.pythonhosted.org/packages/6f/c0/7b1f943f7e87cc3db9626ba17807d042c38645f0a1d4415c7a14afb5591f/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:298930cec56029e05497a76988377cbd7457ba864beeea92ad7e844fe74cd1f1", size = 212652, upload-time = "2026-04-02T09:26:31.709Z" }, + { url = "https://files.pythonhosted.org/packages/38/dd/5a9ab159fe45c6e72079398f277b7d2b523e7f716acc489726115a910097/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:708838739abf24b2ceb208d0e22403dd018faeef86ddac04319a62ae884c4f15", size = 211229, upload-time = "2026-04-02T09:26:33.282Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ff/531a1cad5ca855d1c1a8b69cb71abfd6d85c0291580146fda7c82857caa1/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0f7eb884681e3938906ed0434f20c63046eacd0111c4ba96f27b76084cd679f5", size = 203552, upload-time = "2026-04-02T09:26:34.845Z" }, + { url = "https://files.pythonhosted.org/packages/c1/4c/a5fb52d528a8ca41f7598cb619409ece30a169fbdf9cdce592e53b46c3a6/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4dc1e73c36828f982bfe79fadf5919923f8a6f4df2860804db9a98c48824ce8d", size = 230806, upload-time = "2026-04-02T09:26:36.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/7a/071feed8124111a32b316b33ae4de83d36923039ef8cf48120266844285b/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:aed52fea0513bac0ccde438c188c8a471c4e0f457c2dd20cdbf6ea7a450046c7", size = 212316, upload-time = "2026-04-02T09:26:37.672Z" }, + { url = "https://files.pythonhosted.org/packages/fd/35/f7dba3994312d7ba508e041eaac39a36b120f32d4c8662b8814dab876431/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fea24543955a6a729c45a73fe90e08c743f0b3334bbf3201e6c4bc1b0c7fa464", size = 227274, upload-time = "2026-04-02T09:26:38.93Z" }, + { url = "https://files.pythonhosted.org/packages/8a/2d/a572df5c9204ab7688ec1edc895a73ebded3b023bb07364710b05dd1c9be/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb6d88045545b26da47aa879dd4a89a71d1dce0f0e549b1abcb31dfe4a8eac49", size = 218468, upload-time = "2026-04-02T09:26:40.17Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/890922a8b03a568ca2f336c36585a4713c55d4d67bf0f0c78924be6315ca/charset_normalizer-3.4.7-cp312-cp312-win32.whl", hash = "sha256:2257141f39fe65a3fdf38aeccae4b953e5f3b3324f4ff0daf9f15b8518666a2c", size = 148460, upload-time = "2026-04-02T09:26:41.416Z" }, + { url = "https://files.pythonhosted.org/packages/35/d9/0e7dffa06c5ab081f75b1b786f0aefc88365825dfcd0ac544bdb7b2b6853/charset_normalizer-3.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:5ed6ab538499c8644b8a3e18debabcd7ce684f3fa91cf867521a7a0279cab2d6", size = 159330, upload-time = "2026-04-02T09:26:42.554Z" }, + { url = "https://files.pythonhosted.org/packages/9e/5d/481bcc2a7c88ea6b0878c299547843b2521ccbc40980cb406267088bc701/charset_normalizer-3.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:56be790f86bfb2c98fb742ce566dfb4816e5a83384616ab59c49e0604d49c51d", size = 147828, upload-time = "2026-04-02T09:26:44.075Z" }, + { url = "https://files.pythonhosted.org/packages/c1/3b/66777e39d3ae1ddc77ee606be4ec6d8cbd4c801f65e5a1b6f2b11b8346dd/charset_normalizer-3.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f496c9c3cc02230093d8330875c4c3cdfc3b73612a5fd921c65d39cbcef08063", size = 309627, upload-time = "2026-04-02T09:26:45.198Z" }, + { url = "https://files.pythonhosted.org/packages/2e/4e/b7f84e617b4854ade48a1b7915c8ccfadeba444d2a18c291f696e37f0d3b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ea948db76d31190bf08bd371623927ee1339d5f2a0b4b1b4a4439a65298703c", size = 207008, upload-time = "2026-04-02T09:26:46.824Z" }, + { url = "https://files.pythonhosted.org/packages/c4/bb/ec73c0257c9e11b268f018f068f5d00aa0ef8c8b09f7753ebd5f2880e248/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a277ab8928b9f299723bc1a2dabb1265911b1a76341f90a510368ca44ad9ab66", size = 228303, upload-time = "2026-04-02T09:26:48.397Z" }, + { url = "https://files.pythonhosted.org/packages/85/fb/32d1f5033484494619f701e719429c69b766bfc4dbc61aa9e9c8c166528b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3bec022aec2c514d9cf199522a802bd007cd588ab17ab2525f20f9c34d067c18", size = 224282, upload-time = "2026-04-02T09:26:49.684Z" }, + { url = "https://files.pythonhosted.org/packages/fa/07/330e3a0dda4c404d6da83b327270906e9654a24f6c546dc886a0eb0ffb23/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e044c39e41b92c845bc815e5ae4230804e8e7bc29e399b0437d64222d92809dd", size = 215595, upload-time = "2026-04-02T09:26:50.915Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7c/fc890655786e423f02556e0216d4b8c6bcb6bdfa890160dc66bf52dee468/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:f495a1652cf3fbab2eb0639776dad966c2fb874d79d87ca07f9d5f059b8bd215", size = 201986, upload-time = "2026-04-02T09:26:52.197Z" }, + { url = "https://files.pythonhosted.org/packages/d8/97/bfb18b3db2aed3b90cf54dc292ad79fdd5ad65c4eae454099475cbeadd0d/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e712b419df8ba5e42b226c510472b37bd57b38e897d3eca5e8cfd410a29fa859", size = 211711, upload-time = "2026-04-02T09:26:53.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a5/a581c13798546a7fd557c82614a5c65a13df2157e9ad6373166d2a3e645d/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7804338df6fcc08105c7745f1502ba68d900f45fd770d5bdd5288ddccb8a42d8", size = 210036, upload-time = "2026-04-02T09:26:54.975Z" }, + { url = "https://files.pythonhosted.org/packages/8c/bf/b3ab5bcb478e4193d517644b0fb2bf5497fbceeaa7a1bc0f4d5b50953861/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:481551899c856c704d58119b5025793fa6730adda3571971af568f66d2424bb5", size = 202998, upload-time = "2026-04-02T09:26:56.303Z" }, + { url = "https://files.pythonhosted.org/packages/e7/4e/23efd79b65d314fa320ec6017b4b5834d5c12a58ba4610aa353af2e2f577/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f59099f9b66f0d7145115e6f80dd8b1d847176df89b234a5a6b3f00437aa0832", size = 230056, upload-time = "2026-04-02T09:26:57.554Z" }, + { url = "https://files.pythonhosted.org/packages/b9/9f/1e1941bc3f0e01df116e68dc37a55c4d249df5e6fa77f008841aef68264f/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:f59ad4c0e8f6bba240a9bb85504faa1ab438237199d4cce5f622761507b8f6a6", size = 211537, upload-time = "2026-04-02T09:26:58.843Z" }, + { url = "https://files.pythonhosted.org/packages/80/0f/088cbb3020d44428964a6c97fe1edfb1b9550396bf6d278330281e8b709c/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3dedcc22d73ec993f42055eff4fcfed9318d1eeb9a6606c55892a26964964e48", size = 226176, upload-time = "2026-04-02T09:27:00.437Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9f/130394f9bbe06f4f63e22641d32fc9b202b7e251c9aef4db044324dac493/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:64f02c6841d7d83f832cd97ccf8eb8a906d06eb95d5276069175c696b024b60a", size = 217723, upload-time = "2026-04-02T09:27:02.021Z" }, + { url = "https://files.pythonhosted.org/packages/73/55/c469897448a06e49f8fa03f6caae97074fde823f432a98f979cc42b90e69/charset_normalizer-3.4.7-cp313-cp313-win32.whl", hash = "sha256:4042d5c8f957e15221d423ba781e85d553722fc4113f523f2feb7b188cc34c5e", size = 148085, upload-time = "2026-04-02T09:27:03.192Z" }, + { url = "https://files.pythonhosted.org/packages/5d/78/1b74c5bbb3f99b77a1715c91b3e0b5bdb6fe302d95ace4f5b1bec37b0167/charset_normalizer-3.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:3946fa46a0cf3e4c8cb1cc52f56bb536310d34f25f01ca9b6c16afa767dab110", size = 158819, upload-time = "2026-04-02T09:27:04.454Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/46bd42279d323deb8687c4a5a811fd548cb7d1de10cf6535d099877a9a9f/charset_normalizer-3.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:80d04837f55fc81da168b98de4f4b797ef007fc8a79ab71c6ec9bc4dd662b15b", size = 147915, upload-time = "2026-04-02T09:27:05.971Z" }, + { url = "https://files.pythonhosted.org/packages/97/c8/c67cb8c70e19ef1960b97b22ed2a1567711de46c4ddf19799923adc836c2/charset_normalizer-3.4.7-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c36c333c39be2dbca264d7803333c896ab8fa7d4d6f0ab7edb7dfd7aea6e98c0", size = 309234, upload-time = "2026-04-02T09:27:07.194Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/c091fdee33f20de70d6c8b522743b6f831a2f1cd3ff86de4c6a827c48a76/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c2aed2e5e41f24ea8ef1590b8e848a79b56f3a5564a65ceec43c9d692dc7d8a", size = 208042, upload-time = "2026-04-02T09:27:08.749Z" }, + { url = "https://files.pythonhosted.org/packages/87/1c/ab2ce611b984d2fd5d86a5a8a19c1ae26acac6bad967da4967562c75114d/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54523e136b8948060c0fa0bc7b1b50c32c186f2fceee897a495406bb6e311d2b", size = 228706, upload-time = "2026-04-02T09:27:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a8/29/2b1d2cb00bf085f59d29eb773ce58ec2d325430f8c216804a0a5cd83cbca/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:715479b9a2802ecac752a3b0efa2b0b60285cf962ee38414211abdfccc233b41", size = 224727, upload-time = "2026-04-02T09:27:11.175Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/032c2d5a07fe4d4855fea851209cca2b6f03ebeb6d4e3afdb3358386a684/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd6c2a1c7573c64738d716488d2cdd3c00e340e4835707d8fdb8dc1a66ef164e", size = 215882, upload-time = "2026-04-02T09:27:12.446Z" }, + { url = "https://files.pythonhosted.org/packages/2c/c2/356065d5a8b78ed04499cae5f339f091946a6a74f91e03476c33f0ab7100/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:c45e9440fb78f8ddabcf714b68f936737a121355bf59f3907f4e17721b9d1aae", size = 200860, upload-time = "2026-04-02T09:27:13.721Z" }, + { url = "https://files.pythonhosted.org/packages/0c/cd/a32a84217ced5039f53b29f460962abb2d4420def55afabe45b1c3c7483d/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3534e7dcbdcf757da6b85a0bbf5b6868786d5982dd959b065e65481644817a18", size = 211564, upload-time = "2026-04-02T09:27:15.272Z" }, + { url = "https://files.pythonhosted.org/packages/44/86/58e6f13ce26cc3b8f4a36b94a0f22ae2f00a72534520f4ae6857c4b81f89/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e8ac484bf18ce6975760921bb6148041faa8fef0547200386ea0b52b5d27bf7b", size = 211276, upload-time = "2026-04-02T09:27:16.834Z" }, + { url = "https://files.pythonhosted.org/packages/8f/fe/d17c32dc72e17e155e06883efa84514ca375f8a528ba2546bee73fc4df81/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a5fe03b42827c13cdccd08e6c0247b6a6d4b5e3cdc53fd1749f5896adcdc2356", size = 201238, upload-time = "2026-04-02T09:27:18.229Z" }, + { url = "https://files.pythonhosted.org/packages/6a/29/f33daa50b06525a237451cdb6c69da366c381a3dadcd833fa5676bc468b3/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:2d6eb928e13016cea4f1f21d1e10c1cebd5a421bc57ddf5b1142ae3f86824fab", size = 230189, upload-time = "2026-04-02T09:27:19.445Z" }, + { url = "https://files.pythonhosted.org/packages/b6/6e/52c84015394a6a0bdcd435210a7e944c5f94ea1055f5cc5d56c5fe368e7b/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e74327fb75de8986940def6e8dee4f127cc9752bee7355bb323cc5b2659b6d46", size = 211352, upload-time = "2026-04-02T09:27:20.79Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d7/4353be581b373033fb9198bf1da3cf8f09c1082561e8e922aa7b39bf9fe8/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d6038d37043bced98a66e68d3aa2b6a35505dc01328cd65217cefe82f25def44", size = 227024, upload-time = "2026-04-02T09:27:22.063Z" }, + { url = "https://files.pythonhosted.org/packages/30/45/99d18aa925bd1740098ccd3060e238e21115fffbfdcb8f3ece837d0ace6c/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7579e913a5339fb8fa133f6bbcfd8e6749696206cf05acdbdca71a1b436d8e72", size = 217869, upload-time = "2026-04-02T09:27:23.486Z" }, + { url = "https://files.pythonhosted.org/packages/5c/05/5ee478aa53f4bb7996482153d4bfe1b89e0f087f0ab6b294fcf92d595873/charset_normalizer-3.4.7-cp314-cp314-win32.whl", hash = "sha256:5b77459df20e08151cd6f8b9ef8ef1f961ef73d85c21a555c7eed5b79410ec10", size = 148541, upload-time = "2026-04-02T09:27:25.146Z" }, + { url = "https://files.pythonhosted.org/packages/48/77/72dcb0921b2ce86420b2d79d454c7022bf5be40202a2a07906b9f2a35c97/charset_normalizer-3.4.7-cp314-cp314-win_amd64.whl", hash = "sha256:92a0a01ead5e668468e952e4238cccd7c537364eb7d851ab144ab6627dbbe12f", size = 159634, upload-time = "2026-04-02T09:27:26.642Z" }, + { url = "https://files.pythonhosted.org/packages/c6/a3/c2369911cd72f02386e4e340770f6e158c7980267da16af8f668217abaa0/charset_normalizer-3.4.7-cp314-cp314-win_arm64.whl", hash = "sha256:67f6279d125ca0046a7fd386d01b311c6363844deac3e5b069b514ba3e63c246", size = 148384, upload-time = "2026-04-02T09:27:28.271Z" }, + { url = "https://files.pythonhosted.org/packages/94/09/7e8a7f73d24dba1f0035fbbf014d2c36828fc1bf9c88f84093e57d315935/charset_normalizer-3.4.7-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:effc3f449787117233702311a1b7d8f59cba9ced946ba727bdc329ec69028e24", size = 330133, upload-time = "2026-04-02T09:27:29.474Z" }, + { url = "https://files.pythonhosted.org/packages/8d/da/96975ddb11f8e977f706f45cddd8540fd8242f71ecdb5d18a80723dcf62c/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fbccdc05410c9ee21bbf16a35f4c1d16123dcdeb8a1d38f33654fa21d0234f79", size = 216257, upload-time = "2026-04-02T09:27:30.793Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/1d63bf8ef2d388e95c64b2098f45f84758f6d102a087552da1485912637b/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:733784b6d6def852c814bce5f318d25da2ee65dd4839a0718641c696e09a2960", size = 234851, upload-time = "2026-04-02T09:27:32.44Z" }, + { url = "https://files.pythonhosted.org/packages/9b/40/e5ff04233e70da2681fa43969ad6f66ca5611d7e669be0246c4c7aaf6dc8/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a89c23ef8d2c6b27fd200a42aa4ac72786e7c60d40efdc76e6011260b6e949c4", size = 233393, upload-time = "2026-04-02T09:27:34.03Z" }, + { url = "https://files.pythonhosted.org/packages/be/c1/06c6c49d5a5450f76899992f1ee40b41d076aee9279b49cf9974d2f313d5/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c114670c45346afedc0d947faf3c7f701051d2518b943679c8ff88befe14f8e", size = 223251, upload-time = "2026-04-02T09:27:35.369Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/f2ff16fb050946169e3e1f82134d107e5d4ae72647ec8a1b1446c148480f/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:a180c5e59792af262bf263b21a3c49353f25945d8d9f70628e73de370d55e1e1", size = 206609, upload-time = "2026-04-02T09:27:36.661Z" }, + { url = "https://files.pythonhosted.org/packages/69/d5/a527c0cd8d64d2eab7459784fb4169a0ac76e5a6fc5237337982fd61347e/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3c9a494bc5ec77d43cea229c4f6db1e4d8fe7e1bbffa8b6f0f0032430ff8ab44", size = 220014, upload-time = "2026-04-02T09:27:38.019Z" }, + { url = "https://files.pythonhosted.org/packages/7e/80/8a7b8104a3e203074dc9aa2c613d4b726c0e136bad1cc734594b02867972/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8d828b6667a32a728a1ad1d93957cdf37489c57b97ae6c4de2860fa749b8fc1e", size = 218979, upload-time = "2026-04-02T09:27:39.37Z" }, + { url = "https://files.pythonhosted.org/packages/02/9a/b759b503d507f375b2b5c153e4d2ee0a75aa215b7f2489cf314f4541f2c0/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:cf1493cd8607bec4d8a7b9b004e699fcf8f9103a9284cc94962cb73d20f9d4a3", size = 209238, upload-time = "2026-04-02T09:27:40.722Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/0f3f5d47b86bdb79256e7290b26ac847a2832d9a4033f7eb2cd4bcf4bb5b/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0c96c3b819b5c3e9e165495db84d41914d6894d55181d2d108cc1a69bfc9cce0", size = 236110, upload-time = "2026-04-02T09:27:42.33Z" }, + { url = "https://files.pythonhosted.org/packages/96/23/bce28734eb3ed2c91dcf93abeb8a5cf393a7b2749725030bb630e554fdd8/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:752a45dc4a6934060b3b0dab47e04edc3326575f82be64bc4fc293914566503e", size = 219824, upload-time = "2026-04-02T09:27:43.924Z" }, + { url = "https://files.pythonhosted.org/packages/2c/6f/6e897c6984cc4d41af319b077f2f600fc8214eb2fe2d6bcb79141b882400/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:8778f0c7a52e56f75d12dae53ae320fae900a8b9b4164b981b9c5ce059cd1fcb", size = 233103, upload-time = "2026-04-02T09:27:45.348Z" }, + { url = "https://files.pythonhosted.org/packages/76/22/ef7bd0fe480a0ae9b656189ec00744b60933f68b4f42a7bb06589f6f576a/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ce3412fbe1e31eb81ea42f4169ed94861c56e643189e1e75f0041f3fe7020abe", size = 225194, upload-time = "2026-04-02T09:27:46.706Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/0e0ab3e0b5bc1219bd80a6a0d4d72ca74d9250cb2382b7c699c147e06017/charset_normalizer-3.4.7-cp314-cp314t-win32.whl", hash = "sha256:c03a41a8784091e67a39648f70c5f97b5b6a37f216896d44d2cdcb82615339a0", size = 159827, upload-time = "2026-04-02T09:27:48.053Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1d/29d32e0fb40864b1f878c7f5a0b343ae676c6e2b271a2d55cc3a152391da/charset_normalizer-3.4.7-cp314-cp314t-win_amd64.whl", hash = "sha256:03853ed82eeebbce3c2abfdbc98c96dc205f32a79627688ac9a27370ea61a49c", size = 174168, upload-time = "2026-04-02T09:27:49.795Z" }, + { url = "https://files.pythonhosted.org/packages/de/32/d92444ad05c7a6e41fb2036749777c163baf7a0301a040cb672d6b2b1ae9/charset_normalizer-3.4.7-cp314-cp314t-win_arm64.whl", hash = "sha256:c35abb8bfff0185efac5878da64c45dafd2b37fb0383add1be155a763c1f083d", size = 153018, upload-time = "2026-04-02T09:27:51.116Z" }, + { url = "https://files.pythonhosted.org/packages/db/8f/61959034484a4a7c527811f4721e75d02d653a35afb0b6054474d8185d4c/charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d", size = 61958, upload-time = "2026-04-02T09:28:37.794Z" }, ] [[package]] name = "click" -version = "8.2.1" +version = "8.3.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/63/f9e1ea081ce35720d8b92acde70daaedace594dc93b693c869e0d5910718/click-8.3.3.tar.gz", hash = "sha256:398329ad4837b2ff7cbe1dd166a4c0f8900c3ca3a218de04466f38f6497f18a2", size = 328061, upload-time = "2026-04-22T15:11:27.506Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/ae/44/c1221527f6a71a01ec6fbad7fa78f1d50dfa02217385cf0fa3eec7087d59/click-8.3.3-py3-none-any.whl", hash = "sha256:a2bf429bb3033c89fa4936ffb35d5cb471e3719e1f3c8a7c3fff0b8314305613", size = 110502, upload-time = "2026-04-22T15:11:25.044Z" }, ] [[package]] @@ -244,237 +604,401 @@ wheels = [ ] [[package]] -name = "crudadmin" -version = "0.4.2" +name = "cryptography" +version = "48.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "aiosqlite" }, - { name = "bcrypt" }, - { name = "fastapi" }, - { name = "fastcrud" }, - { name = "greenlet" }, - { name = "jinja2" }, - { name = "pydantic", extra = ["email"] }, - { name = "pydantic-settings" }, - { name = "python-jose" }, - { name = "python-multipart" }, - { name = "sqlalchemy" }, - { name = "user-agents" }, + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6e/f9/2a202981d7508d327cb969af23e4236adf0988d9873d979be4af8490c028/crudadmin-0.4.2.tar.gz", hash = "sha256:6bcfaedbaddc5bbefb9960b6a0bf7d8b75d6bf0f880b625ad3f6293a085cd31a", size = 189902, upload-time = "2025-06-26T06:58:52.816Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/49/8f1f51346756c0ceb11ef9309cabb29f3d333c097bae4b4cd69f7bf0beab/crudadmin-0.4.2-py3-none-any.whl", hash = "sha256:8bba024031505eb8f7454a23c4a3690144ae4a49e0366e02d320b6374b2a9c5c", size = 217454, upload-time = "2025-06-26T06:58:51.389Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/9f/a9/db8f313fdcd85d767d4973515e1db101f9c71f95fced83233de224673757/cryptography-48.0.0.tar.gz", hash = "sha256:5c3932f4436d1cccb036cb0eaef46e6e2db91035166f1ad6505c3c9d5a635920", size = 832984, upload-time = "2026-05-04T22:59:38.133Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/3d/01f6dd9190170a5a241e0e98c2d04be3664a9e6f5b9b872cde63aff1c3dd/cryptography-48.0.0-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:0c558d2cdffd8f4bbb30fc7134c74d2ca9a476f830bb053074498fbc86f41ed6", size = 8001587, upload-time = "2026-05-04T22:57:36.803Z" }, + { url = "https://files.pythonhosted.org/packages/b2/6e/e90527eef33f309beb811cf7c982c3aeffcce8e3edb178baa4ca3ae4a6fa/cryptography-48.0.0-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f5333311663ea94f75dd408665686aaf426563556bb5283554a3539177e03b8c", size = 4690433, upload-time = "2026-05-04T22:57:40.373Z" }, + { url = "https://files.pythonhosted.org/packages/90/04/673510ed51ddff56575f306cf1617d80411ee76831ccd3097599140efdfe/cryptography-48.0.0-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7995ef305d7165c3f11ae07f2517e5a4f1d5c18da1376a0a9ed496336b69e5f3", size = 4710620, upload-time = "2026-05-04T22:57:42.935Z" }, + { url = "https://files.pythonhosted.org/packages/14/d5/e9c4ef932c8d800490c34d8bd589d64a31d5890e27ec9e9ad532be893294/cryptography-48.0.0-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:40ba1f85eaa6959837b1d51c9767e230e14612eea4ef110ee8854ada22da1bf5", size = 4696283, upload-time = "2026-05-04T22:57:45.294Z" }, + { url = "https://files.pythonhosted.org/packages/0c/29/174b9dfb60b12d59ecfc6cfa04bc88c21b42a54f01b8aae09bb6e51e4c7f/cryptography-48.0.0-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:369a6348999f94bbd53435c894377b20ab95f25a9065c283570e70150d8abc3c", size = 5296573, upload-time = "2026-05-04T22:57:47.933Z" }, + { url = "https://files.pythonhosted.org/packages/95/38/0d29a6fd7d0d1373f0c0c88a04ba20e359b257753ac497564cd660fc1d55/cryptography-48.0.0-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a0e692c683f4df67815a2d258b324e66f4738bd7a96a218c826dce4f4bd05d8f", size = 4743677, upload-time = "2026-05-04T22:57:50.067Z" }, + { url = "https://files.pythonhosted.org/packages/30/be/eef653013d5c63b6a490529e0316f9ac14a37602965d4903efed1399f32b/cryptography-48.0.0-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:18349bbc56f4743c8b12dc32e2bccb2cf83ee8b69a3bba74ef8ae857e26b3d25", size = 4330808, upload-time = "2026-05-04T22:57:52.301Z" }, + { url = "https://files.pythonhosted.org/packages/84/9e/500463e87abb7a0a0f9f256ec21123ecde0a7b5541a15e840ea54551fd81/cryptography-48.0.0-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e8eac43dfca5c4cccc6dad9a80504436fca53bb9bc3100a2386d730fbe6b602", size = 4695941, upload-time = "2026-05-04T22:57:54.603Z" }, + { url = "https://files.pythonhosted.org/packages/e3/dc/7303087450c2ec9e7fbb750e17c2abfbc658f23cbd0e54009509b7cc4091/cryptography-48.0.0-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9ccdac7d40688ecb5a3b4a604b8a88c8002e3442d6c60aead1db2a89a041560c", size = 5252579, upload-time = "2026-05-04T22:57:57.207Z" }, + { url = "https://files.pythonhosted.org/packages/d0/c0/7101d3b7215edcdc90c45da544961fd8ed2d6448f77577460fa75a8443f7/cryptography-48.0.0-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:bd72e68b06bb1e96913f97dd4901119bc17f39d4586a5adf2d3e47bc2b9d58b5", size = 4743326, upload-time = "2026-05-04T22:57:59.535Z" }, + { url = "https://files.pythonhosted.org/packages/ac/d8/5b833bad13016f562ab9d063d68199a4bd121d18458e439515601d3357ec/cryptography-48.0.0-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:59baa2cb386c4f0b9905bd6eb4c2a79a69a128408fd31d32ca4d7102d4156321", size = 4826672, upload-time = "2026-05-04T22:58:01.996Z" }, + { url = "https://files.pythonhosted.org/packages/98/e1/7074eb8bf3c135558c73fc2bcf0f5633f912e6fb87e868a55c454080ef09/cryptography-48.0.0-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9249e3cd978541d665967ac2cb2787fd6a62bddf1e75b3e347a594d7dacf4f74", size = 4972574, upload-time = "2026-05-04T22:58:03.968Z" }, + { url = "https://files.pythonhosted.org/packages/04/70/e5a1b41d325f797f39427aa44ef8baf0be500065ab6d8e10369d850d4a4f/cryptography-48.0.0-cp311-abi3-win32.whl", hash = "sha256:9c459db21422be75e2809370b829a87eb37f74cd785fc4aa9ea1e5f43b47cda4", size = 3294868, upload-time = "2026-05-04T22:58:06.467Z" }, + { url = "https://files.pythonhosted.org/packages/f4/ac/8ac51b4a5fc5932eb7ee5c517ba7dc8cd834f0048962b6b352f00f41ebf9/cryptography-48.0.0-cp311-abi3-win_amd64.whl", hash = "sha256:5b012212e08b8dd5edc78ef54da83dd9892fd9105323b3993eff6bea65dc21d7", size = 3817107, upload-time = "2026-05-04T22:58:08.845Z" }, + { url = "https://files.pythonhosted.org/packages/6b/84/70e3feea9feea87fd7cbe77efb2712ae1e3e6edf10749dc6e95f4e60e455/cryptography-48.0.0-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:3cb07a3ed6431663cd321ea8a000a1314c74211f823e4177fefa2255e057d1ec", size = 7986556, upload-time = "2026-05-04T22:58:11.172Z" }, + { url = "https://files.pythonhosted.org/packages/89/6e/18e07a618bb5442ba10cf4df16e99c071365528aa570dfcb8c02e25a303b/cryptography-48.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c7378637d7d88016fa6791c159f698b3d3eed28ebf844ac36b9dc04a14dae18", size = 4684776, upload-time = "2026-05-04T22:58:13.712Z" }, + { url = "https://files.pythonhosted.org/packages/be/6a/4ea3b4c6c6759794d5ee2103c304a5076dc4b19ae1f9fe47dba439e159e9/cryptography-48.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc90c0b39b2e3c65ef52c804b72e3c58f8a04ab2a1871272798e5f9572c17d20", size = 4698121, upload-time = "2026-05-04T22:58:16.448Z" }, + { url = "https://files.pythonhosted.org/packages/2f/59/6ff6ad6cae03bb887da2a5860b2c9805f8dac969ef01ce563336c49bd1d1/cryptography-48.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:76341972e1eff8b4bea859f09c0d3e64b96ce931b084f9b9b7db8ef364c30eff", size = 4690042, upload-time = "2026-05-04T22:58:18.544Z" }, + { url = "https://files.pythonhosted.org/packages/ca/b4/fc334ed8cfd705aca282fe4d8f5ae64a8e0f74932e9feecb344610cf6e4d/cryptography-48.0.0-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:55b7718303bf06a5753dcdccf2f3945cf18ad7bffde41b61226e4db31ab89a9c", size = 5282526, upload-time = "2026-05-04T22:58:20.75Z" }, + { url = "https://files.pythonhosted.org/packages/11/08/9f8c5386cc4cd90d8255c7cdd0f5baf459a08502a09de30dc51f553d38dc/cryptography-48.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:a64697c641c7b1b2178e573cbc31c7c6684cd56883a478d75143dbb7118036db", size = 4733116, upload-time = "2026-05-04T22:58:23.627Z" }, + { url = "https://files.pythonhosted.org/packages/b8/77/99307d7574045699f8805aa500fa0fb83422d115b5400a064ddd306d7750/cryptography-48.0.0-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:561215ea3879cb1cbbf272867e2efda62476f240fb58c64de6b393ae19246741", size = 4316030, upload-time = "2026-05-04T22:58:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/fd/36/a608b98337af3cb2aff4818e406649d30572b7031918b04c87d979495348/cryptography-48.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:ad64688338ed4bc1a6618076ba75fd7194a5f1797ac60b47afe926285adb3166", size = 4689640, upload-time = "2026-05-04T22:58:27.747Z" }, + { url = "https://files.pythonhosted.org/packages/dd/a6/825010a291b4438aecc1f568bc428189fc1175515223632477c07dc0a6df/cryptography-48.0.0-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:906cbf0670286c6e0044156bc7d4af9cbb0ef6db9f73e52c3ec56ba6bdde5336", size = 5237657, upload-time = "2026-05-04T22:58:29.848Z" }, + { url = "https://files.pythonhosted.org/packages/b9/09/4e76a09b4caa29aad535ddc806f5d4c5d01885bd978bd984fbc6ca032cae/cryptography-48.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:ea8990436d914540a40ab24b6a77c0969695ed52f4a4874c5137ccf7045a7057", size = 4732362, upload-time = "2026-05-04T22:58:32.009Z" }, + { url = "https://files.pythonhosted.org/packages/18/78/444fa04a77d0cb95f417dda20d450e13c56ba8e5220fc892a1658f44f882/cryptography-48.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c18684a7f0cc9a3cb60328f496b8e3372def7c5d2df39ac267878b05565aaaae", size = 4819580, upload-time = "2026-05-04T22:58:34.254Z" }, + { url = "https://files.pythonhosted.org/packages/38/85/ea67067c70a1fd4be2c63d35eeed82658023021affccc7b17705f8527dd2/cryptography-48.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9be5aafa5736574f8f15f262adc81b2a9869e2cfe9014d52a44633905b40d52c", size = 4963283, upload-time = "2026-05-04T22:58:36.376Z" }, + { url = "https://files.pythonhosted.org/packages/75/54/cc6d0f3deac3e81c7f847e8a189a12b6cdd65059b43dad25d4316abd849a/cryptography-48.0.0-cp314-cp314t-win32.whl", hash = "sha256:c17dfe85494deaeddc5ce251aebd1d60bbe6afc8b62071bb0b469431a000124f", size = 3270954, upload-time = "2026-05-04T22:58:38.791Z" }, + { url = "https://files.pythonhosted.org/packages/49/67/cc947e288c0758a4e5473d1dcb743037ab7785541265a969240b8885441a/cryptography-48.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27241b1dc9962e056062a8eef1991d02c3a24569c95975bd2322a8a52c6e5e12", size = 3797313, upload-time = "2026-05-04T22:58:40.746Z" }, + { url = "https://files.pythonhosted.org/packages/f2/63/61d4a4e1c6b6bab6ce1e213cd36a24c415d90e76d78c5eb8577c5541d2e8/cryptography-48.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:58d00498e8933e4a194f3076aee1b4a97dfec1a6da444535755822fe5d8b0b86", size = 7983482, upload-time = "2026-05-04T22:58:43.769Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ac/f5b5995b87770c693e2596559ffafe195b4033a57f14a82268a2842953f3/cryptography-48.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:614d0949f4790582d2cc25553abd09dd723025f0c0e7c67376a1d77196743d6e", size = 4683266, upload-time = "2026-05-04T22:58:46.064Z" }, + { url = "https://files.pythonhosted.org/packages/ec/c6/8b14f67e18338fbc4adb76f66c001f5c3610b3e2d1837f268f47a347dbbb/cryptography-48.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7ce4bfae76319a532a2dc68f82cc32f5676ee792a983187dac07183690e5c66f", size = 4696228, upload-time = "2026-05-04T22:58:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/ea/73/f808fbae9514bd91b47875b003f13e284c8c6bdfd904b7944e803937eec1/cryptography-48.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:2eb992bbd4661238c5a397594c83f5b4dc2bc5b848c365c8f991b6780efcc5c7", size = 4689097, upload-time = "2026-05-04T22:58:50.9Z" }, + { url = "https://files.pythonhosted.org/packages/93/01/d86632d7d28db8ae83221995752eeb6639ffb374c2d22955648cf8d52797/cryptography-48.0.0-cp39-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:22a5cb272895dce158b2cacdfdc3debd299019659f42947dbdac6f32d68fe832", size = 5283582, upload-time = "2026-05-04T22:58:53.017Z" }, + { url = "https://files.pythonhosted.org/packages/02/e1/50edc7a50334807cc4791fc4a0ce7468b4a1416d9138eab358bfc9a3d70b/cryptography-48.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2b4d59804e8408e2fea7d1fbaf218e5ec984325221db76e6a241a9abd6cdd95c", size = 4730479, upload-time = "2026-05-04T22:58:55.611Z" }, + { url = "https://files.pythonhosted.org/packages/6f/af/99a582b1b1641ff5911ac559beb45097cf79efd4ead4657f578ef1af2d47/cryptography-48.0.0-cp39-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:984a20b0f62a26f48a3396c72e4bc34c66e356d356bf370053066b3b6d54634a", size = 4326481, upload-time = "2026-05-04T22:58:57.607Z" }, + { url = "https://files.pythonhosted.org/packages/90/ee/89aa26a06ef0a7d7611788ffd571a7c50e368cc6a4d5eef8b4884e866edb/cryptography-48.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5a5ed8fde7a1d09376ca0b40e68cd59c69fe23b1f9768bd5824f54681626032a", size = 4688713, upload-time = "2026-05-04T22:59:00.077Z" }, + { url = "https://files.pythonhosted.org/packages/70/ba/bcb1b0bb7a33d4c7c0c4d4c7874b4a62ae4f56113a5f4baefa362dfb1f0f/cryptography-48.0.0-cp39-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:8cd666227ef7af430aa5914a9910e0ddd703e75f039cef0825cd0da71b6b711a", size = 5238165, upload-time = "2026-05-04T22:59:02.317Z" }, + { url = "https://files.pythonhosted.org/packages/c9/70/ca4003b1ce5ca3dc3186ada51908c8a9b9ff7d5cab83cc0d43ee14ec144f/cryptography-48.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9071196d81abc88b3516ac8cdfad32e2b66dd4a5393a8e68a961e9161ddc6239", size = 4729947, upload-time = "2026-05-04T22:59:05.255Z" }, + { url = "https://files.pythonhosted.org/packages/44/a0/4ec7cf774207905aef1a8d11c3750d5a1db805eb380ee4e16df317870128/cryptography-48.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1e2d54c8be6152856a36f0882ab231e70f8ec7f14e93cf87db8a2ed056bf160c", size = 4822059, upload-time = "2026-05-04T22:59:07.802Z" }, + { url = "https://files.pythonhosted.org/packages/1e/75/a2e55f99c16fcac7b5d6c1eb19ad8e00799854d6be5ca845f9259eae1681/cryptography-48.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a5da777e32ffed6f85a7b2b3f7c5cbc88c146bfcd0a1d7baf5fcc6c52ee35dd4", size = 4960575, upload-time = "2026-05-04T22:59:09.851Z" }, + { url = "https://files.pythonhosted.org/packages/b8/23/6e6f32143ab5d8b36ca848a502c4bcd477ae75b9e1677e3530d669062578/cryptography-48.0.0-cp39-abi3-win32.whl", hash = "sha256:77a2ccbbe917f6710e05ba9adaa25fb5075620bf3ea6fb751997875aff4ae4bd", size = 3279117, upload-time = "2026-05-04T22:59:12.019Z" }, + { url = "https://files.pythonhosted.org/packages/9d/9a/0fea98a70cf1749d41d738836f6349d97945f7c89433a259a6c2642eefeb/cryptography-48.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:16cd65b9330583e4619939b3a3843eec1e6e789744bb01e7c7e2e62e33c239c8", size = 3792100, upload-time = "2026-05-04T22:59:14.884Z" }, + { url = "https://files.pythonhosted.org/packages/be/d2/024b5e06be9d44cb021fb0e1a03d34d63989cf56a0fe62f3dfbab695b9b4/cryptography-48.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:84cf79f0dc8b36ac5da873481716e87aef31fcfa0444f9e1d8b4b2cece142855", size = 3950391, upload-time = "2026-05-04T22:59:17.415Z" }, + { url = "https://files.pythonhosted.org/packages/bc/17/3861e17c56fa0fd37491a14a8673fdb77c57fc5693cafe745ea8b06dba75/cryptography-48.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:fdfef35d751d510fcef5252703621574364fec16418c4a1e5e1055248401054b", size = 4637126, upload-time = "2026-05-04T22:59:20.197Z" }, + { url = "https://files.pythonhosted.org/packages/f0/0a/7e226dbff530f21480727eb764973a7bff2b912f8e15cd4f129e71b56d1d/cryptography-48.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0890f502ddf7d9c6426129c3f49f5c0a39278ed7cd6322c8755ffca6ee675a13", size = 4667270, upload-time = "2026-05-04T22:59:22.647Z" }, + { url = "https://files.pythonhosted.org/packages/3b/f2/5a72274ca9f1b2a8b44a662ee0bf1b435909deb473d6f97bcd035bcdbc71/cryptography-48.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:ecde28a596bead48b0cfd2a1b4416c3d43074c2d785e3a398d7ec1fc4d0f7fbb", size = 4636797, upload-time = "2026-05-04T22:59:24.912Z" }, + { url = "https://files.pythonhosted.org/packages/b4/e1/48cedb2fe63626e91ded1edad159e2a4fb8b6906c4425eb7749673077ce7/cryptography-48.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:4defde8685ae324a9eb9d818717e93b4638ef67070ac9bc15b8ca85f63048355", size = 4666800, upload-time = "2026-05-04T22:59:27.474Z" }, + { url = "https://files.pythonhosted.org/packages/a2/ca/7e8365deec19afb2b2c7be7c1c0aa8f99633b54e90c570999acda93260fc/cryptography-48.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:db63bf618e5dea46c07de12e900fe1cdd2541e6dc9dbae772a70b7d4d4765f6a", size = 3739536, upload-time = "2026-05-04T22:59:29.61Z" }, ] [[package]] -name = "cryptography" -version = "45.0.3" +name = "dnspython" +version = "2.8.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/13/1f/9fa001e74a1993a9cadd2333bb889e50c66327b8594ac538ab8a04f915b7/cryptography-45.0.3.tar.gz", hash = "sha256:ec21313dd335c51d7877baf2972569f40a4291b76a0ce51391523ae358d05899", size = 744738, upload-time = "2025-05-25T14:17:24.777Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/82/b2/2345dc595998caa6f68adf84e8f8b50d18e9fc4638d32b22ea8daedd4b7a/cryptography-45.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:7573d9eebaeceeb55285205dbbb8753ac1e962af3d9640791d12b36864065e71", size = 7056239, upload-time = "2025-05-25T14:16:12.22Z" }, - { url = "https://files.pythonhosted.org/packages/71/3d/ac361649a0bfffc105e2298b720d8b862330a767dab27c06adc2ddbef96a/cryptography-45.0.3-cp311-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d377dde61c5d67eb4311eace661c3efda46c62113ff56bf05e2d679e02aebb5b", size = 4205541, upload-time = "2025-05-25T14:16:14.333Z" }, - { url = "https://files.pythonhosted.org/packages/70/3e/c02a043750494d5c445f769e9c9f67e550d65060e0bfce52d91c1362693d/cryptography-45.0.3-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fae1e637f527750811588e4582988932c222f8251f7b7ea93739acb624e1487f", size = 4433275, upload-time = "2025-05-25T14:16:16.421Z" }, - { url = "https://files.pythonhosted.org/packages/40/7a/9af0bfd48784e80eef3eb6fd6fde96fe706b4fc156751ce1b2b965dada70/cryptography-45.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ca932e11218bcc9ef812aa497cdf669484870ecbcf2d99b765d6c27a86000942", size = 4209173, upload-time = "2025-05-25T14:16:18.163Z" }, - { url = "https://files.pythonhosted.org/packages/31/5f/d6f8753c8708912df52e67969e80ef70b8e8897306cd9eb8b98201f8c184/cryptography-45.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af3f92b1dc25621f5fad065288a44ac790c5798e986a34d393ab27d2b27fcff9", size = 3898150, upload-time = "2025-05-25T14:16:20.34Z" }, - { url = "https://files.pythonhosted.org/packages/8b/50/f256ab79c671fb066e47336706dc398c3b1e125f952e07d54ce82cf4011a/cryptography-45.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f8f8f0b73b885ddd7f3d8c2b2234a7d3ba49002b0223f58cfde1bedd9563c56", size = 4466473, upload-time = "2025-05-25T14:16:22.605Z" }, - { url = "https://files.pythonhosted.org/packages/62/e7/312428336bb2df0848d0768ab5a062e11a32d18139447a76dfc19ada8eed/cryptography-45.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9cc80ce69032ffa528b5e16d217fa4d8d4bb7d6ba8659c1b4d74a1b0f4235fca", size = 4211890, upload-time = "2025-05-25T14:16:24.738Z" }, - { url = "https://files.pythonhosted.org/packages/e7/53/8a130e22c1e432b3c14896ec5eb7ac01fb53c6737e1d705df7e0efb647c6/cryptography-45.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c824c9281cb628015bfc3c59335163d4ca0540d49de4582d6c2637312907e4b1", size = 4466300, upload-time = "2025-05-25T14:16:26.768Z" }, - { url = "https://files.pythonhosted.org/packages/ba/75/6bb6579688ef805fd16a053005fce93944cdade465fc92ef32bbc5c40681/cryptography-45.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5833bb4355cb377ebd880457663a972cd044e7f49585aee39245c0d592904578", size = 4332483, upload-time = "2025-05-25T14:16:28.316Z" }, - { url = "https://files.pythonhosted.org/packages/2f/11/2538f4e1ce05c6c4f81f43c1ef2bd6de7ae5e24ee284460ff6c77e42ca77/cryptography-45.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9bb5bf55dcb69f7067d80354d0a348368da907345a2c448b0babc4215ccd3497", size = 4573714, upload-time = "2025-05-25T14:16:30.474Z" }, - { url = "https://files.pythonhosted.org/packages/f5/bb/e86e9cf07f73a98d84a4084e8fd420b0e82330a901d9cac8149f994c3417/cryptography-45.0.3-cp311-abi3-win32.whl", hash = "sha256:3ad69eeb92a9de9421e1f6685e85a10fbcfb75c833b42cc9bc2ba9fb00da4710", size = 2934752, upload-time = "2025-05-25T14:16:32.204Z" }, - { url = "https://files.pythonhosted.org/packages/c7/75/063bc9ddc3d1c73e959054f1fc091b79572e716ef74d6caaa56e945b4af9/cryptography-45.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:97787952246a77d77934d41b62fb1b6f3581d83f71b44796a4158d93b8f5c490", size = 3412465, upload-time = "2025-05-25T14:16:33.888Z" }, - { url = "https://files.pythonhosted.org/packages/71/9b/04ead6015229a9396890d7654ee35ef630860fb42dc9ff9ec27f72157952/cryptography-45.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:c92519d242703b675ccefd0f0562eb45e74d438e001f8ab52d628e885751fb06", size = 7031892, upload-time = "2025-05-25T14:16:36.214Z" }, - { url = "https://files.pythonhosted.org/packages/46/c7/c7d05d0e133a09fc677b8a87953815c522697bdf025e5cac13ba419e7240/cryptography-45.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5edcb90da1843df85292ef3a313513766a78fbbb83f584a5a58fb001a5a9d57", size = 4196181, upload-time = "2025-05-25T14:16:37.934Z" }, - { url = "https://files.pythonhosted.org/packages/08/7a/6ad3aa796b18a683657cef930a986fac0045417e2dc428fd336cfc45ba52/cryptography-45.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38deed72285c7ed699864f964a3f4cf11ab3fb38e8d39cfcd96710cd2b5bb716", size = 4423370, upload-time = "2025-05-25T14:16:39.502Z" }, - { url = "https://files.pythonhosted.org/packages/4f/58/ec1461bfcb393525f597ac6a10a63938d18775b7803324072974b41a926b/cryptography-45.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5555365a50efe1f486eed6ac7062c33b97ccef409f5970a0b6f205a7cfab59c8", size = 4197839, upload-time = "2025-05-25T14:16:41.322Z" }, - { url = "https://files.pythonhosted.org/packages/d4/3d/5185b117c32ad4f40846f579369a80e710d6146c2baa8ce09d01612750db/cryptography-45.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9e4253ed8f5948a3589b3caee7ad9a5bf218ffd16869c516535325fece163dcc", size = 3886324, upload-time = "2025-05-25T14:16:43.041Z" }, - { url = "https://files.pythonhosted.org/packages/67/85/caba91a57d291a2ad46e74016d1f83ac294f08128b26e2a81e9b4f2d2555/cryptography-45.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cfd84777b4b6684955ce86156cfb5e08d75e80dc2585e10d69e47f014f0a5342", size = 4450447, upload-time = "2025-05-25T14:16:44.759Z" }, - { url = "https://files.pythonhosted.org/packages/ae/d1/164e3c9d559133a38279215c712b8ba38e77735d3412f37711b9f8f6f7e0/cryptography-45.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:a2b56de3417fd5f48773ad8e91abaa700b678dc7fe1e0c757e1ae340779acf7b", size = 4200576, upload-time = "2025-05-25T14:16:46.438Z" }, - { url = "https://files.pythonhosted.org/packages/71/7a/e002d5ce624ed46dfc32abe1deff32190f3ac47ede911789ee936f5a4255/cryptography-45.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:57a6500d459e8035e813bd8b51b671977fb149a8c95ed814989da682314d0782", size = 4450308, upload-time = "2025-05-25T14:16:48.228Z" }, - { url = "https://files.pythonhosted.org/packages/87/ad/3fbff9c28cf09b0a71e98af57d74f3662dea4a174b12acc493de00ea3f28/cryptography-45.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f22af3c78abfbc7cbcdf2c55d23c3e022e1a462ee2481011d518c7fb9c9f3d65", size = 4325125, upload-time = "2025-05-25T14:16:49.844Z" }, - { url = "https://files.pythonhosted.org/packages/f5/b4/51417d0cc01802304c1984d76e9592f15e4801abd44ef7ba657060520bf0/cryptography-45.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:232954730c362638544758a8160c4ee1b832dc011d2c41a306ad8f7cccc5bb0b", size = 4560038, upload-time = "2025-05-25T14:16:51.398Z" }, - { url = "https://files.pythonhosted.org/packages/80/38/d572f6482d45789a7202fb87d052deb7a7b136bf17473ebff33536727a2c/cryptography-45.0.3-cp37-abi3-win32.whl", hash = "sha256:cb6ab89421bc90e0422aca911c69044c2912fc3debb19bb3c1bfe28ee3dff6ab", size = 2924070, upload-time = "2025-05-25T14:16:53.472Z" }, - { url = "https://files.pythonhosted.org/packages/91/5a/61f39c0ff4443651cc64e626fa97ad3099249152039952be8f344d6b0c86/cryptography-45.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:d54ae41e6bd70ea23707843021c778f151ca258081586f0cfa31d936ae43d1b2", size = 3395005, upload-time = "2025-05-25T14:16:55.134Z" }, - { url = "https://files.pythonhosted.org/packages/e7/d4/58a246342093a66af8935d6aa59f790cbb4731adae3937b538d054bdc2f9/cryptography-45.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:edd6d51869beb7f0d472e902ef231a9b7689508e83880ea16ca3311a00bf5ce7", size = 3589802, upload-time = "2025-05-25T14:17:07.792Z" }, - { url = "https://files.pythonhosted.org/packages/96/61/751ebea58c87b5be533c429f01996050a72c7283b59eee250275746632ea/cryptography-45.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:555e5e2d3a53b4fabeca32835878b2818b3f23966a4efb0d566689777c5a12c8", size = 4146964, upload-time = "2025-05-25T14:17:09.538Z" }, - { url = "https://files.pythonhosted.org/packages/8d/01/28c90601b199964de383da0b740b5156f5d71a1da25e7194fdf793d373ef/cryptography-45.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:25286aacb947286620a31f78f2ed1a32cded7be5d8b729ba3fb2c988457639e4", size = 4388103, upload-time = "2025-05-25T14:17:11.978Z" }, - { url = "https://files.pythonhosted.org/packages/3d/ec/cd892180b9e42897446ef35c62442f5b8b039c3d63a05f618aa87ec9ebb5/cryptography-45.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:050ce5209d5072472971e6efbfc8ec5a8f9a841de5a4db0ebd9c2e392cb81972", size = 4150031, upload-time = "2025-05-25T14:17:14.131Z" }, - { url = "https://files.pythonhosted.org/packages/db/d4/22628c2dedd99289960a682439c6d3aa248dff5215123ead94ac2d82f3f5/cryptography-45.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:dc10ec1e9f21f33420cc05214989544727e776286c1c16697178978327b95c9c", size = 4387389, upload-time = "2025-05-25T14:17:17.303Z" }, - { url = "https://files.pythonhosted.org/packages/39/ec/ba3961abbf8ecb79a3586a4ff0ee08c9d7a9938b4312fb2ae9b63f48a8ba/cryptography-45.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:9eda14f049d7f09c2e8fb411dda17dd6b16a3c76a1de5e249188a32aeb92de19", size = 3337432, upload-time = "2025-05-25T14:17:19.507Z" }, -] - -[[package]] -name = "distlib" -version = "0.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, ] [[package]] -name = "dnspython" -version = "2.7.0" +name = "docker" +version = "7.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, ] [[package]] name = "ecdsa" -version = "0.19.1" +version = "0.19.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/ca/8de7744cb3bc966c85430ca2d0fcaeea872507c6a4cf6e007f7fe269ed9d/ecdsa-0.19.2.tar.gz", hash = "sha256:62635b0ac1ca2e027f82122b5b81cb706edc38cd91c63dda28e4f3455a2bf930", size = 202432, upload-time = "2026-03-26T09:58:17.675Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" }, + { url = "https://files.pythonhosted.org/packages/51/79/119091c98e2bf49e24ed9f3ae69f816d715d2904aefa6a2baa039a2ba0b0/ecdsa-0.19.2-py2.py3-none-any.whl", hash = "sha256:840f5dc5e375c68f36c1a7a5b9caad28f95daa65185c9253c0c08dd952bb7399", size = 150818, upload-time = "2026-03-26T09:58:15.808Z" }, ] [[package]] name = "email-validator" -version = "2.2.0" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dnspython" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967, upload-time = "2024-06-20T11:30:30.034Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521, upload-time = "2024-06-20T11:30:28.248Z" }, + { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" }, +] + +[[package]] +name = "execnet" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" }, ] [[package]] name = "faker" -version = "37.3.0" +version = "40.15.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "tzdata" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/97/4b/5354912eaff922876323f2d07e21408b10867f3295d5f917748341cb6f53/faker-37.3.0.tar.gz", hash = "sha256:77b79e7a2228d57175133af0bbcdd26dc623df81db390ee52f5104d46c010f2f", size = 1901376, upload-time = "2025-05-14T15:24:18.039Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/13/6741787bd91c4109c7bed047d68273965cd52ce8a5f773c471b949334b6d/faker-40.15.0.tar.gz", hash = "sha256:20f3a6ec8c266b74d4c554e34118b21c3c2056c0b4a519d15c8decb3a4e6e795", size = 1967447, upload-time = "2026-04-17T20:05:27.555Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/99/045b2dae19a01b9fbb23b9971bc04f4ef808e7f3a213d08c81067304a210/faker-37.3.0-py3-none-any.whl", hash = "sha256:48c94daa16a432f2d2bc803c7ff602509699fca228d13e97e379cd860a7e216e", size = 1942203, upload-time = "2025-05-14T15:24:16.159Z" }, + { url = "https://files.pythonhosted.org/packages/a7/a7/a600f8f30d4505e89166de51dd121bd540ab8e560e8cf0901de00a81de8c/faker-40.15.0-py3-none-any.whl", hash = "sha256:71ab3c3370da9d2205ab74ffb0fd51273063ad562b3a3bb69d0026a20923e318", size = 2004447, upload-time = "2026-04-17T20:05:25.437Z" }, ] [[package]] name = "fastapi" -version = "0.115.14" +version = "0.136.1" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "annotated-doc" }, { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, + { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ca/53/8c38a874844a8b0fa10dd8adf3836ac154082cf88d3f22b544e9ceea0a15/fastapi-0.115.14.tar.gz", hash = "sha256:b1de15cdc1c499a4da47914db35d0e4ef8f1ce62b624e94e0e5824421df99739", size = 296263, upload-time = "2025-06-26T15:29:08.21Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5d/45/c130091c2dfa061bbfe3150f2a5091ef1adf149f2a8d2ae769ecaf6e99a2/fastapi-0.136.1.tar.gz", hash = "sha256:7af665ad7acfa0a3baf8983d393b6b471b9da10ede59c60045f49fbc89a0fa7f", size = 397448, upload-time = "2026-04-23T16:49:44.046Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/53/50/b1222562c6d270fea83e9c9075b8e8600b8479150a18e4516a6138b980d1/fastapi-0.115.14-py3-none-any.whl", hash = "sha256:6c0c8bf9420bd58f565e585036d971872472b4f7d3f6c73b698e10cffdefb3ca", size = 95514, upload-time = "2025-06-26T15:29:06.49Z" }, + { url = "https://files.pythonhosted.org/packages/5a/ff/2e4eca3ade2c22fe1dea7043b8ee9dabe47753349eb1b56a202de8af6349/fastapi-0.136.1-py3-none-any.whl", hash = "sha256:a6e9d7eeada96c93a4d69cb03836b44fa34e2854accb7244a1ece36cd4781c3f", size = 117683, upload-time = "2026-04-23T16:49:42.437Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "email-validator" }, + { name = "fastapi-cli", extra = ["standard"] }, + { name = "fastar" }, + { name = "httpx" }, + { name = "jinja2" }, + { name = "pydantic-extra-types" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "uvicorn", extra = ["standard"] }, ] [[package]] name = "fastapi-boilerplate" -version = "0.1.0" -source = { editable = "." } +version = "0.18.0" +source = { editable = "backend" } dependencies = [ + { name = "aiomcache" }, + { name = "aiosqlite" }, { name = "alembic" }, - { name = "arq" }, { name = "asyncpg" }, - { name = "bcrypt" }, - { name = "crudadmin" }, - { name = "fastapi" }, + { name = "faker" }, + { name = "fastapi", extra = ["standard"] }, { name = "fastcrud" }, + { name = "fastsecure" }, { name = "greenlet" }, - { name = "gunicorn" }, - { name = "httptools" }, { name = "httpx" }, - { name = "mypy" }, - { name = "psycopg2-binary" }, - { name = "pydantic", extra = ["email"] }, + { name = "itsdangerous" }, + { name = "jinja2" }, + { name = "pydantic" }, { name = "pydantic-settings" }, - { name = "python-dotenv" }, - { name = "python-jose" }, - { name = "python-multipart" }, { name = "redis" }, - { name = "rich" }, - { name = "ruff" }, + { name = "sqladmin" }, { name = "sqlalchemy" }, - { name = "sqlalchemy-utils" }, - { name = "structlog" }, - { name = "uuid" }, - { name = "uuid6" }, - { name = "uvicorn" }, - { name = "uvloop" }, + { name = "taskiq" }, + { name = "taskiq-aio-pika" }, + { name = "taskiq-redis" }, + { name = "user-agents" }, ] [package.optional-dependencies] dev = [ - { name = "faker" }, { name = "mypy" }, { name = "pytest" }, + { name = "pytest-asyncio" }, { name = "pytest-mock" }, + { name = "pytest-xdist", extra = ["psutil"] }, { name = "ruff" }, - { name = "types-redis" }, -] - -[package.dev-dependencies] -dev = [ - { name = "pre-commit" }, - { name = "pytest-asyncio" }, + { name = "testcontainers" }, + { name = "types-python-jose" }, ] [package.metadata] requires-dist = [ - { name = "alembic", specifier = ">=1.13.1" }, - { name = "arq", specifier = ">=0.25.0" }, - { name = "asyncpg", specifier = ">=0.29.0" }, - { name = "bcrypt", specifier = ">=4.1.1" }, - { name = "crudadmin", specifier = ">=0.4.2" }, - { name = "faker", marker = "extra == 'dev'", specifier = ">=26.0.0" }, - { name = "fastapi", specifier = ">=0.109.1" }, - { name = "fastcrud", specifier = ">=0.19.2" }, - { name = "greenlet", specifier = ">=2.0.2" }, - { name = "gunicorn", specifier = ">=23.0.0" }, - { name = "httptools", specifier = ">=0.7.1" }, - { name = "httpx", specifier = ">=0.26.0" }, - { name = "mypy", specifier = ">=1.16.0" }, - { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.8.0" }, - { name = "psycopg2-binary", specifier = ">=2.9.9" }, - { name = "pydantic", extras = ["email"], specifier = ">=2.12.5" }, - { name = "pydantic-settings", specifier = ">=2.12.0" }, - { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.4.2" }, + { name = "aiomcache", specifier = ">=0.8.2" }, + { name = "aiosqlite", specifier = ">=0.21.0" }, + { name = "alembic", specifier = ">=1.16.4" }, + { name = "asyncpg", specifier = ">=0.30.0" }, + { name = "faker", specifier = ">=37.1.0" }, + { name = "fastapi", extras = ["standard"], specifier = ">=0.115.8" }, + { name = "fastcrud", specifier = ">=0.21.0" }, + { name = "fastsecure", specifier = ">=0.3.0" }, + { name = "greenlet", specifier = ">=3.1.1" }, + { name = "httpx", specifier = ">=0.28.1" }, + { name = "itsdangerous", specifier = ">=2.2.0" }, + { name = "jinja2", specifier = ">=3.1.6" }, + { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.14.1" }, + { name = "pydantic", specifier = ">=2.10.6" }, + { name = "pydantic-settings", specifier = ">=2.7.1" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.3.5" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.25.3" }, { name = "pytest-mock", marker = "extra == 'dev'", specifier = ">=3.14.0" }, - { name = "python-dotenv", specifier = ">=1.0.0" }, - { name = "python-jose", specifier = ">=3.3.0" }, - { name = "python-multipart", specifier = ">=0.0.9" }, - { name = "redis", specifier = ">=5.0.1" }, - { name = "rich", specifier = ">=14.2.0" }, - { name = "ruff", specifier = ">=0.11.13" }, - { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.1.0" }, - { name = "sqlalchemy", specifier = ">=2.0.25" }, - { name = "sqlalchemy-utils", specifier = ">=0.41.1" }, - { name = "structlog", specifier = ">=25.1.0,<=25.5.0" }, - { name = "types-redis", marker = "extra == 'dev'", specifier = ">=4.6.0" }, - { name = "uuid", specifier = ">=1.30" }, - { name = "uuid6", specifier = ">=2024.1.12" }, - { name = "uvicorn", specifier = ">=0.27.0" }, - { name = "uvloop", specifier = ">=0.19.0" }, + { name = "pytest-xdist", extras = ["psutil"], marker = "extra == 'dev'", specifier = ">=3.8.0" }, + { name = "redis", specifier = ">=6.1.0" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.9.4" }, + { name = "sqladmin", specifier = ">=0.22.0" }, + { name = "sqlalchemy", specifier = ">=2.0.37" }, + { name = "taskiq", specifier = ">=0.11.20" }, + { name = "taskiq-aio-pika", specifier = ">=0.4.3" }, + { name = "taskiq-redis", specifier = ">=1.1.2" }, + { name = "testcontainers", extras = ["postgres"], marker = "extra == 'dev'", specifier = ">=4.10.0" }, + { name = "types-python-jose", marker = "extra == 'dev'", specifier = ">=3.4.0.20250224" }, + { name = "user-agents", specifier = ">=2.2.0" }, ] provides-extras = ["dev"] -[package.metadata.requires-dev] -dev = [ - { name = "pre-commit", specifier = ">=4.3.0" }, - { name = "pytest-asyncio", specifier = ">=1.0.0" }, +[[package]] +name = "fastapi-boilerplate-cli" +version = "0.1.0" +source = { editable = "cli" } +dependencies = [ + { name = "fastapi-boilerplate" }, + { name = "jinja2" }, + { name = "typer" }, +] + +[package.metadata] +requires-dist = [ + { name = "fastapi-boilerplate", editable = "backend" }, + { name = "jinja2", specifier = ">=3.1" }, + { name = "typer", specifier = ">=0.12" }, +] + +[[package]] +name = "fastapi-boilerplate-workspace" +version = "0" +source = { virtual = "." } + +[[package]] +name = "fastapi-cli" +version = "0.0.24" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "rich-toolkit" }, + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6e/58/74797ae9e4610cfa0c6b34c8309096d3b20bb29be3b8b5fbf1004d10fa5f/fastapi_cli-0.0.24.tar.gz", hash = "sha256:1afc9c9e21d7ebc8a3ca5e31790cd8d837742be7e4f8b9236e99cb3451f0de00", size = 19043, upload-time = "2026-02-24T10:45:10.476Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/4b/68f9fe268e535d79c76910519530026a4f994ce07189ac0dded45c6af825/fastapi_cli-0.0.24-py3-none-any.whl", hash = "sha256:4a1f78ed798f106b4fee85ca93b85d8fe33c0a3570f775964d37edb80b8f0edc", size = 12304, upload-time = "2026-02-24T10:45:09.552Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "fastapi-cloud-cli" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[[package]] +name = "fastapi-cloud-cli" +version = "0.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fastar" }, + { name = "httpx" }, + { name = "pydantic", extra = ["email"] }, + { name = "rich-toolkit" }, + { name = "rignore" }, + { name = "sentry-sdk" }, + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/57/cee8e91b83f39e75ae5562a2237261442a8179dcb3b631c7398113157398/fastapi_cloud_cli-0.17.1.tar.gz", hash = "sha256:0baece208fa88063bec46dccb5fb512f3199162092165e57654b44e64adbc44d", size = 47409, upload-time = "2026-04-27T13:38:07.094Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/a0/e252b68cf155409afabea037ab2971f41509481838847f6503fe890884ea/fastapi_cloud_cli-0.17.1-py3-none-any.whl", hash = "sha256:325e0199bdac7cb86f5df4f4a1d2070054095588088ef7b923a60cec458dcd63", size = 34046, upload-time = "2026-04-27T13:38:08.319Z" }, +] + +[[package]] +name = "fastar" +version = "0.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/0f/0aeb3fc50046617702acc0078b277b58367fd62eb727b9ec733ae0e8bbcc/fastar-0.11.0.tar.gz", hash = "sha256:aa7f100f7313c03fdb20f1385927ba95671071ba308ad0c1763fef295e1895ce", size = 70238, upload-time = "2026-04-13T17:11:17.143Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/7a/fb367bdaf4efa2c7952a45aeab2e87a564293ecffe150af673ec8edfda46/fastar-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b82fd6f996e65a86f67a6bd64dd22ef3e8ae2dcaed0ae3b550e71f7e1bbb1df5", size = 709869, upload-time = "2026-04-13T17:09:55.62Z" }, + { url = "https://files.pythonhosted.org/packages/80/ff/b87efb0dcfd081c62c7c7601d7681dabe63103cd51fc16f8d57a1ab45961/fastar-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27eed386fd0558e6daa29211111bbd7b740f7c7e881197f8a00ac7c0f3cdb1d7", size = 631668, upload-time = "2026-04-13T17:09:40.537Z" }, + { url = "https://files.pythonhosted.org/packages/24/7c/0ed6dd38b9adc04b3a8ec3b7045908e7c2170ba0ff6e6d2c51bc9fc770f3/fastar-0.11.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a6931bebc1d8e95ddeef55732c195449e6b44ef33aa31b325505097ed3b4d6aa", size = 869663, upload-time = "2026-04-13T17:09:09.78Z" }, + { url = "https://files.pythonhosted.org/packages/58/ce/8b7fb3f23855accebaaf2d2637eac7f261a7a5d936f861a172079f1ef511/fastar-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:891f72ce42a5e28a74fbd4d5fbf1a3ac1a1163d13cbc200cbd005fb0fabc54bd", size = 762938, upload-time = "2026-04-13T17:07:54.51Z" }, + { url = "https://files.pythonhosted.org/packages/07/cc/5491e2b677bb841f768e3aba052d0344338a5c78aa5d4c18b443831a8e8d/fastar-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5b83c1f61f7017d6e1498568038f8745440cfc16ca2f697ec81bac83050108f6", size = 759232, upload-time = "2026-04-13T17:08:08.864Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b7/643630bdbd179e41e9fae31c03b4cf6061dbf4d6fbbae8425d16eb12545d/fastar-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db73a9b765a516e73983b25341e7b5e0189733878279e278b2295131b0e3a21e", size = 926271, upload-time = "2026-04-13T17:08:23.68Z" }, + { url = "https://files.pythonhosted.org/packages/09/5d/37ade50003b4540e0a53ef100f6692d7ab2ac1122d5acf39920cc09a3e8b/fastar-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:625827d52eb4e8fec942e0233f125ff8010fcf6a67c0a974a8e5f4666b771e3c", size = 818634, upload-time = "2026-04-13T17:08:54.268Z" }, + { url = "https://files.pythonhosted.org/packages/c3/ff/135d177de32cc1e837c99019e4643e6e79352bde49544d4ece5b5eebf56b/fastar-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7f5fd8fa21ec0a88296a38dc5d7fc35efd3b26d46a17b8b7c73c5563925ca15", size = 822755, upload-time = "2026-04-13T17:09:25.01Z" }, + { url = "https://files.pythonhosted.org/packages/27/cb/b835dbe76ceac7fa6105851468c259ffd06830eb9c029402e499d0ec153b/fastar-0.11.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:8c15af91b8cd87ddf23ea55355ae513c1de3ab67178f26dad017c9e9c0af6096", size = 887101, upload-time = "2026-04-13T17:08:39.248Z" }, + { url = "https://files.pythonhosted.org/packages/9e/54/aa8289eb57fc550535470397cb051f5a58a7c89ca4de31d5502b916dd894/fastar-0.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:03a112395a8b0bff251423bd1564c012f0cc058ad8b6bd8fba96f3d7fc117e44", size = 973606, upload-time = "2026-04-13T17:10:10.98Z" }, + { url = "https://files.pythonhosted.org/packages/1f/fd/776d50a0897c01dc6bfd0926772ee913436fdae91b9affaf0a0cbd09f0a1/fastar-0.11.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f2994bb8f5f8c11eb12beae1e6e77a907173c9819236b8a4c8f0573652ceccce", size = 1036696, upload-time = "2026-04-13T17:10:28.502Z" }, + { url = "https://files.pythonhosted.org/packages/c8/f1/cf0f9b499fb37ac065c8a01ec642f96a3c5eb849c38ae983b59f3b3245e0/fastar-0.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dcf99e4b5973d842c7f19c776c3a83cdc0977d505edce6206438505c0456b517", size = 1078182, upload-time = "2026-04-13T17:10:45.318Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9e/21e4701aec4a1123d4dc4d31578dc18875582b5710e4725f7ceb752a248b/fastar-0.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29c9c386dc0d5dda78845a8e6b1480d26ab861c1e0b68f42ae5735cb70ca07f1", size = 1032336, upload-time = "2026-04-13T17:11:02.364Z" }, + { url = "https://files.pythonhosted.org/packages/ce/e2/5872b28c72c27ec1a00760eace6ff35f714f41ebbd5208cf016b12e29250/fastar-0.11.0-cp311-cp311-win32.whl", hash = "sha256:030b2580fc394f2c9b7890b6735810404e9b9ed5e0344db150b945965b5482b7", size = 457368, upload-time = "2026-04-13T17:11:43.528Z" }, + { url = "https://files.pythonhosted.org/packages/fd/6e/ce6832a16193eb4466f4108be8809c249b51cb1f89dd7894545700d079d5/fastar-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:83ab57ae067969cd0b483ac3b6dccc4b595fc77f5c820760998648d4c42822b5", size = 488605, upload-time = "2026-04-13T17:11:29.161Z" }, + { url = "https://files.pythonhosted.org/packages/15/5a/9cfb80661cf38fd7b0889224beb7d2746784d4ade2a931ed9775a18d8602/fastar-0.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:27b1a4cee2298b704de8151d310462ee7335ed036011ca9aa6e784b30b6c73a9", size = 464580, upload-time = "2026-04-13T17:11:18.583Z" }, + { url = "https://files.pythonhosted.org/packages/0f/06/a5773706afc8bd496769786590bbc56d2d0ee419a299cc12ea3f5717fcf3/fastar-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3c51f1c2cdddbd1420d2897ace7738e36c65e17f6ae84e0bfe763f8d1068bb97", size = 708394, upload-time = "2026-04-13T17:09:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/cc/a6/d5e2a4e48495616440a21eed07558219ca90243ad00b0502586f95bd4833/fastar-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0d9d6b052baf5380baea866675dab6ccd04ec2460d12b1c46f10ce3f4ee6a820", size = 628417, upload-time = "2026-04-13T17:09:42.145Z" }, + { url = "https://files.pythonhosted.org/packages/ab/69/9816d69ac8265c9e50456637a487ccfb7a9c566efd9dbcd673df9c2558c2/fastar-0.11.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bd2f05666d4df7e14885b5c38fefd92a785917387513d33d837ff42ec143a22f", size = 863950, upload-time = "2026-04-13T17:09:11.506Z" }, + { url = "https://files.pythonhosted.org/packages/5b/0d/f88daad53aff2e754b6b5ff2a7113f72447a34f6ef17cc23ca99988117b7/fastar-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e6e74aba1ae77ca4aedcaf1697cd413319f4c88a5ccbe5b42c709517c5097e", size = 760737, upload-time = "2026-04-13T17:07:55.958Z" }, + { url = "https://files.pythonhosted.org/packages/2f/a6/82ef4ecd969d50d92ed3ed9dbd8fe77faa24be5e5736f716edc9f4ce8d62/fastar-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38ef77fe940bbc9b37a98bd838727f844b11731cd39358a2640ff864fb385086", size = 757603, upload-time = "2026-04-13T17:08:10.623Z" }, + { url = "https://files.pythonhosted.org/packages/03/35/50249f0d827251f8ac511495e2eacccebda80a00a0ad73e9615b8113b84f/fastar-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8955e61b32d6aff82c983217abf80933fd823b0e727586fc72f08043d996fd59", size = 923952, upload-time = "2026-04-13T17:08:25.526Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d8/faee41659e9c379d906d24eaee6d6833ac8cfef0a5df480e5c2a8d3efb33/fastar-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:483532442cdb08fbff0169510224eae0836f2f672cea6aacb52847d90fefdc46", size = 816574, upload-time = "2026-04-13T17:08:56.076Z" }, + { url = "https://files.pythonhosted.org/packages/22/47/0448ea7992b997dad2bf004bfd98eca74b5858630eae080b50c7b17d9ddc/fastar-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef5a6071121e05d8287fc75bccb054bcbac8bb0501200a0c0a8feeace5303ea4", size = 819382, upload-time = "2026-04-13T17:09:26.66Z" }, + { url = "https://files.pythonhosted.org/packages/33/ef/0d63eb43586831b7a6f8b22c4d77125a7c594423af1f4f090fa9541b9b40/fastar-0.11.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:e45e598af5afe8412197d4786efd6cf29be02e7d3d4f6a3461149eae5d7e94f1", size = 885254, upload-time = "2026-04-13T17:08:40.9Z" }, + { url = "https://files.pythonhosted.org/packages/01/25/edd584675d69e49a165052c3ee886df1c5d574f3e7d813c990306387c623/fastar-0.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e160919b1c47ddb8538e7e8eb4cd527281b40f0bf75110a75993838ef61f286", size = 971239, upload-time = "2026-04-13T17:10:12.997Z" }, + { url = "https://files.pythonhosted.org/packages/a5/37/e8bb24f506ba2b08fbaf36c5800e843bd4d542954e9331f00418e2d23349/fastar-0.11.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:4bb4dc0fc8f7a6807febcebce8a2f3626ba4955a9263d81ecc630aad83be84c0", size = 1035185, upload-time = "2026-04-13T17:10:30.207Z" }, + { url = "https://files.pythonhosted.org/packages/9a/bf/be753736296338149ee4cb3e92e2b5423d6ba17c7b951d15218fd7e99bbf/fastar-0.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4ec95af56aa173f6e320e1183001bf108ba59beaf13edd1fc8200648db203588", size = 1072191, upload-time = "2026-04-13T17:10:47.072Z" }, + { url = "https://files.pythonhosted.org/packages/d2/cd/a81c1aaafb5a22ce57c98ae22f39c89413ed53e4ee6e1b1444b0bd666a6c/fastar-0.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:136cf342735464091c39dc3708168f9fdeb9ebea40b1ead937c61afaf46143d9", size = 1028054, upload-time = "2026-04-13T17:11:04.293Z" }, + { url = "https://files.pythonhosted.org/packages/ec/88/1ce4eed3d70627c95f49ca017f6bbbf2ddcc4b0c601d293259de7689bc20/fastar-0.11.0-cp312-cp312-win32.whl", hash = "sha256:35f23c11b556cc4d3704587faacbc0037f7bdf6c4525cd1d09c70bda4b1c6809", size = 454198, upload-time = "2026-04-13T17:11:45.168Z" }, + { url = "https://files.pythonhosted.org/packages/8f/1d/26ce92f4331cd61a69840db9ca6115829805eec24f285481a854f578e917/fastar-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:920bc56c3c0b8a8ca492904941d1883c1c947c858cd93343356c29122a38f44c", size = 486697, upload-time = "2026-04-13T17:11:31.084Z" }, + { url = "https://files.pythonhosted.org/packages/ed/96/e6eda4480559c69b05d466e7b5ea9170e81fef3795a73e059959a3258319/fastar-0.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:395248faf89e8a6bd5dc1fd544c8465113b627cb6d7c8b296796b60ebea33593", size = 462591, upload-time = "2026-04-13T17:11:20.577Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d6/3be260037e86fb694e88d47f583bac3a0188c99cee1a6b257ac26cb6b53c/fastar-0.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:33f544b08b4541b678e53749b4552a44720d96761fb79c172b005b1089c443ed", size = 707975, upload-time = "2026-04-13T17:09:58.866Z" }, + { url = "https://files.pythonhosted.org/packages/e1/cd/7867aefb1784662554a335f2952c75a50f0c70585ed0d2210d6cc15e5627/fastar-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:91c1c792447e4a642745f347ff9847c52af39633071c57ee67ed53c157fc3506", size = 628460, upload-time = "2026-04-13T17:09:43.776Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2b/d11d84bdd5e0e377771b955755771e3460b290da5809cb78c1b735ee2228/fastar-0.11.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:881247e6b6eaea59fc6569f9b61447aa6b9fc2ee864e048b4643d69c52745805", size = 863054, upload-time = "2026-04-13T17:09:13.048Z" }, + { url = "https://files.pythonhosted.org/packages/25/39/d3f428b318fa940b1b6e785b8d54fc895dfb5d5b945ef8d5442ffa904fb2/fastar-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:863b7929845c9fec92ef6c8d59579cf46af5136655e5342f8df5cebe46cab06c", size = 760247, upload-time = "2026-04-13T17:07:57.396Z" }, + { url = "https://files.pythonhosted.org/packages/9e/04/03949aee82aabb8ede06ac5a4a5579ffaf98a8fe59ce958494508ff15513/fastar-0.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:96b4a57df12bf3211662627a3ea29d62ecb314a2434a0d0843f9fc23e47536e5", size = 756512, upload-time = "2026-04-13T17:08:12.415Z" }, + { url = "https://files.pythonhosted.org/packages/3f/0c/2ca1ae0a3828ca51047962d932b80daca2522db73e8cb9d040cb6ebe28d5/fastar-0.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceef1c2c4df7b7b8ebd3f5d718bbf457b9bbdf25ce0bd07870211ec4fbd9aff4", size = 922183, upload-time = "2026-04-13T17:08:27.187Z" }, + { url = "https://files.pythonhosted.org/packages/65/68/7fe808b1f73a68e686f25434f538c6dc10ef4dfb3db0ace22cd861744bf8/fastar-0.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8e545918441910a779659d4759ad0eef349e935fbdb4668a666d3681567eb05", size = 816394, upload-time = "2026-04-13T17:08:57.657Z" }, + { url = "https://files.pythonhosted.org/packages/1f/17/07d086080f8a83b8d7966955e29bcdbd6a060f5bd949dc9d5abd3658cead/fastar-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28095bb8f821e85fc2764e1a55f03e5e2876dee2abe7cd0ee9420d929905d643", size = 818983, upload-time = "2026-04-13T17:09:28.46Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e2/2c4edf0910af2e814ff6d65b77a91196d472ca8a9fb2033bd983f6856caa/fastar-0.11.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:0fafb95ecbe70f666a5e9b35dd63974ccdc9bb3d99ccdbd4014a823ec3e659b5", size = 884689, upload-time = "2026-04-13T17:08:42.763Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/04fdcbd6558e60de4ced3b55230fac47675d181252582b2fcec3c74608e5/fastar-0.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:af48fed039b94016629dcdad1c95c90c486326dd068de2b0a4df419ee09b6821", size = 970677, upload-time = "2026-04-13T17:10:15.124Z" }, + { url = "https://files.pythonhosted.org/packages/df/b3/2b860a9658550167dbd5824c85e88d0b4b912bf493e42a6322544d6e483d/fastar-0.11.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:74cd96163f39b8638ab4e8d49708ca887959672a22871d8170d01f067319533b", size = 1034026, upload-time = "2026-04-13T17:10:32.318Z" }, + { url = "https://files.pythonhosted.org/packages/b7/9b/fa42ea1188b144bac4b1b60753dfd449974a4d5eda132029ee7711569f94/fastar-0.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4e8b993cb5613bab495ed482810bedc0986633fcb9a3b55c37ec88e0d6714f6a", size = 1071147, upload-time = "2026-04-13T17:10:48.833Z" }, + { url = "https://files.pythonhosted.org/packages/95/c8/d2e501556dca9f1fbc9246111a31792fb49ad908fa4927f34938a97a3604/fastar-0.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dfe39d91fc28e37e06162d94afe01050220edb7df554acb5b702b5503e564816", size = 1028377, upload-time = "2026-04-13T17:11:06.374Z" }, + { url = "https://files.pythonhosted.org/packages/db/33/5f11f23eca0a569cd052507bc45dda2e5468697f8665728d25be44120f7d/fastar-0.11.0-cp313-cp313-win32.whl", hash = "sha256:c5f63d4d99ff4bfb37c659982ec413358bdee747005348756cc50a04d412d989", size = 454089, upload-time = "2026-04-13T17:11:46.821Z" }, + { url = "https://files.pythonhosted.org/packages/da/2f/35ff03c939cba7a255a9132367873fec6c355fd06a7f84fedcbaf4c8129f/fastar-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:8690ed1928d31ded3ada308e1086525fb3871f5fa81e1b69601a3f7774004583", size = 486312, upload-time = "2026-04-13T17:11:32.86Z" }, + { url = "https://files.pythonhosted.org/packages/ef/71/ee9246cbfcbfd4144558f35e7e9a306ffe0a7564730a5188c45f21d2dab8/fastar-0.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:d977ded9d98a0719a305e0a4d5ee811f1d3e856d853a50acb8ae833c3cd6d5d2", size = 461975, upload-time = "2026-04-13T17:11:22.589Z" }, + { url = "https://files.pythonhosted.org/packages/7a/cd/3644c48ecac456f928c12d47ec3bed36c36555b17c3859856f1ff860265d/fastar-0.11.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:71375bd6f03c2a43eb47bd949ea38ff45434917f9cdac79675c5b9f60de4fa73", size = 707860, upload-time = "2026-04-13T17:10:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/69/ca/dee04476ae3626b2b040a60ad84628f77e1ffd8444232f2426b0ca1e0d7e/fastar-0.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:eddfd9cab16e19ae247fe44bf992cb403ccfe27d3931d6de29a4695d95ad386c", size = 628216, upload-time = "2026-04-13T17:09:45.355Z" }, + { url = "https://files.pythonhosted.org/packages/dc/5e/9395c7353d079cb4f5be0f7982ce0dc9f2e7dec5fd175eef466729d6023a/fastar-0.11.0-cp314-cp314-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c371f1d4386c699018bb64eb2fa785feacf32785559049d2bb72fe4af023f53", size = 864378, upload-time = "2026-04-13T17:09:14.611Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/1e4f67148223ff219612b6281a6000357abbcc2417964fa5c83f11d68fce/fastar-0.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cad7fa41e3e66554387481c1a09365e4638becd322904932674159d5f4046728", size = 760921, upload-time = "2026-04-13T17:07:59.138Z" }, + { url = "https://files.pythonhosted.org/packages/0f/82/09d11fb6d12f17993ffaf32ffd30c3c121a11e2966e84f19fb6f66430118/fastar-0.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf36652fa71b83761717c9899b98732498f8a2cb6327ff16bbf07f6be85c3437", size = 757012, upload-time = "2026-04-13T17:08:14.186Z" }, + { url = "https://files.pythonhosted.org/packages/52/1f/5aeeacc4cb65615e2c9292cd9c5b0cd6fb6d2e6ee472ca6adc6c1b1b22ef/fastar-0.11.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f68ff8c17833053da4841720e95edde80ce45bb994b6b7d51418dddaac70ee47", size = 924510, upload-time = "2026-04-13T17:08:28.741Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1a/1e5bdabbeaf2e856928956292609f2ff6a650f94480fb8afaca30229e483/fastar-0.11.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4563ed37a12ea1cdc398af8571258d24b988bf342b7b3bf5451bd5891243280c", size = 816602, upload-time = "2026-04-13T17:08:59.461Z" }, + { url = "https://files.pythonhosted.org/packages/87/24/f960147910da3bed41a3adfcb026e17d5f50f4cf467a3324237a7088f61a/fastar-0.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cee63c9875cba3b70dc44338c560facc5d6e763047dcc4a30501f9a68cf5f890", size = 819452, upload-time = "2026-04-13T17:09:29.926Z" }, + { url = "https://files.pythonhosted.org/packages/cc/f4/3e77d7901d5707fd7f8a352e153c8ae09ea974e6fabad0b7c4eb9944b8d4/fastar-0.11.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:bd76bfffae6d0a91f4ac4a612f721e7aec108db97dccdd120ae063cd66959f27", size = 885254, upload-time = "2026-04-13T17:08:44.285Z" }, + { url = "https://files.pythonhosted.org/packages/47/01/1585edd5ec47782ae93cd94edf05828e0ab02ef00aec00aea4194a600464/fastar-0.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f5b707501ec01c1bc0518f741f01d322e50c9adc19a451aa24f67a2316e9397", size = 971496, upload-time = "2026-04-13T17:10:17.024Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e9/6874c9d1236ded565a0bed54b320ac9f165f287b1d89490fb70f9f323c81/fastar-0.11.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:37c0b5a88a657839aad98b0a6c9e4ac4c2c15d6b49c44ee3935c6b08e9d3e479", size = 1034685, upload-time = "2026-04-13T17:10:34.063Z" }, + { url = "https://files.pythonhosted.org/packages/14/d8/4ab20613ce2983427aee958e39be878dba874aa227c530a845e32429c4f6/fastar-0.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6c55f536c62a6efb180c1af0d5182948bff576bbfe6276e8e1359c9c7d2215d8", size = 1072675, upload-time = "2026-04-13T17:10:50.53Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ae/5ac3b7c20ce4b08f011dd2b979f96caabe64f9b10b157f211ea91bdfadca/fastar-0.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3082eeca59e189b9039335862f4c2780c0c8871d656bfdf559db4414a105b251", size = 1029330, upload-time = "2026-04-13T17:11:08.138Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e7/37cd6a1d4e288292170b64e19d79ecce2a7de8bb76790323399a2abc4619/fastar-0.11.0-cp314-cp314-win32.whl", hash = "sha256:b201a0a4e29f9fec2a177e13154b8725ec65ab9f83bd6415483efaa2aa18344b", size = 453940, upload-time = "2026-04-13T17:11:48.713Z" }, + { url = "https://files.pythonhosted.org/packages/ff/1c/795c878b1ee29d79021cf8ed81f18f2b25ccde58453b0d34b9bdc7e025ea/fastar-0.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:868fddb26072a43e870a8819134b9f80ee602931be5a76e6fb873e04da343637", size = 486334, upload-time = "2026-04-13T17:11:34.882Z" }, + { url = "https://files.pythonhosted.org/packages/ff/a4/113f104301df8bddcc0b3775b611a30cb7610baa3add933c7ccac9386467/fastar-0.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:3db39c9cc42abb0c780a26b299f24dfbc8be455985e969e15336d70d7b2f833b", size = 461534, upload-time = "2026-04-13T17:11:24.329Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a6/5c5f2c2c8e0c63e56a5636ebc7721589c889e94c0092cec7eb28ae7207e6/fastar-0.11.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:49c3299dec5e125e7ebaa27545714da9c7391777366015427e0ae62d548b442b", size = 707156, upload-time = "2026-04-13T17:10:02.176Z" }, + { url = "https://files.pythonhosted.org/packages/df/f7/982c01b61f0fc135ad2b16d01e6d0ee53cf8791e68827f5f7c5a65b2e5b1/fastar-0.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3328ed1ed56d31f5198350b17dd60449b8d6b9d47abb4688bab6aef4450a165b", size = 627032, upload-time = "2026-04-13T17:09:46.978Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c3/38f1dac77ae0c71c37b176277c96d830796b8ce2fe69705f917829b53829/fastar-0.11.0-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bd3eca3bbfec84a614bcb4143b4ad4f784d0895babc26cfc88436af88ca23c7a", size = 864403, upload-time = "2026-04-13T17:09:16.58Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f0/e69c363bdb3e5a5848e937b662b5469581ee6682c51bc1c0556494773929/fastar-0.11.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff86a967acb0d621dd24063dda090daa67bf4993b9570e97fe156de88a9006ca", size = 759480, upload-time = "2026-04-13T17:08:00.599Z" }, + { url = "https://files.pythonhosted.org/packages/3b/29/4d8737590c2a6357d614d7cc7288e8f68e7e449680b8922997cc4349e65e/fastar-0.11.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:86eaf7c0e985d93a7734168be2fb232b2a8cca53e41431c2782d7c12b12c03b1", size = 756219, upload-time = "2026-04-13T17:08:15.699Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ec/400de7b3b7d48801908f19cf5462177104395799472671b3e8152b2b04ca/fastar-0.11.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91f07b0b8eb67e2f177733a1f884edad7dfb9f8977ffef15927b20cb9604027d", size = 923669, upload-time = "2026-04-13T17:08:30.574Z" }, + { url = "https://files.pythonhosted.org/packages/5d/01/8926c53da923fed7ab4b96e7fbf7f73b663beb4f02095b654d6fab46f9ad/fastar-0.11.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f85c896885eb4abf1a635d54dea22cac6ae48d04fc2ea26ae652fcf1febe1220", size = 815729, upload-time = "2026-04-13T17:09:01.204Z" }, + { url = "https://files.pythonhosted.org/packages/89/f0/5fef4c7946e352651b504b1a4235dac3505e7cfd24020788ab50552e84bf/fastar-0.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:075c07095c8de4b774ba8f28b9c0a02b1a2cd254da50cbe464dd3bb2432e9158", size = 819812, upload-time = "2026-04-13T17:09:31.907Z" }, + { url = "https://files.pythonhosted.org/packages/b3/c8/0ebc3298b4a45e7bddc50b169ae6a6f5b80c939394d4befe6e60de535ee7/fastar-0.11.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:07f028933820c65750baf3383b807ecce1cd9385cf00ce192b79d263ad6b856c", size = 884074, upload-time = "2026-04-13T17:08:45.802Z" }, + { url = "https://files.pythonhosted.org/packages/ae/9f/7baa4cdff8d6fbca41fa5c764b48a941fed8a9ec6c4cc92de65895a28299/fastar-0.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:039f875efa0f01fa43c20bf4e2fc7305489c61d0ac76eda991acfba7820a0e63", size = 969450, upload-time = "2026-04-13T17:10:18.667Z" }, + { url = "https://files.pythonhosted.org/packages/d4/dc/1ebbfb58a47056ba866494f19efbcdd2ba2897096b94f36e796594b4d05b/fastar-0.11.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:fff12452a9a5c6814a012445f26365541cc3d99dcca61f09762e6a389f7a32ea", size = 1033775, upload-time = "2026-04-13T17:10:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/c2/5f/ce4e3914066f08c99eb8c32952cc07c1a013e81b1db1b0f598130bf6b974/fastar-0.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:2bf733e09f942b6fa876efe30a90508d1f4caef5630c00fb2a84fba355873712", size = 1072158, upload-time = "2026-04-13T17:10:52.497Z" }, + { url = "https://files.pythonhosted.org/packages/03/2a/6bca72992c84151c387cc6558f3867f5ebe5fb3684ee6fa9b76280ba4b8e/fastar-0.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d1531fa848fdd3677d2dce0a4b436ea64d9ae38fb8babe2ddbc180dd153cb7a3", size = 1028577, upload-time = "2026-04-13T17:11:09.934Z" }, + { url = "https://files.pythonhosted.org/packages/83/18/7a7c15657a3da5569b26fc51cde6a80f8d84cb54b3b1aea6d74a103db4ad/fastar-0.11.0-cp314-cp314t-win32.whl", hash = "sha256:5744551bc67c6fc6581cbd0e34a0fd6e2cd0bd30b43e94b1c3119cf35064b162", size = 453601, upload-time = "2026-04-13T17:11:53.726Z" }, + { url = "https://files.pythonhosted.org/packages/6d/d8/331b59a6de279f3ad75c10c02c40a12f21d64a437d9c3d6f1af2dcbd7a76/fastar-0.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f4ce44e3b56c47cf38244b98d29f269b259740a580c47a2552efa5b96a5458fb", size = 486436, upload-time = "2026-04-13T17:11:40.089Z" }, + { url = "https://files.pythonhosted.org/packages/6b/fd/5390ec4f49100f3ecb9968a392f9e6d039f1e3fe0ecd28443716ff01e589/fastar-0.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:76c1359314355eafbc6989f20fb1ad565a3d10200117923b9da765a17e2f6f11", size = 461049, upload-time = "2026-04-13T17:11:25.918Z" }, + { url = "https://files.pythonhosted.org/packages/cc/5c/9bbeffbf1905391446dd98aa520422ce7affde5c9a7c22d757cc5d7c1397/fastar-0.11.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1266d6a004f427b0d61bd6c7b544d84cc964691b2232c2f4d635a1b75f2f6d5e", size = 711644, upload-time = "2026-04-13T17:10:07.663Z" }, + { url = "https://files.pythonhosted.org/packages/7e/af/ae5cf39d4fb82d0c592705f5ec6db1b065be5265c151b108f86126ee8773/fastar-0.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:298a827ec04ade43733f6ca960d0faec38706aa1494175869ea7ea17f5bad5d3", size = 634371, upload-time = "2026-04-13T17:09:52.083Z" }, + { url = "https://files.pythonhosted.org/packages/7e/36/8d4569e26473c72ccb02d1c5df3ed710073f1c06eca09c26d52ea79fd815/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8800e2387e463a0e5799416a1cbe72dd0fde7270a20e4bde684145e7878f6516", size = 870850, upload-time = "2026-04-13T17:09:21.439Z" }, + { url = "https://files.pythonhosted.org/packages/bf/46/724dc796e1756d3977970f820d30d59bb8cab8e3671b285f1d82ab513aec/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7496def0a2befd82d429cb004ef7ca831585cc887947bd6b9abb68a5ef852b0b", size = 764469, upload-time = "2026-04-13T17:08:05.638Z" }, + { url = "https://files.pythonhosted.org/packages/99/e3/74d6859e632e8fb9339a14f652fb9f800c2bd6aa53071e311c0be3fbab8b/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:878eaf15463eb572e3538af7ca3a8534e5e279cf8196db902d24e5725c4af86e", size = 761375, upload-time = "2026-04-13T17:08:20.669Z" }, + { url = "https://files.pythonhosted.org/packages/a3/e7/cc70e2be5ef8731a7525552b1c35c1448cf9eae6a62cb3a56f12c1bf27ea/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0324ed1d1ef0186e1bbd843b17807d6d837d0906899d4c99378b02c5d86bdd9c", size = 928189, upload-time = "2026-04-13T17:08:35.663Z" }, + { url = "https://files.pythonhosted.org/packages/3c/33/c9a969e78dca323547276a6fee5f4f9588f7cd5ab45acec3778c67399589/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bdf9bd863205590beaf8ef6e66f315310196632180dceaf674985d01a876cac3", size = 820864, upload-time = "2026-04-13T17:09:06.366Z" }, + { url = "https://files.pythonhosted.org/packages/84/bd/6b9434b541fe55c125b5f2e017a565596a2d215aa09207e4555e4585064f/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59af8dbb683b24b90fb5b506de080faeab0a17a908e6c2a5d93a97260ed75d7b", size = 824060, upload-time = "2026-04-13T17:09:37.377Z" }, + { url = "https://files.pythonhosted.org/packages/24/8d/871d5f8cf4c6f13987119fb0a9ae8be131e34f2756c2524e9974adf33824/fastar-0.11.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:9f3df73a3c4292cfe15696cdf59cdb6c309ab59d30b34c733be13c6e32d9a264", size = 889217, upload-time = "2026-04-13T17:08:50.884Z" }, + { url = "https://files.pythonhosted.org/packages/d0/26/cca0fd2704f3ed20165e5613ed911549aef3aaf3b0b5b02fee0e8e23e6cc/fastar-0.11.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:aa3762cbb16e41a76b61f4a6914937a71aab3a7b6c2d82ca233bc686ebaf756b", size = 975418, upload-time = "2026-04-13T17:10:24.307Z" }, + { url = "https://files.pythonhosted.org/packages/99/94/8bbb0b13f5b6cbe2492f0b7cbba5103e6163976a3331466d010e781fa189/fastar-0.11.0-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:a8c7bc8ac74cb359bb546b199288c83236372d094b402e557c197e85527495cd", size = 1038492, upload-time = "2026-04-13T17:10:41.939Z" }, + { url = "https://files.pythonhosted.org/packages/ed/d3/5b7df222a30eac2822ffd00f82fd4c2ce84fba4b369d1e1a03732fd177fc/fastar-0.11.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:587cbd060a2699c5f66281081395bb4657b2b1e0eef5c206b1aabf740019d670", size = 1080210, upload-time = "2026-04-13T17:10:58.462Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/56ef943ea524784598c035ccbd42e564e937da0438ae3f55f0e76cb95571/fastar-0.11.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6a1c56957ac82408be37a3f63594bc83e0919e8760492a4475e542f9f1828778", size = 1034886, upload-time = "2026-04-13T17:11:15.617Z" }, ] [[package]] name = "fastcrud" -version = "0.19.2" +version = "0.21.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "fastapi" }, @@ -482,72 +1006,191 @@ dependencies = [ { name = "sqlalchemy" }, { name = "sqlalchemy-utils" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b2/ae/1fc88455e8e399aff19e591423a948cfcdb712a5974fb8d0cedb31412dae/fastcrud-0.19.2.tar.gz", hash = "sha256:d048177cfd6fc1209a36ee71d9790bc7fc16a2c5d856ff783273699533b43bb3", size = 70870, upload-time = "2025-11-15T19:49:44.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/91/5c/2ee44d6cc63cc1f039cb218ea2681d6978c3a27b094e2a4bdf02f503f599/fastcrud-0.21.0.tar.gz", hash = "sha256:38990a96d4639e65e6f1c9b8f4cebfe5bf1a84ba731eb5c224fff593a7316e02", size = 79300, upload-time = "2026-01-23T19:30:07.347Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/6e/f038ca92fe680ad31cee176c61a5536ddbf31f1bbf3923975fd221917abc/fastcrud-0.19.2-py3-none-any.whl", hash = "sha256:1562a905f92196e485d2df2f6ea34f0408702bf41c6cad3a89a1fedf08a4f4d9", size = 98032, upload-time = "2025-11-15T19:49:42.887Z" }, + { url = "https://files.pythonhosted.org/packages/5f/1b/85fb4c1cb771845e5ede668ef28c090e021020d117e891fa1e599c9e32d1/fastcrud-0.21.0-py3-none-any.whl", hash = "sha256:94daa5ca7815a268b700ee31b36455a8248a1a0547f3fcef5969df0047aab338", size = 107150, upload-time = "2026-01-23T19:30:05.895Z" }, ] [[package]] -name = "filelock" -version = "3.20.0" +name = "fastsecure" +version = "0.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, +dependencies = [ + { name = "bcrypt" }, + { name = "greenlet" }, + { name = "httpx" }, + { name = "pydantic" }, + { name = "python-jose", extra = ["cryptography"] }, + { name = "python-multipart" }, + { name = "redis" }, + { name = "sqlalchemy" }, + { name = "types-python-jose" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/12/c9abbd8e08bbc26898e9db7084cc09619f289f3aefdb3216d6641d59baba/fastsecure-0.3.0.tar.gz", hash = "sha256:bfaf9830965fce198c0ff2c4b0cfea2c5d153b79dd92fa64c3c0555a009b64e9", size = 47028, upload-time = "2025-02-11T22:30:32.704Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/03/59dd272c39997f1fbfe11fdac7cb2185608aad2166312a86c3437e219d0b/fastsecure-0.3.0-py3-none-any.whl", hash = "sha256:33050a45af64771b49967a63de8d892e54c0c4f5e2ca391b0e7f98221dbbfa89", size = 42045, upload-time = "2025-02-11T22:30:30.567Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, + { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, + { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, + { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, + { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, + { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, + { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, + { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, + { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, + { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, + { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, + { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, + { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, + { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, + { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] [[package]] name = "greenlet" -version = "3.2.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365", size = 185752, upload-time = "2025-06-05T16:16:09.955Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/2e/d4fcb2978f826358b673f779f78fa8a32ee37df11920dc2bb5589cbeecef/greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822", size = 270219, upload-time = "2025-06-05T16:10:10.414Z" }, - { url = "https://files.pythonhosted.org/packages/16/24/929f853e0202130e4fe163bc1d05a671ce8dcd604f790e14896adac43a52/greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83", size = 630383, upload-time = "2025-06-05T16:38:51.785Z" }, - { url = "https://files.pythonhosted.org/packages/d1/b2/0320715eb61ae70c25ceca2f1d5ae620477d246692d9cc284c13242ec31c/greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf", size = 642422, upload-time = "2025-06-05T16:41:35.259Z" }, - { url = "https://files.pythonhosted.org/packages/bd/49/445fd1a210f4747fedf77615d941444349c6a3a4a1135bba9701337cd966/greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b", size = 638375, upload-time = "2025-06-05T16:48:18.235Z" }, - { url = "https://files.pythonhosted.org/packages/7e/c8/ca19760cf6eae75fa8dc32b487e963d863b3ee04a7637da77b616703bc37/greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147", size = 637627, upload-time = "2025-06-05T16:13:02.858Z" }, - { url = "https://files.pythonhosted.org/packages/65/89/77acf9e3da38e9bcfca881e43b02ed467c1dedc387021fc4d9bd9928afb8/greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5", size = 585502, upload-time = "2025-06-05T16:12:49.642Z" }, - { url = "https://files.pythonhosted.org/packages/97/c6/ae244d7c95b23b7130136e07a9cc5aadd60d59b5951180dc7dc7e8edaba7/greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc", size = 1114498, upload-time = "2025-06-05T16:36:46.598Z" }, - { url = "https://files.pythonhosted.org/packages/89/5f/b16dec0cbfd3070658e0d744487919740c6d45eb90946f6787689a7efbce/greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba", size = 1139977, upload-time = "2025-06-05T16:12:38.262Z" }, - { url = "https://files.pythonhosted.org/packages/66/77/d48fb441b5a71125bcac042fc5b1494c806ccb9a1432ecaa421e72157f77/greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34", size = 297017, upload-time = "2025-06-05T16:25:05.225Z" }, - { url = "https://files.pythonhosted.org/packages/f3/94/ad0d435f7c48debe960c53b8f60fb41c2026b1d0fa4a99a1cb17c3461e09/greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d", size = 271992, upload-time = "2025-06-05T16:11:23.467Z" }, - { url = "https://files.pythonhosted.org/packages/93/5d/7c27cf4d003d6e77749d299c7c8f5fd50b4f251647b5c2e97e1f20da0ab5/greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b", size = 638820, upload-time = "2025-06-05T16:38:52.882Z" }, - { url = "https://files.pythonhosted.org/packages/c6/7e/807e1e9be07a125bb4c169144937910bf59b9d2f6d931578e57f0bce0ae2/greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d", size = 653046, upload-time = "2025-06-05T16:41:36.343Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ab/158c1a4ea1068bdbc78dba5a3de57e4c7aeb4e7fa034320ea94c688bfb61/greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264", size = 647701, upload-time = "2025-06-05T16:48:19.604Z" }, - { url = "https://files.pythonhosted.org/packages/cc/0d/93729068259b550d6a0288da4ff72b86ed05626eaf1eb7c0d3466a2571de/greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688", size = 649747, upload-time = "2025-06-05T16:13:04.628Z" }, - { url = "https://files.pythonhosted.org/packages/f6/f6/c82ac1851c60851302d8581680573245c8fc300253fc1ff741ae74a6c24d/greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb", size = 605461, upload-time = "2025-06-05T16:12:50.792Z" }, - { url = "https://files.pythonhosted.org/packages/98/82/d022cf25ca39cf1200650fc58c52af32c90f80479c25d1cbf57980ec3065/greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c", size = 1121190, upload-time = "2025-06-05T16:36:48.59Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e1/25297f70717abe8104c20ecf7af0a5b82d2f5a980eb1ac79f65654799f9f/greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163", size = 1149055, upload-time = "2025-06-05T16:12:40.457Z" }, - { url = "https://files.pythonhosted.org/packages/1f/8f/8f9e56c5e82eb2c26e8cde787962e66494312dc8cb261c460e1f3a9c88bc/greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849", size = 297817, upload-time = "2025-06-05T16:29:49.244Z" }, - { url = "https://files.pythonhosted.org/packages/b1/cf/f5c0b23309070ae93de75c90d29300751a5aacefc0a3ed1b1d8edb28f08b/greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad", size = 270732, upload-time = "2025-06-05T16:10:08.26Z" }, - { url = "https://files.pythonhosted.org/packages/48/ae/91a957ba60482d3fecf9be49bc3948f341d706b52ddb9d83a70d42abd498/greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef", size = 639033, upload-time = "2025-06-05T16:38:53.983Z" }, - { url = "https://files.pythonhosted.org/packages/6f/df/20ffa66dd5a7a7beffa6451bdb7400d66251374ab40b99981478c69a67a8/greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3", size = 652999, upload-time = "2025-06-05T16:41:37.89Z" }, - { url = "https://files.pythonhosted.org/packages/51/b4/ebb2c8cb41e521f1d72bf0465f2f9a2fd803f674a88db228887e6847077e/greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95", size = 647368, upload-time = "2025-06-05T16:48:21.467Z" }, - { url = "https://files.pythonhosted.org/packages/8e/6a/1e1b5aa10dced4ae876a322155705257748108b7fd2e4fae3f2a091fe81a/greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb", size = 650037, upload-time = "2025-06-05T16:13:06.402Z" }, - { url = "https://files.pythonhosted.org/packages/26/f2/ad51331a157c7015c675702e2d5230c243695c788f8f75feba1af32b3617/greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b", size = 608402, upload-time = "2025-06-05T16:12:51.91Z" }, - { url = "https://files.pythonhosted.org/packages/26/bc/862bd2083e6b3aff23300900a956f4ea9a4059de337f5c8734346b9b34fc/greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0", size = 1119577, upload-time = "2025-06-05T16:36:49.787Z" }, - { url = "https://files.pythonhosted.org/packages/86/94/1fc0cc068cfde885170e01de40a619b00eaa8f2916bf3541744730ffb4c3/greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36", size = 1147121, upload-time = "2025-06-05T16:12:42.527Z" }, - { url = "https://files.pythonhosted.org/packages/27/1a/199f9587e8cb08a0658f9c30f3799244307614148ffe8b1e3aa22f324dea/greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3", size = 297603, upload-time = "2025-06-05T16:20:12.651Z" }, - { url = "https://files.pythonhosted.org/packages/d8/ca/accd7aa5280eb92b70ed9e8f7fd79dc50a2c21d8c73b9a0856f5b564e222/greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86", size = 271479, upload-time = "2025-06-05T16:10:47.525Z" }, - { url = "https://files.pythonhosted.org/packages/55/71/01ed9895d9eb49223280ecc98a557585edfa56b3d0e965b9fa9f7f06b6d9/greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97", size = 683952, upload-time = "2025-06-05T16:38:55.125Z" }, - { url = "https://files.pythonhosted.org/packages/ea/61/638c4bdf460c3c678a0a1ef4c200f347dff80719597e53b5edb2fb27ab54/greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728", size = 696917, upload-time = "2025-06-05T16:41:38.959Z" }, - { url = "https://files.pythonhosted.org/packages/22/cc/0bd1a7eb759d1f3e3cc2d1bc0f0b487ad3cc9f34d74da4b80f226fde4ec3/greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a", size = 692443, upload-time = "2025-06-05T16:48:23.113Z" }, - { url = "https://files.pythonhosted.org/packages/67/10/b2a4b63d3f08362662e89c103f7fe28894a51ae0bc890fabf37d1d780e52/greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892", size = 692995, upload-time = "2025-06-05T16:13:07.972Z" }, - { url = "https://files.pythonhosted.org/packages/5a/c6/ad82f148a4e3ce9564056453a71529732baf5448ad53fc323e37efe34f66/greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141", size = 655320, upload-time = "2025-06-05T16:12:53.453Z" }, - { url = "https://files.pythonhosted.org/packages/5c/4f/aab73ecaa6b3086a4c89863d94cf26fa84cbff63f52ce9bc4342b3087a06/greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a", size = 301236, upload-time = "2025-06-05T16:15:20.111Z" }, -] - -[[package]] -name = "gunicorn" -version = "23.0.0" +version = "3.5.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "packaging" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/34/72/9614c465dc206155d93eff0ca20d42e1e35afc533971379482de953521a4/gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec", size = 375031, upload-time = "2024-08-10T20:25:27.378Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/7d/6dac2a6e1eba33ee43f318edbed4ff29151a49b5d37f080aad1e6469bca4/gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d", size = 85029, upload-time = "2024-08-10T20:25:24.996Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/3c/3f/dbf99fb14bfeb88c28f16729215478c0e265cacd6dc22270c8f31bb6892f/greenlet-3.5.0.tar.gz", hash = "sha256:d419647372241bc68e957bf38d5c1f98852155e4146bd1e4121adea81f4f01e4", size = 196995, upload-time = "2026-04-27T13:37:15.544Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/0f/a91f143f356523ff682309732b175765a9bc2836fd7c081c2c67fedc1ad4/greenlet-3.5.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8f1cc966c126639cd152fdaa52624d2655f492faa79e013fea161de3e6dda082", size = 284726, upload-time = "2026-04-27T12:20:51.402Z" }, + { url = "https://files.pythonhosted.org/packages/95/82/800646c7ffc5dbabd75ddd2f6b519bb898c0c9c969e5d0473bfe5d20bcce/greenlet-3.5.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:362624e6a8e5bca3b8233e45eef33903a100e9539a2b995c364d595dbc4018b3", size = 604264, upload-time = "2026-04-27T12:52:39.494Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ac/354867c0bba812fc33b15bc55aedafedd0aee3c7dd91dfca22444157dc0c/greenlet-3.5.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5ecd83806b0f4c2f53b1018e0005cd82269ea01d42befc0368730028d850ed1c", size = 616099, upload-time = "2026-04-27T12:59:39.623Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ab/192090c4a5b30df148c22bf4b8895457d739a7c7c5a7b9c41e5dd7f537f2/greenlet-3.5.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fa94cb2288681e3a11645958f1871d48ee9211bd2f66628fdace505927d6e564", size = 623976, upload-time = "2026-04-27T13:02:37.363Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b0/815bece7399e01cadb69014219eebd0042339875c59a59b0820a46ece356/greenlet-3.5.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ff251e9a0279522e62f6176412869395a64ddf2b5c5f782ff609a8216a4e662", size = 615198, upload-time = "2026-04-27T12:25:25.928Z" }, + { url = "https://files.pythonhosted.org/packages/24/11/05eb2b9b188c6df7d68a89c99134d644a7af616a40b9808e8e6ced315d5d/greenlet-3.5.0-cp311-cp311-manylinux_2_39_riscv64.whl", hash = "sha256:64d6ac45f7271f48e45f67c95b54ef73534c52ec041fcda8edf520c6d811f4bc", size = 418379, upload-time = "2026-04-27T13:05:12.755Z" }, + { url = "https://files.pythonhosted.org/packages/10/80/3b2c0a895d6698f6ddb31b07942ebfa982f3e30888bc5546a5b5990de8b2/greenlet-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6d874e79afd41a96e11ff4c5d0bc90a80973e476fda1c2c64985667397df432b", size = 1574927, upload-time = "2026-04-27T12:53:25.81Z" }, + { url = "https://files.pythonhosted.org/packages/44/0e/f354af514a4c61454dbc68e44d47544a5a4d6317e30b77ddfa3a09f4c5f3/greenlet-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0ed006e4b86c59de7467eb2601cd1b77b5a7d657d1ee55e30fe30d76451edba4", size = 1642683, upload-time = "2026-04-27T12:25:23.9Z" }, + { url = "https://files.pythonhosted.org/packages/fa/6a/87f38255201e993a1915265ebb80cd7c2c78b04a45744995abbf6b259fd8/greenlet-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:703cb211b820dbffbbc55a16bfc6e4583a6e6e990f33a119d2cc8b83211119c8", size = 238115, upload-time = "2026-04-27T12:21:48.845Z" }, + { url = "https://files.pythonhosted.org/packages/e3/f8/450fe3c5938fa737ea4d22699772e6e34e8e24431a47bf4e8a1ceed4a98e/greenlet-3.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:6c18dfb59c70f5a94acd271c72e90128c3c776e41e5f07767908c8c1b74ad339", size = 235017, upload-time = "2026-04-27T12:22:26.768Z" }, + { url = "https://files.pythonhosted.org/packages/ef/32/f2ce6d4cac3e55bc6173f92dbe627e782e1850f89d986c3606feb63aafa7/greenlet-3.5.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:db2910d3c809444e0a20147361f343fe2798e106af8d9d8506f5305302655a9f", size = 286228, upload-time = "2026-04-27T12:20:34.421Z" }, + { url = "https://files.pythonhosted.org/packages/b7/aa/caed9e5adf742315fc7be2a84196373aab4816e540e38ba0d76cb7584d68/greenlet-3.5.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ec9ea74e7268ace7f9aab1b1a4e730193fc661b39a993cd91c606c32d4a3628", size = 601775, upload-time = "2026-04-27T12:52:41.045Z" }, + { url = "https://files.pythonhosted.org/packages/c7/af/90ae08497400a941595d12774447f752d3dfe0fbb012e35b76bc5c0ff37e/greenlet-3.5.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54d243512da35485fc7a6bf3c178fdda6327a9d6506fcdd62b1abd1e41b2927b", size = 614436, upload-time = "2026-04-27T12:59:41.595Z" }, + { url = "https://files.pythonhosted.org/packages/3f/e9/4eeadf8cb3403ac274245ba75f07844abc7fa5f6787583fc9156ba741e0f/greenlet-3.5.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:41353ec2ecedf7aa8f682753a41919f8718031a6edac46b8d3dc7ed9e1ceb136", size = 620610, upload-time = "2026-04-27T13:02:39.194Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e0/2e13df68f367e2f9960616927d60857dd7e56aaadd59a47c644216b2f920/greenlet-3.5.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d280a7f5c331622c69f97eb167f33577ff2d1df282c41cd15907fc0a3ca198c", size = 611388, upload-time = "2026-04-27T12:25:28.008Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ef/f913b3c0eb7d26d86a2401c5e1546c9d46b657efee724b06f6f4ac5d8824/greenlet-3.5.0-cp312-cp312-manylinux_2_39_riscv64.whl", hash = "sha256:58c1c374fe2b3d852f9b6b11a7dff4c85404e51b9a596fd9e89cf904eb09866d", size = 422775, upload-time = "2026-04-27T13:05:14.261Z" }, + { url = "https://files.pythonhosted.org/packages/82/f7/393c64055132ac0d488ef6be549253b7e6274194863967ddc0bc8f5b87b8/greenlet-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1eb67d5adefb5bd2e182d42678a328979a209e4e82eb93575708185d31d1f588", size = 1570768, upload-time = "2026-04-27T12:53:28.099Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4b/eaf7735253522cf56d1b74d672a58f54fc114702ceaf05def59aae72f6e1/greenlet-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2628d6c86f6cb0cb45e0c3c54058bbec559f57eaae699447748cb3928150577e", size = 1635983, upload-time = "2026-04-27T12:25:26.903Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fe/4fb3a0805bd5165da5ebf858da7cc01cce8061674106d2cf5bdab32cbfde/greenlet-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:d4d9f0624c775f2dfc56ba54d515a8c771044346852a918b405914f6b19d7fd8", size = 238840, upload-time = "2026-04-27T12:23:54.806Z" }, + { url = "https://files.pythonhosted.org/packages/cb/cb/baa584cb00532126ffe12d9787db0a60c5a4f55c27bfe2666df5d4c30a32/greenlet-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:83ed9f27f1680b50e89f40f6df348a290ea234b249a4003d366663a12eab94f2", size = 235615, upload-time = "2026-04-27T12:21:38.57Z" }, + { url = "https://files.pythonhosted.org/packages/0c/58/fc576f99037ce19c5aa16628e4c3226b6d1419f72a62c79f5f40576e6eb3/greenlet-3.5.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:5a5ed18de6a0f6cc7087f1563f6bd93fc7df1c19165ca01e9bde5a5dc281d106", size = 285066, upload-time = "2026-04-27T12:23:05.033Z" }, + { url = "https://files.pythonhosted.org/packages/4a/ba/b28ddbe6bfad6a8ac196ef0e8cff37bc65b79735995b9e410923fffeeb70/greenlet-3.5.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a717fbc46d8a354fa675f7c1e813485b6ba3885f9bef0cd56e5ba27d758ff5b", size = 604414, upload-time = "2026-04-27T12:52:42.358Z" }, + { url = "https://files.pythonhosted.org/packages/09/06/4b69f8f0b67603a8be2790e55107a190b376f2627fe0eaf5695d85ffb3cd/greenlet-3.5.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ddc090c5c1792b10246a78e8c2163ebbe04cf877f9d785c230a7b27b39ad038e", size = 617349, upload-time = "2026-04-27T12:59:43.32Z" }, + { url = "https://files.pythonhosted.org/packages/6a/15/a643b4ecd09969e30b8a150d5919960caae0abe4f5af75ab040b1ab85e78/greenlet-3.5.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4964101b8585c144cbda5532b1aa644255126c08a265dae90c16e7a0e63aaa9d", size = 623234, upload-time = "2026-04-27T13:02:40.611Z" }, + { url = "https://files.pythonhosted.org/packages/8a/17/a3918541fd0ddefe024a69de6d16aa7b46d36ac19562adaa63c7fa180eff/greenlet-3.5.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2094acd54b272cb6eae8c03dd87b3fa1820a4cef18d6889c378d503500a1dc13", size = 613927, upload-time = "2026-04-27T12:25:30.28Z" }, + { url = "https://files.pythonhosted.org/packages/77/18/3b13d5ef1275b0ffaf933b05efa21408ac4ca95823c7411d79682e4fdcff/greenlet-3.5.0-cp313-cp313-manylinux_2_39_riscv64.whl", hash = "sha256:7022615368890680e67b9965d33f5773aade330d5343bbe25560135aaa849eae", size = 425243, upload-time = "2026-04-27T13:05:15.689Z" }, + { url = "https://files.pythonhosted.org/packages/ee/e1/bd0af6213c7dd33175d8a462d4c1fe1175124ebed4855bc1475a5b5242c2/greenlet-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5e05ba267789ea87b5a155cf0e810b1ab88bf18e9e8740813945ceb8ee4350ba", size = 1570893, upload-time = "2026-04-27T12:53:29.483Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2a/0789702f864f5382cb476b93d7a9c823c10472658102ccd65f415747d2e2/greenlet-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0ecec963079cd58cbd14723582384f11f166fd58883c15dcbfb342e0bc9b5846", size = 1636060, upload-time = "2026-04-27T12:25:28.845Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8f/22bf9df92bbff0eb07842b60f7e63bf7675a9742df628437a9f02d09137f/greenlet-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:728d9667d8f2f586644b748dbd9bb67e50d6a9381767d1357714ea6825bb3bf5", size = 238740, upload-time = "2026-04-27T12:24:01.341Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b7/9c5c3d653bd4ff614277c049ac676422e2c557db47b4fe43e6313fc005dc/greenlet-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:47422135b1d308c14b2c6e758beedb1acd33bb91679f5670edf77bf46244722b", size = 235525, upload-time = "2026-04-27T12:23:12.308Z" }, + { url = "https://files.pythonhosted.org/packages/94/5e/a70f31e3e8d961c4ce589c15b28e4225d63704e431a23932a3808cbcc867/greenlet-3.5.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:f35807464c4c58c55f0d31dfa83c541a5615d825c2fe3d2b95360cf7c4e3c0a8", size = 285564, upload-time = "2026-04-27T12:23:08.555Z" }, + { url = "https://files.pythonhosted.org/packages/af/a6/046c0a28e21833e4086918218cfb3d8bed51c075a1b700f20b9d7861c0f4/greenlet-3.5.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55fa7ea52771be44af0de27d8b80c02cd18c2c3cddde6c847ecebdf72418b6a1", size = 651166, upload-time = "2026-04-27T12:52:43.644Z" }, + { url = "https://files.pythonhosted.org/packages/47/f8/4af27f71c5ff32a7fbc516adb46370d9c4ae2bc7bd3dc7d066ac542b4b15/greenlet-3.5.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a97e4821aa710603f94de0da25f25096454d78ffdace5dc77f3a006bc01abba3", size = 663792, upload-time = "2026-04-27T12:59:44.93Z" }, + { url = "https://files.pythonhosted.org/packages/fb/89/2dadb89793c37ee8b4c237857188293e9060dc085f19845c292e00f8e091/greenlet-3.5.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bf2d8a80bec89ab46221ae45c5373d5ba0bd36c19aa8508e85c6cd7e5106cd37", size = 668086, upload-time = "2026-04-27T13:02:42.314Z" }, + { url = "https://files.pythonhosted.org/packages/a3/59/1bd6d7428d6ed9106efbb8c52310c60fd04f6672490f452aeaa3829aa436/greenlet-3.5.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f52a464e4ed91780bdfbbdd2b97197f3accaa629b98c200f4dffada759f3ae7", size = 660933, upload-time = "2026-04-27T12:25:33.276Z" }, + { url = "https://files.pythonhosted.org/packages/82/35/75722be7e26a2af4cbd2dc35b0ed382dacf9394b7e75551f76ed1abe87f2/greenlet-3.5.0-cp314-cp314-manylinux_2_39_riscv64.whl", hash = "sha256:1bae92a1dd94c5f9d9493c3a212dd874c202442047cf96446412c862feca83a2", size = 470799, upload-time = "2026-04-27T13:05:17.094Z" }, + { url = "https://files.pythonhosted.org/packages/83/e4/b903e5a5fae1e8a28cdd32a0cfbfd560b668c25b692f67768822ddc5f40f/greenlet-3.5.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:762612baf1161ccb8437c0161c668a688223cba28e1bf038f4eb47b13e39ccdf", size = 1618401, upload-time = "2026-04-27T12:53:31.062Z" }, + { url = "https://files.pythonhosted.org/packages/0e/e3/5ec408a329acb854fb607a122e1ee5fb3ff649f9a97952948a90803c0d8e/greenlet-3.5.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:57a43c6079a89713522bc4bcb9f75070ecf5d3dbad7792bfe42239362cbf2a16", size = 1682038, upload-time = "2026-04-27T12:25:31.838Z" }, + { url = "https://files.pythonhosted.org/packages/91/20/6b165108058767ee643c55c5c4904d591a830ee2b3c7dbd359828fbc829f/greenlet-3.5.0-cp314-cp314-win_amd64.whl", hash = "sha256:3bc59be3945ae9750b9e7d45067d01ae3fe90ea5f9ade99239dabdd6e28a5033", size = 239835, upload-time = "2026-04-27T12:24:54.136Z" }, + { url = "https://files.pythonhosted.org/packages/4e/62/1c498375cee177b55d980c1db319f26470e5309e54698c8f8fc06c0fd539/greenlet-3.5.0-cp314-cp314-win_arm64.whl", hash = "sha256:a96fcee45e03fe30a62669fd16ab5c9d3c172660d3085605cb1e2d1280d3c988", size = 236862, upload-time = "2026-04-27T12:23:24.957Z" }, + { url = "https://files.pythonhosted.org/packages/78/a8/4522939255bb5409af4e87132f915446bf3622c2c292d14d3c38d128ae82/greenlet-3.5.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:a10a732421ab4fec934783ce3e54763470d0181db6e3468f9103a275c3ed1853", size = 293614, upload-time = "2026-04-27T12:24:12.874Z" }, + { url = "https://files.pythonhosted.org/packages/15/5e/8744c52e2c027b5a8772a01561934c8835f869733e101f62075c60430340/greenlet-3.5.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fc391b1566f2907d17aaebe78f8855dc45675159a775fcf9e61f8ee0078e87f", size = 650723, upload-time = "2026-04-27T12:52:45.412Z" }, + { url = "https://files.pythonhosted.org/packages/00/ef/7b4c39c03cf46ceca512c5d3f914afd85aa30b2cc9a93015b0dd73e4be6c/greenlet-3.5.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:680bd0e7ad5e8daa8a4aa89f68fd6adc834b8a8036dc256533f7e08f4a4b01f7", size = 656529, upload-time = "2026-04-27T12:59:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5c/0602239503b124b70e39355cbdb39361ecfe65b87a5f2f63752c32f5286f/greenlet-3.5.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1aa4ce8debcd4ea7fb2e150f3036588c41493d1d52c43538924ae1819003f4ce", size = 657015, upload-time = "2026-04-27T13:02:43.973Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b5/c7768f352f5c010f92064d0063f987e7dc0cd290a6d92a34109015ce4aa1/greenlet-3.5.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddb36c7d6c9c0a65f18c7258634e0c416c6ab59caac8c987b96f80c2ebda0112", size = 654364, upload-time = "2026-04-27T12:25:35.64Z" }, + { url = "https://files.pythonhosted.org/packages/38/51/8699f865f125dc952384cb432b0f7138aa4d8f2969a7d12d0df5b94d054d/greenlet-3.5.0-cp314-cp314t-manylinux_2_39_riscv64.whl", hash = "sha256:728a73687e39ae9ca34e4694cbf2f049d3fbc7174639468d0f67200a97d8f9e2", size = 488275, upload-time = "2026-04-27T13:05:18.28Z" }, + { url = "https://files.pythonhosted.org/packages/ef/d0/079ebe12e4b1fc758857ce5be1a5e73f06870f2101e52611d1e71925ce54/greenlet-3.5.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e5ddf316ced87539144621453c3aef229575825fe60c604e62bedc4003f372b2", size = 1614204, upload-time = "2026-04-27T12:53:32.618Z" }, + { url = "https://files.pythonhosted.org/packages/6d/89/6c2fb63df3596552d20e58fb4d96669243388cf680cff222758812c7bfaa/greenlet-3.5.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4a448128607be0de65342dc9b31be7f948ef4cc0bc8832069350abefd310a8f2", size = 1675480, upload-time = "2026-04-27T12:25:34.168Z" }, + { url = "https://files.pythonhosted.org/packages/15/32/77ee8a6c1564fc345a491a4e85b3bf360e4cf26eac98c4532d2fdb96e01f/greenlet-3.5.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d60097128cb0a1cab9ea541186ea13cd7b847b8449a7787c2e2350da0cb82d86", size = 245324, upload-time = "2026-04-27T12:24:40.295Z" }, ] [[package]] @@ -559,59 +1202,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] -[[package]] -name = "hiredis" -version = "3.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f7/08/24b72f425b75e1de7442fb1740f69ca66d5820b9f9c0e2511ff9aadab3b7/hiredis-3.2.1.tar.gz", hash = "sha256:5a5f64479bf04dd829fe7029fad0ea043eac4023abc6e946668cbbec3493a78d", size = 89096, upload-time = "2025-05-23T11:41:57.227Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/48/84/2ea9636f2ba0811d9eb3bebbbfa84f488238180ddab70c9cb7fa13419d78/hiredis-3.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:e4ae0be44cab5e74e6e4c4a93d04784629a45e781ff483b136cc9e1b9c23975c", size = 82425, upload-time = "2025-05-23T11:39:54.135Z" }, - { url = "https://files.pythonhosted.org/packages/fc/24/b9ebf766a99998fda3975937afa4912e98de9d7f8d0b83f48096bdd961c1/hiredis-3.2.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:24647e84c9f552934eb60b7f3d2116f8b64a7020361da9369e558935ca45914d", size = 45231, upload-time = "2025-05-23T11:39:55.455Z" }, - { url = "https://files.pythonhosted.org/packages/68/4c/c009b4d9abeb964d607f0987561892d1589907f770b9e5617552b34a4a4d/hiredis-3.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6fb3e92d1172da8decc5f836bf8b528c0fc9b6d449f1353e79ceeb9dc1801132", size = 43240, upload-time = "2025-05-23T11:39:57.8Z" }, - { url = "https://files.pythonhosted.org/packages/e9/83/d53f3ae9e4ac51b8a35afb7ccd68db871396ed1d7c8ba02ce2c30de0cf17/hiredis-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38ba7a32e51e518b6b3e470142e52ed2674558e04d7d73d86eb19ebcb37d7d40", size = 169624, upload-time = "2025-05-23T11:40:00.055Z" }, - { url = "https://files.pythonhosted.org/packages/91/2f/f9f091526e22a45385d45f3870204dc78aee365b6fe32e679e65674da6a7/hiredis-3.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4fc632be73174891d6bb71480247e57b2fd8f572059f0a1153e4d0339e919779", size = 165799, upload-time = "2025-05-23T11:40:01.194Z" }, - { url = "https://files.pythonhosted.org/packages/1c/cc/e561274438cdb19794f0638136a5a99a9ca19affcb42679b12a78016b8ad/hiredis-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f03e6839ff21379ad3c195e0700fc9c209e7f344946dea0f8a6d7b5137a2a141", size = 180612, upload-time = "2025-05-23T11:40:02.385Z" }, - { url = "https://files.pythonhosted.org/packages/83/ba/a8a989f465191d55672e57aea2a331bfa3a74b5cbc6f590031c9e11f7491/hiredis-3.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99983873e37c71bb71deb544670ff4f9d6920dab272aaf52365606d87a4d6c73", size = 169934, upload-time = "2025-05-23T11:40:03.524Z" }, - { url = "https://files.pythonhosted.org/packages/52/5f/1148e965df1c67b17bdcaef199f54aec3def0955d19660a39c6ee10a6f55/hiredis-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd982c419f48e3a57f592678c72474429465bb4bfc96472ec805f5d836523f0", size = 170074, upload-time = "2025-05-23T11:40:04.618Z" }, - { url = "https://files.pythonhosted.org/packages/43/5e/e6846ad159a938b539fb8d472e2e68cb6758d7c9454ea0520211f335ea72/hiredis-3.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc993f4aa4abc029347f309e722f122e05a3b8a0c279ae612849b5cc9dc69f2d", size = 164158, upload-time = "2025-05-23T11:40:05.653Z" }, - { url = "https://files.pythonhosted.org/packages/0a/a1/5891e0615f0993f194c1b51a65aaac063b0db318a70df001b28e49f0579d/hiredis-3.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dde790d420081f18b5949227649ccb3ed991459df33279419a25fcae7f97cd92", size = 162591, upload-time = "2025-05-23T11:40:07.041Z" }, - { url = "https://files.pythonhosted.org/packages/d4/da/8bce52ca81716f53c1014f689aea4c170ba6411e6848f81a1bed1fc375eb/hiredis-3.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b0c8cae7edbef860afcf3177b705aef43e10b5628f14d5baf0ec69668247d08d", size = 174808, upload-time = "2025-05-23T11:40:09.146Z" }, - { url = "https://files.pythonhosted.org/packages/84/91/fc1ef444ed4dc432b5da9b48e9bd23266c703528db7be19e2b608d67ba06/hiredis-3.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e8a90eaca7e1ce7f175584f07a2cdbbcab13f4863f9f355d7895c4d28805f65b", size = 167060, upload-time = "2025-05-23T11:40:10.757Z" }, - { url = "https://files.pythonhosted.org/packages/66/ad/beebf73a5455f232b97e00564d1e8ad095d4c6e18858c60c6cfdd893ac1e/hiredis-3.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:476031958fa44e245e803827e0787d49740daa4de708fe514370293ce519893a", size = 164833, upload-time = "2025-05-23T11:40:12.001Z" }, - { url = "https://files.pythonhosted.org/packages/75/79/a9591bdc0148c0fbdf54cf6f3d449932d3b3b8779e87f33fa100a5a8088f/hiredis-3.2.1-cp311-cp311-win32.whl", hash = "sha256:eb3f5df2a9593b4b4b676dce3cea53b9c6969fc372875188589ddf2bafc7f624", size = 20402, upload-time = "2025-05-23T11:40:13.216Z" }, - { url = "https://files.pythonhosted.org/packages/9f/05/c93cc6fab31e3c01b671126c82f44372fb211facb8bd4571fd372f50898d/hiredis-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:1402e763d8a9fdfcc103bbf8b2913971c0a3f7b8a73deacbda3dfe5f3a9d1e0b", size = 22085, upload-time = "2025-05-23T11:40:14.19Z" }, - { url = "https://files.pythonhosted.org/packages/60/a1/6da1578a22df1926497f7a3f6a3d2408fe1d1559f762c1640af5762a8eb6/hiredis-3.2.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3742d8b17e73c198cabeab11da35f2e2a81999d406f52c6275234592256bf8e8", size = 82627, upload-time = "2025-05-23T11:40:15.362Z" }, - { url = "https://files.pythonhosted.org/packages/6c/b1/1056558ca8dc330be5bb25162fe5f268fee71571c9a535153df9f871a073/hiredis-3.2.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9c2f3176fb617a79f6cccf22cb7d2715e590acb534af6a82b41f8196ad59375d", size = 45404, upload-time = "2025-05-23T11:40:16.72Z" }, - { url = "https://files.pythonhosted.org/packages/58/4f/13d1fa1a6b02a99e9fed8f546396f2d598c3613c98e6c399a3284fa65361/hiredis-3.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a8bd46189c7fa46174e02670dc44dfecb60f5bd4b67ed88cb050d8f1fd842f09", size = 43299, upload-time = "2025-05-23T11:40:17.697Z" }, - { url = "https://files.pythonhosted.org/packages/c0/25/ddfac123ba5a32eb1f0b40ba1b2ec98a599287f7439def8856c3c7e5dd0d/hiredis-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f86ee4488c8575b58139cdfdddeae17f91e9a893ffee20260822add443592e2f", size = 172194, upload-time = "2025-05-23T11:40:19.143Z" }, - { url = "https://files.pythonhosted.org/packages/2c/1e/443a3703ce570b631ca43494094fbaeb051578a0ebe4bfcefde351e1ba25/hiredis-3.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3717832f4a557b2fe7060b9d4a7900e5de287a15595e398c3f04df69019ca69d", size = 168429, upload-time = "2025-05-23T11:40:20.329Z" }, - { url = "https://files.pythonhosted.org/packages/3b/d6/0d8c6c706ed79b2298c001b5458c055615e3166533dcee3900e821a18a3e/hiredis-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5cb12c21fb9e2403d28c4e6a38120164973342d34d08120f2d7009b66785644", size = 182967, upload-time = "2025-05-23T11:40:21.921Z" }, - { url = "https://files.pythonhosted.org/packages/da/68/da8dd231fbce858b5a20ab7d7bf558912cd125f08bac4c778865ef5fe2c2/hiredis-3.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:080fda1510bbd389af91f919c11a4f2aa4d92f0684afa4709236faa084a42cac", size = 172495, upload-time = "2025-05-23T11:40:23.105Z" }, - { url = "https://files.pythonhosted.org/packages/65/25/83a31420535e2778662caa95533d5c997011fa6a88331f0cdb22afea9ec3/hiredis-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1252e10a1f3273d1c6bf2021e461652c2e11b05b83e0915d6eb540ec7539afe2", size = 173142, upload-time = "2025-05-23T11:40:24.24Z" }, - { url = "https://files.pythonhosted.org/packages/41/d7/cb907348889eb75e2aa2e6b63e065b611459e0f21fe1e371a968e13f0d55/hiredis-3.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d9e320e99ab7d2a30dc91ff6f745ba38d39b23f43d345cdee9881329d7b511d6", size = 166433, upload-time = "2025-05-23T11:40:25.287Z" }, - { url = "https://files.pythonhosted.org/packages/01/5d/7cbc69d82af7b29a95723d50f5261555ba3d024bfbdc414bdc3d23c0defb/hiredis-3.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:641668f385f16550fdd6fdc109b0af6988b94ba2acc06770a5e06a16e88f320c", size = 164883, upload-time = "2025-05-23T11:40:26.454Z" }, - { url = "https://files.pythonhosted.org/packages/f9/00/f995b1296b1d7e0247651347aa230f3225a9800e504fdf553cf7cd001cf7/hiredis-3.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1e1f44208c39d6c345ff451f82f21e9eeda6fe9af4ac65972cc3eeb58d41f7cb", size = 177262, upload-time = "2025-05-23T11:40:27.576Z" }, - { url = "https://files.pythonhosted.org/packages/c5/f3/723a67d729e94764ce9e0d73fa5f72a0f87d3ce3c98c9a0b27cbf001cc79/hiredis-3.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f882a0d6415fffe1ffcb09e6281d0ba8b1ece470e866612bbb24425bf76cf397", size = 169619, upload-time = "2025-05-23T11:40:29.671Z" }, - { url = "https://files.pythonhosted.org/packages/45/58/f69028df00fb1b223e221403f3be2059ae86031e7885f955d26236bdfc17/hiredis-3.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b4e78719a0730ebffe335528531d154bc8867a246418f74ecd88adbc4d938c49", size = 167303, upload-time = "2025-05-23T11:40:30.902Z" }, - { url = "https://files.pythonhosted.org/packages/2b/7d/567411e65cce76cf265a9a4f837fd2ebc564bef6368dd42ac03f7a517c0a/hiredis-3.2.1-cp312-cp312-win32.whl", hash = "sha256:33c4604d9f79a13b84da79950a8255433fca7edaf292bbd3364fd620864ed7b2", size = 20551, upload-time = "2025-05-23T11:40:32.69Z" }, - { url = "https://files.pythonhosted.org/packages/90/74/b4c291eb4a4a874b3690ff9fc311a65d5292072556421b11b1d786e3e1d0/hiredis-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7b9749375bf9d171aab8813694f379f2cff0330d7424000f5e92890ad4932dc9", size = 22128, upload-time = "2025-05-23T11:40:33.686Z" }, - { url = "https://files.pythonhosted.org/packages/47/91/c07e737288e891c974277b9fa090f0a43c72ab6ccb5182117588f1c01269/hiredis-3.2.1-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:7cabf7f1f06be221e1cbed1f34f00891a7bdfad05b23e4d315007dd42148f3d4", size = 82636, upload-time = "2025-05-23T11:40:35.035Z" }, - { url = "https://files.pythonhosted.org/packages/92/20/02cb1820360eda419bc17eb835eca976079e2b3e48aecc5de0666b79a54c/hiredis-3.2.1-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:db85cb86f8114c314d0ec6d8de25b060a2590b4713135240d568da4f7dea97ac", size = 45404, upload-time = "2025-05-23T11:40:36.113Z" }, - { url = "https://files.pythonhosted.org/packages/87/51/d30a4aadab8670ed9d40df4982bc06c891ee1da5cdd88d16a74e1ecbd520/hiredis-3.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c9a592a49b7b8497e4e62c3ff40700d0c7f1a42d145b71e3e23c385df573c964", size = 43301, upload-time = "2025-05-23T11:40:37.557Z" }, - { url = "https://files.pythonhosted.org/packages/f7/7b/2c613e1bb5c2e2bac36e8befeefdd58b42816befb17e26ab600adfe337fb/hiredis-3.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0079ef1e03930b364556b78548e67236ab3def4e07e674f6adfc52944aa972dd", size = 172486, upload-time = "2025-05-23T11:40:38.659Z" }, - { url = "https://files.pythonhosted.org/packages/1e/df/8f2c4fcc28d6f5178b25ee1ba2157cc473f9908c16ce4b8e0bdd79e38b05/hiredis-3.2.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d6a290ed45d9c14f4c50b6bda07afb60f270c69b5cb626fd23a4c2fde9e3da1", size = 168532, upload-time = "2025-05-23T11:40:39.843Z" }, - { url = "https://files.pythonhosted.org/packages/88/ae/d0864ffaa0461e29a6940a11c858daf78c99476c06ed531b41ad2255ec25/hiredis-3.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79dd5fe8c0892769f82949adeb021342ca46871af26e26945eb55d044fcdf0d0", size = 183216, upload-time = "2025-05-23T11:40:41.005Z" }, - { url = "https://files.pythonhosted.org/packages/75/17/558e831b77692d73f5bcf8b493ab3eace9f11b0aa08839cdbb87995152c7/hiredis-3.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:998a82281a159f4aebbfd4fb45cfe24eb111145206df2951d95bc75327983b58", size = 172689, upload-time = "2025-05-23T11:40:42.153Z" }, - { url = "https://files.pythonhosted.org/packages/35/b9/4fccda21f930f08c5072ad51e825d85d457748138443d7b510afe77b8264/hiredis-3.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41fc3cd52368ffe7c8e489fb83af5e99f86008ed7f9d9ba33b35fec54f215c0a", size = 173319, upload-time = "2025-05-23T11:40:43.328Z" }, - { url = "https://files.pythonhosted.org/packages/3d/8b/596d613588b0a3c58dfcf9a17edc6a886c4de6a3096e27c7142a94e2304d/hiredis-3.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8d10df3575ce09b0fa54b8582f57039dcbdafde5de698923a33f601d2e2a246c", size = 166695, upload-time = "2025-05-23T11:40:44.453Z" }, - { url = "https://files.pythonhosted.org/packages/e7/5b/6a1c266e9f6627a8be1fa0d8622e35e35c76ae40cce6d1c78a7e6021184a/hiredis-3.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1ab010d04be33735ad8e643a40af0d68a21d70a57b1d0bff9b6a66b28cca9dbf", size = 165181, upload-time = "2025-05-23T11:40:45.697Z" }, - { url = "https://files.pythonhosted.org/packages/6c/70/a9b91fa70d21763d9dfd1c27ddd378f130749a0ae4a0645552f754b3d1fc/hiredis-3.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ec3b5f9ea34f70aaba3e061cbe1fa3556fea401d41f5af321b13e326792f3017", size = 177589, upload-time = "2025-05-23T11:40:46.903Z" }, - { url = "https://files.pythonhosted.org/packages/1a/c7/31bbb015156dc4441f6e19daa9598266a61445bf3f6e14c44292764638f6/hiredis-3.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:158dfb505fff6bffd17f823a56effc0c2a7a8bc4fb659d79a52782f22eefc697", size = 169883, upload-time = "2025-05-23T11:40:48.111Z" }, - { url = "https://files.pythonhosted.org/packages/89/44/cddc23379e0ce20ad7514b2adb2aa2c9b470ffb1ca0a2d8c020748962a22/hiredis-3.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d632cd0ddd7895081be76748e6fb9286f81d2a51c371b516541c6324f2fdac9", size = 167585, upload-time = "2025-05-23T11:40:49.208Z" }, - { url = "https://files.pythonhosted.org/packages/48/92/8fc9b981ed01fc2bbac463a203455cd493482b749801bb555ebac72923f1/hiredis-3.2.1-cp313-cp313-win32.whl", hash = "sha256:e9726d03e7df068bf755f6d1ecc61f7fc35c6b20363c7b1b96f39a14083df940", size = 20554, upload-time = "2025-05-23T11:40:50.314Z" }, - { url = "https://files.pythonhosted.org/packages/e1/6e/e76341d68aa717a705a2ee3be6da9f4122a0d1e3f3ad93a7104ed7a81bea/hiredis-3.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:b5b1653ad7263a001f2e907e81a957d6087625f9700fa404f1a2268c0a4f9059", size = 22136, upload-time = "2025-05-23T11:40:51.497Z" }, -] - [[package]] name = "httpcore" version = "1.0.9" @@ -677,30 +1267,30 @@ wheels = [ ] [[package]] -name = "identify" -version = "2.6.15" +name = "idna" +version = "3.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/cc/762dfb036166873f0059f3b7de4565e1b5bc3d6f28a414c13da27e442f99/idna-3.13.tar.gz", hash = "sha256:585ea8fe5d69b9181ec1afba340451fba6ba764af97026f92a91d4eef164a242", size = 194210, upload-time = "2026-04-22T16:42:42.314Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" }, + { url = "https://files.pythonhosted.org/packages/5d/13/ad7d7ca3808a898b4612b6fe93cde56b53f3034dcde235acb1f0e1df24c6/idna-3.13-py3-none-any.whl", hash = "sha256:892ea0cde124a99ce773decba204c5552b69c3c67ffd5f232eb7696135bc8bb3", size = 68629, upload-time = "2026-04-22T16:42:40.909Z" }, ] [[package]] -name = "idna" -version = "3.10" +name = "iniconfig" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] [[package]] -name = "iniconfig" -version = "2.1.0" +name = "itsdangerous" +version = "2.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, ] [[package]] @@ -715,76 +1305,175 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] +[[package]] +name = "librt" +version = "0.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/39/cb/c1945e506893b5b8577fb45a60c80e3ffe4a82092a04a6f29b0b951d9a24/librt-0.10.0.tar.gz", hash = "sha256:1aba1e8aa4e3307a7be68a74149545fde7451964dc0235a8bec5704a17bdda42", size = 191799, upload-time = "2026-05-05T16:31:23.535Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/a3/1472717d2325adacc8d335ba2e4078015c09d75b599f3cf48e967b3d306e/librt-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01b4500ca3a625450c032a9142a8e843923ce263fa8a92ad1b38927cabe2fe72", size = 76045, upload-time = "2026-05-05T16:29:18.731Z" }, + { url = "https://files.pythonhosted.org/packages/a6/31/bfe32355d4b369aef3d7aa442df663bb5558c2ffa2de286cb2956346bc24/librt-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b7e42d1b3e300d20bfc87e72ffd62f0a92a2cb3c35f7bf90df90c9d2a49f74c", size = 79466, upload-time = "2026-05-05T16:29:20.052Z" }, + { url = "https://files.pythonhosted.org/packages/e9/f1/83f8a2c715ba2cac9b7387a5a5cea25f717f7184320cfe48b36bed9c58e9/librt-0.10.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8ef7b8c61ce3a1b597cd3e15348ff1574325165c2e7ce09a718154cde2a7950", size = 242283, upload-time = "2026-05-05T16:29:21.596Z" }, + { url = "https://files.pythonhosted.org/packages/cc/94/c3a4ce94857f0004a542f86662806383611858f522722db58efaec0a1472/librt-0.10.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.manylinux_2_28_i686.whl", hash = "sha256:e73c84f72d1fa0d6eaa7a1930b436ba8d2c90c58d77bfabb09995a69ad35f6c0", size = 230735, upload-time = "2026-05-05T16:29:23.335Z" }, + { url = "https://files.pythonhosted.org/packages/d1/41/e962bb26c7728eb7b3a69e490d0c800fd9968a6970e390c1f18ddb56093d/librt-0.10.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9728cb98713bd862fb8f4fd6a642d1896c86058a41d77c70f3d5cee75e725275", size = 256606, upload-time = "2026-05-05T16:29:24.91Z" }, + { url = "https://files.pythonhosted.org/packages/66/3a/4e46a707b1ecc993fd691071623b9beab89703a63bd21cc7807e06c28209/librt-0.10.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:648b7e941d20acd72f9652115e0e53facd98156d61f9ebf7a812bdef8bdccea9", size = 249739, upload-time = "2026-05-05T16:29:26.648Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f5/dc5b7eb294656ad23d4ff4cf8514208d54fe1026b909d726a0dc026689c9/librt-0.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c3e33747c068e86a9007c20fdb777eb5ba8d3d19136d7812f88e69a713041b6f", size = 261414, upload-time = "2026-05-05T16:29:28.702Z" }, + { url = "https://files.pythonhosted.org/packages/58/e4/990ed8d12c7f114ac8f8ccd47f7d9bd9704ef61acfcb1df4a05047da7710/librt-0.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d509c745bf7e77d1107cf05e6abb249dc03fad13eb39f2286a49deedaeb2bcd7", size = 256614, upload-time = "2026-05-05T16:29:30.357Z" }, + { url = "https://files.pythonhosted.org/packages/60/eb/52d2726c7fb22818507dc3cc166c8f36dd4a4b68a7be67f12006ac8777c1/librt-0.10.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:786ad5a15e99d0e0e74f3adbeecc198a5ac58f340be07e984723d1e0074838de", size = 255144, upload-time = "2026-05-05T16:29:32.106Z" }, + { url = "https://files.pythonhosted.org/packages/bc/df/bd5591a78f7531fce4b6eb9962aadc6adc9560a01570442a884b6e554abe/librt-0.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:075582d877a97ee3d8e77bda3689dbe617b14f6469224a2d80b4b6c38e3951aa", size = 279121, upload-time = "2026-05-05T16:29:33.688Z" }, + { url = "https://files.pythonhosted.org/packages/fd/df/7c2b838dfc89a1762dd156d8b0c39848a7a2845d725a50be5a6e021fb8ba/librt-0.10.0-cp311-cp311-win32.whl", hash = "sha256:75ecdc3f5a90065aa2af2e574706c5495adc392520762dcf10b1aa716f0b8090", size = 62593, upload-time = "2026-05-05T16:29:35.152Z" }, + { url = "https://files.pythonhosted.org/packages/91/19/22ff572981049a9d436a083dbea1572d0f5dc068b7353637d2dd9977c8f1/librt-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:b6f6084884131d8a52cb9d7095ff2aa52c1e786d9fdaefab1fb4515415e9e083", size = 70914, upload-time = "2026-05-05T16:29:36.407Z" }, + { url = "https://files.pythonhosted.org/packages/12/22/1697cc64f4a5c7e9bce55e99c6d234a346beaedaefcd1e2ca90dd285f98c/librt-0.10.0-cp311-cp311-win_arm64.whl", hash = "sha256:0140bd62151160047e89b2730cb6f8506cdac5127baa1afb9231e4dd3fe7f681", size = 61176, upload-time = "2026-05-05T16:29:37.62Z" }, + { url = "https://files.pythonhosted.org/packages/12/8e/cbb5b6f6e45e65c10a42449a69eaccc44d73e6a081ea752fbc5221c6dc1c/librt-0.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b4b58a44b407e91f633dafee008de9ddea6aa2a555ed94929c099260910bd0ba", size = 77327, upload-time = "2026-05-05T16:29:38.919Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3d/8233cbee8e99e6a8992f02bfc2dec8d787509566a511d1fde2574ee7473f/librt-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:950b79b11762531bdf45a9df909d2f9a2a8445c70c88665c01d14c8511a27dc5", size = 79971, upload-time = "2026-05-05T16:29:40.96Z" }, + { url = "https://files.pythonhosted.org/packages/87/6f/5264b298cef2b72fc97d2dde56c66181eda35204bf5dcd1ed0c3d0a0a782/librt-0.10.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4538453f51be197633b425912c150e25b0667252d3741c53e8368176d98d9d37", size = 246559, upload-time = "2026-05-05T16:29:42.701Z" }, + { url = "https://files.pythonhosted.org/packages/07/7b/19b1b859cc60d5f99276cc2b3144d91556c6d1b1e4ebb50359696bebf7a8/librt-0.10.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.manylinux_2_28_i686.whl", hash = "sha256:70b955f091beac93e994a0b7ec616934f63b3ea5c3d6d7af847562f935aceca7", size = 235216, upload-time = "2026-05-05T16:29:44.193Z" }, + { url = "https://files.pythonhosted.org/packages/6e/56/a2f40717142a8af46289f57874ef914353d8faccd5e4f8e594ab1e16e8c7/librt-0.10.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:483e685e06b6163728ba6c85d74315176be7190f432ec2a41226e5e14355d5f0", size = 263108, upload-time = "2026-05-05T16:29:46.365Z" }, + { url = "https://files.pythonhosted.org/packages/67/ca/15c625c3bdc0167c01e04ef8878317e9713f3bfa788438342f7a94c7b22c/librt-0.10.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ac53d946a009d1a38c44a60812708c9458fb2a239a5f630d8e625571386650f", size = 255280, upload-time = "2026-05-05T16:29:48.087Z" }, + { url = "https://files.pythonhosted.org/packages/ed/c5/ba301d571d9e05844e2435b73aba30bee77bb75ce155c9affcfd2173dd03/librt-0.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bc8771c9fcf0ea894ca41fdc2abd83572c2fbda221f232d86e718614e57ff513", size = 268829, upload-time = "2026-05-05T16:29:49.628Z" }, + { url = "https://files.pythonhosted.org/packages/8b/60/af70e135bc1f1fe15dd3894b1e4bbefc7ecdf911749a925a39eb86ceb2a1/librt-0.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:70805dbc5257892ac572f86290a61e3c8d90224ecce1a8b2d1f7ed51965417f4", size = 262051, upload-time = "2026-05-05T16:29:51.244Z" }, + { url = "https://files.pythonhosted.org/packages/83/c2/c8236eb8b421bac5a172ba208f965abaa89805da2a3fa112bdf1764caf8f/librt-0.10.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d3b4f300f7bcba6e2ff73fb8bef1898479e9772bfa2682998c636391633ec826", size = 264347, upload-time = "2026-05-05T16:29:53.013Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f5/15b6d32bc25dacd4a60886a683d8128d6219910c122202b995a40dd4f8d2/librt-0.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:943bc943f92f4fb3408fae62485c6a3ad68ce4f2ee205643a39641525c19a276", size = 286482, upload-time = "2026-05-05T16:29:54.675Z" }, + { url = "https://files.pythonhosted.org/packages/fb/8e/b1b959bacd323eb4360579db992513e1406d1c6ef7edb57b5511fd0666fd/librt-0.10.0-cp312-cp312-win32.whl", hash = "sha256:6065c1a758fba1010b41401013903d3d5d2750eab425ddedd584abac31d0630e", size = 62955, upload-time = "2026-05-05T16:29:56.39Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4c/d4cd6e4b9fc24098e63cc85537d1b6689682aee96809c38f08072067cc2b/librt-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:d788ecbe208ab352dab0e105cc06057bf9a2fc7e58cabb0d751ad9e30062b9e2", size = 71191, upload-time = "2026-05-05T16:29:57.682Z" }, + { url = "https://files.pythonhosted.org/packages/2b/19/8641da1f63d24b92354a492f893c022d6b3a0df44e70c8eff49364613983/librt-0.10.0-cp312-cp312-win_arm64.whl", hash = "sha256:6003d1f295bdba02656dc81308208fc060d0a51d8c0d0a6db70f7f3c57b9ba0a", size = 61432, upload-time = "2026-05-05T16:29:58.971Z" }, + { url = "https://files.pythonhosted.org/packages/e5/29/681a75c82f4cc90d29e4b257a3299b79fe13fe927a04c57b8109d70b6957/librt-0.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f0ede79d682e73f91c1b599a76d78b7464b9b5d213754cedb13372d9df36e596", size = 77299, upload-time = "2026-05-05T16:30:00.209Z" }, + { url = "https://files.pythonhosted.org/packages/62/24/0c7ca445a55d04be79cac19819437fd094782347fa116f6681844fa6143e/librt-0.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e0ba0b131fdb336c8b9c948e397f4a7e649d0f783b529f07b647bf4961df392e", size = 79930, upload-time = "2026-05-05T16:30:01.555Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1f/1e2b8f6443ef9e9a81e89486ca70e22f3684f93db003ce6eaefc3d0839b9/librt-0.10.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2728117da2afb96fb957768725ee43dc9a2d73b031e02da424b818a3cdd3a275", size = 246195, upload-time = "2026-05-05T16:30:03.261Z" }, + { url = "https://files.pythonhosted.org/packages/74/61/9dc9e03de0439ad84c1c240aac8b747f12c90cb797ea6042f7bdb8d3410f/librt-0.10.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.manylinux_2_28_i686.whl", hash = "sha256:723ba80594c49cdf0584196fc430752262605dc9449902fc9bd3d9b79976cb77", size = 234951, upload-time = "2026-05-05T16:30:04.881Z" }, + { url = "https://files.pythonhosted.org/packages/55/f4/635223117d7590875bca441275065a3bf491203ad4208bd1cc3ffd90c5a1/librt-0.10.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7292edaaca294a61a978c53a3c7d6130d099b0dfbc8f0a65916cdc6b891b9852", size = 262768, upload-time = "2026-05-05T16:30:06.638Z" }, + { url = "https://files.pythonhosted.org/packages/e5/66/b04152d0cd8b6ca2b428a8bd3230343230c35ed304a932f35b5375f2f828/librt-0.10.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:89fe9d539f2c10a1666633eeeac507ce95dd06d9ecc58de3c6390dba156a3d3a", size = 255075, upload-time = "2026-05-05T16:30:08.216Z" }, + { url = "https://files.pythonhosted.org/packages/35/1e/25bac4c7f2ca36f0e612cade186970683cf79153d96beccc3a11a9e19b97/librt-0.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4efa7b9587503fa5b67f40593302b9c8836d211d222ff9f7cafe67be5f8f0b10", size = 268559, upload-time = "2026-05-05T16:30:10.1Z" }, + { url = "https://files.pythonhosted.org/packages/18/54/4601faab35b6632a13200faa146ca62bfd111ffbe2568be430d65c89493a/librt-0.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:22dc982ef59df0136df36092ccbdbb570ced8aafb33e49585739b2f1de1c13b6", size = 261753, upload-time = "2026-05-05T16:30:11.912Z" }, + { url = "https://files.pythonhosted.org/packages/1b/cf/39f4023509e94fade8b074666fa3292db9cb6b34ea5dcbe7af53df9fca1d/librt-0.10.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:6f2e5f3606253a84cea719c94a3bb1c54487b5d617d0254d46e0920d8a06be3f", size = 264055, upload-time = "2026-05-05T16:30:13.465Z" }, + { url = "https://files.pythonhosted.org/packages/8e/00/40247209fc46a8e308a91412d5206aedf8efb667ee89eb625820106a5c2f/librt-0.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:40884bfaa1e29f6b6a9be255007d8f359bfc9e61d68bdef8ed3158bfcbc95df9", size = 286190, upload-time = "2026-05-05T16:30:15.073Z" }, + { url = "https://files.pythonhosted.org/packages/d8/6e/5566beb94431a985abe1787af5ef86e087750172ff9d0bbf20f93e88132d/librt-0.10.0-cp313-cp313-win32.whl", hash = "sha256:3cd34cd8254eba756660bff6c2da91278248184301054fe3e4feb073bdd49b14", size = 62949, upload-time = "2026-05-05T16:30:16.503Z" }, + { url = "https://files.pythonhosted.org/packages/d0/c2/3ea3301d6c8dff51d39dbe8ed75db3dc92896947d4afb5eeadf821c1e67f/librt-0.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:7baac5313e2d8dce1386f97777a8d03ab28f5fe1e780b3b9ac2ee7544551fedc", size = 71152, upload-time = "2026-05-05T16:30:17.766Z" }, + { url = "https://files.pythonhosted.org/packages/3c/de/5d49cb92cadcbc77d3abc27b93fd6030ed8437487dde2eae38cab5e6704d/librt-0.10.0-cp313-cp313-win_arm64.whl", hash = "sha256:afc5b4406c8e2515698d922a5c7823a009312835ea58196671fff40e35cb8166", size = 61336, upload-time = "2026-05-05T16:30:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/6a/64/7165e08108cc185a13a9c069f0685e6ef92e70e07fddf7edf5e7348c6316/librt-0.10.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f09588a30e6a22ec624090d72a3ab1a6d4d5485c3ed739603e76aa3c16efa688", size = 76794, upload-time = "2026-05-05T16:30:20.392Z" }, + { url = "https://files.pythonhosted.org/packages/ae/ef/bf8613febf651b90c5222ee79dea5ae58d4cc2b544df69d3033424448934/librt-0.10.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:131ade118d12bd7a0adc4e655474a553f1b76cf78385868885944d21d51e45e0", size = 79662, upload-time = "2026-05-05T16:30:22.025Z" }, + { url = "https://files.pythonhosted.org/packages/b6/67/9eddd165c1d8397bdf99b38bf12b5a55b3def5035b49eedb49f2775d1430/librt-0.10.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8b9ab28e40d011c373a189eae900c916e66d6fbecf7983e9e4883089ee085ef", size = 242390, upload-time = "2026-05-05T16:30:23.51Z" }, + { url = "https://files.pythonhosted.org/packages/10/d1/d95da80334501866cd37004ab5d7483220d05862fab4b5405394f0264f0d/librt-0.10.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.manylinux_2_28_i686.whl", hash = "sha256:67c39bb30da73bae1f293d1ed8bc2f8f6642649dd0928d3600aeff3041ac23d6", size = 232603, upload-time = "2026-05-05T16:30:25.198Z" }, + { url = "https://files.pythonhosted.org/packages/0c/fa/e6d64d28718bc1be4e1736fcb037ca1c4dfca927e7167df75a7d5215665e/librt-0.10.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8c3273c6b774614f093c8927c2bf1b077d0fefde988fe98f46a333734e5597ab", size = 259187, upload-time = "2026-05-05T16:30:26.772Z" }, + { url = "https://files.pythonhosted.org/packages/72/3f/3fdb77e7f937dad59cfd76b720be7e7643400ec76b2da35befab8d66ba30/librt-0.10.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9dd7c1b86a4baa583ab5db977484b93a2c474e69e96ef3e9538387ea54229cb9", size = 251846, upload-time = "2026-05-05T16:30:28.56Z" }, + { url = "https://files.pythonhosted.org/packages/18/ca/f4d49133dd86a6f55d79eca30bf412fa722f511a9abe67f62f57aa64e66a/librt-0.10.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a77385c5a202e831149f7ad03be9e67cf80e957e52c614e83dcb822c95222eb8", size = 264936, upload-time = "2026-05-05T16:30:30.491Z" }, + { url = "https://files.pythonhosted.org/packages/de/66/a8df2fbadc1f6c1827a096d11c40175bd526133480bd3bc88ec64a03d257/librt-0.10.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c6a5eafa74b5655bad59886138ed68426f098a6beb8cb95a71f2cc3cd8bb33fe", size = 258699, upload-time = "2026-05-05T16:30:32.002Z" }, + { url = "https://files.pythonhosted.org/packages/bb/73/1e3c83613fe05451bb969e27b68a573d177f08d5f63533cc29fec0989658/librt-0.10.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:1fc93d0439204c50ab4d1512611ce2c206f1b369b419f69c7c27c761561e3291", size = 259825, upload-time = "2026-05-05T16:30:35.077Z" }, + { url = "https://files.pythonhosted.org/packages/09/24/5e2f926ee9d3ef348d9339526d7062abb5c44d8419e3179528c01d78c102/librt-0.10.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:79e713c178bc7a744adfbee6b4619a288eecc0c914da2a9313a20255abe2f0cf", size = 282548, upload-time = "2026-05-05T16:30:36.639Z" }, + { url = "https://files.pythonhosted.org/packages/fc/7d/3e89ed6ad0162561fa8bef9df3195e24263104c955713cd0237d3711fad2/librt-0.10.0-cp314-cp314-win32.whl", hash = "sha256:2eba9d955a68c41d9f326be3da42f163ec3518b7ab20f1c826224e7bed71e0bf", size = 58970, upload-time = "2026-05-05T16:30:38.183Z" }, + { url = "https://files.pythonhosted.org/packages/76/25/579e731c94a7086a268bfa3e7a4945cd47836bebd3cbf3faeafd2e7eaef9/librt-0.10.0-cp314-cp314-win_amd64.whl", hash = "sha256:cbfaf7f5145e9917f5d18bffa298eff6a19d74e7b8b11dabdca95785befe8dbf", size = 67260, upload-time = "2026-05-05T16:30:39.804Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f8/235822b7ae0b2334f12ee18bcf2476d07924077a5efeea57dbe927704be2/librt-0.10.0-cp314-cp314-win_arm64.whl", hash = "sha256:8d6d385d1969849a6b1397114df22714b6ded917bada98668e3e974dc663477e", size = 57156, upload-time = "2026-05-05T16:30:41.412Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e3/9b919cbf1e8eb770bf91bb7df28125e0f1daf4587169afefd95402636e9a/librt-0.10.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:6c3a82d3bd32631ef5c79922dfc028520c9ad840255979ab4d908271818039ee", size = 79150, upload-time = "2026-05-05T16:30:42.761Z" }, + { url = "https://files.pythonhosted.org/packages/6a/f5/72a944aa3bc3498169a168087eff58ca48b58bf1b704e59d091fd30739f3/librt-0.10.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d64cc66005dc324c9bb1fa3fc2841f529002f6eb15966d55e46d430f56955a6a", size = 82304, upload-time = "2026-05-05T16:30:44.082Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e3/fcc290a33e295019759472dfa794d204e43504b276ac65eab7fd9da20ea3/librt-0.10.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9bb562cd28c88cd2c6a9a6c78f99dc39348d6b16c94adc25de0e574acf1176e9", size = 272556, upload-time = "2026-05-05T16:30:45.497Z" }, + { url = "https://files.pythonhosted.org/packages/fd/54/546975e4c997573885e7f040a05012f8838e06fb12b0c3c1fbb76254e9d7/librt-0.10.0-cp314-cp314t-manylinux2014_i686.manylinux_2_17_i686.manylinux_2_28_i686.whl", hash = "sha256:b809aa2854d019c28773b03605df22adc675ee4f3f4402d673581313e8906119", size = 256941, upload-time = "2026-05-05T16:30:47.059Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f1d03401571b331653acddbd4e8cd955c06d945241dd08b25192fac0d04b/librt-0.10.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc15acabdd519bd4176fdadc2119e5e3093485d86f89138daf47e5b4cedb983a", size = 285855, upload-time = "2026-05-05T16:30:48.86Z" }, + { url = "https://files.pythonhosted.org/packages/0c/08/62cf80ff046c339faf56718b3a940244d4beb70f1c6407289b5830ec11e9/librt-0.10.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b1b2d835307d08ddadd94568e2369648ec9173bd3eea6d7f52a1abe717c81f98", size = 275321, upload-time = "2026-05-05T16:30:50.63Z" }, + { url = "https://files.pythonhosted.org/packages/d9/ea/da5918d4070362e9a4d2ee9cd34f9dc84902daad8fd4275f8504a727ff4e/librt-0.10.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d261c6a2f93335a5167887fb0223e8b98ffce20ee3fde242e8e58a37ece6d0e5", size = 293993, upload-time = "2026-05-05T16:30:52.577Z" }, + { url = "https://files.pythonhosted.org/packages/c9/8d/68b6086bed1fcdc314c640ea04e31e52d18052e08059fa595409d66a51a9/librt-0.10.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e2ffd44963f8e7f68995504d90f9881d64e94dc1d8e310039b9526108fc0c0f7", size = 284254, upload-time = "2026-05-05T16:30:55.086Z" }, + { url = "https://files.pythonhosted.org/packages/06/c8/b810f1d84ec34a5a7ed93d7b510ab04164d75fbdf23088d5c3fbe6b08357/librt-0.10.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:5f285f6455ed495791c4d8630e5af732960adea93cac4c893d15619f2eae53e8", size = 284925, upload-time = "2026-05-05T16:30:56.728Z" }, + { url = "https://files.pythonhosted.org/packages/5a/00/3c82d4158c5a2c62528b8fccce65a8c9ad700e480e86f9389387435089a5/librt-0.10.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f6034ff52e663d34c7b82ef2aa2f94ad7c1d939e2368e63b06844bc4d127d2e1", size = 307830, upload-time = "2026-05-05T16:30:58.377Z" }, + { url = "https://files.pythonhosted.org/packages/99/3a/9c635ac3e8a00383ff689161d3eac8a30b3b2ddc711b40471e6b8983ea29/librt-0.10.0-cp314-cp314t-win32.whl", hash = "sha256:657860fd877fba6a241ea088ef99f63ca819945d3c715265da670bad56c37ebe", size = 60147, upload-time = "2026-05-05T16:31:00.293Z" }, + { url = "https://files.pythonhosted.org/packages/dc/e8/6f65f3e565d4ac212cddddd552eacc8035ffdf941ca0ad6fe945a211d41f/librt-0.10.0-cp314-cp314t-win_amd64.whl", hash = "sha256:56ded2d66010203a0cb5af063b609e3f079531a0e5e576d618dece859fd2e1af", size = 68649, upload-time = "2026-05-05T16:31:01.778Z" }, + { url = "https://files.pythonhosted.org/packages/51/78/a0705a67cacd81e5fa01a5035b3adbdfbb43a7b8d4bd27e2b282ae61baf2/librt-0.10.0-cp314-cp314t-win_arm64.whl", hash = "sha256:1ee63f30abf18ed4830fdbaf87b2b6f4bba1e198d46085c314edde4045e56715", size = 58247, upload-time = "2026-05-05T16:31:03.191Z" }, +] + [[package]] name = "mako" -version = "1.3.10" +version = "1.3.12" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/62/791b31e69ae182791ec67f04850f2f062716bbd205483d63a215f3e062d3/mako-1.3.12.tar.gz", hash = "sha256:9f778e93289bd410bb35daadeb4fc66d95a746f0b75777b942088b7fd7af550a", size = 400219, upload-time = "2026-04-28T19:01:08.512Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, + { url = "https://files.pythonhosted.org/packages/bc/b1/a0ec7a5a9db730a08daef1fdfb8090435b82465abbf758a596f0ea88727e/mako-1.3.12-py3-none-any.whl", hash = "sha256:8f61569480282dbf557145ce441e4ba888be453c30989f879f0d652e39f53ea9", size = 78521, upload-time = "2026-04-28T19:01:10.393Z" }, ] [[package]] name = "markdown-it-py" -version = "4.0.0" +version = "4.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +sdist = { url = "https://files.pythonhosted.org/packages/06/ff/7841249c247aa650a76b9ee4bbaeae59370dc8bfd2f6c01f3630c35eb134/markdown_it_py-4.2.0.tar.gz", hash = "sha256:04a21681d6fbb623de53f6f364d352309d4094dd4194040a10fd51833e418d49", size = 82454, upload-time = "2026-05-07T12:08:28.36Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, + { url = "https://files.pythonhosted.org/packages/b3/81/4da04ced5a082363ecfa159c010d200ecbd959ae410c10c0264a38cac0f5/markdown_it_py-4.2.0-py3-none-any.whl", hash = "sha256:9f7ebbcd14fe59494226453aed97c1070d83f8d24b6fc3a3bcf9a38092641c4a", size = 91687, upload-time = "2026-05-07T12:08:27.182Z" }, ] [[package]] name = "markupsafe" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, - { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, - { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, - { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, - { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, - { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, - { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, - { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, ] [[package]] @@ -796,36 +1485,172 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] +[[package]] +name = "multidict" +version = "6.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/f1/a90635c4f88fb913fbf4ce660b83b7445b7a02615bda034b2f8eb38fd597/multidict-6.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ff981b266af91d7b4b3793ca3382e53229088d193a85dfad6f5f4c27fc73e5d", size = 76626, upload-time = "2026-01-26T02:43:26.485Z" }, + { url = "https://files.pythonhosted.org/packages/a6/9b/267e64eaf6fc637a15b35f5de31a566634a2740f97d8d094a69d34f524a4/multidict-6.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:844c5bca0b5444adb44a623fb0a1310c2f4cd41f402126bb269cd44c9b3f3e1e", size = 44706, upload-time = "2026-01-26T02:43:27.607Z" }, + { url = "https://files.pythonhosted.org/packages/dd/a4/d45caf2b97b035c57267791ecfaafbd59c68212004b3842830954bb4b02e/multidict-6.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2a0a924d4c2e9afcd7ec64f9de35fcd96915149b2216e1cb2c10a56df483855", size = 44356, upload-time = "2026-01-26T02:43:28.661Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d2/0a36c8473f0cbaeadd5db6c8b72d15bbceeec275807772bfcd059bef487d/multidict-6.7.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8be1802715a8e892c784c0197c2ace276ea52702a0ede98b6310c8f255a5afb3", size = 244355, upload-time = "2026-01-26T02:43:31.165Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/8c65be997fd7dd311b7d39c7b6e71a0cb449bad093761481eccbbe4b42a2/multidict-6.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2e2d2ed645ea29f31c4c7ea1552fcfd7cb7ba656e1eafd4134a6620c9f5fdd9e", size = 246433, upload-time = "2026-01-26T02:43:32.581Z" }, + { url = "https://files.pythonhosted.org/packages/01/fb/4dbd7e848d2799c6a026ec88ad39cf2b8416aa167fcc903baa55ecaa045c/multidict-6.7.1-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:95922cee9a778659e91db6497596435777bd25ed116701a4c034f8e46544955a", size = 225376, upload-time = "2026-01-26T02:43:34.417Z" }, + { url = "https://files.pythonhosted.org/packages/b6/8a/4a3a6341eac3830f6053062f8fbc9a9e54407c80755b3f05bc427295c2d0/multidict-6.7.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6b83cabdc375ffaaa15edd97eb7c0c672ad788e2687004990074d7d6c9b140c8", size = 257365, upload-time = "2026-01-26T02:43:35.741Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a2/dd575a69c1aa206e12d27d0770cdf9b92434b48a9ef0cd0d1afdecaa93c4/multidict-6.7.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:38fb49540705369bab8484db0689d86c0a33a0a9f2c1b197f506b71b4b6c19b0", size = 254747, upload-time = "2026-01-26T02:43:36.976Z" }, + { url = "https://files.pythonhosted.org/packages/5a/56/21b27c560c13822ed93133f08aa6372c53a8e067f11fbed37b4adcdac922/multidict-6.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:439cbebd499f92e9aa6793016a8acaa161dfa749ae86d20960189f5398a19144", size = 246293, upload-time = "2026-01-26T02:43:38.258Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a4/23466059dc3854763423d0ad6c0f3683a379d97673b1b89ec33826e46728/multidict-6.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6d3bc717b6fe763b8be3f2bee2701d3c8eb1b2a8ae9f60910f1b2860c82b6c49", size = 242962, upload-time = "2026-01-26T02:43:40.034Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/51dd754a3524d685958001e8fa20a0f5f90a6a856e0a9dcabff69be3dbb7/multidict-6.7.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:619e5a1ac57986dbfec9f0b301d865dddf763696435e2962f6d9cf2fdff2bb71", size = 237360, upload-time = "2026-01-26T02:43:41.752Z" }, + { url = "https://files.pythonhosted.org/packages/64/3f/036dfc8c174934d4b55d86ff4f978e558b0e585cef70cfc1ad01adc6bf18/multidict-6.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0b38ebffd9be37c1170d33bc0f36f4f262e0a09bc1aac1c34c7aa51a7293f0b3", size = 245940, upload-time = "2026-01-26T02:43:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/3d/20/6214d3c105928ebc353a1c644a6ef1408bc5794fcb4f170bb524a3c16311/multidict-6.7.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:10ae39c9cfe6adedcdb764f5e8411d4a92b055e35573a2eaa88d3323289ef93c", size = 253502, upload-time = "2026-01-26T02:43:44.371Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e2/c653bc4ae1be70a0f836b82172d643fcf1dade042ba2676ab08ec08bff0f/multidict-6.7.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:25167cc263257660290fba06b9318d2026e3c910be240a146e1f66dd114af2b0", size = 247065, upload-time = "2026-01-26T02:43:45.745Z" }, + { url = "https://files.pythonhosted.org/packages/c8/11/a854b4154cd3bd8b1fd375e8a8ca9d73be37610c361543d56f764109509b/multidict-6.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:128441d052254f42989ef98b7b6a6ecb1e6f708aa962c7984235316db59f50fa", size = 241870, upload-time = "2026-01-26T02:43:47.054Z" }, + { url = "https://files.pythonhosted.org/packages/13/bf/9676c0392309b5fdae322333d22a829715b570edb9baa8016a517b55b558/multidict-6.7.1-cp311-cp311-win32.whl", hash = "sha256:d62b7f64ffde3b99d06b707a280db04fb3855b55f5a06df387236051d0668f4a", size = 41302, upload-time = "2026-01-26T02:43:48.753Z" }, + { url = "https://files.pythonhosted.org/packages/c9/68/f16a3a8ba6f7b6dc92a1f19669c0810bd2c43fc5a02da13b1cbf8e253845/multidict-6.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:bdbf9f3b332abd0cdb306e7c2113818ab1e922dc84b8f8fd06ec89ed2a19ab8b", size = 45981, upload-time = "2026-01-26T02:43:49.921Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ad/9dd5305253fa00cd3c7555dbef69d5bf4133debc53b87ab8d6a44d411665/multidict-6.7.1-cp311-cp311-win_arm64.whl", hash = "sha256:b8c990b037d2fff2f4e33d3f21b9b531c5745b33a49a7d6dbe7a177266af44f6", size = 43159, upload-time = "2026-01-26T02:43:51.635Z" }, + { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, + { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, + { url = "https://files.pythonhosted.org/packages/cf/3b/d6bd75dc4f3ff7c73766e04e705b00ed6dbbaccf670d9e05a12b006f5a21/multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53", size = 251018, upload-time = "2026-01-26T02:43:56.198Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/c959c5933adedb9ac15152e4067c702a808ea183a8b64cf8f31af8ad3155/multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75", size = 258883, upload-time = "2026-01-26T02:43:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/7ed40adafea3d4f1c8b916e3b5cc3a8e07dfcdcb9cd72800f4ed3ca1b387/multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b", size = 242413, upload-time = "2026-01-26T02:43:58.755Z" }, + { url = "https://files.pythonhosted.org/packages/d2/57/b8565ff533e48595503c785f8361ff9a4fde4d67de25c207cd0ba3befd03/multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733", size = 268404, upload-time = "2026-01-26T02:44:00.216Z" }, + { url = "https://files.pythonhosted.org/packages/e0/50/9810c5c29350f7258180dfdcb2e52783a0632862eb334c4896ac717cebcb/multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a", size = 269456, upload-time = "2026-01-26T02:44:02.202Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8d/5e5be3ced1d12966fefb5c4ea3b2a5b480afcea36406559442c6e31d4a48/multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961", size = 256322, upload-time = "2026-01-26T02:44:03.56Z" }, + { url = "https://files.pythonhosted.org/packages/31/6e/d8a26d81ac166a5592782d208dd90dfdc0a7a218adaa52b45a672b46c122/multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582", size = 253955, upload-time = "2026-01-26T02:44:04.845Z" }, + { url = "https://files.pythonhosted.org/packages/59/4c/7c672c8aad41534ba619bcd4ade7a0dc87ed6b8b5c06149b85d3dd03f0cd/multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e", size = 251254, upload-time = "2026-01-26T02:44:06.133Z" }, + { url = "https://files.pythonhosted.org/packages/7b/bd/84c24de512cbafbdbc39439f74e967f19570ce7924e3007174a29c348916/multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3", size = 252059, upload-time = "2026-01-26T02:44:07.518Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/f5449385510825b73d01c2d4087bf6d2fccc20a2d42ac34df93191d3dd03/multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6", size = 263588, upload-time = "2026-01-26T02:44:09.382Z" }, + { url = "https://files.pythonhosted.org/packages/d7/11/afc7c677f68f75c84a69fe37184f0f82fce13ce4b92f49f3db280b7e92b3/multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a", size = 259642, upload-time = "2026-01-26T02:44:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/2b/17/ebb9644da78c4ab36403739e0e6e0e30ebb135b9caf3440825001a0bddcb/multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba", size = 251377, upload-time = "2026-01-26T02:44:12.042Z" }, + { url = "https://files.pythonhosted.org/packages/ca/a4/840f5b97339e27846c46307f2530a2805d9d537d8b8bd416af031cad7fa0/multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511", size = 41887, upload-time = "2026-01-26T02:44:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/80/31/0b2517913687895f5904325c2069d6a3b78f66cc641a86a2baf75a05dcbb/multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19", size = 46053, upload-time = "2026-01-26T02:44:15.371Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/aba28e4ee4006ae4c7df8d327d31025d760ffa992ea23812a601d226e682/multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf", size = 43307, upload-time = "2026-01-26T02:44:16.852Z" }, + { url = "https://files.pythonhosted.org/packages/f2/22/929c141d6c0dba87d3e1d38fbdf1ba8baba86b7776469f2bc2d3227a1e67/multidict-6.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2b41f5fed0ed563624f1c17630cb9941cf2309d4df00e494b551b5f3e3d67a23", size = 76174, upload-time = "2026-01-26T02:44:18.509Z" }, + { url = "https://files.pythonhosted.org/packages/c7/75/bc704ae15fee974f8fccd871305e254754167dce5f9e42d88a2def741a1d/multidict-6.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84e61e3af5463c19b67ced91f6c634effb89ef8bfc5ca0267f954451ed4bb6a2", size = 45116, upload-time = "2026-01-26T02:44:19.745Z" }, + { url = "https://files.pythonhosted.org/packages/79/76/55cd7186f498ed080a18440c9013011eb548f77ae1b297206d030eb1180a/multidict-6.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:935434b9853c7c112eee7ac891bc4cb86455aa631269ae35442cb316790c1445", size = 43524, upload-time = "2026-01-26T02:44:21.571Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3c/414842ef8d5a1628d68edee29ba0e5bcf235dbfb3ccd3ea303a7fe8c72ff/multidict-6.7.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432feb25a1cb67fe82a9680b4d65fb542e4635cb3166cd9c01560651ad60f177", size = 249368, upload-time = "2026-01-26T02:44:22.803Z" }, + { url = "https://files.pythonhosted.org/packages/f6/32/befed7f74c458b4a525e60519fe8d87eef72bb1e99924fa2b0f9d97a221e/multidict-6.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e82d14e3c948952a1a85503817e038cba5905a3352de76b9a465075d072fba23", size = 256952, upload-time = "2026-01-26T02:44:24.306Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/c878a44ba877f366630c860fdf74bfb203c33778f12b6ac274936853c451/multidict-6.7.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4cfb48c6ea66c83bcaaf7e4dfa7ec1b6bbcf751b7db85a328902796dfde4c060", size = 240317, upload-time = "2026-01-26T02:44:25.772Z" }, + { url = "https://files.pythonhosted.org/packages/68/49/57421b4d7ad2e9e60e25922b08ceb37e077b90444bde6ead629095327a6f/multidict-6.7.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d540e51b7e8e170174555edecddbd5538105443754539193e3e1061864d444d", size = 267132, upload-time = "2026-01-26T02:44:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/fe/ec0edd52ddbcea2a2e89e174f0206444a61440b40f39704e64dc807a70bd/multidict-6.7.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:273d23f4b40f3dce4d6c8a821c741a86dec62cded82e1175ba3d99be128147ed", size = 268140, upload-time = "2026-01-26T02:44:29.588Z" }, + { url = "https://files.pythonhosted.org/packages/b0/73/6e1b01cbeb458807aa0831742232dbdd1fa92bfa33f52a3f176b4ff3dc11/multidict-6.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d624335fd4fa1c08a53f8b4be7676ebde19cd092b3895c421045ca87895b429", size = 254277, upload-time = "2026-01-26T02:44:30.902Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b2/5fb8c124d7561a4974c342bc8c778b471ebbeb3cc17df696f034a7e9afe7/multidict-6.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:12fad252f8b267cc75b66e8fc51b3079604e8d43a75428ffe193cd9e2195dfd6", size = 252291, upload-time = "2026-01-26T02:44:32.31Z" }, + { url = "https://files.pythonhosted.org/packages/5a/96/51d4e4e06bcce92577fcd488e22600bd38e4fd59c20cb49434d054903bd2/multidict-6.7.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:03ede2a6ffbe8ef936b92cb4529f27f42be7f56afcdab5ab739cd5f27fb1cbf9", size = 250156, upload-time = "2026-01-26T02:44:33.734Z" }, + { url = "https://files.pythonhosted.org/packages/db/6b/420e173eec5fba721a50e2a9f89eda89d9c98fded1124f8d5c675f7a0c0f/multidict-6.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:90efbcf47dbe33dcf643a1e400d67d59abeac5db07dc3f27d6bdeae497a2198c", size = 249742, upload-time = "2026-01-26T02:44:35.222Z" }, + { url = "https://files.pythonhosted.org/packages/44/a3/ec5b5bd98f306bc2aa297b8c6f11a46714a56b1e6ef5ebda50a4f5d7c5fb/multidict-6.7.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5c4b9bfc148f5a91be9244d6264c53035c8a0dcd2f51f1c3c6e30e30ebaa1c84", size = 262221, upload-time = "2026-01-26T02:44:36.604Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f7/e8c0d0da0cd1e28d10e624604e1a36bcc3353aaebdfdc3a43c72bc683a12/multidict-6.7.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:401c5a650f3add2472d1d288c26deebc540f99e2fb83e9525007a74cd2116f1d", size = 258664, upload-time = "2026-01-26T02:44:38.008Z" }, + { url = "https://files.pythonhosted.org/packages/52/da/151a44e8016dd33feed44f730bd856a66257c1ee7aed4f44b649fb7edeb3/multidict-6.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97891f3b1b3ffbded884e2916cacf3c6fc87b66bb0dde46f7357404750559f33", size = 249490, upload-time = "2026-01-26T02:44:39.386Z" }, + { url = "https://files.pythonhosted.org/packages/87/af/a3b86bf9630b732897f6fc3f4c4714b90aa4361983ccbdcd6c0339b21b0c/multidict-6.7.1-cp313-cp313-win32.whl", hash = "sha256:e1c5988359516095535c4301af38d8a8838534158f649c05dd1050222321bcb3", size = 41695, upload-time = "2026-01-26T02:44:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/b2/35/e994121b0e90e46134673422dd564623f93304614f5d11886b1b3e06f503/multidict-6.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:960c83bf01a95b12b08fd54324a4eb1d5b52c88932b5cba5d6e712bb3ed12eb5", size = 45884, upload-time = "2026-01-26T02:44:42.488Z" }, + { url = "https://files.pythonhosted.org/packages/ca/61/42d3e5dbf661242a69c97ea363f2d7b46c567da8eadef8890022be6e2ab0/multidict-6.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:563fe25c678aaba333d5399408f5ec3c383ca5b663e7f774dd179a520b8144df", size = 43122, upload-time = "2026-01-26T02:44:43.664Z" }, + { url = "https://files.pythonhosted.org/packages/6d/b3/e6b21c6c4f314bb956016b0b3ef2162590a529b84cb831c257519e7fde44/multidict-6.7.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c76c4bec1538375dad9d452d246ca5368ad6e1c9039dadcf007ae59c70619ea1", size = 83175, upload-time = "2026-01-26T02:44:44.894Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/23ecd2abfe0957b234f6c960f4ade497f55f2c16aeb684d4ecdbf1c95791/multidict-6.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:57b46b24b5d5ebcc978da4ec23a819a9402b4228b8a90d9c656422b4bdd8a963", size = 48460, upload-time = "2026-01-26T02:44:46.106Z" }, + { url = "https://files.pythonhosted.org/packages/c4/57/a0ed92b23f3a042c36bc4227b72b97eca803f5f1801c1ab77c8a212d455e/multidict-6.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e954b24433c768ce78ab7929e84ccf3422e46deb45a4dc9f93438f8217fa2d34", size = 46930, upload-time = "2026-01-26T02:44:47.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/66/02ec7ace29162e447f6382c495dc95826bf931d3818799bbef11e8f7df1a/multidict-6.7.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3bd231490fa7217cc832528e1cd8752a96f0125ddd2b5749390f7c3ec8721b65", size = 242582, upload-time = "2026-01-26T02:44:48.604Z" }, + { url = "https://files.pythonhosted.org/packages/58/18/64f5a795e7677670e872673aca234162514696274597b3708b2c0d276cce/multidict-6.7.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:253282d70d67885a15c8a7716f3a73edf2d635793ceda8173b9ecc21f2fb8292", size = 250031, upload-time = "2026-01-26T02:44:50.544Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ed/e192291dbbe51a8290c5686f482084d31bcd9d09af24f63358c3d42fd284/multidict-6.7.1-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b4c48648d7649c9335cf1927a8b87fa692de3dcb15faa676c6a6f1f1aabda43", size = 228596, upload-time = "2026-01-26T02:44:51.951Z" }, + { url = "https://files.pythonhosted.org/packages/1e/7e/3562a15a60cf747397e7f2180b0a11dc0c38d9175a650e75fa1b4d325e15/multidict-6.7.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98bc624954ec4d2c7cb074b8eefc2b5d0ce7d482e410df446414355d158fe4ca", size = 257492, upload-time = "2026-01-26T02:44:53.902Z" }, + { url = "https://files.pythonhosted.org/packages/24/02/7d0f9eae92b5249bb50ac1595b295f10e263dd0078ebb55115c31e0eaccd/multidict-6.7.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b99af4d9eec0b49927b4402bcbb58dea89d3e0db8806a4086117019939ad3dd", size = 255899, upload-time = "2026-01-26T02:44:55.316Z" }, + { url = "https://files.pythonhosted.org/packages/00/e3/9b60ed9e23e64c73a5cde95269ef1330678e9c6e34dd4eb6b431b85b5a10/multidict-6.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aac4f16b472d5b7dc6f66a0d49dd57b0e0902090be16594dc9ebfd3d17c47e7", size = 247970, upload-time = "2026-01-26T02:44:56.783Z" }, + { url = "https://files.pythonhosted.org/packages/3e/06/538e58a63ed5cfb0bd4517e346b91da32fde409d839720f664e9a4ae4f9d/multidict-6.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:21f830fe223215dffd51f538e78c172ed7c7f60c9b96a2bf05c4848ad49921c3", size = 245060, upload-time = "2026-01-26T02:44:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/b2/2f/d743a3045a97c895d401e9bd29aaa09b94f5cbdf1bd561609e5a6c431c70/multidict-6.7.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f5dd81c45b05518b9aa4da4aa74e1c93d715efa234fd3e8a179df611cc85e5f4", size = 235888, upload-time = "2026-01-26T02:44:59.57Z" }, + { url = "https://files.pythonhosted.org/packages/38/83/5a325cac191ab28b63c52f14f1131f3b0a55ba3b9aa65a6d0bf2a9b921a0/multidict-6.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eb304767bca2bb92fb9c5bd33cedc95baee5bb5f6c88e63706533a1c06ad08c8", size = 243554, upload-time = "2026-01-26T02:45:01.054Z" }, + { url = "https://files.pythonhosted.org/packages/20/1f/9d2327086bd15da2725ef6aae624208e2ef828ed99892b17f60c344e57ed/multidict-6.7.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c9035dde0f916702850ef66460bc4239d89d08df4d02023a5926e7446724212c", size = 252341, upload-time = "2026-01-26T02:45:02.484Z" }, + { url = "https://files.pythonhosted.org/packages/e8/2c/2a1aa0280cf579d0f6eed8ee5211c4f1730bd7e06c636ba2ee6aafda302e/multidict-6.7.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:af959b9beeb66c822380f222f0e0a1889331597e81f1ded7f374f3ecb0fd6c52", size = 246391, upload-time = "2026-01-26T02:45:03.862Z" }, + { url = "https://files.pythonhosted.org/packages/e5/03/7ca022ffc36c5a3f6e03b179a5ceb829be9da5783e6fe395f347c0794680/multidict-6.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:41f2952231456154ee479651491e94118229844dd7226541788be783be2b5108", size = 243422, upload-time = "2026-01-26T02:45:05.296Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1d/b31650eab6c5778aceed46ba735bd97f7c7d2f54b319fa916c0f96e7805b/multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32", size = 47770, upload-time = "2026-01-26T02:45:06.754Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/2d2d1d522e51285bd61b1e20df8f47ae1a9d80839db0b24ea783b3832832/multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8", size = 53109, upload-time = "2026-01-26T02:45:08.044Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a3/cc409ba012c83ca024a308516703cf339bdc4b696195644a7215a5164a24/multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118", size = 45573, upload-time = "2026-01-26T02:45:09.349Z" }, + { url = "https://files.pythonhosted.org/packages/91/cc/db74228a8be41884a567e88a62fd589a913708fcf180d029898c17a9a371/multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee", size = 75190, upload-time = "2026-01-26T02:45:10.651Z" }, + { url = "https://files.pythonhosted.org/packages/d5/22/492f2246bb5b534abd44804292e81eeaf835388901f0c574bac4eeec73c5/multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2", size = 44486, upload-time = "2026-01-26T02:45:11.938Z" }, + { url = "https://files.pythonhosted.org/packages/f1/4f/733c48f270565d78b4544f2baddc2fb2a245e5a8640254b12c36ac7ac68e/multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1", size = 43219, upload-time = "2026-01-26T02:45:14.346Z" }, + { url = "https://files.pythonhosted.org/packages/24/bb/2c0c2287963f4259c85e8bcbba9182ced8d7fca65c780c38e99e61629d11/multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d", size = 245132, upload-time = "2026-01-26T02:45:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f9/44d4b3064c65079d2467888794dea218d1601898ac50222ab8a9a8094460/multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31", size = 252420, upload-time = "2026-01-26T02:45:17.293Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/78f7275e73fa17b24c9a51b0bd9d73ba64bb32d0ed51b02a746eb876abe7/multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048", size = 233510, upload-time = "2026-01-26T02:45:19.356Z" }, + { url = "https://files.pythonhosted.org/packages/4b/25/8167187f62ae3cbd52da7893f58cb036b47ea3fb67138787c76800158982/multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362", size = 264094, upload-time = "2026-01-26T02:45:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/69a3a83b7b030cf283fb06ce074a05a02322359783424d7edf0f15fe5022/multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37", size = 260786, upload-time = "2026-01-26T02:45:22.818Z" }, + { url = "https://files.pythonhosted.org/packages/fe/3b/8ec5074bcfc450fe84273713b4b0a0dd47c0249358f5d82eb8104ffe2520/multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709", size = 248483, upload-time = "2026-01-26T02:45:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/48/5a/d5a99e3acbca0e29c5d9cba8f92ceb15dce78bab963b308ae692981e3a5d/multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0", size = 248403, upload-time = "2026-01-26T02:45:25.982Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/e58cd31f6c7d5102f2a4bf89f96b9cf7e00b6c6f3d04ecc44417c00a5a3c/multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb", size = 240315, upload-time = "2026-01-26T02:45:27.487Z" }, + { url = "https://files.pythonhosted.org/packages/94/33/1cd210229559cb90b6786c30676bb0c58249ff42f942765f88793b41fdce/multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd", size = 245528, upload-time = "2026-01-26T02:45:28.991Z" }, + { url = "https://files.pythonhosted.org/packages/64/f2/6e1107d226278c876c783056b7db43d800bb64c6131cec9c8dfb6903698e/multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601", size = 258784, upload-time = "2026-01-26T02:45:30.503Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c1/11f664f14d525e4a1b5327a82d4de61a1db604ab34c6603bb3c2cc63ad34/multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1", size = 251980, upload-time = "2026-01-26T02:45:32.603Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9f/75a9ac888121d0c5bbd4ecf4eead45668b1766f6baabfb3b7f66a410e231/multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b", size = 243602, upload-time = "2026-01-26T02:45:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e7/50bf7b004cc8525d80dbbbedfdc7aed3e4c323810890be4413e589074032/multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d", size = 40930, upload-time = "2026-01-26T02:45:36.278Z" }, + { url = "https://files.pythonhosted.org/packages/e0/bf/52f25716bbe93745595800f36fb17b73711f14da59ed0bb2eba141bc9f0f/multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f", size = 45074, upload-time = "2026-01-26T02:45:37.546Z" }, + { url = "https://files.pythonhosted.org/packages/97/ab/22803b03285fa3a525f48217963da3a65ae40f6a1b6f6cf2768879e208f9/multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5", size = 42471, upload-time = "2026-01-26T02:45:38.889Z" }, + { url = "https://files.pythonhosted.org/packages/e0/6d/f9293baa6146ba9507e360ea0292b6422b016907c393e2f63fc40ab7b7b5/multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581", size = 82401, upload-time = "2026-01-26T02:45:40.254Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/53b5494738d83558d87c3c71a486504d8373421c3e0dbb6d0db48ad42ee0/multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a", size = 48143, upload-time = "2026-01-26T02:45:41.635Z" }, + { url = "https://files.pythonhosted.org/packages/37/e8/5284c53310dcdc99ce5d66563f6e5773531a9b9fe9ec7a615e9bc306b05f/multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c", size = 46507, upload-time = "2026-01-26T02:45:42.99Z" }, + { url = "https://files.pythonhosted.org/packages/e4/fc/6800d0e5b3875568b4083ecf5f310dcf91d86d52573160834fb4bfcf5e4f/multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262", size = 239358, upload-time = "2026-01-26T02:45:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/41/75/4ad0973179361cdf3a113905e6e088173198349131be2b390f9fa4da5fc6/multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59", size = 246884, upload-time = "2026-01-26T02:45:47.167Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9c/095bb28b5da139bd41fb9a5d5caff412584f377914bd8787c2aa98717130/multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889", size = 225878, upload-time = "2026-01-26T02:45:48.698Z" }, + { url = "https://files.pythonhosted.org/packages/07/d0/c0a72000243756e8f5a277b6b514fa005f2c73d481b7d9e47cd4568aa2e4/multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4", size = 253542, upload-time = "2026-01-26T02:45:50.164Z" }, + { url = "https://files.pythonhosted.org/packages/c0/6b/f69da15289e384ecf2a68837ec8b5ad8c33e973aa18b266f50fe55f24b8c/multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d", size = 252403, upload-time = "2026-01-26T02:45:51.779Z" }, + { url = "https://files.pythonhosted.org/packages/a2/76/b9669547afa5a1a25cd93eaca91c0da1c095b06b6d2d8ec25b713588d3a1/multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609", size = 244889, upload-time = "2026-01-26T02:45:53.27Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a9/a50d2669e506dad33cfc45b5d574a205587b7b8a5f426f2fbb2e90882588/multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489", size = 241982, upload-time = "2026-01-26T02:45:54.919Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/1609558ad8b456b4827d3c5a5b775c93b87878fd3117ed3db3423dfbce1b/multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c", size = 232415, upload-time = "2026-01-26T02:45:56.981Z" }, + { url = "https://files.pythonhosted.org/packages/d8/59/6f61039d2aa9261871e03ab9dc058a550d240f25859b05b67fd70f80d4b3/multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e", size = 240337, upload-time = "2026-01-26T02:45:58.698Z" }, + { url = "https://files.pythonhosted.org/packages/a1/29/fdc6a43c203890dc2ae9249971ecd0c41deaedfe00d25cb6564b2edd99eb/multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c", size = 248788, upload-time = "2026-01-26T02:46:00.862Z" }, + { url = "https://files.pythonhosted.org/packages/a9/14/a153a06101323e4cf086ecee3faadba52ff71633d471f9685c42e3736163/multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9", size = 242842, upload-time = "2026-01-26T02:46:02.824Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/604ae839e64a4a6efc80db94465348d3b328ee955e37acb24badbcd24d83/multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2", size = 240237, upload-time = "2026-01-26T02:46:05.898Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/c3a5187bf66f6fb546ff4ab8fb5a077cbdd832d7b1908d4365c7f74a1917/multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7", size = 48008, upload-time = "2026-01-26T02:46:07.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f7/addf1087b860ac60e6f382240f64fb99f8bfb532bb06f7c542b83c29ca61/multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5", size = 53542, upload-time = "2026-01-26T02:46:08.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/81/4629d0aa32302ef7b2ec65c75a728cc5ff4fa410c50096174c1632e70b3e/multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2", size = 44719, upload-time = "2026-01-26T02:46:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" }, +] + [[package]] name = "mypy" -version = "1.16.0" +version = "2.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "ast-serialize" }, + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, { name = "mypy-extensions" }, { name = "pathspec" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d4/38/13c2f1abae94d5ea0354e146b95a1be9b2137a0d506728e0da037c4276f6/mypy-1.16.0.tar.gz", hash = "sha256:84b94283f817e2aa6350a14b4a8fb2a35a53c286f97c9d30f53b63620e7af8ab", size = 3323139, upload-time = "2025-05-29T13:46:12.532Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/24/c4/ff2f79db7075c274fe85b5fff8797d29c6b61b8854c39e3b7feb556aa377/mypy-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9f826aaa7ff8443bac6a494cf743f591488ea940dd360e7dd330e30dd772a5ab", size = 10884498, upload-time = "2025-05-29T13:18:54.066Z" }, - { url = "https://files.pythonhosted.org/packages/02/07/12198e83006235f10f6a7808917376b5d6240a2fd5dce740fe5d2ebf3247/mypy-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82d056e6faa508501af333a6af192c700b33e15865bda49611e3d7d8358ebea2", size = 10011755, upload-time = "2025-05-29T13:34:00.851Z" }, - { url = "https://files.pythonhosted.org/packages/f1/9b/5fd5801a72b5d6fb6ec0105ea1d0e01ab2d4971893076e558d4b6d6b5f80/mypy-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:089bedc02307c2548eb51f426e085546db1fa7dd87fbb7c9fa561575cf6eb1ff", size = 11800138, upload-time = "2025-05-29T13:32:55.082Z" }, - { url = "https://files.pythonhosted.org/packages/2e/81/a117441ea5dfc3746431e51d78a4aca569c677aa225bca2cc05a7c239b61/mypy-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a2322896003ba66bbd1318c10d3afdfe24e78ef12ea10e2acd985e9d684a666", size = 12533156, upload-time = "2025-05-29T13:19:12.963Z" }, - { url = "https://files.pythonhosted.org/packages/3f/38/88ec57c6c86014d3f06251e00f397b5a7daa6888884d0abf187e4f5f587f/mypy-1.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:021a68568082c5b36e977d54e8f1de978baf401a33884ffcea09bd8e88a98f4c", size = 12742426, upload-time = "2025-05-29T13:20:22.72Z" }, - { url = "https://files.pythonhosted.org/packages/bd/53/7e9d528433d56e6f6f77ccf24af6ce570986c2d98a5839e4c2009ef47283/mypy-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:54066fed302d83bf5128632d05b4ec68412e1f03ef2c300434057d66866cea4b", size = 9478319, upload-time = "2025-05-29T13:21:17.582Z" }, - { url = "https://files.pythonhosted.org/packages/70/cf/158e5055e60ca2be23aec54a3010f89dcffd788732634b344fc9cb1e85a0/mypy-1.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c5436d11e89a3ad16ce8afe752f0f373ae9620841c50883dc96f8b8805620b13", size = 11062927, upload-time = "2025-05-29T13:35:52.328Z" }, - { url = "https://files.pythonhosted.org/packages/94/34/cfff7a56be1609f5d10ef386342ce3494158e4d506516890142007e6472c/mypy-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f2622af30bf01d8fc36466231bdd203d120d7a599a6d88fb22bdcb9dbff84090", size = 10083082, upload-time = "2025-05-29T13:35:33.378Z" }, - { url = "https://files.pythonhosted.org/packages/b3/7f/7242062ec6288c33d8ad89574df87c3903d394870e5e6ba1699317a65075/mypy-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d045d33c284e10a038f5e29faca055b90eee87da3fc63b8889085744ebabb5a1", size = 11828306, upload-time = "2025-05-29T13:21:02.164Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5f/b392f7b4f659f5b619ce5994c5c43caab3d80df2296ae54fa888b3d17f5a/mypy-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b4968f14f44c62e2ec4a038c8797a87315be8df7740dc3ee8d3bfe1c6bf5dba8", size = 12702764, upload-time = "2025-05-29T13:20:42.826Z" }, - { url = "https://files.pythonhosted.org/packages/9b/c0/7646ef3a00fa39ac9bc0938626d9ff29d19d733011be929cfea59d82d136/mypy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb14a4a871bb8efb1e4a50360d4e3c8d6c601e7a31028a2c79f9bb659b63d730", size = 12896233, upload-time = "2025-05-29T13:18:37.446Z" }, - { url = "https://files.pythonhosted.org/packages/6d/38/52f4b808b3fef7f0ef840ee8ff6ce5b5d77381e65425758d515cdd4f5bb5/mypy-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:bd4e1ebe126152a7bbaa4daedd781c90c8f9643c79b9748caa270ad542f12bec", size = 9565547, upload-time = "2025-05-29T13:20:02.836Z" }, - { url = "https://files.pythonhosted.org/packages/97/9c/ca03bdbefbaa03b264b9318a98950a9c683e06472226b55472f96ebbc53d/mypy-1.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a9e056237c89f1587a3be1a3a70a06a698d25e2479b9a2f57325ddaaffc3567b", size = 11059753, upload-time = "2025-05-29T13:18:18.167Z" }, - { url = "https://files.pythonhosted.org/packages/36/92/79a969b8302cfe316027c88f7dc6fee70129490a370b3f6eb11d777749d0/mypy-1.16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b07e107affb9ee6ce1f342c07f51552d126c32cd62955f59a7db94a51ad12c0", size = 10073338, upload-time = "2025-05-29T13:19:48.079Z" }, - { url = "https://files.pythonhosted.org/packages/14/9b/a943f09319167da0552d5cd722104096a9c99270719b1afeea60d11610aa/mypy-1.16.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6fb60cbd85dc65d4d63d37cb5c86f4e3a301ec605f606ae3a9173e5cf34997b", size = 11827764, upload-time = "2025-05-29T13:46:04.47Z" }, - { url = "https://files.pythonhosted.org/packages/ec/64/ff75e71c65a0cb6ee737287c7913ea155845a556c64144c65b811afdb9c7/mypy-1.16.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7e32297a437cc915599e0578fa6bc68ae6a8dc059c9e009c628e1c47f91495d", size = 12701356, upload-time = "2025-05-29T13:35:13.553Z" }, - { url = "https://files.pythonhosted.org/packages/0a/ad/0e93c18987a1182c350f7a5fab70550852f9fabe30ecb63bfbe51b602074/mypy-1.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:afe420c9380ccec31e744e8baff0d406c846683681025db3531b32db56962d52", size = 12900745, upload-time = "2025-05-29T13:17:24.409Z" }, - { url = "https://files.pythonhosted.org/packages/28/5d/036c278d7a013e97e33f08c047fe5583ab4f1fc47c9a49f985f1cdd2a2d7/mypy-1.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:55f9076c6ce55dd3f8cd0c6fff26a008ca8e5131b89d5ba6d86bd3f47e736eeb", size = 9572200, upload-time = "2025-05-29T13:33:44.92Z" }, - { url = "https://files.pythonhosted.org/packages/99/a3/6ed10530dec8e0fdc890d81361260c9ef1f5e5c217ad8c9b21ecb2b8366b/mypy-1.16.0-py3-none-any.whl", hash = "sha256:29e1499864a3888bca5c1542f2d7232c6e586295183320caa95758fc84034031", size = 2265773, upload-time = "2025-05-29T13:35:18.762Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/cf/dc/7e6d49f04fca40b9dd5c752a51a432ffe67fb45200702bc9eee0cb4bbb26/mypy-2.0.0.tar.gz", hash = "sha256:1a9e3900ac5c40f1fe813506c7739da6e6f0eab2729067ebd94bfb0bbba53532", size = 3869036, upload-time = "2026-05-06T19:26:43.22Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/1e/268b81393b81d64683f670680215553e70ae92c55805915b3440080e05e4/mypy-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17b7222e9fdfd352e61fb3131da117e55cc465f701ff232f1bd97a02bbad91f", size = 14580849, upload-time = "2026-05-06T19:23:06.567Z" }, + { url = "https://files.pythonhosted.org/packages/6e/32/d159a8002d9e5c44e59ece9d641a26956c89be5b6827f819d9a9dc678c65/mypy-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc0a61adea1a5ffc2d47a4dc4bb180d8103f477fc2a90a1cdcbb168c2cc6caff", size = 13444955, upload-time = "2026-05-06T19:25:11.982Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5d/3b28d5a2799591da0ee5490418e94497eaf5d701e42d8b001b5e17a9b3d6/mypy-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8578f857b519993d065e5805290b71467ebfae772407a5f57e823755e4fdb850", size = 13873124, upload-time = "2026-05-06T19:20:39.684Z" }, + { url = "https://files.pythonhosted.org/packages/60/23/f40f723955617b814d5ddc1154d8938b77aaf6926c2dbf72846e8943a0b7/mypy-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:33f668a37a650df60f7b825c1ac61e6baadd4ac3c89519e929badde58d28edf5", size = 14748822, upload-time = "2026-05-06T19:25:30.972Z" }, + { url = "https://files.pythonhosted.org/packages/d6/16/eded971224a483e422a141ffd580c00e1b919df8e529f06d03a4a987878c/mypy-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29ea6da86c8c5e9addd48fa6e624f467341b3814f54ded871b28980468686dea", size = 14992675, upload-time = "2026-05-06T19:23:34.511Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6a/1cbd7290f00b4dbaa4c4502e53ac05645ea635e4d1e3dcd42687c2fc39cd/mypy-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:904baa0124ebbccf0c7ba94f722cf9186ee30478f5e5b11432ffc8929248ee55", size = 10983628, upload-time = "2026-05-06T19:26:39.48Z" }, + { url = "https://files.pythonhosted.org/packages/83/3f/8caa9bcc2636cd512642050747466b695fa2540d7040544fd7ddb721d671/mypy-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:440165501295e523bf1e5d3e411b62b367b901c65610938e75f0e56ba0462461", size = 9906041, upload-time = "2026-05-06T19:24:03.199Z" }, + { url = "https://files.pythonhosted.org/packages/f6/4b/f6cd12ef1eb63be1c342da3e8ca811d2280276177f6de4ef20cb2366d79b/mypy-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:660790551c988e69d8bf7a35c8b4149edeb22f4a339165702be843532e9dcdb5", size = 14756610, upload-time = "2026-05-06T19:26:19.221Z" }, + { url = "https://files.pythonhosted.org/packages/32/73/67d09ca28bee21feaca264b2a680cf2d300bcc2071136ad064928324c843/mypy-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7a15bf92cd8781f8e72f69ffa7e30d1f434402d065ee1ecd5223ef2ef100f914", size = 13554270, upload-time = "2026-05-06T19:26:08.977Z" }, + { url = "https://files.pythonhosted.org/packages/61/b3/44718b5c6b1b5a27440ff2effe6a1be0fa2a190c0f4e2e21a83728416f95/mypy-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ff370b43d7def05bbcd2f5267f0bcda72dd6a552ef2ea9375b02d6fe06da270", size = 13924663, upload-time = "2026-05-06T19:21:24.932Z" }, + { url = "https://files.pythonhosted.org/packages/6a/2b/bbb9cc5773f946846a7c340097e59bcf84095437dda0d56bb4f6cf1f6541/mypy-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:37bd246590a018e5a11703b7b09c39d47ede3df5ba3fa863c5b8590b465beb01", size = 14946862, upload-time = "2026-05-06T19:24:23.023Z" }, + { url = "https://files.pythonhosted.org/packages/43/25/e9318566f443a5130b4ff0ad3367ee6c4c4c49ff083fe5214a7318c18282/mypy-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cce87e92214fac8bf8feb8a680d0c1b6fb748d50e9b57fbb13e4b1d83a3ed19b", size = 15175090, upload-time = "2026-05-06T19:26:28.794Z" }, + { url = "https://files.pythonhosted.org/packages/67/65/2ec28c834f21e164c33bc296a7db538ad50c74f83e517c7a0be95ff6de86/mypy-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:e19e9cb69b66a4141009d24898259914fa2b71d026de0b46edf9fafdbf4fd46e", size = 11052899, upload-time = "2026-05-06T19:25:39.084Z" }, + { url = "https://files.pythonhosted.org/packages/9e/72/d1ec625cfc9bd101c07a6834ef1f94e820296f8fdbad2eb03f50e0983f8c/mypy-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:b021614cb08d44785b025982163ec3c39c94bff766ead071fa9e82b4ef6f62cd", size = 9972935, upload-time = "2026-05-06T19:23:24.204Z" }, + { url = "https://files.pythonhosted.org/packages/e5/c6/996a1e535e5d0d597c3b1460fc962733091f885f312e749350eb2ac10965/mypy-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ef5f581b61240d1cc629b12f8df6565ed6ffac0d82ed745eef7833222ab50b9", size = 14737259, upload-time = "2026-05-06T19:20:23.081Z" }, + { url = "https://files.pythonhosted.org/packages/94/c5/0f9460e26b77f434bd53f47d1ce32a3cd4580c92a5331fa5dfc059f9421a/mypy-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:20e3470a165dbc249bdfbe8d1c5172727ef22688cffc279f8c3aa264ab9d4d9a", size = 13538377, upload-time = "2026-05-06T19:21:08.804Z" }, + { url = "https://files.pythonhosted.org/packages/b2/3e/8ea2f8dd1e5c9c279fb3c28193bdb850adf4d3d8172880abad829eced609/mypy-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:224ba142eee8b4d65d4db657cb1fc22abec30b135ded6ab297302ba1f62e505d", size = 13914264, upload-time = "2026-05-06T19:24:12.875Z" }, + { url = "https://files.pythonhosted.org/packages/be/ce/78bd3b8520f676acee9dab48ea71473e68f6d5cf14b59fbd800bea50a92b/mypy-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e879ad8a03908ff74d15e8a9b42bf049918e6798d52c011011f1873d0b5877e", size = 14926761, upload-time = "2026-05-06T19:20:12.846Z" }, + { url = "https://files.pythonhosted.org/packages/61/ef/b52fa340522da3d22e669117c3b83155c2660f7cdc035856958fbfffb224/mypy-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:65c5c15bcbd18d6fe927cc55c459597a3517d69cc3123f067be3b020010e115e", size = 15157014, upload-time = "2026-05-06T19:25:49.78Z" }, + { url = "https://files.pythonhosted.org/packages/7a/0c/dde7614250c6d017936c7aa3bb63b9b52c7cfd298d3f1be9be45f307870b/mypy-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:d1a068acd7c9fb77e9f8923f1556f2f49d6d7895821121b8d97fa5642b9c52f5", size = 11067049, upload-time = "2026-05-06T19:21:16.116Z" }, + { url = "https://files.pythonhosted.org/packages/27/ec/1d6af4830a94a285442db19caa02f160cc1a255e4f324eec5458e6c2bafb/mypy-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:ef9d96da1ddffbc21f27d3939319b6846d12393baa17c4d2f3e81e040e73ce2c", size = 9967903, upload-time = "2026-05-06T19:22:15.52Z" }, + { url = "https://files.pythonhosted.org/packages/ce/2c/6fefe954207860aed6eeb91776795e64a257d3ce0360862288984ce121f5/mypy-2.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c918c64e8ce36557851b0347f84eb12f1965d3a06813c36df253eb0c0afd1d82", size = 14729633, upload-time = "2026-05-06T19:24:53.383Z" }, + { url = "https://files.pythonhosted.org/packages/23/d6/d336f5b820af189eb0390cce21de62d264c0a4e64713dfbe81bfc4fc7739/mypy-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:301f1a8ccc7d79b542ee218b28bb49443a83e194eb3d10da63ff1649e5aa5d34", size = 13559524, upload-time = "2026-05-06T19:22:24.906Z" }, + { url = "https://files.pythonhosted.org/packages/af/a6/d7bb54fde1770f0484e5fbdbdce37a41e95ed0a1cd493ec60ead111e356c/mypy-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fdf4ef489d44ce350bac3fd699907834e551d4c934e9cc862ef201215ab1558d", size = 13936018, upload-time = "2026-05-06T19:25:02.992Z" }, + { url = "https://files.pythonhosted.org/packages/7d/ba/5be51316b91e6a6bf6e3a8adb3de500e7e1fb5bf9491743b8cbc81a34a2c/mypy-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9cde2d0989f912fc850890f727d0d76495e7a6c5bdd9912a1efdb64952b4398d", size = 14910712, upload-time = "2026-05-06T19:25:21.83Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/e2c8c3b373e20ebfb66e6c83a99027fd67df4ec43b08879f74e822d2dc4c/mypy-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdf05693c231a14fe37dbfce192a3a1372c26a833af4a80f550547742952e719", size = 15141499, upload-time = "2026-05-06T19:20:50.924Z" }, + { url = "https://files.pythonhosted.org/packages/12/36/07756f933e00416d912e35878cfcf89a593a3350a885691c0bb85ae0226a/mypy-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:73aee2da33a2237e66cbe84a94780e53599847e86bb3aa7b93e405e8cd9905f2", size = 11240511, upload-time = "2026-05-06T19:21:32.39Z" }, + { url = "https://files.pythonhosted.org/packages/70/05/79ac1f20f2397353f3845f7b8bb5d8006cda7c8ef9092f04f9de3c6135f2/mypy-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:1f6dcd8f39971f41edab2728c877c4ac8b50ad3c387ff2770423b79a05d23910", size = 10149336, upload-time = "2026-05-06T19:22:08.383Z" }, + { url = "https://files.pythonhosted.org/packages/53/e0/0db84e0ebbad6e99e566c68e4b465784f2a2294f7719e8db9d509ef23087/mypy-2.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:a04e980b9275c76159da66c6e1723c7798306f9802b31bdaf9358d0c84030ce8", size = 15797362, upload-time = "2026-05-06T19:22:00.835Z" }, + { url = "https://files.pythonhosted.org/packages/0a/a4/14cc0768164dd53bec48aa41a20270b18df9bf72aa5054278bf133608315/mypy-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:33f9cf4825469b2bc73c53ba55f6d9a9b4cdb60f9e6e228745581520f29b8771", size = 14635914, upload-time = "2026-05-06T19:23:43.675Z" }, + { url = "https://files.pythonhosted.org/packages/08/48/d866a3e23b4dc5974c77d9cf65a435bf22de01a84dd4620917950e233960/mypy-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:191675c3c7dc2a5c7722a035a6909c277f14046c5e4e02aa5fbf65f8524f08ad", size = 15270866, upload-time = "2026-05-06T19:22:34.756Z" }, + { url = "https://files.pythonhosted.org/packages/71/eb/de9ef94958eb2078a6b908ceb247757dc384d3a238d3bd6ed7d81de5eaf8/mypy-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c3d26c4321a3b06fc9f04c741e0733af693f82d823f8e64e47b2e63b7f19fa84", size = 16093131, upload-time = "2026-05-06T19:23:56.541Z" }, + { url = "https://files.pythonhosted.org/packages/ad/07/0ab2c1a9d26e90942612724cbd5788f16b7810c5dd39bfcf79286c6c4524/mypy-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:bbcbc4d5917ca6ce12de70e051de7f533e3bf92d548b41a38a2232a6fe356525", size = 16330685, upload-time = "2026-05-06T19:21:42.037Z" }, + { url = "https://files.pythonhosted.org/packages/a6/8f/46f85d1371a5be642dad263828118ae1efd536d91d8bd2000c68acff3920/mypy-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:dbc6ba6d40572ae49268531565793a8f07eac7fc65ad76d482c9b4c8765b6043", size = 12752017, upload-time = "2026-05-06T19:22:44.002Z" }, + { url = "https://files.pythonhosted.org/packages/7a/e6/94ca48800cac19eb28a58188a768aaec0d16cac0f373915f073058ab0855/mypy-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:77926029dfcb7e1a3ecb0acb2ddbb24ca36be03f7d623e1759ad5376be8f6c01", size = 10527097, upload-time = "2026-05-06T19:20:58.973Z" }, + { url = "https://files.pythonhosted.org/packages/5c/14/fd0694aa594d6e9f9fd16ce821be2eff295197a273262ef56ddcc1388d68/mypy-2.0.0-py3-none-any.whl", hash = "sha256:8a92b2be3146b4fa1f062af7eb05574cbf3e6eb8e1f14704af1075423144e4e5", size = 2673434, upload-time = "2026-05-06T19:26:32.856Z" }, ] [[package]] @@ -837,40 +1662,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, ] -[[package]] -name = "nodeenv" -version = "1.9.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, -] - [[package]] name = "packaging" -version = "25.0" +version = "26.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/f1/e7a6dd94a8d4a5626c03e4e99c87f241ba9e350cd9e6d75123f992427270/packaging-26.2.tar.gz", hash = "sha256:ff452ff5a3e828ce110190feff1178bb1f2ea2281fa2075aadb987c2fb221661", size = 228134, upload-time = "2026-04-24T20:15:23.917Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, + { url = "https://files.pythonhosted.org/packages/df/b2/87e62e8c3e2f4b32e5fe99e0b86d576da1312593b39f47d8ceef365e95ed/packaging-26.2-py3-none-any.whl", hash = "sha256:5fc45236b9446107ff2415ce77c807cee2862cb6fac22b8a73826d0693b0980e", size = 100195, upload-time = "2026-04-24T20:15:22.081Z" }, ] [[package]] -name = "pathspec" -version = "0.12.1" +name = "pamqp" +version = "3.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/62/35bbd3d3021e008606cd0a9532db7850c65741bbf69ac8a3a0d8cfeb7934/pamqp-3.3.0.tar.gz", hash = "sha256:40b8795bd4efcf2b0f8821c1de83d12ca16d5760f4507836267fd7a02b06763b", size = 30993, upload-time = "2024-01-12T20:37:25.085Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, + { url = "https://files.pythonhosted.org/packages/ac/8d/c1e93296e109a320e508e38118cf7d1fc2a4d1c2ec64de78565b3c445eb5/pamqp-3.3.0-py2.py3-none-any.whl", hash = "sha256:c901a684794157ae39b52cbf700db8c9aae7a470f13528b9d7b4e5f7202f8eb0", size = 33848, upload-time = "2024-01-12T20:37:21.359Z" }, ] [[package]] -name = "platformdirs" -version = "4.5.0" +name = "pathspec" +version = "1.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/82/42f767fc1c1143d6fd36efb827202a2d997a375e160a71eb2888a925aac1/pathspec-1.1.1.tar.gz", hash = "sha256:17db5ecd524104a120e173814c90367a96a98d07c45b2e10c2f3919fff91bf5a", size = 135180, upload-time = "2026-04-27T01:46:08.907Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d9/7fb5aa316bc299258e68c73ba3bddbc499654a07f151cba08f6153988714/pathspec-1.1.1-py3-none-any.whl", hash = "sha256:a00ce642f577bf7f473932318056212bc4f8bfdf53128c78bbd5af0b9b20b189", size = 57328, upload-time = "2026-04-27T01:46:07.06Z" }, ] [[package]] @@ -883,85 +1699,174 @@ wheels = [ ] [[package]] -name = "pre-commit" -version = "4.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cfgv" }, - { name = "identify" }, - { name = "nodeenv" }, - { name = "pyyaml" }, - { name = "virtualenv" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ff/29/7cf5bbc236333876e4b41f56e06857a87937ce4bf91e117a6991a2dbb02a/pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16", size = 193792, upload-time = "2025-08-09T18:56:14.651Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965, upload-time = "2025-08-09T18:56:13.192Z" }, -] - -[[package]] -name = "psycopg2-binary" -version = "2.9.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764, upload-time = "2024-10-16T11:24:58.126Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/8f/9feb01291d0d7a0a4c6a6bab24094135c2b59c6a81943752f632c75896d6/psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff", size = 3043397, upload-time = "2024-10-16T11:19:40.033Z" }, - { url = "https://files.pythonhosted.org/packages/15/30/346e4683532011561cd9c8dfeac6a8153dd96452fee0b12666058ab7893c/psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c", size = 3274806, upload-time = "2024-10-16T11:19:43.5Z" }, - { url = "https://files.pythonhosted.org/packages/66/6e/4efebe76f76aee7ec99166b6c023ff8abdc4e183f7b70913d7c047701b79/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c", size = 2851370, upload-time = "2024-10-16T11:19:46.986Z" }, - { url = "https://files.pythonhosted.org/packages/7f/fd/ff83313f86b50f7ca089b161b8e0a22bb3c319974096093cd50680433fdb/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb", size = 3080780, upload-time = "2024-10-16T11:19:50.242Z" }, - { url = "https://files.pythonhosted.org/packages/e6/c4/bfadd202dcda8333a7ccafdc51c541dbdfce7c2c7cda89fa2374455d795f/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341", size = 3264583, upload-time = "2024-10-16T11:19:54.424Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f1/09f45ac25e704ac954862581f9f9ae21303cc5ded3d0b775532b407f0e90/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a", size = 3019831, upload-time = "2024-10-16T11:19:57.762Z" }, - { url = "https://files.pythonhosted.org/packages/9e/2e/9beaea078095cc558f215e38f647c7114987d9febfc25cb2beed7c3582a5/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b", size = 2871822, upload-time = "2024-10-16T11:20:04.693Z" }, - { url = "https://files.pythonhosted.org/packages/01/9e/ef93c5d93f3dc9fc92786ffab39e323b9aed066ba59fdc34cf85e2722271/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7", size = 2820975, upload-time = "2024-10-16T11:20:11.401Z" }, - { url = "https://files.pythonhosted.org/packages/a5/f0/049e9631e3268fe4c5a387f6fc27e267ebe199acf1bc1bc9cbde4bd6916c/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e", size = 2919320, upload-time = "2024-10-16T11:20:17.959Z" }, - { url = "https://files.pythonhosted.org/packages/dc/9a/bcb8773b88e45fb5a5ea8339e2104d82c863a3b8558fbb2aadfe66df86b3/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68", size = 2957617, upload-time = "2024-10-16T11:20:24.711Z" }, - { url = "https://files.pythonhosted.org/packages/e2/6b/144336a9bf08a67d217b3af3246abb1d027095dab726f0687f01f43e8c03/psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392", size = 1024618, upload-time = "2024-10-16T11:20:27.718Z" }, - { url = "https://files.pythonhosted.org/packages/61/69/3b3d7bd583c6d3cbe5100802efa5beacaacc86e37b653fc708bf3d6853b8/psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4", size = 1163816, upload-time = "2024-10-16T11:20:30.777Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771, upload-time = "2024-10-16T11:20:35.234Z" }, - { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336, upload-time = "2024-10-16T11:20:38.742Z" }, - { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637, upload-time = "2024-10-16T11:20:42.145Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097, upload-time = "2024-10-16T11:20:46.185Z" }, - { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776, upload-time = "2024-10-16T11:20:50.879Z" }, - { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968, upload-time = "2024-10-16T11:20:56.819Z" }, - { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334, upload-time = "2024-10-16T11:21:02.411Z" }, - { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722, upload-time = "2024-10-16T11:21:09.01Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132, upload-time = "2024-10-16T11:21:16.339Z" }, - { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312, upload-time = "2024-10-16T11:21:25.584Z" }, - { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191, upload-time = "2024-10-16T11:21:29.912Z" }, - { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031, upload-time = "2024-10-16T11:21:34.211Z" }, - { url = "https://files.pythonhosted.org/packages/3e/30/d41d3ba765609c0763505d565c4d12d8f3c79793f0d0f044ff5a28bf395b/psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d", size = 3044699, upload-time = "2024-10-16T11:21:42.841Z" }, - { url = "https://files.pythonhosted.org/packages/35/44/257ddadec7ef04536ba71af6bc6a75ec05c5343004a7ec93006bee66c0bc/psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb", size = 3275245, upload-time = "2024-10-16T11:21:51.989Z" }, - { url = "https://files.pythonhosted.org/packages/1b/11/48ea1cd11de67f9efd7262085588790a95d9dfcd9b8a687d46caf7305c1a/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7", size = 2851631, upload-time = "2024-10-16T11:21:57.584Z" }, - { url = "https://files.pythonhosted.org/packages/62/e0/62ce5ee650e6c86719d621a761fe4bc846ab9eff8c1f12b1ed5741bf1c9b/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d", size = 3082140, upload-time = "2024-10-16T11:22:02.005Z" }, - { url = "https://files.pythonhosted.org/packages/27/ce/63f946c098611f7be234c0dd7cb1ad68b0b5744d34f68062bb3c5aa510c8/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73", size = 3264762, upload-time = "2024-10-16T11:22:06.412Z" }, - { url = "https://files.pythonhosted.org/packages/43/25/c603cd81402e69edf7daa59b1602bd41eb9859e2824b8c0855d748366ac9/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673", size = 3020967, upload-time = "2024-10-16T11:22:11.583Z" }, - { url = "https://files.pythonhosted.org/packages/5f/d6/8708d8c6fca531057fa170cdde8df870e8b6a9b136e82b361c65e42b841e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f", size = 2872326, upload-time = "2024-10-16T11:22:16.406Z" }, - { url = "https://files.pythonhosted.org/packages/ce/ac/5b1ea50fc08a9df82de7e1771537557f07c2632231bbab652c7e22597908/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909", size = 2822712, upload-time = "2024-10-16T11:22:21.366Z" }, - { url = "https://files.pythonhosted.org/packages/c4/fc/504d4503b2abc4570fac3ca56eb8fed5e437bf9c9ef13f36b6621db8ef00/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1", size = 2920155, upload-time = "2024-10-16T11:22:25.684Z" }, - { url = "https://files.pythonhosted.org/packages/b2/d1/323581e9273ad2c0dbd1902f3fb50c441da86e894b6e25a73c3fda32c57e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567", size = 2959356, upload-time = "2024-10-16T11:22:30.562Z" }, - { url = "https://files.pythonhosted.org/packages/08/50/d13ea0a054189ae1bc21af1d85b6f8bb9bbc5572991055d70ad9006fe2d6/psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142", size = 2569224, upload-time = "2025-01-04T20:09:19.234Z" }, +name = "propcache" +version = "0.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/44/c87281c333769159c50594f22610f77398a47ccbfbbf23074e744e86f87c/propcache-0.5.2.tar.gz", hash = "sha256:01c4fc7480cd0598bb4b57022df55b9ca296da7fc5a8760bd8451a7e63a7d427", size = 50208, upload-time = "2026-05-08T21:02:12.199Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/f1/8a8cc1c2c7e7934ab77e0163414f736fadbc0f5e8dd9673b952355ac175b/propcache-0.5.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74b70780220e2dd89175ca24b81b68b67c83db499ae611e7f2313cb329801c78", size = 90744, upload-time = "2026-05-08T20:59:45.799Z" }, + { url = "https://files.pythonhosted.org/packages/c2/f4/651b1225e976bd1a2ba5cfba0c29d096581c2636b437e3a9a7ab6276270a/propcache-0.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4840ab0ae0216d952f4b53dc6d0b992bfc2bedbfe360bdd9b548bc184c08959", size = 52033, upload-time = "2026-05-08T20:59:47.408Z" }, + { url = "https://files.pythonhosted.org/packages/15/a8/8ede85d6aa1f79fc7dc2f8fd2c8d65920b8272c3892903c8a1affde48cfb/propcache-0.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6844ba6364fb12f403928a82cfd295ab103a2b315c77c747b2dbe4a41894ea7", size = 52754, upload-time = "2026-05-08T20:59:49.202Z" }, + { url = "https://files.pythonhosted.org/packages/7d/fe/b3551b41bbc2f5b5bb088fc6920567cd43101253e68fbaa261339eb96fe1/propcache-0.5.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2293949b855ce597f2826452d17c2d545fb5622379c4ea6fdf525e9b8e8a2511", size = 57573, upload-time = "2026-05-08T20:59:50.778Z" }, + { url = "https://files.pythonhosted.org/packages/83/27/ab851ebd1b7172e3e161f5f8d39e315d54a91bea246f01f4d872d3376aef/propcache-0.5.2-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0fd59b5af35f74da48d905dcbad55449ba13be91823cb05a9bd590bbf5b61660", size = 60645, upload-time = "2026-05-08T20:59:52.227Z" }, + { url = "https://files.pythonhosted.org/packages/95/7d/466b3d18022e9897cbda9c735c493c5bd747d7a4c6f5ea1480b4cec434b6/propcache-0.5.2-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29f9309a2e42b0d273be006fdb4be2d6c39a47f6f57d8fb1cf9f81481df81b66", size = 61563, upload-time = "2026-05-08T20:59:53.866Z" }, + { url = "https://files.pythonhosted.org/packages/27/1b/16ab7f2cf2041da2f60d156ba64c2484eadf9168075b4ff43c3ef60045af/propcache-0.5.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5aaa2b923c1944ac8febd6609cb373540a5563e7cbcb0fd770f75dace2eb817b", size = 58888, upload-time = "2026-05-08T20:59:55.457Z" }, + { url = "https://files.pythonhosted.org/packages/0a/67/bb777ffd907633563bf35fd859c4ce97b0512c32f4633cf5d1eb7c33512b/propcache-0.5.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:66ea454f095ddf5b6b14f56c064c0941c4788be11e18d2464cf643bf7203ff67", size = 59253, upload-time = "2026-05-08T20:59:57.075Z" }, + { url = "https://files.pythonhosted.org/packages/b9/42/64f8d90b73fd9cdc1499b48057ff6d9cd2a98a25734c9bb62ecf07e87061/propcache-0.5.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:95f1e3f4760d404b13c9976c0229b2b49a3c8e2c62a9ce92efdd2b11ada75e3f", size = 57558, upload-time = "2026-05-08T20:59:58.602Z" }, + { url = "https://files.pythonhosted.org/packages/eb/02/dba5bc03c9041f2092ea55a449caf5dfe68352c6654511b29ba0654ddb69/propcache-0.5.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:85341b12b9d55bad0bded24cac341bb34289469e03a11f3f583ea1cc1db0326c", size = 55007, upload-time = "2026-05-08T20:59:59.837Z" }, + { url = "https://files.pythonhosted.org/packages/14/c0/43f649c7aa2a77a3b100d84e9dea3a483120ecb608bfe36ce49eaff517fe/propcache-0.5.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:26a4dca084132874e639895c3135dfad5eb20bae209f62d1aeb31b03e601c3c0", size = 60355, upload-time = "2026-05-08T21:00:01.144Z" }, + { url = "https://files.pythonhosted.org/packages/83/c0/435dafd27f1cb4a495381dae60e25883ccfe4020bb72818e8184c1678092/propcache-0.5.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:3b199b9b2b3d6a7edf3183ba8a9a137a22b97f7df525feb5ae1eccf026d2a9c6", size = 59057, upload-time = "2026-05-08T21:00:02.401Z" }, + { url = "https://files.pythonhosted.org/packages/53/ae/6e292df9135d659944e96cb3389258e4a663e5b2b5f6c217ef0ddc8d2f73/propcache-0.5.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e59bc9e66329185b93dab73f210f1a37f81cb40f321501db8017c9aea15dba27", size = 61938, upload-time = "2026-05-08T21:00:03.638Z" }, + { url = "https://files.pythonhosted.org/packages/0b/42/314ebc50d8159055411fd6b0bda322ff510e4b1f7d2e4927940ad0f6af20/propcache-0.5.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:552ffadf6ad409844bc5919c42a0a83d88314cedddaea0e41e80a8b8fffe881f", size = 59731, upload-time = "2026-05-08T21:00:04.881Z" }, + { url = "https://files.pythonhosted.org/packages/b8/9b/2da6dee38871c3c8772fabc2758325a5c9077d6d18c597737dc04dd884cd/propcache-0.5.2-cp311-cp311-win32.whl", hash = "sha256:cd416c1de191973c52ff1a12a57446bfc7642797b282d7caf2162d7d1b8aa9a0", size = 38966, upload-time = "2026-05-08T21:00:06.511Z" }, + { url = "https://files.pythonhosted.org/packages/42/4e/f17363fb58c0afe05b067361cb6d86ed2d29de6506779a27547c4d183075/propcache-0.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:44e488ef40dbb452700b2b1f8188934121f6648f52c295055662d2191959ff82", size = 42135, upload-time = "2026-05-08T21:00:08.088Z" }, + { url = "https://files.pythonhosted.org/packages/c6/eb/6af6685077d22e8b33358d3c548e3282706a0b3cd85044ffba4e5dd08e3b/propcache-0.5.2-cp311-cp311-win_arm64.whl", hash = "sha256:54adaa85a22078d1e306304a40984dc5be99d599bf3dc0a24dc98f7daeab89ab", size = 38381, upload-time = "2026-05-08T21:00:09.692Z" }, + { url = "https://files.pythonhosted.org/packages/4a/cb/e27bc2b2737a0bb49962b275efa051e8f1c35a936df7d5139b6b658b7dc9/propcache-0.5.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:806719138ecd720339a12410fb9614ac9b2b2d3a5fdf8235d56981c36f4039ba", size = 95887, upload-time = "2026-05-08T21:00:11.277Z" }, + { url = "https://files.pythonhosted.org/packages/e6/13/b8ae04c59392f8d11c6cd9fb4011d1dc7c86b81225c770280300e259ffe1/propcache-0.5.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:db2b80ea58eab4f86b2beec3cc8b39e8ff9276ac20e96b7cce43c8ae84cd6b5a", size = 54654, upload-time = "2026-05-08T21:00:12.604Z" }, + { url = "https://files.pythonhosted.org/packages/2c/7d/49777a3e20b55863d4794384a38acd460c04157b0a00f8602b0d508b8431/propcache-0.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e5cbfac9f61484f7e9f3597775500cd3ebe8274e9b050c38f9525c77c97520bf", size = 55190, upload-time = "2026-05-08T21:00:13.935Z" }, + { url = "https://files.pythonhosted.org/packages/44/c7/085d0cd63062e84044e3f05797749c3f8e3938ff3aeb0eb2f69d43fafc91/propcache-0.5.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5dbc581d2814337da56222fab8dc5f161cd798a434e49bac27930aaef798e144", size = 59995, upload-time = "2026-05-08T21:00:15.526Z" }, + { url = "https://files.pythonhosted.org/packages/9c/42/32cf8e3009e92b2645cf1e944f701e8ea4e924dffde1ee26db860bcbf7e4/propcache-0.5.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:857187f381f88c8e2fa2fe56ab94879d011b883d5a2ee5a1b60a8cd2a06846d9", size = 63422, upload-time = "2026-05-08T21:00:16.824Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/f112433f99fc979431b87a39ef169e3f8df070d99a72792c56d6937ac48b/propcache-0.5.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:178b4a2cdaac1818e2bf1c5a99b94383fa73ea5382e032a48dec07dc5668dc42", size = 64342, upload-time = "2026-05-08T21:00:18.362Z" }, + { url = "https://files.pythonhosted.org/packages/14/15/5574111ae50dd6e879456888c0eadd4c5a869959775854e18e18a6b345f3/propcache-0.5.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f328175a2cde1f0ff2c4ed8ce968b9dcfb55f3a7153f39e2957ed994da13476", size = 61639, upload-time = "2026-05-08T21:00:19.692Z" }, + { url = "https://files.pythonhosted.org/packages/cc/da/4d775080b1490c0ae604acda868bd71aabe3a89ed16f2aa4339eb8a283e7/propcache-0.5.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5671d09a36b06d0fd4a3da0fccbcae360e9b1570924171a15e9e0997f0249fba", size = 61588, upload-time = "2026-05-08T21:00:21.155Z" }, + { url = "https://files.pythonhosted.org/packages/04/ac/f076982cbe2195ee9cf32de5a1e46951d9fb399fc207f390562dd0fd8fb2/propcache-0.5.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80168e2ebe4d3ec6599d10ad8f520304ae1cad9b6c5a95372aef1b66b7bfb53a", size = 60029, upload-time = "2026-05-08T21:00:22.713Z" }, + { url = "https://files.pythonhosted.org/packages/70/60/189be62e0dd898dce3b331e1b8c7a543cd3a405ac0c81fe8ee8a9d5d77e1/propcache-0.5.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:45f11346f884bc47444f6e6647131055844134c3175b629f84952e2b5cd62b64", size = 56774, upload-time = "2026-05-08T21:00:24.001Z" }, + { url = "https://files.pythonhosted.org/packages/ea/9e/93377b9c7939c1ffae98f878dee955efadfd638078bc86dbc21f9d52f651/propcache-0.5.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e778ebd44ef4f66ed60a0416b06b489687db264a9c0b3620362f26489492913", size = 63532, upload-time = "2026-05-08T21:00:25.545Z" }, + { url = "https://files.pythonhosted.org/packages/14/f9/590ef6cfb9b8028d516d287812ece32bb0bc5f11fbb9c8bf6b2e6313fec8/propcache-0.5.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:c0cb9ed24c8964e172768d455a38254c2dd8a552905729ce006cad3d3dda59b1", size = 61592, upload-time = "2026-05-08T21:00:27.186Z" }, + { url = "https://files.pythonhosted.org/packages/b4/5e/70958b3034c297a630bba2f17ca7abc2d5f39a803ad7e370ab79d1ecd022/propcache-0.5.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1d1ad32d9d4355e2be65574fd0bfd3677e7066b009cd5b9b2dee8aa6a6393b33", size = 64788, upload-time = "2026-05-08T21:00:28.8Z" }, + { url = "https://files.pythonhosted.org/packages/12/fd/77fe5936d8c3086ca9048f7f415f122ed82e53884a9ec193646b42deef06/propcache-0.5.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c80f4ba3e8f00189165999a742ee526ebeccedf6c3f7beb0c7df821e9772435a", size = 62514, upload-time = "2026-05-08T21:00:30.098Z" }, + { url = "https://files.pythonhosted.org/packages/cf/74/66bd798b5b3be70aa1b391f5cc9d6a0a5532d7fd3b19ec0b213e72e6ad9d/propcache-0.5.2-cp312-cp312-win32.whl", hash = "sha256:8c7972d8f193740d9175f0998ab38717e6cd322d5935c5b0fef8c0d323fd9031", size = 39018, upload-time = "2026-05-08T21:00:31.622Z" }, + { url = "https://files.pythonhosted.org/packages/61/7c/5c0d34aa3024694d6dcb9271cdbdd08c4e47c1c0ad95ec7e7bc74cdea145/propcache-0.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:d9ee8826a7d47863a08ac44e1a5f611a462eefc3a194b492da242128bec75b42", size = 42322, upload-time = "2026-05-08T21:00:32.918Z" }, + { url = "https://files.pythonhosted.org/packages/4d/91/875812f1a3feb20ceba818ef39fbe4d92f1081e04ac815c822496d0d038b/propcache-0.5.2-cp312-cp312-win_arm64.whl", hash = "sha256:2800a4a8ead6b28cccd1ec54b59346f0def7922ee1c7598e8499c733cfbb7c84", size = 38172, upload-time = "2026-05-08T21:00:35.124Z" }, + { url = "https://files.pythonhosted.org/packages/c5/09/f049e45385503fe67db75a6b6186a7b9f0c3930366dc960522c312a825b1/propcache-0.5.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:099aaf4b4d1a02265b92a977edf00b5c4f63b3b17ac6de39b0d637c9cac0188a", size = 94457, upload-time = "2026-05-08T21:00:36.355Z" }, + { url = "https://files.pythonhosted.org/packages/6b/65/83d1d05655baf63113731bd5a1008435e14f8d1e5a06cbe4ec5b23ad7a31/propcache-0.5.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:68ce1c44c7a813a7f71ea04315a8c7b330b63db99d059a797a4651bb6f69f117", size = 53835, upload-time = "2026-05-08T21:00:38.072Z" }, + { url = "https://files.pythonhosted.org/packages/a9/12/a6ba6482bb5ea3260c000c9b20881c95fa11c6b30173715668259f844ed7/propcache-0.5.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fc299c129490f55f254cd90be0deca4764e36e9a7c08b4aa588479a3bbed3098", size = 54545, upload-time = "2026-05-08T21:00:39.319Z" }, + { url = "https://files.pythonhosted.org/packages/a9/19/7fa086f5764c59ec8a8e157cd93aa8497acc00aba9dcdec56bfffb32602d/propcache-0.5.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a6ae2198be502c10f09b2516e7b5d019816924bc3183a43ce792a7bd6625e6f4", size = 59886, upload-time = "2026-05-08T21:00:40.621Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e4/5d7663dc8235956c8f5281698a3af1d351d8820341ddd890f59d9a9127f2/propcache-0.5.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6041d31504dc1779d700e1edcfb08eea334b357620b06681a4eabb57a74e574e", size = 63261, upload-time = "2026-05-08T21:00:41.775Z" }, + { url = "https://files.pythonhosted.org/packages/4a/4a/15a03adee24d6350da4292caeac44c34c033d2afe5e87eb370f38854560f/propcache-0.5.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7eabc04151c78a9f4d5bbb5f1faf571e4defeb4b585e0fe95b60ff2dbe4d3d7", size = 64184, upload-time = "2026-05-08T21:00:43.018Z" }, + { url = "https://files.pythonhosted.org/packages/8b/c6/979176efdaa3d239e36d503d5af63a0a773b36662ed8f52e5b6a6d9fd40e/propcache-0.5.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4db0ba63d693afd40d249bd93f842b5f144f8fcbb83de05660373bcf30517b1d", size = 61534, upload-time = "2026-05-08T21:00:44.507Z" }, + { url = "https://files.pythonhosted.org/packages/c8/22/63e8cd1bae4c2d2be6493b6b7d10566ddafad88137cfbc99964a1119853c/propcache-0.5.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1dbcf7675229b35d31abb6547d8ebc8c27a830ac3f9a794edff6254873ec7c0a", size = 61500, upload-time = "2026-05-08T21:00:45.796Z" }, + { url = "https://files.pythonhosted.org/packages/60/5a/28e5d9acbac1cc9ccb67045e8c1b943aa8d79fdf39c93bd73cacd68008ea/propcache-0.5.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d310c013aad2c72f1c3f2f8dd3279d460a858c551f97aeb8c63e4693cca7b4d2", size = 59994, upload-time = "2026-05-08T21:00:47.093Z" }, + { url = "https://files.pythonhosted.org/packages/f3/40/db650677f554a95b9c01a7c9d93d629e93a15562f5deb4573c9ee136fed2/propcache-0.5.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:06187263ddad280d05b4d8a8b3bb7d164cbebd469236544a42e6d9b28ac6a4fa", size = 56884, upload-time = "2026-05-08T21:00:48.376Z" }, + { url = "https://files.pythonhosted.org/packages/80/45/70b39b89516ff8b96bf732fa6fded8cef20f293cb1508690101c3c07ec51/propcache-0.5.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3115559b8effafd63b142ea5ed53d63a16ea6469cbc63dce4ee194b42db5d853", size = 63464, upload-time = "2026-05-08T21:00:49.954Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e2/fa59d3a89eac5534293124af4f1d0d0ada091ce4a0ab4610ce03fd2bdd8d/propcache-0.5.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c60462af8e6dc30c35407c7237ea908d777b22862bbee27bc4699c0d8bcdc45a", size = 61588, upload-time = "2026-05-08T21:00:51.281Z" }, + { url = "https://files.pythonhosted.org/packages/0b/97/efb547a55c4bc7381cfb202d6a2239ac621045277bc1ea5dfd3a7f0516c0/propcache-0.5.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40314bca9ac559716fe374094fc81c11dcc34b64fd6c585360f5775690505704", size = 64667, upload-time = "2026-05-08T21:00:52.602Z" }, + { url = "https://files.pythonhosted.org/packages/92/56/f5c7d9b4b7595d5127da38974d791b2153f3d1eae6c674af3583ace92ad3/propcache-0.5.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cfa21e036ce1e1db2be04ba3b85d2df1bb1702fa01932d984c5464c665228ff4", size = 62463, upload-time = "2026-05-08T21:00:54.303Z" }, + { url = "https://files.pythonhosted.org/packages/bd/3b/484a3a65fc9f9f60c41dcd17b428bace5389544e2c680994534a20755066/propcache-0.5.2-cp313-cp313-win32.whl", hash = "sha256:f156a3529f38063b6dbaf356e15602a7f95f8055b1295a438433a6386f10463d", size = 38621, upload-time = "2026-05-08T21:00:55.808Z" }, + { url = "https://files.pythonhosted.org/packages/1c/fd/3f0f10dba4dabad3bf53102be007abf55481067952bde0fdddff439e7c61/propcache-0.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:dfed59d0a5aeb01e242e66ff0300bc4a265a7c05f612d30016f0b60b1017d757", size = 41649, upload-time = "2026-05-08T21:00:57.061Z" }, + { url = "https://files.pythonhosted.org/packages/90/ec/6ce619cc32bb500a482f811f9cd509368b4e58e638d13f2c68f370d6b475/propcache-0.5.2-cp313-cp313-win_arm64.whl", hash = "sha256:ba338430e87ceb9c8f0cf754de38a9860560261e56c00376debd628698a7364f", size = 37636, upload-time = "2026-05-08T21:00:58.646Z" }, + { url = "https://files.pythonhosted.org/packages/1b/82/c1d268bbbf2ef981c5bf0fbbe746db617c66e3bcefe431a1aa8943fbe23a/propcache-0.5.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a592f5f3da71c8691c788c13cb6734b6d17663d2e1cb8caddf0673d01ef8847d", size = 98872, upload-time = "2026-05-08T21:00:59.889Z" }, + { url = "https://files.pythonhosted.org/packages/f4/d4/52c871e73e864e6b34c0e2d58ac1ec5ccd149497ddc7ad2137ae98323a35/propcache-0.5.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6a997d0489e9668a384fcfd5061b857aa5361de73191cac204d04b889cfbbafa", size = 56257, upload-time = "2026-05-08T21:01:01.195Z" }, + { url = "https://files.pythonhosted.org/packages/67/f0/9b90ca2a210b3d09bcfcd96ecd0f55545c091535abce2a45de2775cfd357/propcache-0.5.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:10734b5484ea113152ee25a91dccedf81631791805d2c9ccb054958e51842c94", size = 56696, upload-time = "2026-05-08T21:01:02.941Z" }, + { url = "https://files.pythonhosted.org/packages/9d/0e/6e9d4ba07c8e56e21ddec1e75f12148142b21ca83a51871babce095334f4/propcache-0.5.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cafca7e56c12bb02ae16d283742bef25a61122e9dab2b5b3f2ccbe589ce32164", size = 62378, upload-time = "2026-05-08T21:01:04.475Z" }, + { url = "https://files.pythonhosted.org/packages/65/19/c10badaa463dde8a27ce884f8ee2ec37e6035b7c9f5ff0c8f74f06f08dac/propcache-0.5.2-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f064f8d2b59177878b7615df1735cd8fe3462ed6be8c7b217d17a276489c2b7f", size = 65283, upload-time = "2026-05-08T21:01:05.959Z" }, + { url = "https://files.pythonhosted.org/packages/b0/b6/93bea99ca80e19cef6512a8580e5b7857bbe09422d9daa7fd4ef5723306c/propcache-0.5.2-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f78abfa8dfc32376fd1aacf597b2f2fbbe0ea751419aee718af5d4f82537ef8c", size = 66616, upload-time = "2026-05-08T21:01:07.228Z" }, + { url = "https://files.pythonhosted.org/packages/83/e4/5c7462e50625f051f37fb38b8224f7639f667184bbd34424ec83819bb1b7/propcache-0.5.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7467da8a9822bf1a55336f877340c5bcbd3c482afc43a99771169f74a26dedc", size = 63773, upload-time = "2026-05-08T21:01:08.514Z" }, + { url = "https://files.pythonhosted.org/packages/ca/b6/99238894047b13c823be25027e736626cd414a52a5e30d2c3347c2733529/propcache-0.5.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a6ddc6ac9e25de626c1f129c1b467d7ecd33ce2237d3fd0c4e429feef0a7ee1f", size = 63664, upload-time = "2026-05-08T21:01:09.874Z" }, + { url = "https://files.pythonhosted.org/packages/85/1e/a3a1a63116a2b8edb415a8bb9a6f0c34bd03830b1e18e8ce2904e1dc1cf4/propcache-0.5.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2f22cbbac9e26a8e864c0985ff1268d5d939d53d9d9411a9824279097e03a2cb", size = 62643, upload-time = "2026-05-08T21:01:11.132Z" }, + { url = "https://files.pythonhosted.org/packages/e4/03/893cf147de2fc6543c5eaa07ad833170e7e2a2385725bbebe8c0503723bb/propcache-0.5.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:fc76378c62a0f04d0cd82fbb1a2cd2d7e28fcb40d5873f28a6c44e388aaa2751", size = 59595, upload-time = "2026-05-08T21:01:12.387Z" }, + { url = "https://files.pythonhosted.org/packages/86/3b/04c1a2e12c57766568ba75ba72b3bf2042818d4c1425fab6fc07155c7cff/propcache-0.5.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:acd2c8edba48e31e58a363b8cf4e5c7db3b04b3f9e371f601df30d9b0d244836", size = 65711, upload-time = "2026-05-08T21:01:13.676Z" }, + { url = "https://files.pythonhosted.org/packages/1c/34/80f8d0099f8d6bacc4de1624c85672681c8cd1149ca2da0e38fd120b817f/propcache-0.5.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:452b5065457eb9991ec5eb38ff41d6cd4c991c9ac7c531c4d5849ae473a9a13f", size = 64247, upload-time = "2026-05-08T21:01:14.936Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1a/8b08f3a5f1037e9e370c55883ceeeee0f6dd0416fb2d2d67b8bfc91f2a79/propcache-0.5.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3430bb2bfe1331885c427745a751e774ee679fd4344f80b97bf879815fe8fa55", size = 67102, upload-time = "2026-05-08T21:01:16.281Z" }, + { url = "https://files.pythonhosted.org/packages/34/68/8bdb7bb7756d76e005490649d10e4a8369e610c74d619f71e1aedf889e9c/propcache-0.5.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cef6cea3922890dd6c9654971001fa797b526c16ab5e1e46c05fd6f877be7568", size = 64964, upload-time = "2026-05-08T21:01:17.57Z" }, + { url = "https://files.pythonhosted.org/packages/0a/aa/50fb0b5d3968b61a510926ff8b8465f1d6e976b3ab74496d7a4b9fc42515/propcache-0.5.2-cp313-cp313t-win32.whl", hash = "sha256:72d61e16dd78228b58c5d47be830ff3da7e5f139abdf0aef9d86cde1c5cf2191", size = 42546, upload-time = "2026-05-08T21:01:18.946Z" }, + { url = "https://files.pythonhosted.org/packages/ae/4c/0ddbae64321bd4a95bcbfc19307238016b5b1fee645c84626c8d539e5b74/propcache-0.5.2-cp313-cp313t-win_amd64.whl", hash = "sha256:0958834041a0166d343b8d2cedcd8bcbaeb4fdbe0cf08320c5379f143c3be6e7", size = 46330, upload-time = "2026-05-08T21:01:20.162Z" }, + { url = "https://files.pythonhosted.org/packages/00/d9/9cddc8efb78d8af264c5ec9f6d10b62f57c515feda8d321595f56010fb23/propcache-0.5.2-cp313-cp313t-win_arm64.whl", hash = "sha256:6de8bd93ddde9b992cf2b2e0d796d501a19026b5b9fd87356d7d0779531a8d96", size = 40521, upload-time = "2026-05-08T21:01:21.399Z" }, + { url = "https://files.pythonhosted.org/packages/e2/ea/23ee535d90ce8bcc465a3028eb3cc0ce3bd1005f4bb27710b30587de798d/propcache-0.5.2-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:46088abff4cba581dea21ae0467a480526cb25aa5f3c269e909f800328bc3999", size = 94662, upload-time = "2026-05-08T21:01:22.683Z" }, + { url = "https://files.pythonhosted.org/packages/b5/06/c5a52f419b5d8972f8d46a7577476090d8e3263ff589ce40b5ca4968d5be/propcache-0.5.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fc88b26f08d634f7bc819a7852e5214f5802641ab8d9fd5326892292eee1993e", size = 53928, upload-time = "2026-05-08T21:01:23.986Z" }, + { url = "https://files.pythonhosted.org/packages/63/b1/4260d67d6bd85e58a66b72d54ce15d5de789b6f3870cc6bedf8ff9667401/propcache-0.5.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:97797ebb098e670a2f92dd66f32897e30d7615b14e7f59711de23e30a9072539", size = 54650, upload-time = "2026-05-08T21:01:25.305Z" }, + { url = "https://files.pythonhosted.org/packages/70/06/2f46c318e3307cd7a6a7481def374ce838c0fe20084b39dd54b0879d0e99/propcache-0.5.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba57fffe4ac99c5d30076161b5866336d97600769bad35cc68f7774b15298a4e", size = 59912, upload-time = "2026-05-08T21:01:26.545Z" }, + { url = "https://files.pythonhosted.org/packages/4c/29/fe1aebec2ce57ab985a9c382bded1124431f85078113aa222c5d278430d4/propcache-0.5.2-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:583c19759d9eec1e5b69e2fbef36a7d9c326041be9746cb822d335c8cedc2979", size = 63300, upload-time = "2026-05-08T21:01:27.937Z" }, + { url = "https://files.pythonhosted.org/packages/b4/18/2334b26768b6c82be8c69e83671b767d5ef426aa09b0cba6c2ea47816774/propcache-0.5.2-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d0326e2e5e1f3163fa306c834e48e8d490e5fae607a097a40c0648109b47ba80", size = 64208, upload-time = "2026-05-08T21:01:29.484Z" }, + { url = "https://files.pythonhosted.org/packages/2b/76/7f1bfd6afff4c5e38e36a3c6d68eb5f4b7311ea80baf693db78d95b603c4/propcache-0.5.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e00820e192c8dbebcafb383ebbf99030895f09905e7a0eb2e0340a0bcc2bc825", size = 61633, upload-time = "2026-05-08T21:01:31.068Z" }, + { url = "https://files.pythonhosted.org/packages/c4/46/b3ff8aba2b4953a3e50de2cf72f1b5748b8eca93b15f3dc2c84339084c09/propcache-0.5.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c66afea89b1e43725731d2004732a046fe6fe955d51f952c3e95a7314a284a39", size = 61724, upload-time = "2026-05-08T21:01:32.374Z" }, + { url = "https://files.pythonhosted.org/packages/c5/01/814cfcafbcff954f94c01cf30e097ddc88a076b5440fbcf4570753437d40/propcache-0.5.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc37dec6c6cdad0b57881a5658fd14fbf53e333b1a86cf86559f190e1d9ec4", size = 60069, upload-time = "2026-05-08T21:01:33.67Z" }, + { url = "https://files.pythonhosted.org/packages/da/68/5c6f7622d510cc666a300687e06fd060c1a43361c0c9b20d284f06d8096a/propcache-0.5.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:5570dbcc97571c15f68068e529c92715a12f8d54030e272d264b377e22bd17a5", size = 57099, upload-time = "2026-05-08T21:01:34.915Z" }, + { url = "https://files.pythonhosted.org/packages/55/27/9cb0b4c679124085327957d42521c99dba04c88c90c3e55a6f0b633ebccc/propcache-0.5.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f814362777a9f841adddb200ecdf8f5cb1e5a3c4b7a86378edbd6ccb26edd702", size = 63391, upload-time = "2026-05-08T21:01:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/f0/9d/7258aaa5bdf60fc6f27591eef6fe52768cb0beda7140be477c8b12c9794a/propcache-0.5.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:196913dea116aeb5a2ba95af4ddcb7ea85559ae07d8eee8751688310d09168c3", size = 61626, upload-time = "2026-05-08T21:01:37.545Z" }, + { url = "https://files.pythonhosted.org/packages/8e/0d/41c602003e8a9b16fe1e7eadf62c7bfba9d5474370b24200bf48b315f45f/propcache-0.5.2-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:6e7b8719005dd1175be4ab1cd25e9b98659a5e0347331506ec6760d2773a7fb5", size = 64781, upload-time = "2026-05-08T21:01:38.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f3/38e66b1856e9bd079deea015bc4a55f7767c0e4db2f7dcf69e7e680ba4ce/propcache-0.5.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:51f96d685ab16e88cab128cd37a52c5da540809c8b879fa047731bfcb4ad35a4", size = 62570, upload-time = "2026-05-08T21:01:40.415Z" }, + { url = "https://files.pythonhosted.org/packages/95/ca/bbfe9b910ce57dde8bb4876b4520fc02a4e89497c10de26be936758a3aaa/propcache-0.5.2-cp314-cp314-win32.whl", hash = "sha256:cc6fc3cc62e8501d3ed62894425040d2728ecddb1ed072737a5c70bd537aa9f0", size = 39436, upload-time = "2026-05-08T21:01:41.654Z" }, + { url = "https://files.pythonhosted.org/packages/61/d2/45c9defbaa1ea297035d9d4cce9e8f80daafbf19319c6007f157c6256ea9/propcache-0.5.2-cp314-cp314-win_amd64.whl", hash = "sha256:81e3a30b0bb60caa22033dd0f8a3618d1d67356212514f62c57db75cb0ef410c", size = 42373, upload-time = "2026-05-08T21:01:43.041Z" }, + { url = "https://files.pythonhosted.org/packages/44/68/9ea5103f41d5217d7d6ec24db90018e23aebec070c3f9a6e54d12b841fd8/propcache-0.5.2-cp314-cp314-win_arm64.whl", hash = "sha256:0d2c9bf8528f135dbb805ce027567e09164f7efa51a2be07458a2c0420f292d0", size = 38554, upload-time = "2026-05-08T21:01:44.336Z" }, + { url = "https://files.pythonhosted.org/packages/8a/81/fadf555f42d3b762eea8a53950b0489fdc0aa9da5f8ed9e10ce0a4e01b48/propcache-0.5.2-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:4bc8ff1feffc6a61c7002ffe84634c41b822e104990ae009f44a0834430070bb", size = 99395, upload-time = "2026-05-08T21:01:45.883Z" }, + { url = "https://files.pythonhosted.org/packages/f5/c9/c61e134a686949cf7971af3a390148b1156f7be81c73bc0cd12c873e2d48/propcache-0.5.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:79aa3ff0a9b566633b642fa9caf7e21ed1c13d6feca718187873f199e1514078", size = 56653, upload-time = "2026-05-08T21:01:47.307Z" }, + { url = "https://files.pythonhosted.org/packages/cb/73/daf935ea7048ddd7ec8eec5345b4a40b619d2d178b3c0a0900796bc3c794/propcache-0.5.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1b31822f4474c4036bae62de9402710051d431a606d6a0f907fec79935a071aa", size = 56914, upload-time = "2026-05-08T21:01:48.573Z" }, + { url = "https://files.pythonhosted.org/packages/79/9f/aba959b435ea18617edd7cf0a7ad0b9c574b8fc7e3d2cd55fb59cb255d33/propcache-0.5.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13fef48778b5a2a756523fdb781326b028ca75e32858b04f2cdd19f394564917", size = 62567, upload-time = "2026-05-08T21:01:49.903Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a1/859942de9a791ff42f6141736f5b37749b8f53e65edfa49638c67dd67e6a/propcache-0.5.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8b73ab70f1a3351fbc71f663b3e645af6dd0329100c353081cf69c37433fc6fe", size = 65542, upload-time = "2026-05-08T21:01:51.204Z" }, + { url = "https://files.pythonhosted.org/packages/b5/61/315bc0fd6c0fc7f80a528b8afd209e5fc4a875ea79571b91b8f50f442907/propcache-0.5.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5538d2c13d93e4698af7e092b57bc7298fd35d1d58e656ae18f23ee0d0378e03", size = 66845, upload-time = "2026-05-08T21:01:52.539Z" }, + { url = "https://files.pythonhosted.org/packages/47/f7/9f8122e3132e8e354ac41975ef8f1099be7d5a16bc7ae562734e993665c0/propcache-0.5.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd645f03898405cabe694fb8bc35241e3a9c332ec85627584fe3de201452b335", size = 63985, upload-time = "2026-05-08T21:01:53.847Z" }, + { url = "https://files.pythonhosted.org/packages/c8/54/c317819ec157cbf6f35df9df9657a6f82daf34d5faf15948b2f639c2192e/propcache-0.5.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a473b3440261e0c60706e732b2ed2f517857344fc21bf48fdfe211e2d98eb285", size = 63999, upload-time = "2026-05-08T21:01:55.179Z" }, + { url = "https://files.pythonhosted.org/packages/5a/56/387e3f7dfce0a9233df41fb888aa1c30222cb4bbbf09537c02dd9bd85fe2/propcache-0.5.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7afa37062e6650640e932e4cc9297d81f9f42d9944029cc386b8247dea4da837", size = 62779, upload-time = "2026-05-08T21:01:57.489Z" }, + { url = "https://files.pythonhosted.org/packages/a1/9c/596784cb5824ed61ee960d3f8655a3f0993e107c6e98ab6c818b7fb92ccb/propcache-0.5.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:8a90efd5777e996e42d568db9ac740b944d691e565cbfd31b2f7832f9184b2b8", size = 59796, upload-time = "2026-05-08T21:01:58.736Z" }, + { url = "https://files.pythonhosted.org/packages/c2/3d/1a6cfa1726a48542c1e8784a0761421476a5b68e09b7f36bf95eb954aaba/propcache-0.5.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:f19bb891234d72535764d703bfed1153cc34f4214d5bd7150aee1eec9e8f4366", size = 66023, upload-time = "2026-05-08T21:02:00.228Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0e/05fd6990369477076e4e280bcb970de760fddf0161a46e988bc95f7940ec/propcache-0.5.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:32775082acd2d807ee3db715c7770d38767b817870acfa08c29e057f3c4d5b56", size = 64448, upload-time = "2026-05-08T21:02:01.888Z" }, + { url = "https://files.pythonhosted.org/packages/cd/86/5f8da315a4309c62c10c0b2516b17492d5d3bbe1bb862b96604db67e2a37/propcache-0.5.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9282fb1a3bccd038da9f768b927b24a0c753e466c086b7c4f3c6982851eefb2d", size = 67329, upload-time = "2026-05-08T21:02:03.484Z" }, + { url = "https://files.pythonhosted.org/packages/da/d3/3368efe79ab21f0cdf86ef49895811c9cc933131d4cde1f28a624e22e712/propcache-0.5.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cc49723e2f60d6b32a0f0b08a3fd6d13203c07f1cd9566cfce0f12a917c967a2", size = 65172, upload-time = "2026-05-08T21:02:04.745Z" }, + { url = "https://files.pythonhosted.org/packages/d5/07/127e8b0bacfb325396196f9d976a22453049b89b9b2b08477cc3145faa44/propcache-0.5.2-cp314-cp314t-win32.whl", hash = "sha256:2d7aa89ebca5acc98cba9d1472d976e394782f587bad6661003602a619fd1821", size = 43813, upload-time = "2026-05-08T21:02:06.025Z" }, + { url = "https://files.pythonhosted.org/packages/88/fb/46dad6c0ae49ed230ab1b16c890c2b6314e2403e6c412976f4a72d64a527/propcache-0.5.2-cp314-cp314t-win_amd64.whl", hash = "sha256:d447bb0b3054be5818458fbb171208b1d9ff11eba14e18ca18b90cbb45767370", size = 47764, upload-time = "2026-05-08T21:02:07.353Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c4/a47d0a63aa309d10d59ede6e9d4cff03a344a79d1f0f4cd0cd74997b53e0/propcache-0.5.2-cp314-cp314t-win_arm64.whl", hash = "sha256:fe67a3d11cd9b4efabfa45c3d00ffba2b26811442a73a581a94b67c2b5faccf6", size = 41140, upload-time = "2026-05-08T21:02:09.065Z" }, + { url = "https://files.pythonhosted.org/packages/3a/ed/1cdcab6ba3d6ab7feca11fc14f0eeea80755bb53ef4e892079f31b10a25f/propcache-0.5.2-py3-none-any.whl", hash = "sha256:be1ddfcbb376e3de5d2e2db1d58d6d67463e6b4f9f040c000de8e300295465fe", size = 14036, upload-time = "2026-05-08T21:02:10.673Z" }, +] + +[[package]] +name = "psutil" +version = "7.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/c6/d1ddf4abb55e93cebc4f2ed8b5d6dbad109ecb8d63748dd2b20ab5e57ebe/psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372", size = 493740, upload-time = "2026-01-28T18:14:54.428Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/08/510cbdb69c25a96f4ae523f733cdc963ae654904e8db864c07585ef99875/psutil-7.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2edccc433cbfa046b980b0df0171cd25bcaeb3a68fe9022db0979e7aa74a826b", size = 130595, upload-time = "2026-01-28T18:14:57.293Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f5/97baea3fe7a5a9af7436301f85490905379b1c6f2dd51fe3ecf24b4c5fbf/psutil-7.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78c8603dcd9a04c7364f1a3e670cea95d51ee865e4efb3556a3a63adef958ea", size = 131082, upload-time = "2026-01-28T18:14:59.732Z" }, + { url = "https://files.pythonhosted.org/packages/37/d6/246513fbf9fa174af531f28412297dd05241d97a75911ac8febefa1a53c6/psutil-7.2.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a571f2330c966c62aeda00dd24620425d4b0cc86881c89861fbc04549e5dc63", size = 181476, upload-time = "2026-01-28T18:15:01.884Z" }, + { url = "https://files.pythonhosted.org/packages/b8/b5/9182c9af3836cca61696dabe4fd1304e17bc56cb62f17439e1154f225dd3/psutil-7.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:917e891983ca3c1887b4ef36447b1e0873e70c933afc831c6b6da078ba474312", size = 184062, upload-time = "2026-01-28T18:15:04.436Z" }, + { url = "https://files.pythonhosted.org/packages/16/ba/0756dca669f5a9300d0cbcbfae9a4c30e446dfc7440ffe43ded5724bfd93/psutil-7.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:ab486563df44c17f5173621c7b198955bd6b613fb87c71c161f827d3fb149a9b", size = 139893, upload-time = "2026-01-28T18:15:06.378Z" }, + { url = "https://files.pythonhosted.org/packages/1c/61/8fa0e26f33623b49949346de05ec1ddaad02ed8ba64af45f40a147dbfa97/psutil-7.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:ae0aefdd8796a7737eccea863f80f81e468a1e4cf14d926bd9b6f5f2d5f90ca9", size = 135589, upload-time = "2026-01-28T18:15:08.03Z" }, + { url = "https://files.pythonhosted.org/packages/81/69/ef179ab5ca24f32acc1dac0c247fd6a13b501fd5534dbae0e05a1c48b66d/psutil-7.2.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:eed63d3b4d62449571547b60578c5b2c4bcccc5387148db46e0c2313dad0ee00", size = 130664, upload-time = "2026-01-28T18:15:09.469Z" }, + { url = "https://files.pythonhosted.org/packages/7b/64/665248b557a236d3fa9efc378d60d95ef56dd0a490c2cd37dafc7660d4a9/psutil-7.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7b6d09433a10592ce39b13d7be5a54fbac1d1228ed29abc880fb23df7cb694c9", size = 131087, upload-time = "2026-01-28T18:15:11.724Z" }, + { url = "https://files.pythonhosted.org/packages/d5/2e/e6782744700d6759ebce3043dcfa661fb61e2fb752b91cdeae9af12c2178/psutil-7.2.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fa4ecf83bcdf6e6c8f4449aff98eefb5d0604bf88cb883d7da3d8d2d909546a", size = 182383, upload-time = "2026-01-28T18:15:13.445Z" }, + { url = "https://files.pythonhosted.org/packages/57/49/0a41cefd10cb7505cdc04dab3eacf24c0c2cb158a998b8c7b1d27ee2c1f5/psutil-7.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e452c464a02e7dc7822a05d25db4cde564444a67e58539a00f929c51eddda0cf", size = 185210, upload-time = "2026-01-28T18:15:16.002Z" }, + { url = "https://files.pythonhosted.org/packages/dd/2c/ff9bfb544f283ba5f83ba725a3c5fec6d6b10b8f27ac1dc641c473dc390d/psutil-7.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c7663d4e37f13e884d13994247449e9f8f574bc4655d509c3b95e9ec9e2b9dc1", size = 141228, upload-time = "2026-01-28T18:15:18.385Z" }, + { url = "https://files.pythonhosted.org/packages/f2/fc/f8d9c31db14fcec13748d373e668bc3bed94d9077dbc17fb0eebc073233c/psutil-7.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:11fe5a4f613759764e79c65cf11ebdf26e33d6dd34336f8a337aa2996d71c841", size = 136284, upload-time = "2026-01-28T18:15:19.912Z" }, + { url = "https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486", size = 129090, upload-time = "2026-01-28T18:15:22.168Z" }, + { url = "https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979", size = 129859, upload-time = "2026-01-28T18:15:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9", size = 155560, upload-time = "2026-01-28T18:15:25.976Z" }, + { url = "https://files.pythonhosted.org/packages/63/65/37648c0c158dc222aba51c089eb3bdfa238e621674dc42d48706e639204f/psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e", size = 156997, upload-time = "2026-01-28T18:15:27.794Z" }, + { url = "https://files.pythonhosted.org/packages/8e/13/125093eadae863ce03c6ffdbae9929430d116a246ef69866dad94da3bfbc/psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8", size = 148972, upload-time = "2026-01-28T18:15:29.342Z" }, + { url = "https://files.pythonhosted.org/packages/04/78/0acd37ca84ce3ddffaa92ef0f571e073faa6d8ff1f0559ab1272188ea2be/psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc", size = 148266, upload-time = "2026-01-28T18:15:31.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/90/e2159492b5426be0c1fef7acba807a03511f97c5f86b3caeda6ad92351a7/psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988", size = 137737, upload-time = "2026-01-28T18:15:33.849Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c7/7bb2e321574b10df20cbde462a94e2b71d05f9bbda251ef27d104668306a/psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee", size = 134617, upload-time = "2026-01-28T18:15:36.514Z" }, ] [[package]] name = "pyasn1" -version = "0.6.1" +version = "0.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/5f/6583902b6f79b399c9c40674ac384fd9cd77805f9e6205075f828ef11fb2/pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf", size = 148685, upload-time = "2026-03-17T01:06:53.382Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a0/7d793dce3fa811fe047d6ae2431c672364b462850c6235ae306c0efd025f/pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde", size = 83997, upload-time = "2026-03-17T01:06:52.036Z" }, ] [[package]] name = "pycparser" -version = "2.22" +version = "3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + +[[package]] +name = "pycron" +version = "3.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/5d/340be12ae4a69c33102dfb6ddc1dc6e53e69b2d504fa26b5d34a472c3057/pycron-3.2.0.tar.gz", hash = "sha256:e125a28aca0295769541a40633f70b602579df48c9cb357c36c28d2628ba2b13", size = 4248, upload-time = "2025-06-05T13:24:12.636Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/76/caf316909f4545e7158e0e1defd8956a1da49f4af04f5d16b18c358dfeac/pycron-3.2.0-py3-none-any.whl", hash = "sha256:6d2349746270bd642b71b9f7187cf13f4d9ee2412b4710396a507b5fe4f60dac", size = 4904, upload-time = "2025-06-05T13:24:11.477Z" }, ] [[package]] name = "pydantic" -version = "2.12.5" +version = "2.13.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -969,9 +1874,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/a5/b60d21ac674192f8ab0ba4e9fd860690f9b4a6e51ca5df118733b487d8d6/pydantic-2.13.4.tar.gz", hash = "sha256:c40756b57adaa8b1efeeced5c196f3f3b7c435f90e84ea7f443901bec8099ef6", size = 844775, upload-time = "2026-05-06T13:43:05.343Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, + { url = "https://files.pythonhosted.org/packages/fd/7b/122376b1fd3c62c1ed9dc80c931ace4844b3c55407b6fb2d199377c9736f/pydantic-2.13.4-py3-none-any.whl", hash = "sha256:45a282cde31d808236fd7ea9d919b128653c8b38b393d1c4ab335c62924d9aba", size = 472262, upload-time = "2026-05-06T13:43:02.641Z" }, ] [package.optional-dependencies] @@ -981,136 +1886,145 @@ email = [ [[package]] name = "pydantic-core" -version = "2.41.5" +version = "2.46.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, - { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, - { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, - { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, - { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, - { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, - { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, - { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, - { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, - { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, - { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, - { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, - { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, - { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, - { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, - { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, - { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, - { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, - { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, - { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, - { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, - { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, - { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, - { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, - { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, - { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, - { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, - { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, - { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, - { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, - { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, - { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, - { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, - { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, - { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, - { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, - { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, - { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, - { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, - { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, - { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, - { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, - { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, - { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, - { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, - { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, - { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, - { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, - { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, - { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, - { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, - { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, - { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, - { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, - { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, - { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, - { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, - { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, - { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, - { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, - { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, - { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, - { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, - { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, - { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, - { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, - { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, - { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, - { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, - { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, - { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, - { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, - { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, - { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, - { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, - { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, - { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/9d/56/921726b776ace8d8f5db44c4ef961006580d91dc52b803c489fafd1aa249/pydantic_core-2.46.4.tar.gz", hash = "sha256:62f875393d7f270851f20523dd2e29f082bcc82292d66db2b64ea71f64b6e1c1", size = 471464, upload-time = "2026-05-06T13:37:06.98Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/fa/6d7708d2cfc1a832acb6aeb0cd16e801902df8a0f583bb3b4b527fde022e/pydantic_core-2.46.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0e96592440881c74a213e5ad528e2b24d3d4f940de2766bed9010ab1d9e51594", size = 2111872, upload-time = "2026-05-06T13:40:27.596Z" }, + { url = "https://files.pythonhosted.org/packages/ae/6f/aa064a3e74b5745afbdf250594f38e7ead05e2d651bcb35994b9417a0d4d/pydantic_core-2.46.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0d65b8c354be7fb5f720c3caa8bc940bc2d20ce749c8e06135f07f8ed95dd7c", size = 1948255, upload-time = "2026-05-06T13:39:12.574Z" }, + { url = "https://files.pythonhosted.org/packages/43/3a/41114a9f7569b84b4d84e7a018c57c56347dac30c0d4a872946ec4e36c46/pydantic_core-2.46.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bfb192b3f4b9e8a89b6277b6ce787564f62cfd272055f6e685726b111dc7826", size = 1972827, upload-time = "2026-05-06T13:38:19.841Z" }, + { url = "https://files.pythonhosted.org/packages/ef/25/1ab42e8048fe551934d9884e8d64daa7e990ad386f310a15981aeb6a5b08/pydantic_core-2.46.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9037063db01f09b09e237c282b6792bd4da634b5402c4e7f0c61effed7701a04", size = 2041051, upload-time = "2026-05-06T13:38:10.447Z" }, + { url = "https://files.pythonhosted.org/packages/94/c2/1a934597ddf08da410385b3b7aae91956a5a76c635effef456074fad7e88/pydantic_core-2.46.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc010ab034c8c7452522748bf937df58020d256ccae0874463d1f4d01758af8e", size = 2221314, upload-time = "2026-05-06T13:40:13.089Z" }, + { url = "https://files.pythonhosted.org/packages/02/6d/9e8ad178c9c4df27ad3c8f25d1fe2a7ab0d2ba0559fad4aee5d3d1f16771/pydantic_core-2.46.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c5dac79fa1614d1e06ca695109c6105923bd9c7d1d6c918d4e637b7e6b32fd3", size = 2285146, upload-time = "2026-05-06T13:38:59.224Z" }, + { url = "https://files.pythonhosted.org/packages/80/50/540cd3aeefc041beb111125c4bff779831a2111fc6b15a9138cda277d32c/pydantic_core-2.46.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9fa868638bf362d3d138ea55829cefb3d5f4b0d7f142234382a15e2485dbec4", size = 2089685, upload-time = "2026-05-06T13:38:17.762Z" }, + { url = "https://files.pythonhosted.org/packages/6b/a4/b440ad35f05f6a38f89fa0f149accb3f0e02be94ca5e15f3c449a61b4bc9/pydantic_core-2.46.4-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:17299feefe090f2caa5b8e37222bb5f663e4935a8bfa6931d4102e5df1a9f398", size = 2115420, upload-time = "2026-05-06T13:37:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/99/61/de4f55db8dfd57bfdfa9a12ec90fe1b57c4f41062f7ca86f08586b3e0ac0/pydantic_core-2.46.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4c63ebc82684aa89d9a3bcbd13d515b3be44250dc68dd3bd81526c1cb31286c3", size = 2165122, upload-time = "2026-05-06T13:37:01.167Z" }, + { url = "https://files.pythonhosted.org/packages/f7/52/7c529d7bdb2d1068bd52f51fe32572c8301f9a4febf1948f10639f1436f5/pydantic_core-2.46.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:aaa2a54443eff1950ba5ddc6b6ccda0d9c84a364276a62f969bdf2a390650848", size = 2182573, upload-time = "2026-05-06T13:38:45.04Z" }, + { url = "https://files.pythonhosted.org/packages/37/b3/7c40325848ba78247f2812dcf9c7274e38cd801820ca6dd9fe63bcfb0eb4/pydantic_core-2.46.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:18e5ceec2ab67e6d5f1a9085e5a24c9c4e2ac4545730bfe668680bca05e555f3", size = 2317139, upload-time = "2026-05-06T13:37:15.539Z" }, + { url = "https://files.pythonhosted.org/packages/d9/37/f913f81a657c865b75da6c0dbed79876073c2a43b5bd9edbe8da785e4d49/pydantic_core-2.46.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a0f62d0a58f4e7da165457e995725421e0064f2255d8eccebc49f41bbc23b109", size = 2360433, upload-time = "2026-05-06T13:37:30.099Z" }, + { url = "https://files.pythonhosted.org/packages/c4/67/6acaa1be2567f9256b056d8477158cac7240813956ce86e49deae8e173b4/pydantic_core-2.46.4-cp311-cp311-win32.whl", hash = "sha256:041bde0a48fd37cf71cab1c9d56d3e8625a3793fef1f7dd232b3ff37e978ecda", size = 1985513, upload-time = "2026-05-06T13:38:15.669Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e6/c505f83dfeda9a2e5c995cfd872949e4d05e12f7feb3dca72f633daefa94/pydantic_core-2.46.4-cp311-cp311-win_amd64.whl", hash = "sha256:6f2eeda33a839975441c86a4119e1383c50b47faf0cbb5176985565c6bb02c33", size = 2071114, upload-time = "2026-05-06T13:40:35.416Z" }, + { url = "https://files.pythonhosted.org/packages/0f/da/7a263a96d965d9d0df5e8de8a475f33495451117035b09acb110288c381f/pydantic_core-2.46.4-cp311-cp311-win_arm64.whl", hash = "sha256:14f4c5d6db102bd796a627bbb3a17b4cf4574b9ae861d8b7c9a9661c6dd3362d", size = 2044298, upload-time = "2026-05-06T13:38:29.754Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8c/af022f0af448d7747c5154288d46b5f2bc5f17366eaa0e23e9aa04d59f3b/pydantic_core-2.46.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3245406455a5d98187ec35530fd772b1d799b26667980872c8d4614991e2c4a2", size = 2106158, upload-time = "2026-05-06T13:38:57.215Z" }, + { url = "https://files.pythonhosted.org/packages/19/95/6195171e385007300f0f5574592e467c568becce2d937a0b6804f218bc49/pydantic_core-2.46.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:962ccbab7b642487b1d8b7df90ef677e03134cf1fd8880bf698649b22a69371f", size = 1951724, upload-time = "2026-05-06T13:37:02.697Z" }, + { url = "https://files.pythonhosted.org/packages/8e/bc/f47d1ff9cbb1620e1b5b697eef06010035735f07820180e74178226b27b3/pydantic_core-2.46.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8233f2947cf85404441fd7e0085f53b10c93e0ee78611099b5c7237e36aacbf7", size = 1975742, upload-time = "2026-05-06T13:37:09.448Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/9b9a5b0306345664a2da6410877af6e8082481b5884b3ddd78d47c6013ce/pydantic_core-2.46.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3a233125ac121aa3ffba9a2b59edfc4a985a76092dc8279586ab4b71390875e7", size = 2052418, upload-time = "2026-05-06T13:37:38.234Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b7/a65fec226f5d78fc39f4a13c4cc0c768c22b113438f60c14adc9d2865038/pydantic_core-2.46.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b712b53160b79a5850310b912a5ef8e57e56947c8ad690c227f5c9d7e561712", size = 2232274, upload-time = "2026-05-06T13:38:27.753Z" }, + { url = "https://files.pythonhosted.org/packages/68/f0/92039db98b907ef49269a8271f67db9cb78ae2fc68062ef7e4e77adb5f61/pydantic_core-2.46.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9401557acd873c3a7f3eb9383edef8ac4968f9510e340f4808d427e75667e7b4", size = 2309940, upload-time = "2026-05-06T13:38:05.353Z" }, + { url = "https://files.pythonhosted.org/packages/5f/97/2aab507d3d00ca626e8e57c1eac6a79e4e5fbcc63eb99733ff55d1717f65/pydantic_core-2.46.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:926c9541b14b12b1681dca8a0b75feb510b06c6341b70a8e500c2fdcff837cce", size = 2094516, upload-time = "2026-05-06T13:39:10.577Z" }, + { url = "https://files.pythonhosted.org/packages/22/37/a8aca44d40d737dde2bc05b3c6c07dff0de07ce6f82e9f3167aeaf4d5dea/pydantic_core-2.46.4-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:56cb4851bcaf3d117eddcef4fe66afd750a50274b0da8e22be256d10e5611987", size = 2136854, upload-time = "2026-05-06T13:40:22.59Z" }, + { url = "https://files.pythonhosted.org/packages/24/99/fcef1b79238c06a8cbec70819ac722ba76e02bc8ada9b0fd66eba40da01b/pydantic_core-2.46.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c68fcd102d71ea85c5b2dfac3f4f8476eff42a9e078fd5faefff6d145063536b", size = 2180306, upload-time = "2026-05-06T13:40:10.666Z" }, + { url = "https://files.pythonhosted.org/packages/ae/6c/fc44000918855b42779d007ae63b0532794739027b2f417321cddbc44f6a/pydantic_core-2.46.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b2f69dec1725e79a012d920df1707de5caf7ed5e08f3be4435e25803efc47458", size = 2190044, upload-time = "2026-05-06T13:40:43.231Z" }, + { url = "https://files.pythonhosted.org/packages/6b/65/d9cadc9f1920d7a127ad2edba16c1db7916e59719285cd6c94600b0080ba/pydantic_core-2.46.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:8d0820e8192167f80d88d64038e609c31452eeca865b4e1d9950a27a4609b00b", size = 2329133, upload-time = "2026-05-06T13:39:57.365Z" }, + { url = "https://files.pythonhosted.org/packages/d0/cf/c873d91679f3a30bcf5e7ac280ce5573483e72295307685120d0d5ad3416/pydantic_core-2.46.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fbdb89b3e1c94a30cc5edfce477c6e6a5dc4d8f84665b455c27582f211a1c72c", size = 2374464, upload-time = "2026-05-06T13:38:06.976Z" }, + { url = "https://files.pythonhosted.org/packages/47/bd/6f2fc8188f31bf10590f1e98e7b306336161fac930a8c514cd7bd828c7dc/pydantic_core-2.46.4-cp312-cp312-win32.whl", hash = "sha256:9aa768456404a8bf48a4406685ac2bec8e72b62c69313734fa3b73cf33b3a894", size = 1974823, upload-time = "2026-05-06T13:40:47.985Z" }, + { url = "https://files.pythonhosted.org/packages/40/8c/985c1d41ea1107c2534abd9870e4ed5c8e7669b5c308297835c001e7a1c4/pydantic_core-2.46.4-cp312-cp312-win_amd64.whl", hash = "sha256:e9c26f834c65f5752f3f06cb08cb86a913ceb7274d0db6e267808a708b46bc89", size = 2072919, upload-time = "2026-05-06T13:39:21.153Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ba/f463d006e0c47373ca7ec5e1a261c59dc01ef4d62b2657af925fb0deee3a/pydantic_core-2.46.4-cp312-cp312-win_arm64.whl", hash = "sha256:4fc73cb559bdb54b1134a706a2802a4cddd27a0633f5abb7e53056268751ac6a", size = 2027604, upload-time = "2026-05-06T13:39:03.753Z" }, + { url = "https://files.pythonhosted.org/packages/51/a2/5d30b469c5267a17b39dec53208222f76a8d351dfac4af661888c5aee77d/pydantic_core-2.46.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5d5902252db0d3cedf8d4a1bc68f70eeb430f7e4c7104c8c476753519b423008", size = 2106306, upload-time = "2026-05-06T13:37:48.029Z" }, + { url = "https://files.pythonhosted.org/packages/c1/81/4fa520eaffa8bd7d1525e644cd6d39e7d60b1592bc5b516693c7340b50f1/pydantic_core-2.46.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c94f0688e7b8d0a67abf40e57a7eaaecd17cc9586706a31b76c031f63df052b4", size = 1951906, upload-time = "2026-05-06T13:37:17.012Z" }, + { url = "https://files.pythonhosted.org/packages/03/d5/fd02da45b659668b05923b17ba3a0100a0a3d5541e3bd8fcc4ecb711309e/pydantic_core-2.46.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f027324c56cd5406ca49c124b0db10e56c69064fec039acc571c29020cc87c76", size = 1976802, upload-time = "2026-05-06T13:37:35.113Z" }, + { url = "https://files.pythonhosted.org/packages/21/f2/95727e1368be3d3ed485eaab7adbd7dda408f33f7a36e8b48e0144002b91/pydantic_core-2.46.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e739fee756ba1010f8bcccb534252e85a35fe45ae92c295a06059ce58b74ccd3", size = 2052446, upload-time = "2026-05-06T13:37:12.313Z" }, + { url = "https://files.pythonhosted.org/packages/9c/86/5d99feea3f77c7234b8718075b23db11532773c1a0dbd9b9490215dc2eeb/pydantic_core-2.46.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d56801be94b86a9da183e5f3766e6310752b99ff647e38b09a9500d88e46e76", size = 2232757, upload-time = "2026-05-06T13:39:01.149Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3a/508ac615935ef7588cf6d9e9b91309fdc2da751af865e02a9098de88258c/pydantic_core-2.46.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2412e734dcb48da14d4e4006b82b46b74f2518b8a26ee7e58c6844a6cd6d03c4", size = 2309275, upload-time = "2026-05-06T13:37:41.406Z" }, + { url = "https://files.pythonhosted.org/packages/07/f8/41db9de19d7987d6b04715a02b3b40aea467000275d9d758ffaa31af7d50/pydantic_core-2.46.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9551187363ffc0de2a00b2e47c25aeaeb1020b69b668762966df15fc5659dd5a", size = 2094467, upload-time = "2026-05-06T13:39:18.847Z" }, + { url = "https://files.pythonhosted.org/packages/2c/e2/f35033184cb11d0052daf4416e8e10a502ea2ac006fc4f459aee872727d1/pydantic_core-2.46.4-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:0186750b482eefa11d7f435892b09c5c606193ef3375bcf94aa00ae6bfb66262", size = 2134417, upload-time = "2026-05-06T13:40:17.944Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7b/6ceeb1cc90e193862f444ebe373d8fdf613f0a82572dde03fb10734c6c71/pydantic_core-2.46.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5855698a4856556d86e8e6cd8434bc3ac0314ee8e12089ae0e143f64c6256e4e", size = 2179782, upload-time = "2026-05-06T13:40:32.618Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f2/c8d7773ede6af08036423a00ae0ceffce266c3c52a096c435d68c896083f/pydantic_core-2.46.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:cbaf13819775b7f769bf4a1f066cb6df7a28d4480081a589828ef190226881cd", size = 2188782, upload-time = "2026-05-06T13:36:51.018Z" }, + { url = "https://files.pythonhosted.org/packages/59/31/0c864784e31f09f05cdd87606f08923b9c9e7f6e51dd27f20f62f975ce9f/pydantic_core-2.46.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:633147d34cf4550417f12e2b1a0383973bdf5cdfde212cb09e9a581cf10820be", size = 2328334, upload-time = "2026-05-06T13:40:37.764Z" }, + { url = "https://files.pythonhosted.org/packages/c2/eb/4f6c8a41efa30baa755590f4141abf3a8c370fab610915733e74134a7270/pydantic_core-2.46.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:82cf5301172168103724d49a1444d3378cb20cdee30b116a1bd6031236298a5d", size = 2372986, upload-time = "2026-05-06T13:39:34.152Z" }, + { url = "https://files.pythonhosted.org/packages/5b/24/b375a480d53113860c299764bfe9f349a3dc9108b3adc0d7f0d786492ebf/pydantic_core-2.46.4-cp313-cp313-win32.whl", hash = "sha256:9fa8ae11da9e2b3126c6426f147e0fba88d96d65921799bb30c6abd1cb2c97fb", size = 1973693, upload-time = "2026-05-06T13:37:55.072Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e8/cff247591966f2d22ec8c003cd7587e27b7ba7b81ab2fb888e3ab75dc285/pydantic_core-2.46.4-cp313-cp313-win_amd64.whl", hash = "sha256:6b3ace8194b0e5204818c92802dcdca7fc6d88aabbb799d7c795540d9cd6d292", size = 2071819, upload-time = "2026-05-06T13:38:49.139Z" }, + { url = "https://files.pythonhosted.org/packages/c6/1a/f4aee670d5670e9e148e0c82c7db98d780be566c6e6a97ee8035528ca0b3/pydantic_core-2.46.4-cp313-cp313-win_arm64.whl", hash = "sha256:184c081504d17f1c1066e430e117142b2c77d9448a97f7b65c6ac9fd9aee238d", size = 2027411, upload-time = "2026-05-06T13:40:45.796Z" }, + { url = "https://files.pythonhosted.org/packages/8d/74/228a26ddad29c6672b805d9fd78e8d251cd04004fa7eed0e622096cd0250/pydantic_core-2.46.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:428e04521a40150c85216fc8b85e8d39fece235a9cf5e383761238c7fa9b96fb", size = 2102079, upload-time = "2026-05-06T13:38:41.019Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/8970b150a4b4365623ae00fc88603491f763c627311ae8031e3111356d6e/pydantic_core-2.46.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23ace664830ee0bfe014a0c7bc248b1f7f25ed7ad103852c317624a1083af462", size = 1952179, upload-time = "2026-05-06T13:36:59.812Z" }, + { url = "https://files.pythonhosted.org/packages/95/30/5211a831ae054928054b2f79731661087a2bc5c01e825c672b3a4a8f1b3e/pydantic_core-2.46.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce5c1d2a8b27468f433ca974829c44060b8097eedc39933e3c206a90ee49c4a9", size = 1978926, upload-time = "2026-05-06T13:37:39.933Z" }, + { url = "https://files.pythonhosted.org/packages/57/e9/689668733b1eb67adeef047db3c2e8788fcf65a7fd9c9e2b46b7744fe245/pydantic_core-2.46.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7283d57845ecf5a163403eb0702dfc220cc4fbdd18919cb5ccea4f95ee1cdab4", size = 2046785, upload-time = "2026-05-06T13:38:01.995Z" }, + { url = "https://files.pythonhosted.org/packages/60/d9/6715260422ff50a2109878fd24d948a6c3446bb2664f34ee78cd972b3acd/pydantic_core-2.46.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8daafc69c93ee8a0204506a3b6b30f586ef54028f52aeeeb5c4cfc5184fd5914", size = 2228733, upload-time = "2026-05-06T13:40:50.371Z" }, + { url = "https://files.pythonhosted.org/packages/18/ae/fdb2f64316afca925640f8e70bb1a564b0ec2721c1389e25b8eb4bf9a299/pydantic_core-2.46.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd2213145bcc2ba85884d0ac63d222fece9209678f77b9b4d76f054c561adb28", size = 2307534, upload-time = "2026-05-06T13:37:21.531Z" }, + { url = "https://files.pythonhosted.org/packages/89/1d/8eff589b45bb8190a9d12c49cfad0f176a5cbd1534908a6b5125e2886239/pydantic_core-2.46.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a5f930472650a82629163023e630d160863fce524c616f4e5186e5de9d9a49b", size = 2099732, upload-time = "2026-05-06T13:39:31.942Z" }, + { url = "https://files.pythonhosted.org/packages/06/d5/ee5a3366637fee41dee51a1fc91562dcf12ddbc68fda34e6b253da2324bb/pydantic_core-2.46.4-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:c1b3f518abeca3aa13c712fd202306e145abf59a18b094a6bafb2d2bbf59192c", size = 2129627, upload-time = "2026-05-06T13:37:25.033Z" }, + { url = "https://files.pythonhosted.org/packages/94/33/2414be571d2c6a6c4d08be21f9292b6d3fdb08949a97b6dfe985017821db/pydantic_core-2.46.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a7dd0b3ee80d90150e3495a3a13ac34dbcbfd4f012996a6a1d8900e91b5c0fb", size = 2179141, upload-time = "2026-05-06T13:37:14.046Z" }, + { url = "https://files.pythonhosted.org/packages/7b/79/7daa95be995be0eecc4cf75064cb33f9bbbfe3fe0158caf2f0d4a996a5c7/pydantic_core-2.46.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:3fb702cd90b0446a3a1c5e470bfa0dd23c0233b676a9099ddcc964fa6ca13898", size = 2184325, upload-time = "2026-05-06T13:36:53.615Z" }, + { url = "https://files.pythonhosted.org/packages/9f/cb/d0a382f5c0de8a222dc61c65348e0ce831b1f68e0a018450d31c2cace3a5/pydantic_core-2.46.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b8458003118a712e66286df6a707db01c52c0f52f7db8e4a38f0da1d3b94fc4e", size = 2323990, upload-time = "2026-05-06T13:40:29.971Z" }, + { url = "https://files.pythonhosted.org/packages/05/db/d9ba624cc4a5aced1598e88c04fdbd8310c8a69b9d38b9a3d39ce3a61ed7/pydantic_core-2.46.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:372429a130e469c9cd698925ce5fc50940b7a1336b0d82038e63d5bbc4edc519", size = 2369978, upload-time = "2026-05-06T13:37:23.027Z" }, + { url = "https://files.pythonhosted.org/packages/f2/20/d15df15ba918c423461905802bfd2981c3af0bfa0e40d05e13edbfa48bc3/pydantic_core-2.46.4-cp314-cp314-win32.whl", hash = "sha256:85bb3611ff1802f3ee7fdd7dbff26b56f343fb432d57a4728fdd49b6ef35e2f4", size = 1966354, upload-time = "2026-05-06T13:38:03.499Z" }, + { url = "https://files.pythonhosted.org/packages/fc/b6/6b8de4c0a7d7ab3004c439c80c5c1e0a3e8d78bbae19379b01960383d9e5/pydantic_core-2.46.4-cp314-cp314-win_amd64.whl", hash = "sha256:811ff8e9c313ab425368bcbb36e5c4ebd7108c2bbf4e4089cfbb0b01eff63fac", size = 2072238, upload-time = "2026-05-06T13:39:40.807Z" }, + { url = "https://files.pythonhosted.org/packages/32/36/51eb763beec1f4cf59b1db243a7dcc39cbb41230f050a09b9d69faaf0a48/pydantic_core-2.46.4-cp314-cp314-win_arm64.whl", hash = "sha256:bfec22eab3c8cc2ceec0248aec886624116dc079afa027ecc8ad4a7e62010f8a", size = 2018251, upload-time = "2026-05-06T13:37:26.72Z" }, + { url = "https://files.pythonhosted.org/packages/e8/91/855af51d625b23aa987116a19e231d2aaef9c4a415273ddc189b79a45fee/pydantic_core-2.46.4-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:af8244b2bef6aaad6d92cda81372de7f8c8d36c9f0c3ea36e827c60e7d9467a0", size = 2099593, upload-time = "2026-05-06T13:39:47.682Z" }, + { url = "https://files.pythonhosted.org/packages/fb/1b/8784a54c65edb5f49f0a14d6977cf1b209bba85a4c77445b255c2de58ab3/pydantic_core-2.46.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a4330cdbc57162e4b3aa303f588ba752257694c9c9be3e7ebb11b4aca659b5d", size = 1935226, upload-time = "2026-05-06T13:40:40.428Z" }, + { url = "https://files.pythonhosted.org/packages/e8/e7/1955d28d1afc56dd4b3ad7cc0cf39df1b9852964cf16e5d13912756d6d6b/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c61fc04a3d840155ff08e475a04809278972fe6aef51e2720554e96367e34b", size = 1974605, upload-time = "2026-05-06T13:37:32.029Z" }, + { url = "https://files.pythonhosted.org/packages/93/e2/3fedbf0ba7a22850e6e9fd78117f1c0f10f950182344d8a6c535d468fdd8/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c50f2528cf200c5eed56faf3f4e22fcd5f38c157a8b78576e6ba3168ec35f000", size = 2030777, upload-time = "2026-05-06T13:38:55.239Z" }, + { url = "https://files.pythonhosted.org/packages/f8/61/46be275fcaaba0b4f5b9669dd852267ce1ff616592dccf7a7845588df091/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cbe8b01f948de4286c74cdd6c667aceb38f5c1e26f0693b3983d9d74887c65e", size = 2236641, upload-time = "2026-05-06T13:37:08.096Z" }, + { url = "https://files.pythonhosted.org/packages/60/db/12e93e46a8bac9988be3c016860f83293daea8c716c029c9ace279036f2f/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:617d7e2ca7dcb8c5cf6bcb8c59b8832c94b36196bbf1cbd1bfb56ed341905edd", size = 2286404, upload-time = "2026-05-06T13:40:20.221Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4a/4d8b19008f38d31c53b8219cfedc2e3d5de5fe99d90076b7e767de29274f/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7027560ee92211647d0d34e3f7cd6f50da56399d26a9c8ad0da286d3869a53f3", size = 2109219, upload-time = "2026-05-06T13:38:12.153Z" }, + { url = "https://files.pythonhosted.org/packages/88/70/3cbc40978fefb7bb09c6708d40d4ad1a5d70fd7213c3d17f971de868ec1f/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:f99626688942fb746e545232e7726926f3be91b5975f8b55327665fafda991c7", size = 2110594, upload-time = "2026-05-06T13:40:02.971Z" }, + { url = "https://files.pythonhosted.org/packages/9d/20/b8d36736216e29491125531685b2f9e61aa5b4b2599893f8268551da3338/pydantic_core-2.46.4-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc3e9034a63de20e15e8ade85358bc6efc614008cab72898b4b4952bea0509ff", size = 2159542, upload-time = "2026-05-06T13:39:27.506Z" }, + { url = "https://files.pythonhosted.org/packages/1d/a2/367df868eb584dacf6bf82a389272406d7178e301c4ac82545ab98bc2dd9/pydantic_core-2.46.4-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:97e7cf2be5c77b7d1a9713a05605d49460d02c6078d38d8bef3cbe323c548424", size = 2168146, upload-time = "2026-05-06T13:38:31.93Z" }, + { url = "https://files.pythonhosted.org/packages/c1/b8/4460f77f7e201893f649a29ab355dddd3beee8a97bcb1a320db414f9a06e/pydantic_core-2.46.4-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:3bf92c5d0e00fefaab325a4d27828fe6b6e2a21848686b5b60d2d9eeb09d76c6", size = 2306309, upload-time = "2026-05-06T13:37:44.717Z" }, + { url = "https://files.pythonhosted.org/packages/64/c4/be2639293acd87dc8ddbcec41a73cee9b2ebf996fe6d892a1a74e88ad3f7/pydantic_core-2.46.4-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:3ecbc122d18468d06ca279dc26a8c2e2d5acb10943bb35e36ae92096dc3b5565", size = 2369736, upload-time = "2026-05-06T13:37:05.645Z" }, + { url = "https://files.pythonhosted.org/packages/30/a6/9f9f380dbb301f67023bf8f707aaa75daadf84f7152d95c410fd7e81d994/pydantic_core-2.46.4-cp314-cp314t-win32.whl", hash = "sha256:e846ae7835bf0703ae43f534ab79a867146dadd59dc9ca5c8b53d5c8f7c9ef02", size = 1955575, upload-time = "2026-05-06T13:38:51.116Z" }, + { url = "https://files.pythonhosted.org/packages/40/1f/f1eb9eb350e795d1af8586289746f5c5677d16043040d63710e22abc43c9/pydantic_core-2.46.4-cp314-cp314t-win_amd64.whl", hash = "sha256:2108ba5c1c1eca18030634489dc544844144ee36357f2f9f780b93e7ddbb44b5", size = 2051624, upload-time = "2026-05-06T13:38:21.672Z" }, + { url = "https://files.pythonhosted.org/packages/f6/d2/42dd53d0a85c27606f316d3aa5d2869c4e8470a5ed6dec30e4a1abe19192/pydantic_core-2.46.4-cp314-cp314t-win_arm64.whl", hash = "sha256:4fcbe087dbc2068af7eda3aa87634eba216dbda64d1ae73c8684b621d33f6596", size = 2017325, upload-time = "2026-05-06T13:40:52.723Z" }, + { url = "https://files.pythonhosted.org/packages/ee/a4/73995fd4ebbb46ba0ee51e6fa049b8f02c40daebb762208feda8a6b7894d/pydantic_core-2.46.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:14d4edf427bdcf950a8a02d7cb44a08614388dd6e1bdcbf4f67504fa7887da9c", size = 2111589, upload-time = "2026-05-06T13:37:10.817Z" }, + { url = "https://files.pythonhosted.org/packages/fb/7f/f37d3a5e8bfcc2e403f5c57a730f2d815693fb42119e8ea48b3789335af1/pydantic_core-2.46.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:0ce40cd7b21210e99342afafbd4d0f76d784eb5b1d60f3bdc566be4983c6c73b", size = 1944552, upload-time = "2026-05-06T13:36:56.717Z" }, + { url = "https://files.pythonhosted.org/packages/15/3c/d7eb777b3ff43e8433a4efb39a17aa8fd98a4ee8561a24a67ef5db07b2d6/pydantic_core-2.46.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90884113d8b48f760e9587002789ddd741e76ab9f89518cd1e43b1f1a52ec44b", size = 1982984, upload-time = "2026-05-06T13:39:06.207Z" }, + { url = "https://files.pythonhosted.org/packages/63/87/70b9f40170a81afd55ca26c9b2acb25c20d64bcfbf888fafecb3ba077d4c/pydantic_core-2.46.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66ce7632c22d837c95301830e111ad0128a32b8207533b60896a96c4915192ea", size = 2138417, upload-time = "2026-05-06T13:39:45.476Z" }, + { url = "https://files.pythonhosted.org/packages/9d/1d/8987ad40f65ae1432753072f214fb5c74fe47ffbd0698bb9cbbb585664f8/pydantic_core-2.46.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:1d8ba486450b14f3b1d63bc521d410ec7565e52f887b9fb671791886436a42f7", size = 2095527, upload-time = "2026-05-06T13:39:52.283Z" }, + { url = "https://files.pythonhosted.org/packages/64/d3/84c282a7eee1d3ac4c0377546ef5a1ea436ce26840d9ac3b7ed54a377507/pydantic_core-2.46.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:3009f12e4e90b7f88b4f9adb1b0c4a3d58fe7820f3238c190047209d148026df", size = 1936024, upload-time = "2026-05-06T13:40:15.671Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ca/eac61596cdeb4d7e174d3dc0bd8a6238f14f75f97a24e7b7db4c7e7340a0/pydantic_core-2.46.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad785e92e6dc634c21555edc8bd6b64957ab844541bcb96a1366c202951ae526", size = 1990696, upload-time = "2026-05-06T13:38:34.717Z" }, + { url = "https://files.pythonhosted.org/packages/fa/c3/7c8b240552251faf6b3a957db200fcfbbcec36763c050428b601e0c9b83b/pydantic_core-2.46.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00c603d540afdd6b80eb39f078f33ebd46211f02f33e34a32d9f053bba711de0", size = 2147590, upload-time = "2026-05-06T13:39:29.883Z" }, + { url = "https://files.pythonhosted.org/packages/11/cb/428de0385b6c8d44b716feba566abfacfbd23ee3c4439faa789a1456242f/pydantic_core-2.46.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0c563b08bca408dc7f65f700633d8442fffb2421fc47b8101377e9fd65051ff0", size = 2112782, upload-time = "2026-05-06T13:37:04.016Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b5/6a17bdadd0fc1f170adfd05a20d37c832f52b117b4d9131da1f41bb097ce/pydantic_core-2.46.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:db06ffe51636ffe9ca531fe9023dd64bdd794be8754cb5df57c5498ae5b518a7", size = 1952146, upload-time = "2026-05-06T13:39:43.092Z" }, + { url = "https://files.pythonhosted.org/packages/2a/dc/03734d80e362cd43ef65428e9de77c730ce7f2f11c60d2b1e1b39f0fbf99/pydantic_core-2.46.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:133878133d271ade3d41d1bfb2a45ec38dbdbda40bc065921c6b04e4630127e2", size = 2134492, upload-time = "2026-05-06T13:36:58.124Z" }, + { url = "https://files.pythonhosted.org/packages/de/df/5e5ffc085ed07cc22d298134d3d911c63e91f6a0eb91fe646750a3209910/pydantic_core-2.46.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9bc519fbf2b7578398853d815009ae5e4d4603d12f4e3f91da8c06852d3da3e9", size = 2156604, upload-time = "2026-05-06T13:37:49.88Z" }, + { url = "https://files.pythonhosted.org/packages/81/44/6e112a4253e56f5705467cbab7ab5e91ee7398ba3d56d358635958893d3e/pydantic_core-2.46.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c7a7bd4e39e8e4c12c39cd480356842b6a8a06e41b23a55a5e3e191718838ddf", size = 2183828, upload-time = "2026-05-06T13:37:43.053Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ad/5565071e937d8e752842ac241463944c9eb14c87e2d269f2658a5bd05e98/pydantic_core-2.46.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:d396ec2b979760aaf3218e76c24e65bd0aca24983298653b3a9d7a45f9e47b30", size = 2310000, upload-time = "2026-05-06T13:37:56.694Z" }, + { url = "https://files.pythonhosted.org/packages/4f/c3/66883a5cec183e7fba4d024b4cbbe61851a63750ef606b0afecc46d1f2bf/pydantic_core-2.46.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:86e1a4418c6cd97d60c95c71164158eaf7324fae7b0923264016baa993eba6fc", size = 2361286, upload-time = "2026-05-06T13:40:05.667Z" }, + { url = "https://files.pythonhosted.org/packages/4b/2d/69abac8f838090bbecd5df894befb2c2619e7996a98ddb949db9f3b93225/pydantic_core-2.46.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:d51026d73fcfd93610abc7b27789c26b313920fcfb20e27462d74a7f8b06e983", size = 2193071, upload-time = "2026-05-06T13:38:08.682Z" }, +] + +[[package]] +name = "pydantic-extra-types" +version = "2.11.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/71/dba38ee2651f84f7842206adbd2233d8bbdb59fb85e9fa14232486a8c471/pydantic_extra_types-2.11.1.tar.gz", hash = "sha256:46792d2307383859e923d8fcefa82108b1a141f8a9c0198982b3832ab5ef1049", size = 172002, upload-time = "2026-03-16T08:08:03.92Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/c1/3226e6d7f5a4f736f38ac11a6fbb262d701889802595cdb0f53a885ac2e0/pydantic_extra_types-2.11.1-py3-none-any.whl", hash = "sha256:1722ea2bddae5628ace25f2aa685b69978ef533123e5638cfbddb999e0100ec1", size = 79526, upload-time = "2026-03-16T08:08:02.533Z" }, ] [[package]] name = "pydantic-settings" -version = "2.12.0" +version = "2.14.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/60/1d1e59c9c90d54591469ada7d268251f71c24bdb765f1a8a832cee8c6653/pydantic_settings-2.14.1.tar.gz", hash = "sha256:e874d3bec7e787b0c9958277956ed9b4dd5de6a80e162188fdaff7c5e26fd5fa", size = 235551, upload-time = "2026-05-08T13:40:06.542Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8d/f1af3832f5e6eb13ba94ee809e72b8ecb5eef226d27ee0bef7d963d943c7/pydantic_settings-2.14.1-py3-none-any.whl", hash = "sha256:6e3c7edfd8277687cdc598f56e5cff0e9bfff0910a3749deaa8d4401c3a2b9de", size = 60964, upload-time = "2026-05-08T13:40:04.958Z" }, ] [[package]] name = "pygments" -version = "2.19.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, -] - -[[package]] -name = "pyjwt" -version = "2.9.0" +version = "2.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fb/68/ce067f09fca4abeca8771fe667d89cc347d1e99da3e093112ac329c6020e/pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c", size = 78825, upload-time = "2024-08-01T15:01:08.445Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/79/84/0fdf9b18ba31d69877bd39c9cd6052b47f3761e9910c15de788e519f079f/PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850", size = 22344, upload-time = "2024-08-01T15:01:06.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, ] [[package]] name = "pytest" -version = "8.4.0" +version = "9.0.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -1119,42 +2033,61 @@ dependencies = [ { name = "pluggy" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/aa/405082ce2749be5398045152251ac69c0f3578c7077efc53431303af97ce/pytest-8.4.0.tar.gz", hash = "sha256:14d920b48472ea0dbf68e45b96cd1ffda4705f33307dcc86c676c1b5104838a6", size = 1515232, upload-time = "2025-06-02T17:36:30.03Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/549bd94f1a0a402dc8cf64563a117c0f3765662e2e668477624baeec44d5/pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c", size = 1572165, upload-time = "2026-04-07T17:16:18.027Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/de/afa024cbe022b1b318a3d224125aa24939e99b4ff6f22e0ba639a2eaee47/pytest-8.4.0-py3-none-any.whl", hash = "sha256:f40f825768ad76c0977cbacdf1fd37c6f7a468e460ea6a0636078f8972d4517e", size = 363797, upload-time = "2025-06-02T17:36:27.859Z" }, + { url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249, upload-time = "2026-04-07T17:16:16.13Z" }, ] [[package]] name = "pytest-asyncio" -version = "1.0.0" +version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d0/d4/14f53324cb1a6381bef29d698987625d80052bb33932d8e7cbf9b337b17c/pytest_asyncio-1.0.0.tar.gz", hash = "sha256:d15463d13f4456e1ead2594520216b225a16f781e144f8fdf6c5bb4667c48b3f", size = 46960, upload-time = "2025-05-26T04:54:40.484Z" } +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/05/ce271016e351fddc8399e546f6e23761967ee09c8c568bbfbecb0c150171/pytest_asyncio-1.0.0-py3-none-any.whl", hash = "sha256:4f024da9f1ef945e680dc68610b52550e36590a67fd31bb3b4943979a1f90ef3", size = 15976, upload-time = "2025-05-26T04:54:39.035Z" }, + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, ] [[package]] name = "pytest-mock" -version = "3.14.1" +version = "3.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036, upload-time = "2025-09-16T16:37:27.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" }, +] + +[[package]] +name = "pytest-xdist" +version = "3.8.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "execnet" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/28/67172c96ba684058a4d24ffe144d64783d2a270d0af0d9e792737bddc75c/pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", size = 33241, upload-time = "2025-05-26T13:58:45.167Z" } +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923, upload-time = "2025-05-26T13:58:43.487Z" }, + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + +[package.optional-dependencies] +psutil = [ + { name = "psutil" }, ] [[package]] name = "python-dotenv" -version = "1.1.0" +version = "1.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920, upload-time = "2025-03-25T10:14:56.835Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, + { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, ] [[package]] @@ -1171,13 +2104,37 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d9/c3/0bd11992072e6a1c513b16500a5d07f91a24017c5909b02c72c62d7ad024/python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771", size = 34624, upload-time = "2025-05-28T17:31:52.802Z" }, ] +[package.optional-dependencies] +cryptography = [ + { name = "cryptography" }, +] + [[package]] name = "python-multipart" -version = "0.0.20" +version = "0.0.27" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/9b/f23807317a113dc36e74e75eb265a02dd1a4d9082abc3c1064acd22997c4/python_multipart-0.0.27.tar.gz", hash = "sha256:9870a6a8c5a20a5bf4f07c017bd1489006ff8836cff097b6933355ee2b49b602", size = 44043, upload-time = "2026-04-27T10:51:26.649Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/78/4126abcbdbd3c559d43e0db7f7b9173fc6befe45d39a2856cc0b8ec2a5a6/python_multipart-0.0.27-py3-none-any.whl", hash = "sha256:6fccfad17a27334bd0193681b369f476eda3409f17381a2d65aa7df3f7275645", size = 29254, upload-time = "2026-04-27T10:51:24.997Z" }, +] + +[[package]] +name = "pywin32" +version = "311" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, ] [[package]] @@ -1237,33 +2194,151 @@ wheels = [ [[package]] name = "redis" -version = "5.3.0" +version = "7.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, - { name = "pyjwt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/dd/2b37032f4119dff2a2f9bbcaade03221b100ba26051bb96e275de3e5db7a/redis-5.3.0.tar.gz", hash = "sha256:8d69d2dde11a12dc85d0dbf5c45577a5af048e2456f7077d87ad35c1c81c310e", size = 4626288, upload-time = "2025-04-30T14:54:40.634Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/7f/3759b1d0d72b7c92f0d70ffd9dc962b7b7b5ee74e135f9d7d8ab06b8a318/redis-7.4.0.tar.gz", hash = "sha256:64a6ea7bf567ad43c964d2c30d82853f8df927c5c9017766c55a1d1ed95d18ad", size = 4943913, upload-time = "2026-03-24T09:14:37.53Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/b0/aa601efe12180ba492b02e270554877e68467e66bda5d73e51eaa8ecc78a/redis-5.3.0-py3-none-any.whl", hash = "sha256:f1deeca1ea2ef25c1e4e46b07f4ea1275140526b1feea4c6459c0ec27a10ef83", size = 272836, upload-time = "2025-04-30T14:54:30.744Z" }, + { url = "https://files.pythonhosted.org/packages/74/3a/95deec7db1eb53979973ebd156f3369a72732208d1391cd2e5d127062a32/redis-7.4.0-py3-none-any.whl", hash = "sha256:a9c74a5c893a5ef8455a5adb793a31bb70feb821c86eccb62eebef5a19c429ec", size = 409772, upload-time = "2026-03-24T09:14:35.968Z" }, ] -[package.optional-dependencies] -hiredis = [ - { name = "hiredis" }, +[[package]] +name = "requests" +version = "2.33.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" }, ] [[package]] name = "rich" -version = "14.2.0" +version = "15.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/8f/0722ca900cc807c13a6a0c696dacf35430f72e0ec571c4275d2371fca3e9/rich-15.0.0.tar.gz", hash = "sha256:edd07a4824c6b40189fb7ac9bc4c52536e9780fbbfbddf6f1e2502c31b068c36", size = 230680, upload-time = "2026-04-12T08:24:00.75Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, + { url = "https://files.pythonhosted.org/packages/82/3b/64d4899d73f91ba49a8c18a8ff3f0ea8f1c1d75481760df8c68ef5235bf5/rich-15.0.0-py3-none-any.whl", hash = "sha256:33bd4ef74232fb73fe9279a257718407f169c09b78a87ad3d296f548e27de0bb", size = 310654, upload-time = "2026-04-12T08:24:02.83Z" }, +] + +[[package]] +name = "rich-toolkit" +version = "0.19.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/ba/dae9e3096651042754da419a4042bc1c75e07d615f9b15066d738838e4df/rich_toolkit-0.19.7.tar.gz", hash = "sha256:133c0915872da91d4c25d85342d5ec1dfacc69b63448af1a08a0d4b4f23ef46e", size = 195877, upload-time = "2026-02-24T16:06:20.555Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/3c/c923619f6d2f5fafcc96fec0aaf9550a46cd5b6481f06e0c6b66a2a4fed0/rich_toolkit-0.19.7-py3-none-any.whl", hash = "sha256:0288e9203728c47c5a4eb60fd2f0692d9df7455a65901ab6f898437a2ba5989d", size = 32963, upload-time = "2026-02-24T16:06:22.066Z" }, +] + +[[package]] +name = "rignore" +version = "0.7.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/f5/8bed2310abe4ae04b67a38374a4d311dd85220f5d8da56f47ae9361be0b0/rignore-0.7.6.tar.gz", hash = "sha256:00d3546cd793c30cb17921ce674d2c8f3a4b00501cb0e3dd0e82217dbeba2671", size = 57140, upload-time = "2025-11-05T21:41:21.968Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/41/b6e2be3069ef3b7f24e35d2911bd6deb83d20ed5642ad81d5a6d1c015473/rignore-0.7.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:40be8226e12d6653abbebaffaea2885f80374c1c8f76fe5ca9e0cadd120a272c", size = 885285, upload-time = "2025-11-05T20:42:39.763Z" }, + { url = "https://files.pythonhosted.org/packages/52/66/ba7f561b6062402022887706a7f2b2c2e2e2a28f1e3839202b0a2f77e36d/rignore-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182f4e5e4064d947c756819446a7d4cdede8e756b8c81cf9e509683fe38778d7", size = 823882, upload-time = "2025-11-05T20:42:23.488Z" }, + { url = "https://files.pythonhosted.org/packages/f5/81/4087453df35a90b07370647b19017029324950c1b9137d54bf1f33843f17/rignore-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16b63047648a916a87be1e51bb5c009063f1b8b6f5afe4f04f875525507e63dc", size = 899362, upload-time = "2025-11-05T20:40:51.111Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c9/390a8fdfabb76d71416be773bd9f162977bd483084f68daf19da1dec88a6/rignore-0.7.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ba5524f5178deca4d7695e936604ebc742acb8958f9395776e1fcb8133f8257a", size = 873633, upload-time = "2025-11-05T20:41:06.193Z" }, + { url = "https://files.pythonhosted.org/packages/df/c9/79404fcb0faa76edfbc9df0901f8ef18568d1104919ebbbad6d608c888d1/rignore-0.7.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62020dbb89a1dd4b84ab3d60547b3b2eb2723641d5fb198463643f71eaaed57d", size = 1167633, upload-time = "2025-11-05T20:41:22.491Z" }, + { url = "https://files.pythonhosted.org/packages/6e/8d/b3466d32d445d158a0aceb80919085baaae495b1f540fb942f91d93b5e5b/rignore-0.7.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b34acd532769d5a6f153a52a98dcb81615c949ab11697ce26b2eb776af2e174d", size = 941434, upload-time = "2025-11-05T20:41:38.151Z" }, + { url = "https://files.pythonhosted.org/packages/e8/40/9cd949761a7af5bc27022a939c91ff622d29c7a0b66d0c13a863097dde2d/rignore-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c5e53b752f9de44dff7b3be3c98455ce3bf88e69d6dc0cf4f213346c5e3416c", size = 959461, upload-time = "2025-11-05T20:42:08.476Z" }, + { url = "https://files.pythonhosted.org/packages/b5/87/1e1a145731f73bdb7835e11f80da06f79a00d68b370d9a847de979575e6d/rignore-0.7.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25b3536d13a5d6409ce85f23936f044576eeebf7b6db1d078051b288410fc049", size = 985323, upload-time = "2025-11-05T20:41:52.735Z" }, + { url = "https://files.pythonhosted.org/packages/6c/31/1ecff992fc3f59c4fcdcb6c07d5f6c1e6dfb55ccda19c083aca9d86fa1c6/rignore-0.7.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e01cad2b0b92f6b1993f29fc01f23f2d78caf4bf93b11096d28e9d578eb08ce", size = 1079173, upload-time = "2025-11-05T21:40:12.007Z" }, + { url = "https://files.pythonhosted.org/packages/17/18/162eedadb4c2282fa4c521700dbf93c9b14b8842e8354f7d72b445b8d593/rignore-0.7.6-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5991e46ab9b4868334c9e372ab0892b0150f3f586ff2b1e314272caeb38aaedb", size = 1139012, upload-time = "2025-11-05T21:40:29.399Z" }, + { url = "https://files.pythonhosted.org/packages/78/96/a9ca398a8af74bb143ad66c2a31303c894111977e28b0d0eab03867f1b43/rignore-0.7.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6c8ae562e5d1246cba5eaeb92a47b2a279e7637102828dde41dcbe291f529a3e", size = 1118827, upload-time = "2025-11-05T21:40:46.6Z" }, + { url = "https://files.pythonhosted.org/packages/9f/22/1c1a65047df864def9a047dbb40bc0b580b8289a4280e62779cd61ae21f2/rignore-0.7.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aaf938530dcc0b47c4cfa52807aa2e5bfd5ca6d57a621125fe293098692f6345", size = 1128182, upload-time = "2025-11-05T21:41:04.239Z" }, + { url = "https://files.pythonhosted.org/packages/bd/f4/1526eb01fdc2235aca1fd9d0189bee4021d009a8dcb0161540238c24166e/rignore-0.7.6-cp311-cp311-win32.whl", hash = "sha256:166ebce373105dd485ec213a6a2695986346e60c94ff3d84eb532a237b24a4d5", size = 646547, upload-time = "2025-11-05T21:41:49.439Z" }, + { url = "https://files.pythonhosted.org/packages/7c/c8/dda0983e1845706beb5826459781549a840fe5a7eb934abc523e8cd17814/rignore-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:44f35ee844b1a8cea50d056e6a595190ce9d42d3cccf9f19d280ae5f3058973a", size = 727139, upload-time = "2025-11-05T21:41:34.367Z" }, + { url = "https://files.pythonhosted.org/packages/e3/47/eb1206b7bf65970d41190b879e1723fc6bbdb2d45e53565f28991a8d9d96/rignore-0.7.6-cp311-cp311-win_arm64.whl", hash = "sha256:14b58f3da4fa3d5c3fa865cab49821675371f5e979281c683e131ae29159a581", size = 657598, upload-time = "2025-11-05T21:41:23.758Z" }, + { url = "https://files.pythonhosted.org/packages/0b/0e/012556ef3047a2628842b44e753bb15f4dc46806780ff090f1e8fe4bf1eb/rignore-0.7.6-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:03e82348cb7234f8d9b2834f854400ddbbd04c0f8f35495119e66adbd37827a8", size = 883488, upload-time = "2025-11-05T20:42:41.359Z" }, + { url = "https://files.pythonhosted.org/packages/93/b0/d4f1f3fe9eb3f8e382d45ce5b0547ea01c4b7e0b4b4eb87bcd66a1d2b888/rignore-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9e624f6be6116ea682e76c5feb71ea91255c67c86cb75befe774365b2931961", size = 820411, upload-time = "2025-11-05T20:42:24.782Z" }, + { url = "https://files.pythonhosted.org/packages/4a/c8/dea564b36dedac8de21c18e1851789545bc52a0c22ece9843444d5608a6a/rignore-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bda49950d405aa8d0ebe26af807c4e662dd281d926530f03f29690a2e07d649a", size = 897821, upload-time = "2025-11-05T20:40:52.613Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2b/ee96db17ac1835e024c5d0742eefb7e46de60020385ac883dd3d1cde2c1f/rignore-0.7.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5fd5ab3840b8c16851d327ed06e9b8be6459702a53e5ab1fc4073b684b3789e", size = 873963, upload-time = "2025-11-05T20:41:07.49Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8c/ad5a57bbb9d14d5c7e5960f712a8a0b902472ea3f4a2138cbf70d1777b75/rignore-0.7.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ced2a248352636a5c77504cb755dc02c2eef9a820a44d3f33061ce1bb8a7f2d2", size = 1169216, upload-time = "2025-11-05T20:41:23.73Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/5b00bc2a6bc1701e6878fca798cf5d9125eb3113193e33078b6fc0d99123/rignore-0.7.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a04a3b73b75ddc12c9c9b21efcdaab33ca3832941d6f1d67bffd860941cd448a", size = 942942, upload-time = "2025-11-05T20:41:39.393Z" }, + { url = "https://files.pythonhosted.org/packages/85/e5/7f99bd0cc9818a91d0e8b9acc65b792e35750e3bdccd15a7ee75e64efca4/rignore-0.7.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d24321efac92140b7ec910ac7c53ab0f0c86a41133d2bb4b0e6a7c94967f44dd", size = 959787, upload-time = "2025-11-05T20:42:09.765Z" }, + { url = "https://files.pythonhosted.org/packages/55/54/2ffea79a7c1eabcede1926347ebc2a81bc6b81f447d05b52af9af14948b9/rignore-0.7.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73c7aa109d41e593785c55fdaa89ad80b10330affa9f9d3e3a51fa695f739b20", size = 984245, upload-time = "2025-11-05T20:41:54.062Z" }, + { url = "https://files.pythonhosted.org/packages/41/f7/e80f55dfe0f35787fa482aa18689b9c8251e045076c35477deb0007b3277/rignore-0.7.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1734dc49d1e9501b07852ef44421f84d9f378da9fbeda729e77db71f49cac28b", size = 1078647, upload-time = "2025-11-05T21:40:13.463Z" }, + { url = "https://files.pythonhosted.org/packages/d4/cf/2c64f0b6725149f7c6e7e5a909d14354889b4beaadddaa5fff023ec71084/rignore-0.7.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5719ea14ea2b652c0c0894be5dfde954e1853a80dea27dd2fbaa749618d837f5", size = 1139186, upload-time = "2025-11-05T21:40:31.27Z" }, + { url = "https://files.pythonhosted.org/packages/75/95/a86c84909ccc24af0d094b50d54697951e576c252a4d9f21b47b52af9598/rignore-0.7.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8e23424fc7ce35726854f639cb7968151a792c0c3d9d082f7f67e0c362cfecca", size = 1117604, upload-time = "2025-11-05T21:40:48.07Z" }, + { url = "https://files.pythonhosted.org/packages/7f/5e/13b249613fd5d18d58662490ab910a9f0be758981d1797789913adb4e918/rignore-0.7.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3efdcf1dd84d45f3e2bd2f93303d9be103888f56dfa7c3349b5bf4f0657ec696", size = 1127725, upload-time = "2025-11-05T21:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/c7/28/fa5dcd1e2e16982c359128664e3785f202d3eca9b22dd0b2f91c4b3d242f/rignore-0.7.6-cp312-cp312-win32.whl", hash = "sha256:ccca9d1a8b5234c76b71546fc3c134533b013f40495f394a65614a81f7387046", size = 646145, upload-time = "2025-11-05T21:41:51.096Z" }, + { url = "https://files.pythonhosted.org/packages/26/87/69387fb5dd81a0f771936381431780b8cf66fcd2cfe9495e1aaf41548931/rignore-0.7.6-cp312-cp312-win_amd64.whl", hash = "sha256:c96a285e4a8bfec0652e0bfcf42b1aabcdda1e7625f5006d188e3b1c87fdb543", size = 726090, upload-time = "2025-11-05T21:41:36.485Z" }, + { url = "https://files.pythonhosted.org/packages/24/5f/e8418108dcda8087fb198a6f81caadbcda9fd115d61154bf0df4d6d3619b/rignore-0.7.6-cp312-cp312-win_arm64.whl", hash = "sha256:a64a750e7a8277a323f01ca50b7784a764845f6cce2fe38831cb93f0508d0051", size = 656317, upload-time = "2025-11-05T21:41:25.305Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8a/a4078f6e14932ac7edb171149c481de29969d96ddee3ece5dc4c26f9e0c3/rignore-0.7.6-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2bdab1d31ec9b4fb1331980ee49ea051c0d7f7bb6baa28b3125ef03cdc48fdaf", size = 883057, upload-time = "2025-11-05T20:42:42.741Z" }, + { url = "https://files.pythonhosted.org/packages/f9/8f/f8daacd177db4bf7c2223bab41e630c52711f8af9ed279be2058d2fe4982/rignore-0.7.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:90f0a00ce0c866c275bf888271f1dc0d2140f29b82fcf33cdbda1e1a6af01010", size = 820150, upload-time = "2025-11-05T20:42:26.545Z" }, + { url = "https://files.pythonhosted.org/packages/36/31/b65b837e39c3f7064c426754714ac633b66b8c2290978af9d7f513e14aa9/rignore-0.7.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1ad295537041dc2ed4b540fb1a3906bd9ede6ccdad3fe79770cd89e04e3c73c", size = 897406, upload-time = "2025-11-05T20:40:53.854Z" }, + { url = "https://files.pythonhosted.org/packages/ca/58/1970ce006c427e202ac7c081435719a076c478f07b3a23f469227788dc23/rignore-0.7.6-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f782dbd3a65a5ac85adfff69e5c6b101285ef3f845c3a3cae56a54bebf9fe116", size = 874050, upload-time = "2025-11-05T20:41:08.922Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/eb45db9f90137329072a732273be0d383cb7d7f50ddc8e0bceea34c1dfdf/rignore-0.7.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65cece3b36e5b0826d946494734c0e6aaf5a0337e18ff55b071438efe13d559e", size = 1167835, upload-time = "2025-11-05T20:41:24.997Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f1/6f1d72ddca41a64eed569680587a1236633587cc9f78136477ae69e2c88a/rignore-0.7.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7e4bb66c13cd7602dc8931822c02dfbbd5252015c750ac5d6152b186f0a8be0", size = 941945, upload-time = "2025-11-05T20:41:40.628Z" }, + { url = "https://files.pythonhosted.org/packages/48/6f/2f178af1c1a276a065f563ec1e11e7a9e23d4996fd0465516afce4b5c636/rignore-0.7.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297e500c15766e196f68aaaa70e8b6db85fa23fdc075b880d8231fdfba738cd7", size = 959067, upload-time = "2025-11-05T20:42:11.09Z" }, + { url = "https://files.pythonhosted.org/packages/5b/db/423a81c4c1e173877c7f9b5767dcaf1ab50484a94f60a0b2ed78be3fa765/rignore-0.7.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a07084211a8d35e1a5b1d32b9661a5ed20669970b369df0cf77da3adea3405de", size = 984438, upload-time = "2025-11-05T20:41:55.443Z" }, + { url = "https://files.pythonhosted.org/packages/31/eb/c4f92cc3f2825d501d3c46a244a671eb737fc1bcf7b05a3ecd34abb3e0d7/rignore-0.7.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:181eb2a975a22256a1441a9d2f15eb1292839ea3f05606620bd9e1938302cf79", size = 1078365, upload-time = "2025-11-05T21:40:15.148Z" }, + { url = "https://files.pythonhosted.org/packages/26/09/99442f02794bd7441bfc8ed1c7319e890449b816a7493b2db0e30af39095/rignore-0.7.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:7bbcdc52b5bf9f054b34ce4af5269df5d863d9c2456243338bc193c28022bd7b", size = 1139066, upload-time = "2025-11-05T21:40:32.771Z" }, + { url = "https://files.pythonhosted.org/packages/2c/88/bcfc21e520bba975410e9419450f4b90a2ac8236b9a80fd8130e87d098af/rignore-0.7.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f2e027a6da21a7c8c0d87553c24ca5cc4364def18d146057862c23a96546238e", size = 1118036, upload-time = "2025-11-05T21:40:49.646Z" }, + { url = "https://files.pythonhosted.org/packages/e2/25/d37215e4562cda5c13312636393aea0bafe38d54d4e0517520a4cc0753ec/rignore-0.7.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee4a18b82cbbc648e4aac1510066682fe62beb5dc88e2c67c53a83954e541360", size = 1127550, upload-time = "2025-11-05T21:41:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/dc/76/a264ab38bfa1620ec12a8ff1c07778da89e16d8c0f3450b0333020d3d6dc/rignore-0.7.6-cp313-cp313-win32.whl", hash = "sha256:a7d7148b6e5e95035d4390396895adc384d37ff4e06781a36fe573bba7c283e5", size = 646097, upload-time = "2025-11-05T21:41:53.201Z" }, + { url = "https://files.pythonhosted.org/packages/62/44/3c31b8983c29ea8832b6082ddb1d07b90379c2d993bd20fce4487b71b4f4/rignore-0.7.6-cp313-cp313-win_amd64.whl", hash = "sha256:b037c4b15a64dced08fc12310ee844ec2284c4c5c1ca77bc37d0a04f7bff386e", size = 726170, upload-time = "2025-11-05T21:41:38.131Z" }, + { url = "https://files.pythonhosted.org/packages/aa/41/e26a075cab83debe41a42661262f606166157df84e0e02e2d904d134c0d8/rignore-0.7.6-cp313-cp313-win_arm64.whl", hash = "sha256:e47443de9b12fe569889bdbe020abe0e0b667516ee2ab435443f6d0869bd2804", size = 656184, upload-time = "2025-11-05T21:41:27.396Z" }, + { url = "https://files.pythonhosted.org/packages/9a/b9/1f5bd82b87e5550cd843ceb3768b4a8ef274eb63f29333cf2f29644b3d75/rignore-0.7.6-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:8e41be9fa8f2f47239ded8920cc283699a052ac4c371f77f5ac017ebeed75732", size = 882632, upload-time = "2025-11-05T20:42:44.063Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6b/07714a3efe4a8048864e8a5b7db311ba51b921e15268b17defaebf56d3db/rignore-0.7.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6dc1e171e52cefa6c20e60c05394a71165663b48bca6c7666dee4f778f2a7d90", size = 820760, upload-time = "2025-11-05T20:42:27.885Z" }, + { url = "https://files.pythonhosted.org/packages/ac/0f/348c829ea2d8d596e856371b14b9092f8a5dfbb62674ec9b3f67e4939a9d/rignore-0.7.6-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ce2268837c3600f82ab8db58f5834009dc638ee17103582960da668963bebc5", size = 899044, upload-time = "2025-11-05T20:40:55.336Z" }, + { url = "https://files.pythonhosted.org/packages/f0/30/2e1841a19b4dd23878d73edd5d82e998a83d5ed9570a89675f140ca8b2ad/rignore-0.7.6-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:690a3e1b54bfe77e89c4bacb13f046e642f8baadafc61d68f5a726f324a76ab6", size = 874144, upload-time = "2025-11-05T20:41:10.195Z" }, + { url = "https://files.pythonhosted.org/packages/c2/bf/0ce9beb2e5f64c30e3580bef09f5829236889f01511a125f98b83169b993/rignore-0.7.6-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09d12ac7a0b6210c07bcd145007117ebd8abe99c8eeb383e9e4673910c2754b2", size = 1168062, upload-time = "2025-11-05T20:41:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/b9/8b/571c178414eb4014969865317da8a02ce4cf5241a41676ef91a59aab24de/rignore-0.7.6-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a2b2b74a8c60203b08452479b90e5ce3dbe96a916214bc9eb2e5af0b6a9beb0", size = 942542, upload-time = "2025-11-05T20:41:41.838Z" }, + { url = "https://files.pythonhosted.org/packages/19/62/7a3cf601d5a45137a7e2b89d10c05b5b86499190c4b7ca5c3c47d79ee519/rignore-0.7.6-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fc5a531ef02131e44359419a366bfac57f773ea58f5278c2cdd915f7d10ea94", size = 958739, upload-time = "2025-11-05T20:42:12.463Z" }, + { url = "https://files.pythonhosted.org/packages/5f/1f/4261f6a0d7caf2058a5cde2f5045f565ab91aa7badc972b57d19ce58b14e/rignore-0.7.6-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7a1f77d9c4cd7e76229e252614d963442686bfe12c787a49f4fe481df49e7a9", size = 984138, upload-time = "2025-11-05T20:41:56.775Z" }, + { url = "https://files.pythonhosted.org/packages/2b/bf/628dfe19c75e8ce1f45f7c248f5148b17dfa89a817f8e3552ab74c3ae812/rignore-0.7.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ead81f728682ba72b5b1c3d5846b011d3e0174da978de87c61645f2ed36659a7", size = 1079299, upload-time = "2025-11-05T21:40:16.639Z" }, + { url = "https://files.pythonhosted.org/packages/af/a5/be29c50f5c0c25c637ed32db8758fdf5b901a99e08b608971cda8afb293b/rignore-0.7.6-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:12ffd50f520c22ffdabed8cd8bfb567d9ac165b2b854d3e679f4bcaef11a9441", size = 1139618, upload-time = "2025-11-05T21:40:34.507Z" }, + { url = "https://files.pythonhosted.org/packages/2a/40/3c46cd7ce4fa05c20b525fd60f599165e820af66e66f2c371cd50644558f/rignore-0.7.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:e5a16890fbe3c894f8ca34b0fcacc2c200398d4d46ae654e03bc9b3dbf2a0a72", size = 1117626, upload-time = "2025-11-05T21:40:51.494Z" }, + { url = "https://files.pythonhosted.org/packages/8c/b9/aea926f263b8a29a23c75c2e0d8447965eb1879d3feb53cfcf84db67ed58/rignore-0.7.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3abab3bf99e8a77488ef6c7c9a799fac22224c28fe9f25cc21aa7cc2b72bfc0b", size = 1128144, upload-time = "2025-11-05T21:41:09.169Z" }, + { url = "https://files.pythonhosted.org/packages/a4/f6/0d6242f8d0df7f2ecbe91679fefc1f75e7cd2072cb4f497abaab3f0f8523/rignore-0.7.6-cp314-cp314-win32.whl", hash = "sha256:eeef421c1782953c4375aa32f06ecae470c1285c6381eee2a30d2e02a5633001", size = 646385, upload-time = "2025-11-05T21:41:55.105Z" }, + { url = "https://files.pythonhosted.org/packages/d5/38/c0dcd7b10064f084343d6af26fe9414e46e9619c5f3224b5272e8e5d9956/rignore-0.7.6-cp314-cp314-win_amd64.whl", hash = "sha256:6aeed503b3b3d5af939b21d72a82521701a4bd3b89cd761da1e7dc78621af304", size = 725738, upload-time = "2025-11-05T21:41:39.736Z" }, + { url = "https://files.pythonhosted.org/packages/d9/7a/290f868296c1ece914d565757ab363b04730a728b544beb567ceb3b2d96f/rignore-0.7.6-cp314-cp314-win_arm64.whl", hash = "sha256:104f215b60b3c984c386c3e747d6ab4376d5656478694e22c7bd2f788ddd8304", size = 656008, upload-time = "2025-11-05T21:41:29.028Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d2/3c74e3cd81fe8ea08a8dcd2d755c09ac2e8ad8fe409508904557b58383d3/rignore-0.7.6-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:bb24a5b947656dd94cb9e41c4bc8b23cec0c435b58be0d74a874f63c259549e8", size = 882835, upload-time = "2025-11-05T20:42:45.443Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/a772a34b6b63154877433ac2d048364815b24c2dd308f76b212c408101a2/rignore-0.7.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b1e33c9501cefe24b70a1eafd9821acfd0ebf0b35c3a379430a14df089993e3", size = 820301, upload-time = "2025-11-05T20:42:29.226Z" }, + { url = "https://files.pythonhosted.org/packages/71/30/054880b09c0b1b61d17eeb15279d8bf729c0ba52b36c3ada52fb827cbb3c/rignore-0.7.6-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bec3994665a44454df86deb762061e05cd4b61e3772f5b07d1882a8a0d2748d5", size = 897611, upload-time = "2025-11-05T20:40:56.475Z" }, + { url = "https://files.pythonhosted.org/packages/1e/40/b2d1c169f833d69931bf232600eaa3c7998ba4f9a402e43a822dad2ea9f2/rignore-0.7.6-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26cba2edfe3cff1dfa72bddf65d316ddebf182f011f2f61538705d6dbaf54986", size = 873875, upload-time = "2025-11-05T20:41:11.561Z" }, + { url = "https://files.pythonhosted.org/packages/55/59/ca5ae93d83a1a60e44b21d87deb48b177a8db1b85e82fc8a9abb24a8986d/rignore-0.7.6-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ffa86694fec604c613696cb91e43892aa22e1fec5f9870e48f111c603e5ec4e9", size = 1167245, upload-time = "2025-11-05T20:41:28.29Z" }, + { url = "https://files.pythonhosted.org/packages/a5/52/cf3dce392ba2af806cba265aad6bcd9c48bb2a6cb5eee448d3319f6e505b/rignore-0.7.6-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48efe2ed95aa8104145004afb15cdfa02bea5cdde8b0344afeb0434f0d989aa2", size = 941750, upload-time = "2025-11-05T20:41:43.111Z" }, + { url = "https://files.pythonhosted.org/packages/ec/be/3f344c6218d779395e785091d05396dfd8b625f6aafbe502746fcd880af2/rignore-0.7.6-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dcae43eb44b7f2457fef7cc87f103f9a0013017a6f4e62182c565e924948f21", size = 958896, upload-time = "2025-11-05T20:42:13.784Z" }, + { url = "https://files.pythonhosted.org/packages/c9/34/d3fa71938aed7d00dcad87f0f9bcb02ad66c85d6ffc83ba31078ce53646a/rignore-0.7.6-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2cd649a7091c0dad2f11ef65630d30c698d505cbe8660dd395268e7c099cc99f", size = 983992, upload-time = "2025-11-05T20:41:58.022Z" }, + { url = "https://files.pythonhosted.org/packages/24/a4/52a697158e9920705bdbd0748d59fa63e0f3233fb92e9df9a71afbead6ca/rignore-0.7.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42de84b0289d478d30ceb7ae59023f7b0527786a9a5b490830e080f0e4ea5aeb", size = 1078181, upload-time = "2025-11-05T21:40:18.151Z" }, + { url = "https://files.pythonhosted.org/packages/ac/65/aa76dbcdabf3787a6f0fd61b5cc8ed1e88580590556d6c0207960d2384bb/rignore-0.7.6-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:875a617e57b53b4acbc5a91de418233849711c02e29cc1f4f9febb2f928af013", size = 1139232, upload-time = "2025-11-05T21:40:35.966Z" }, + { url = "https://files.pythonhosted.org/packages/08/44/31b31a49b3233c6842acc1c0731aa1e7fb322a7170612acf30327f700b44/rignore-0.7.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8703998902771e96e49968105207719f22926e4431b108450f3f430b4e268b7c", size = 1117349, upload-time = "2025-11-05T21:40:53.013Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ae/1b199a2302c19c658cf74e5ee1427605234e8c91787cfba0015f2ace145b/rignore-0.7.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:602ef33f3e1b04c1e9a10a3c03f8bc3cef2d2383dcc250d309be42b49923cabc", size = 1127702, upload-time = "2025-11-05T21:41:10.881Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d3/18210222b37e87e36357f7b300b7d98c6dd62b133771e71ae27acba83a4f/rignore-0.7.6-cp314-cp314t-win32.whl", hash = "sha256:c1d8f117f7da0a4a96a8daef3da75bc090e3792d30b8b12cfadc240c631353f9", size = 647033, upload-time = "2025-11-05T21:42:00.095Z" }, + { url = "https://files.pythonhosted.org/packages/3e/87/033eebfbee3ec7d92b3bb1717d8f68c88e6fc7de54537040f3b3a405726f/rignore-0.7.6-cp314-cp314t-win_amd64.whl", hash = "sha256:ca36e59408bec81de75d307c568c2d0d410fb880b1769be43611472c61e85c96", size = 725647, upload-time = "2025-11-05T21:41:44.449Z" }, + { url = "https://files.pythonhosted.org/packages/79/62/b88e5879512c55b8ee979c666ee6902adc4ed05007226de266410ae27965/rignore-0.7.6-cp314-cp314t-win_arm64.whl", hash = "sha256:b83adabeb3e8cf662cabe1931b83e165b88c526fa6af6b3aa90429686e474896", size = 656035, upload-time = "2025-11-05T21:41:31.13Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/a6250ff0c49a3cdb943910ada4116e708118e9b901c878cfae616c80a904/rignore-0.7.6-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a20b6fb61bcced9a83dfcca6599ad45182b06ba720cff7c8d891e5b78db5b65f", size = 886470, upload-time = "2025-11-05T20:42:52.314Z" }, + { url = "https://files.pythonhosted.org/packages/35/af/c69c0c51b8f9f7914d95c4ea91c29a2ac067572048cae95dd6d2efdbe05d/rignore-0.7.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:392dcabfecbe176c9ebbcb40d85a5e86a5989559c4f988c2741da7daf1b5be25", size = 825976, upload-time = "2025-11-05T20:42:35.118Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d2/1b264f56132264ea609d3213ab603d6a27016b19559a1a1ede1a66a03dcd/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22baa462abdc36fdd5a5e2dae423107723351b85ff093762f9261148b9d0a04a", size = 899739, upload-time = "2025-11-05T20:41:01.518Z" }, + { url = "https://files.pythonhosted.org/packages/55/e4/b3c5dfdd8d8a10741dfe7199ef45d19a0e42d0c13aa377c83bd6caf65d90/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53fb28882d2538cb2d231972146c4927a9d9455e62b209f85d634408c4103538", size = 874843, upload-time = "2025-11-05T20:41:17.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/10/d6f3750233881a2a154cefc9a6a0a9b19da526b19f7f08221b552c6f827d/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87409f7eeb1103d6b77f3472a3a0d9a5953e3ae804a55080bdcb0120ee43995b", size = 1170348, upload-time = "2025-11-05T20:41:34.21Z" }, + { url = "https://files.pythonhosted.org/packages/6e/10/ad98ca05c9771c15af734cee18114a3c280914b6e34fde9ffea2e61e88aa/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:684014e42e4341ab3ea23a203551857fcc03a7f8ae96ca3aefb824663f55db32", size = 942315, upload-time = "2025-11-05T20:41:48.508Z" }, + { url = "https://files.pythonhosted.org/packages/de/00/ab5c0f872acb60d534e687e629c17e0896c62da9b389c66d3aa16b817aa8/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77356ebb01ba13f8a425c3d30fcad40e57719c0e37670d022d560884a30e4767", size = 961047, upload-time = "2025-11-05T20:42:19.403Z" }, + { url = "https://files.pythonhosted.org/packages/b8/86/3030fdc363a8f0d1cd155b4c453d6db9bab47a24fcc64d03f61d9d78fe6a/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6cbd8a48abbd3747a6c830393cd578782fab5d43f4deea48c5f5e344b8fed2b0", size = 986090, upload-time = "2025-11-05T20:42:03.581Z" }, + { url = "https://files.pythonhosted.org/packages/33/b8/133aa4002cee0ebbb39362f94e4898eec7fbd09cec9fcbce1cd65b355b7f/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2673225dcec7f90497e79438c35e34638d0d0391ccea3cbb79bfb9adc0dc5bd7", size = 1079656, upload-time = "2025-11-05T21:40:24.89Z" }, + { url = "https://files.pythonhosted.org/packages/67/56/36d5d34210e5e7dfcd134eed8335b19e80ae940ee758f493e4f2b344dd70/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:c081f17290d8a2b96052b79207622aa635686ea39d502b976836384ede3d303c", size = 1139789, upload-time = "2025-11-05T21:40:42.119Z" }, + { url = "https://files.pythonhosted.org/packages/6b/5b/bb4f9420802bf73678033a4a55ab1bede36ce2e9b41fec5f966d83d932b3/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:57e8327aacc27f921968cb2a174f9e47b084ce9a7dd0122c8132d22358f6bd79", size = 1120308, upload-time = "2025-11-05T21:40:59.402Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8b/a1299085b28a2f6135e30370b126e3c5055b61908622f2488ade67641479/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d8955b57e42f2a5434670d5aa7b75eaf6e74602ccd8955dddf7045379cd762fb", size = 1129444, upload-time = "2025-11-05T21:41:17.906Z" }, ] [[package]] @@ -1280,27 +2355,49 @@ wheels = [ [[package]] name = "ruff" -version = "0.11.13" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ed/da/9c6f995903b4d9474b39da91d2d626659af3ff1eeb43e9ae7c119349dba6/ruff-0.11.13.tar.gz", hash = "sha256:26fa247dc68d1d4e72c179e08889a25ac0c7ba4d78aecfc835d49cbfd60bf514", size = 4282054, upload-time = "2025-06-05T21:00:15.721Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7d/ce/a11d381192966e0b4290842cc8d4fac7dc9214ddf627c11c1afff87da29b/ruff-0.11.13-py3-none-linux_armv6l.whl", hash = "sha256:4bdfbf1240533f40042ec00c9e09a3aade6f8c10b6414cf11b519488d2635d46", size = 10292516, upload-time = "2025-06-05T20:59:32.944Z" }, - { url = "https://files.pythonhosted.org/packages/78/db/87c3b59b0d4e753e40b6a3b4a2642dfd1dcaefbff121ddc64d6c8b47ba00/ruff-0.11.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aef9c9ed1b5ca28bb15c7eac83b8670cf3b20b478195bd49c8d756ba0a36cf48", size = 11106083, upload-time = "2025-06-05T20:59:37.03Z" }, - { url = "https://files.pythonhosted.org/packages/77/79/d8cec175856ff810a19825d09ce700265f905c643c69f45d2b737e4a470a/ruff-0.11.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53b15a9dfdce029c842e9a5aebc3855e9ab7771395979ff85b7c1dedb53ddc2b", size = 10436024, upload-time = "2025-06-05T20:59:39.741Z" }, - { url = "https://files.pythonhosted.org/packages/8b/5b/f6d94f2980fa1ee854b41568368a2e1252681b9238ab2895e133d303538f/ruff-0.11.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab153241400789138d13f362c43f7edecc0edfffce2afa6a68434000ecd8f69a", size = 10646324, upload-time = "2025-06-05T20:59:42.185Z" }, - { url = "https://files.pythonhosted.org/packages/6c/9c/b4c2acf24ea4426016d511dfdc787f4ce1ceb835f3c5fbdbcb32b1c63bda/ruff-0.11.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c51f93029d54a910d3d24f7dd0bb909e31b6cd989a5e4ac513f4eb41629f0dc", size = 10174416, upload-time = "2025-06-05T20:59:44.319Z" }, - { url = "https://files.pythonhosted.org/packages/f3/10/e2e62f77c65ede8cd032c2ca39c41f48feabedb6e282bfd6073d81bb671d/ruff-0.11.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1808b3ed53e1a777c2ef733aca9051dc9bf7c99b26ece15cb59a0320fbdbd629", size = 11724197, upload-time = "2025-06-05T20:59:46.935Z" }, - { url = "https://files.pythonhosted.org/packages/bb/f0/466fe8469b85c561e081d798c45f8a1d21e0b4a5ef795a1d7f1a9a9ec182/ruff-0.11.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d28ce58b5ecf0f43c1b71edffabe6ed7f245d5336b17805803312ec9bc665933", size = 12511615, upload-time = "2025-06-05T20:59:49.534Z" }, - { url = "https://files.pythonhosted.org/packages/17/0e/cefe778b46dbd0cbcb03a839946c8f80a06f7968eb298aa4d1a4293f3448/ruff-0.11.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55e4bc3a77842da33c16d55b32c6cac1ec5fb0fbec9c8c513bdce76c4f922165", size = 12117080, upload-time = "2025-06-05T20:59:51.654Z" }, - { url = "https://files.pythonhosted.org/packages/5d/2c/caaeda564cbe103bed145ea557cb86795b18651b0f6b3ff6a10e84e5a33f/ruff-0.11.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:633bf2c6f35678c56ec73189ba6fa19ff1c5e4807a78bf60ef487b9dd272cc71", size = 11326315, upload-time = "2025-06-05T20:59:54.469Z" }, - { url = "https://files.pythonhosted.org/packages/75/f0/782e7d681d660eda8c536962920c41309e6dd4ebcea9a2714ed5127d44bd/ruff-0.11.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ffbc82d70424b275b089166310448051afdc6e914fdab90e08df66c43bb5ca9", size = 11555640, upload-time = "2025-06-05T20:59:56.986Z" }, - { url = "https://files.pythonhosted.org/packages/5d/d4/3d580c616316c7f07fb3c99dbecfe01fbaea7b6fd9a82b801e72e5de742a/ruff-0.11.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a9ddd3ec62a9a89578c85842b836e4ac832d4a2e0bfaad3b02243f930ceafcc", size = 10507364, upload-time = "2025-06-05T20:59:59.154Z" }, - { url = "https://files.pythonhosted.org/packages/5a/dc/195e6f17d7b3ea6b12dc4f3e9de575db7983db187c378d44606e5d503319/ruff-0.11.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d237a496e0778d719efb05058c64d28b757c77824e04ffe8796c7436e26712b7", size = 10141462, upload-time = "2025-06-05T21:00:01.481Z" }, - { url = "https://files.pythonhosted.org/packages/f4/8e/39a094af6967faa57ecdeacb91bedfb232474ff8c3d20f16a5514e6b3534/ruff-0.11.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:26816a218ca6ef02142343fd24c70f7cd8c5aa6c203bca284407adf675984432", size = 11121028, upload-time = "2025-06-05T21:00:04.06Z" }, - { url = "https://files.pythonhosted.org/packages/5a/c0/b0b508193b0e8a1654ec683ebab18d309861f8bd64e3a2f9648b80d392cb/ruff-0.11.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:51c3f95abd9331dc5b87c47ac7f376db5616041173826dfd556cfe3d4977f492", size = 11602992, upload-time = "2025-06-05T21:00:06.249Z" }, - { url = "https://files.pythonhosted.org/packages/7c/91/263e33ab93ab09ca06ce4f8f8547a858cc198072f873ebc9be7466790bae/ruff-0.11.13-py3-none-win32.whl", hash = "sha256:96c27935418e4e8e77a26bb05962817f28b8ef3843a6c6cc49d8783b5507f250", size = 10474944, upload-time = "2025-06-05T21:00:08.459Z" }, - { url = "https://files.pythonhosted.org/packages/46/f4/7c27734ac2073aae8efb0119cae6931b6fb48017adf048fdf85c19337afc/ruff-0.11.13-py3-none-win_amd64.whl", hash = "sha256:29c3189895a8a6a657b7af4e97d330c8a3afd2c9c8f46c81e2fc5a31866517e3", size = 11548669, upload-time = "2025-06-05T21:00:11.147Z" }, - { url = "https://files.pythonhosted.org/packages/ec/bf/b273dd11673fed8a6bd46032c0ea2a04b2ac9bfa9c628756a5856ba113b0/ruff-0.11.13-py3-none-win_arm64.whl", hash = "sha256:b4385285e9179d608ff1d2fb9922062663c658605819a6876d8beef0c30b7f3b", size = 10683928, upload-time = "2025-06-05T21:00:13.758Z" }, +version = "0.15.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/99/43/3291f1cc9106f4c63bdce7a8d0df5047fe8422a75b091c16b5e9355e0b11/ruff-0.15.12.tar.gz", hash = "sha256:ecea26adb26b4232c0c2ca19ccbc0083a68344180bba2a600605538ce51a40a6", size = 4643852, upload-time = "2026-04-24T18:17:14.305Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/6e/e78ffb61d4686f3d96ba3df2c801161843746dcbcbb17a1e927d4829312b/ruff-0.15.12-py3-none-linux_armv6l.whl", hash = "sha256:f86f176e188e94d6bdbc09f09bfd9dc729059ad93d0e7390b5a73efe19f8861c", size = 10640713, upload-time = "2026-04-24T18:17:22.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/08/a317bc231fb9e7b93e4ef3089501e51922ff88d6936ce5cf870c4fe55419/ruff-0.15.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e3bcd123364c3770b8e1b7baaf343cc99a35f197c5c6e8af79015c666c423a6c", size = 11069267, upload-time = "2026-04-24T18:17:30.105Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a4/f828e9718d3dce1f5f11c39c4f65afd32783c8b2aebb2e3d259e492c47bd/ruff-0.15.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fe87510d000220aa1ed530d4448a7c696a0cae1213e5ec30e5874287b66557b5", size = 10397182, upload-time = "2026-04-24T18:17:07.177Z" }, + { url = "https://files.pythonhosted.org/packages/71/e0/3310fc6d1b5e1fdea22bf3b1b807c7e187b581021b0d7d4514cccdb5fb71/ruff-0.15.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84a1630093121375a3e2a95b4a6dc7b59e2b4ee76216e32d81aae550a832d002", size = 10758012, upload-time = "2026-04-24T18:16:55.759Z" }, + { url = "https://files.pythonhosted.org/packages/11/c1/a606911aee04c324ddaa883ae418f3569792fd3c4a10c50e0dd0a2311e1e/ruff-0.15.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb129f40f114f089ebe0ca56c0d251cf2061b17651d464bb6478dc01e69f11f5", size = 10447479, upload-time = "2026-04-24T18:16:51.677Z" }, + { url = "https://files.pythonhosted.org/packages/9d/68/4201e8444f0894f21ab4aeeaee68aa4f10b51613514a20d80bd628d57e88/ruff-0.15.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0c862b172d695db7598426b8af465e7e9ac00a3ea2a3630ee67eb82e366aaa6", size = 11234040, upload-time = "2026-04-24T18:17:16.529Z" }, + { url = "https://files.pythonhosted.org/packages/34/ff/8a6d6cf4ccc23fd67060874e832c18919d1557a0611ebef03fdb01fff11e/ruff-0.15.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2849ea9f3484c3aca43a82f484210370319e7170df4dfe4843395ddf6c57bc33", size = 12087377, upload-time = "2026-04-24T18:17:04.944Z" }, + { url = "https://files.pythonhosted.org/packages/85/f6/c669cf73f5152f623d34e69866a46d5e6185816b19fcd5b6dd8a2d299922/ruff-0.15.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e77c7e51c07fe396826d5969a5b846d9cd4c402535835fb6e21ce8b28fef847", size = 11367784, upload-time = "2026-04-24T18:17:25.409Z" }, + { url = "https://files.pythonhosted.org/packages/e8/39/c61d193b8a1daaa8977f7dea9e8d8ba866e02ea7b65d32f6861693aa4c12/ruff-0.15.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b2f4f2f3b1026b5fb449b467d9264bf22067b600f7b6f41fc5958909f449d0", size = 11344088, upload-time = "2026-04-24T18:17:12.258Z" }, + { url = "https://files.pythonhosted.org/packages/c2/8d/49afab3645e31e12c590acb6d3b5b69d7aab5b81926dbaf7461f9441f37a/ruff-0.15.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9ba3b8f1afd7e2e43d8943e55f249e13f9682fde09711644a6e7290eb4f3e339", size = 11271770, upload-time = "2026-04-24T18:17:02.457Z" }, + { url = "https://files.pythonhosted.org/packages/46/06/33f41fe94403e2b755481cdfb9b7ef3e4e0ed031c4581124658d935d52b4/ruff-0.15.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e852ba9fdc890655e1d78f2df1499efbe0e54126bd405362154a75e2bde159c5", size = 10719355, upload-time = "2026-04-24T18:17:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/0d/59/18aa4e014debbf559670e4048e39260a85c7fcee84acfd761ac01e7b8d35/ruff-0.15.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dd8aed930da53780d22fc70bdf84452c843cf64f8cb4eb38984319c24c5cd5fd", size = 10462758, upload-time = "2026-04-24T18:17:32.347Z" }, + { url = "https://files.pythonhosted.org/packages/25/e7/cc9f16fd0f3b5fddcbd7ec3d6ae30c8f3fde1047f32a4093a98d633c6570/ruff-0.15.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:01da3988d225628b709493d7dc67c3b9b12c0210016b08690ef9bd27970b262b", size = 10953498, upload-time = "2026-04-24T18:17:20.674Z" }, + { url = "https://files.pythonhosted.org/packages/72/7a/a9ba7f98c7a575978698f4230c5e8cc54bbc761af34f560818f933dafa0c/ruff-0.15.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9cae0f92bd5700d1213188b31cd3bdd2b315361296d10b96b8e2337d3d11f53e", size = 11447765, upload-time = "2026-04-24T18:17:09.755Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f9/0ae446942c846b8266059ad8a30702a35afae55f5cdc54c5adf8d7afdc27/ruff-0.15.12-py3-none-win32.whl", hash = "sha256:d0185894e038d7043ba8fd6aee7499ece6462dc0ea9f1e260c7451807c714c20", size = 10657277, upload-time = "2026-04-24T18:17:18.591Z" }, + { url = "https://files.pythonhosted.org/packages/33/f1/9614e03e1cdcbf9437570b5400ced8a720b5db22b28d8e0f1bda429f660d/ruff-0.15.12-py3-none-win_amd64.whl", hash = "sha256:c87a162d61ab3adca47c03f7f717c68672edec7d1b5499e652331780fe74950d", size = 11837758, upload-time = "2026-04-24T18:17:00.113Z" }, + { url = "https://files.pythonhosted.org/packages/c0/98/6beb4b351e472e5f4c4613f7c35a5290b8be2497e183825310c4c3a3984b/ruff-0.15.12-py3-none-win_arm64.whl", hash = "sha256:a538f7a82d061cee7be55542aca1d86d1393d55d81d4fcc314370f4340930d4f", size = 11120821, upload-time = "2026-04-24T18:16:57.979Z" }, +] + +[[package]] +name = "sentry-sdk" +version = "2.59.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/65/e0/9bf5e5fc7442b10880f3ec0eff0ef4208b84a099606f343ec4f5445227fb/sentry_sdk-2.59.0.tar.gz", hash = "sha256:cd265808ef8bf3f3edf69b527c0a0b2b6b1322762679e55b8987db2e9584aec1", size = 447331, upload-time = "2026-05-04T12:19:06.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/00/b8cc413748fb6383d1582e7cda51314f99743351c462a92dc690d5b5853b/sentry_sdk-2.59.0-py2.py3-none-any.whl", hash = "sha256:abcf65ee9a9d9cdebf9ad369782408ecca9c1c792686ef06ba34f5ab233527fe", size = 468432, upload-time = "2026-05-04T12:19:04.741Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, ] [[package]] @@ -1313,49 +2410,72 @@ wheels = [ ] [[package]] -name = "sniffio" -version = "1.3.1" +name = "sqladmin" +version = "0.25.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +dependencies = [ + { name = "jinja2" }, + { name = "python-multipart" }, + { name = "sqlalchemy" }, + { name = "starlette" }, + { name = "wtforms" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/42/ac352f9afc118491a57c3acd1d54c84447f8d6a7b9420fdd9e4fe28b3ba7/sqladmin-0.25.0.tar.gz", hash = "sha256:370f183eca5ea95281176c086b23d704773e6c77b745342753601a9a9abcb93c", size = 1440449, upload-time = "2026-04-18T01:25:40.15Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + { url = "https://files.pythonhosted.org/packages/ba/1c/4a9bdeb541478a27fd8bf2af85faa3a1574a8ab3de0f07e8c01b29ccd9ff/sqladmin-0.25.0-py3-none-any.whl", hash = "sha256:fd45a7c78e989cb1cf8242f94bd108475f5d35622d79035a2cb147350f54ce1d", size = 1456019, upload-time = "2026-04-18T01:25:38.523Z" }, ] [[package]] name = "sqlalchemy" -version = "2.0.41" +version = "2.0.49" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/63/66/45b165c595ec89aa7dcc2c1cd222ab269bc753f1fc7a1e68f8481bd957bf/sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9", size = 9689424, upload-time = "2025-05-14T17:10:32.339Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/37/4e/b00e3ffae32b74b5180e15d2ab4040531ee1bef4c19755fe7926622dc958/sqlalchemy-2.0.41-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f", size = 2121232, upload-time = "2025-05-14T17:48:20.444Z" }, - { url = "https://files.pythonhosted.org/packages/ef/30/6547ebb10875302074a37e1970a5dce7985240665778cfdee2323709f749/sqlalchemy-2.0.41-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560", size = 2110897, upload-time = "2025-05-14T17:48:21.634Z" }, - { url = "https://files.pythonhosted.org/packages/9e/21/59df2b41b0f6c62da55cd64798232d7349a9378befa7f1bb18cf1dfd510a/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f", size = 3273313, upload-time = "2025-05-14T17:51:56.205Z" }, - { url = "https://files.pythonhosted.org/packages/62/e4/b9a7a0e5c6f79d49bcd6efb6e90d7536dc604dab64582a9dec220dab54b6/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6", size = 3273807, upload-time = "2025-05-14T17:55:26.928Z" }, - { url = "https://files.pythonhosted.org/packages/39/d8/79f2427251b44ddee18676c04eab038d043cff0e764d2d8bb08261d6135d/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04", size = 3209632, upload-time = "2025-05-14T17:51:59.384Z" }, - { url = "https://files.pythonhosted.org/packages/d4/16/730a82dda30765f63e0454918c982fb7193f6b398b31d63c7c3bd3652ae5/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582", size = 3233642, upload-time = "2025-05-14T17:55:29.901Z" }, - { url = "https://files.pythonhosted.org/packages/04/61/c0d4607f7799efa8b8ea3c49b4621e861c8f5c41fd4b5b636c534fcb7d73/sqlalchemy-2.0.41-cp311-cp311-win32.whl", hash = "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8", size = 2086475, upload-time = "2025-05-14T17:56:02.095Z" }, - { url = "https://files.pythonhosted.org/packages/9d/8e/8344f8ae1cb6a479d0741c02cd4f666925b2bf02e2468ddaf5ce44111f30/sqlalchemy-2.0.41-cp311-cp311-win_amd64.whl", hash = "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504", size = 2110903, upload-time = "2025-05-14T17:56:03.499Z" }, - { url = "https://files.pythonhosted.org/packages/3e/2a/f1f4e068b371154740dd10fb81afb5240d5af4aa0087b88d8b308b5429c2/sqlalchemy-2.0.41-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9", size = 2119645, upload-time = "2025-05-14T17:55:24.854Z" }, - { url = "https://files.pythonhosted.org/packages/9b/e8/c664a7e73d36fbfc4730f8cf2bf930444ea87270f2825efbe17bf808b998/sqlalchemy-2.0.41-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1", size = 2107399, upload-time = "2025-05-14T17:55:28.097Z" }, - { url = "https://files.pythonhosted.org/packages/5c/78/8a9cf6c5e7135540cb682128d091d6afa1b9e48bd049b0d691bf54114f70/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70", size = 3293269, upload-time = "2025-05-14T17:50:38.227Z" }, - { url = "https://files.pythonhosted.org/packages/3c/35/f74add3978c20de6323fb11cb5162702670cc7a9420033befb43d8d5b7a4/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e", size = 3303364, upload-time = "2025-05-14T17:51:49.829Z" }, - { url = "https://files.pythonhosted.org/packages/6a/d4/c990f37f52c3f7748ebe98883e2a0f7d038108c2c5a82468d1ff3eec50b7/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078", size = 3229072, upload-time = "2025-05-14T17:50:39.774Z" }, - { url = "https://files.pythonhosted.org/packages/15/69/cab11fecc7eb64bc561011be2bd03d065b762d87add52a4ca0aca2e12904/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae", size = 3268074, upload-time = "2025-05-14T17:51:51.736Z" }, - { url = "https://files.pythonhosted.org/packages/5c/ca/0c19ec16858585d37767b167fc9602593f98998a68a798450558239fb04a/sqlalchemy-2.0.41-cp312-cp312-win32.whl", hash = "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6", size = 2084514, upload-time = "2025-05-14T17:55:49.915Z" }, - { url = "https://files.pythonhosted.org/packages/7f/23/4c2833d78ff3010a4e17f984c734f52b531a8c9060a50429c9d4b0211be6/sqlalchemy-2.0.41-cp312-cp312-win_amd64.whl", hash = "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0", size = 2111557, upload-time = "2025-05-14T17:55:51.349Z" }, - { url = "https://files.pythonhosted.org/packages/d3/ad/2e1c6d4f235a97eeef52d0200d8ddda16f6c4dd70ae5ad88c46963440480/sqlalchemy-2.0.41-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443", size = 2115491, upload-time = "2025-05-14T17:55:31.177Z" }, - { url = "https://files.pythonhosted.org/packages/cf/8d/be490e5db8400dacc89056f78a52d44b04fbf75e8439569d5b879623a53b/sqlalchemy-2.0.41-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc", size = 2102827, upload-time = "2025-05-14T17:55:34.921Z" }, - { url = "https://files.pythonhosted.org/packages/a0/72/c97ad430f0b0e78efaf2791342e13ffeafcbb3c06242f01a3bb8fe44f65d/sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1", size = 3225224, upload-time = "2025-05-14T17:50:41.418Z" }, - { url = "https://files.pythonhosted.org/packages/5e/51/5ba9ea3246ea068630acf35a6ba0d181e99f1af1afd17e159eac7e8bc2b8/sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a", size = 3230045, upload-time = "2025-05-14T17:51:54.722Z" }, - { url = "https://files.pythonhosted.org/packages/78/2f/8c14443b2acea700c62f9b4a8bad9e49fc1b65cfb260edead71fd38e9f19/sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d", size = 3159357, upload-time = "2025-05-14T17:50:43.483Z" }, - { url = "https://files.pythonhosted.org/packages/fc/b2/43eacbf6ccc5276d76cea18cb7c3d73e294d6fb21f9ff8b4eef9b42bbfd5/sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23", size = 3197511, upload-time = "2025-05-14T17:51:57.308Z" }, - { url = "https://files.pythonhosted.org/packages/fa/2e/677c17c5d6a004c3c45334ab1dbe7b7deb834430b282b8a0f75ae220c8eb/sqlalchemy-2.0.41-cp313-cp313-win32.whl", hash = "sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f", size = 2082420, upload-time = "2025-05-14T17:55:52.69Z" }, - { url = "https://files.pythonhosted.org/packages/e9/61/e8c1b9b6307c57157d328dd8b8348ddc4c47ffdf1279365a13b2b98b8049/sqlalchemy-2.0.41-cp313-cp313-win_amd64.whl", hash = "sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df", size = 2108329, upload-time = "2025-05-14T17:55:54.495Z" }, - { url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224, upload-time = "2025-05-14T17:39:42.154Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/09/45/461788f35e0364a8da7bda51a1fe1b09762d0c32f12f63727998d85a873b/sqlalchemy-2.0.49.tar.gz", hash = "sha256:d15950a57a210e36dd4cec1aac22787e2a4d57ba9318233e2ef8b2daf9ff2d5f", size = 9898221, upload-time = "2026-04-03T16:38:11.704Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/b5/e3617cc67420f8f403efebd7b043128f94775e57e5b84e7255203390ceae/sqlalchemy-2.0.49-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5070135e1b7409c4161133aa525419b0062088ed77c92b1da95366ec5cbebbe", size = 2159126, upload-time = "2026-04-03T16:50:13.242Z" }, + { url = "https://files.pythonhosted.org/packages/20/9b/91ca80403b17cd389622a642699e5f6564096b698e7cdcbcbb6409898bc4/sqlalchemy-2.0.49-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ac7a3e245fd0310fd31495eb61af772e637bdf7d88ee81e7f10a3f271bff014", size = 3315509, upload-time = "2026-04-03T16:54:49.332Z" }, + { url = "https://files.pythonhosted.org/packages/b1/61/0722511d98c54de95acb327824cb759e8653789af2b1944ab1cc69d32565/sqlalchemy-2.0.49-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d4e5a0ceba319942fa6b585cf82539288a61e314ef006c1209f734551ab9536", size = 3315014, upload-time = "2026-04-03T16:56:56.376Z" }, + { url = "https://files.pythonhosted.org/packages/46/55/d514a653ffeb4cebf4b54c47bec32ee28ad89d39fafba16eeed1d81dccd5/sqlalchemy-2.0.49-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3ddcb27fb39171de36e207600116ac9dfd4ae46f86c82a9bf3934043e80ebb88", size = 3267388, upload-time = "2026-04-03T16:54:51.272Z" }, + { url = "https://files.pythonhosted.org/packages/2f/16/0dcc56cb6d3335c1671a2258f5d2cb8267c9a2260e27fde53cbfb1b3540a/sqlalchemy-2.0.49-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:32fe6a41ad97302db2931f05bb91abbcc65b5ce4c675cd44b972428dd2947700", size = 3289602, upload-time = "2026-04-03T16:56:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/51/6c/f8ab6fb04470a133cd80608db40aa292e6bae5f162c3a3d4ab19544a67af/sqlalchemy-2.0.49-cp311-cp311-win32.whl", hash = "sha256:46d51518d53edfbe0563662c96954dc8fcace9832332b914375f45a99b77cc9a", size = 2119044, upload-time = "2026-04-03T17:00:53.455Z" }, + { url = "https://files.pythonhosted.org/packages/c4/59/55a6d627d04b6ebb290693681d7683c7da001eddf90b60cfcc41ee907978/sqlalchemy-2.0.49-cp311-cp311-win_amd64.whl", hash = "sha256:951d4a210744813be63019f3df343bf233b7432aadf0db54c75802247330d3af", size = 2143642, upload-time = "2026-04-03T17:00:54.769Z" }, + { url = "https://files.pythonhosted.org/packages/49/b3/2de412451330756aaaa72d27131db6dde23995efe62c941184e15242a5fa/sqlalchemy-2.0.49-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4bbccb45260e4ff1b7db0be80a9025bb1e6698bdb808b83fff0000f7a90b2c0b", size = 2157681, upload-time = "2026-04-03T16:53:07.132Z" }, + { url = "https://files.pythonhosted.org/packages/50/84/b2a56e2105bd11ebf9f0b93abddd748e1a78d592819099359aa98134a8bf/sqlalchemy-2.0.49-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb37f15714ec2652d574f021d479e78cd4eb9d04396dca36568fdfffb3487982", size = 3338976, upload-time = "2026-04-03T17:07:40Z" }, + { url = "https://files.pythonhosted.org/packages/2c/fa/65fcae2ed62f84ab72cf89536c7c3217a156e71a2c111b1305ab6f0690e2/sqlalchemy-2.0.49-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb9ec6436a820a4c006aad1ac351f12de2f2dbdaad171692ee457a02429b672", size = 3351937, upload-time = "2026-04-03T17:12:23.374Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2f/6fd118563572a7fe475925742eb6b3443b2250e346a0cc27d8d408e73773/sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8d6efc136f44a7e8bc8088507eaabbb8c2b55b3dbb63fe102c690da0ddebe55e", size = 3281646, upload-time = "2026-04-03T17:07:41.949Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d7/410f4a007c65275b9cf82354adb4bb8ba587b176d0a6ee99caa16fe638f8/sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e06e617e3d4fd9e51d385dfe45b077a41e9d1b033a7702551e3278ac597dc750", size = 3316695, upload-time = "2026-04-03T17:12:25.642Z" }, + { url = "https://files.pythonhosted.org/packages/d9/95/81f594aa60ded13273a844539041ccf1e66c5a7bed0a8e27810a3b52d522/sqlalchemy-2.0.49-cp312-cp312-win32.whl", hash = "sha256:83101a6930332b87653886c01d1ee7e294b1fe46a07dd9a2d2b4f91bcc88eec0", size = 2117483, upload-time = "2026-04-03T17:05:40.896Z" }, + { url = "https://files.pythonhosted.org/packages/47/9e/fd90114059175cac64e4fafa9bf3ac20584384d66de40793ae2e2f26f3bb/sqlalchemy-2.0.49-cp312-cp312-win_amd64.whl", hash = "sha256:618a308215b6cececb6240b9abde545e3acdabac7ae3e1d4e666896bf5ba44b4", size = 2144494, upload-time = "2026-04-03T17:05:42.282Z" }, + { url = "https://files.pythonhosted.org/packages/ae/81/81755f50eb2478eaf2049728491d4ea4f416c1eb013338682173259efa09/sqlalchemy-2.0.49-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df2d441bacf97022e81ad047e1597552eb3f83ca8a8f1a1fdd43cd7fe3898120", size = 2154547, upload-time = "2026-04-03T16:53:08.64Z" }, + { url = "https://files.pythonhosted.org/packages/a2/bc/3494270da80811d08bcfa247404292428c4fe16294932bce5593f215cad9/sqlalchemy-2.0.49-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8e20e511dc15265fb433571391ba313e10dd8ea7e509d51686a51313b4ac01a2", size = 3280782, upload-time = "2026-04-03T17:07:43.508Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f5/038741f5e747a5f6ea3e72487211579d8cbea5eb9827a9cbd61d0108c4bd/sqlalchemy-2.0.49-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47604cb2159f8bbd5a1ab48a714557156320f20871ee64d550d8bf2683d980d3", size = 3297156, upload-time = "2026-04-03T17:12:27.697Z" }, + { url = "https://files.pythonhosted.org/packages/88/50/a6af0ff9dc954b43a65ca9b5367334e45d99684c90a3d3413fc19a02d43c/sqlalchemy-2.0.49-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:22d8798819f86720bc646ab015baff5ea4c971d68121cb36e2ebc2ee43ead2b7", size = 3228832, upload-time = "2026-04-03T17:07:45.38Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d1/5f6bdad8de0bf546fc74370939621396515e0cdb9067402d6ba1b8afbe9a/sqlalchemy-2.0.49-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9b1c058c171b739e7c330760044803099c7fff11511e3ab3573e5327116a9c33", size = 3267000, upload-time = "2026-04-03T17:12:29.657Z" }, + { url = "https://files.pythonhosted.org/packages/f7/30/ad62227b4a9819a5e1c6abff77c0f614fa7c9326e5a3bdbee90f7139382b/sqlalchemy-2.0.49-cp313-cp313-win32.whl", hash = "sha256:a143af2ea6672f2af3f44ed8f9cd020e9cc34c56f0e8db12019d5d9ecf41cb3b", size = 2115641, upload-time = "2026-04-03T17:05:43.989Z" }, + { url = "https://files.pythonhosted.org/packages/17/3a/7215b1b7d6d49dc9a87211be44562077f5f04f9bb5a59552c1c8e2d98173/sqlalchemy-2.0.49-cp313-cp313-win_amd64.whl", hash = "sha256:12b04d1db2663b421fe072d638a138460a51d5a862403295671c4f3987fb9148", size = 2141498, upload-time = "2026-04-03T17:05:45.7Z" }, + { url = "https://files.pythonhosted.org/packages/28/4b/52a0cb2687a9cd1648252bb257be5a1ba2c2ded20ba695c65756a55a15a4/sqlalchemy-2.0.49-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24bd94bb301ec672d8f0623eba9226cc90d775d25a0c92b5f8e4965d7f3a1518", size = 3560807, upload-time = "2026-04-03T16:58:31.666Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d8/fda95459204877eed0458550d6c7c64c98cc50c2d8d618026737de9ed41a/sqlalchemy-2.0.49-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a51d3db74ba489266ef55c7a4534eb0b8db9a326553df481c11e5d7660c8364d", size = 3527481, upload-time = "2026-04-03T17:06:00.155Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0a/2aac8b78ac6487240cf7afef8f203ca783e8796002dc0cf65c4ee99ff8bb/sqlalchemy-2.0.49-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:55250fe61d6ebfd6934a272ee16ef1244e0f16b7af6cd18ab5b1fc9f08631db0", size = 3468565, upload-time = "2026-04-03T16:58:33.414Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/ce71cfa82c50a373fd2148b3c870be05027155ce791dc9a5dcf439790b8b/sqlalchemy-2.0.49-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:46796877b47034b559a593d7e4b549aba151dae73f9e78212a3478161c12ab08", size = 3477769, upload-time = "2026-04-03T17:06:02.787Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e8/0a9f5c1f7c6f9ca480319bf57c2d7423f08d31445974167a27d14483c948/sqlalchemy-2.0.49-cp313-cp313t-win32.whl", hash = "sha256:9c4969a86e41454f2858256c39bdfb966a20961e9b58bf8749b65abf447e9a8d", size = 2143319, upload-time = "2026-04-03T17:02:04.328Z" }, + { url = "https://files.pythonhosted.org/packages/0e/51/fb5240729fbec73006e137c4f7a7918ffd583ab08921e6ff81a999d6517a/sqlalchemy-2.0.49-cp313-cp313t-win_amd64.whl", hash = "sha256:b9870d15ef00e4d0559ae10ee5bc71b654d1f20076dbe8bc7ed19b4c0625ceba", size = 2175104, upload-time = "2026-04-03T17:02:05.989Z" }, + { url = "https://files.pythonhosted.org/packages/55/33/bf28f618c0a9597d14e0b9ee7d1e0622faff738d44fe986ee287cdf1b8d0/sqlalchemy-2.0.49-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:233088b4b99ebcbc5258c755a097aa52fbf90727a03a5a80781c4b9c54347a2e", size = 2156356, upload-time = "2026-04-03T16:53:09.914Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a7/5f476227576cb8644650eff68cc35fa837d3802b997465c96b8340ced1e2/sqlalchemy-2.0.49-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57ca426a48eb2c682dae8204cd89ea8ab7031e2675120a47924fabc7caacbc2a", size = 3276486, upload-time = "2026-04-03T17:07:46.9Z" }, + { url = "https://files.pythonhosted.org/packages/2e/84/efc7c0bf3a1c5eef81d397f6fddac855becdbb11cb38ff957888603014a7/sqlalchemy-2.0.49-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:685e93e9c8f399b0c96a624799820176312f5ceef958c0f88215af4013d29066", size = 3281479, upload-time = "2026-04-03T17:12:32.226Z" }, + { url = "https://files.pythonhosted.org/packages/91/68/bb406fa4257099c67bd75f3f2261b129c63204b9155de0d450b37f004698/sqlalchemy-2.0.49-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e0400fa22f79acc334d9a6b185dc00a44a8e6578aa7e12d0ddcd8434152b187", size = 3226269, upload-time = "2026-04-03T17:07:48.678Z" }, + { url = "https://files.pythonhosted.org/packages/67/84/acb56c00cca9f251f437cb49e718e14f7687505749ea9255d7bd8158a6df/sqlalchemy-2.0.49-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a05977bffe9bffd2229f477fa75eabe3192b1b05f408961d1bebff8d1cd4d401", size = 3248260, upload-time = "2026-04-03T17:12:34.381Z" }, + { url = "https://files.pythonhosted.org/packages/56/19/6a20ea25606d1efd7bd1862149bb2a22d1451c3f851d23d887969201633f/sqlalchemy-2.0.49-cp314-cp314-win32.whl", hash = "sha256:0f2fa354ba106eafff2c14b0cc51f22801d1e8b2e4149342023bd6f0955de5f5", size = 2118463, upload-time = "2026-04-03T17:05:47.093Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4f/8297e4ed88e80baa1f5aa3c484a0ee29ef3c69c7582f206c916973b75057/sqlalchemy-2.0.49-cp314-cp314-win_amd64.whl", hash = "sha256:77641d299179c37b89cf2343ca9972c88bb6eef0d5fc504a2f86afd15cd5adf5", size = 2144204, upload-time = "2026-04-03T17:05:48.694Z" }, + { url = "https://files.pythonhosted.org/packages/1f/33/95e7216df810c706e0cd3655a778604bbd319ed4f43333127d465a46862d/sqlalchemy-2.0.49-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c1dc3368794d522f43914e03312202523cc89692f5389c32bea0233924f8d977", size = 3565474, upload-time = "2026-04-03T16:58:35.128Z" }, + { url = "https://files.pythonhosted.org/packages/0c/a4/ed7b18d8ccf7f954a83af6bb73866f5bc6f5636f44c7731fbb741f72cc4f/sqlalchemy-2.0.49-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c821c47ecfe05cc32140dcf8dc6fd5d21971c86dbd56eabfe5ba07a64910c01", size = 3530567, upload-time = "2026-04-03T17:06:04.587Z" }, + { url = "https://files.pythonhosted.org/packages/73/a3/20faa869c7e21a827c4a2a42b41353a54b0f9f5e96df5087629c306df71e/sqlalchemy-2.0.49-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9c04bff9a5335eb95c6ecf1c117576a0aa560def274876fd156cfe5510fccc61", size = 3474282, upload-time = "2026-04-03T16:58:37.131Z" }, + { url = "https://files.pythonhosted.org/packages/b7/50/276b9a007aa0764304ad467eceb70b04822dc32092492ee5f322d559a4dc/sqlalchemy-2.0.49-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7f605a456948c35260e7b2a39f8952a26f077fd25653c37740ed186b90aaa68a", size = 3480406, upload-time = "2026-04-03T17:06:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/c3/c80fcdb41905a2df650c2a3e0337198b6848876e63d66fe9188ef9003d24/sqlalchemy-2.0.49-cp314-cp314t-win32.whl", hash = "sha256:6270d717b11c5476b0cbb21eedc8d4dbb7d1a956fd6c15a23e96f197a6193158", size = 2149151, upload-time = "2026-04-03T17:02:07.281Z" }, + { url = "https://files.pythonhosted.org/packages/05/52/9f1a62feab6ed368aff068524ff414f26a6daebc7361861035ae00b05530/sqlalchemy-2.0.49-cp314-cp314t-win_amd64.whl", hash = "sha256:275424295f4256fd301744b8f335cff367825d270f155d522b30c7bf49903ee7", size = 2184178, upload-time = "2026-04-03T17:02:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/e5/30/8519fdde58a7bdf155b714359791ad1dc018b47d60269d5d160d311fdc36/sqlalchemy-2.0.49-py3-none-any.whl", hash = "sha256:ec44cfa7ef1a728e88ad41674de50f6db8cfdb3e2af84af86e0041aaf02d43d0", size = 1942158, upload-time = "2026-04-03T16:53:44.135Z" }, ] [[package]] @@ -1372,70 +2492,120 @@ wheels = [ [[package]] name = "starlette" -version = "0.40.0" +version = "1.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/cb/244daf0d7be4508099ad5bca3cdfe8b8b5538acd719c5f397f614e569fff/starlette-0.40.0.tar.gz", hash = "sha256:1a3139688fb298ce5e2d661d37046a66ad996ce94be4d4983be019a23a04ea35", size = 2573611, upload-time = "2024-10-15T06:52:34.481Z" } +sdist = { url = "https://files.pythonhosted.org/packages/81/69/17425771797c36cded50b7fe44e850315d039f28b15901ab44839e70b593/starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149", size = 2655289, upload-time = "2026-03-22T18:29:46.779Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/0f/64baf7a06492e8c12f5c4b49db286787a7255195df496fc21f5fd9eecffa/starlette-0.40.0-py3-none-any.whl", hash = "sha256:c494a22fae73805376ea6bf88439783ecfba9aac88a43911b48c653437e784c4", size = 73303, upload-time = "2024-10-15T06:52:32.486Z" }, + { url = "https://files.pythonhosted.org/packages/0b/c9/584bc9651441b4ba60cc4d557d8a547b5aff901af35bda3a4ee30c819b82/starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b", size = 72651, upload-time = "2026-03-22T18:29:45.111Z" }, ] [[package]] -name = "structlog" -version = "25.5.0" +name = "taskiq" +version = "0.12.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ef/52/9ba0f43b686e7f3ddfeaa78ac3af750292662284b3661e91ad5494f21dbc/structlog-25.5.0.tar.gz", hash = "sha256:098522a3bebed9153d4570c6d0288abf80a031dfdb2048d59a49e9dc2190fc98", size = 1460830, upload-time = "2025-10-27T08:28:23.028Z" } +dependencies = [ + { name = "aiohttp" }, + { name = "anyio" }, + { name = "packaging" }, + { name = "pycron" }, + { name = "pydantic" }, + { name = "taskiq-dependencies" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/7d/32bdfddd09fe3520bfb4978618dd87292201ca3f0bf9b9c413eb4ee946eb/taskiq-0.12.4.tar.gz", hash = "sha256:887e6acc72eea322a1515d9863551f00be7c81c6a0f72b2961569e58d68edc34", size = 397640, upload-time = "2026-05-08T10:43:59.724Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/45/a132b9074aa18e799b891b91ad72133c98d8042c70f6240e4c5f9dabee2f/structlog-25.5.0-py3-none-any.whl", hash = "sha256:a8453e9b9e636ec59bd9e79bbd4a72f025981b3ba0f5837aebf48f02f37a7f9f", size = 72510, upload-time = "2025-10-27T08:28:21.535Z" }, + { url = "https://files.pythonhosted.org/packages/28/0a/35b7b2673f923fd57bda8820afee03a6bb57883c1a61ee9e04e31e37ca50/taskiq-0.12.4-py3-none-any.whl", hash = "sha256:b910d3676e45b317678b4a8a1865ba6c261f696a02541461c765a7773cf4fa4b", size = 91804, upload-time = "2026-05-08T10:44:00.889Z" }, ] [[package]] -name = "types-cffi" -version = "1.17.0.20250523" +name = "taskiq-aio-pika" +version = "0.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "types-setuptools" }, + { name = "aio-pika" }, + { name = "aiostream" }, + { name = "taskiq" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/5f/ac80a2f55757019e5d4809d17544569c47a623565258ca1a836ba951d53f/types_cffi-1.17.0.20250523.tar.gz", hash = "sha256:e7110f314c65590533adae1b30763be08ca71ad856a1ae3fe9b9d8664d49ec22", size = 16858, upload-time = "2025-05-23T03:05:40.983Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d5/05/e9f4e5cbc7f9777a09f493e502242922df2d3e3779364d0292313995d68c/taskiq_aio_pika-0.6.0.tar.gz", hash = "sha256:0a4ec304a5e860e205aaea5077d90d2a009a4842f3ee008b5185c29301992ed9", size = 9492, upload-time = "2026-02-28T12:24:20.505Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/57/b06600675ef8ab6352f30632c0ece20d592f922531b3f490a0559ed792ea/taskiq_aio_pika-0.6.0-py3-none-any.whl", hash = "sha256:6bff38b61b24afd7d41b78ea9ffca0702fe9653e82289ca1287b063a53af2145", size = 10789, upload-time = "2026-02-28T12:24:19.654Z" }, +] + +[[package]] +name = "taskiq-dependencies" +version = "1.5.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/90/47a627696e53bfdcacabc3e8c05b73bf1424685bcb5f17209cb8b12da1bf/taskiq_dependencies-1.5.7.tar.gz", hash = "sha256:0d3b240872ef152b719153b9526d866d2be978aeeaea6600e878414babc2dcb4", size = 14875, upload-time = "2025-02-26T22:07:39.876Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f1/86/e26e6ae4dfcbf6031b8422c22cf3a9eb2b6d127770406e7645b6248d8091/types_cffi-1.17.0.20250523-py3-none-any.whl", hash = "sha256:e98c549d8e191f6220e440f9f14315d6775a21a0e588c32c20476be885b2fad9", size = 20010, upload-time = "2025-05-23T03:05:39.136Z" }, + { url = "https://files.pythonhosted.org/packages/99/6d/4a012f2de002c2e93273f5e7d3e3feea02f7fdbb7b75ca2ca1dd10703091/taskiq_dependencies-1.5.7-py3-none-any.whl", hash = "sha256:6fcee5d159bdb035ef915d4d848826169b6f06fe57cc2297a39b62ea3e76036f", size = 13801, upload-time = "2025-02-26T22:07:38.622Z" }, ] [[package]] -name = "types-pyopenssl" -version = "24.1.0.20240722" +name = "taskiq-redis" +version = "1.2.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cryptography" }, - { name = "types-cffi" }, + { name = "redis" }, + { name = "taskiq" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/93/29/47a346550fd2020dac9a7a6d033ea03fccb92fa47c726056618cc889745e/types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39", size = 8458, upload-time = "2024-07-22T02:32:22.558Z" } +sdist = { url = "https://files.pythonhosted.org/packages/90/0a/c555ac1d922e03b9fde2b1b609572a310a252f4bb79fbf964c3039efb6ff/taskiq_redis-1.2.2.tar.gz", hash = "sha256:103c488d143138bab8fc84044dbe68cd3561251090695a6042120398e9915325", size = 14460, upload-time = "2026-02-03T20:26:58.189Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/05/c868a850b6fbb79c26f5f299b768ee0adc1f9816d3461dcf4287916f655b/types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54", size = 7499, upload-time = "2024-07-22T02:32:21.232Z" }, + { url = "https://files.pythonhosted.org/packages/30/a6/a28f8e06540c041c03e9028a100c5b8949a01c4308f286a6c74197c3bf32/taskiq_redis-1.2.2-py3-none-any.whl", hash = "sha256:574d085c0c07f7fa9945e51195fe2db5b9d3c2a07bcfdc5a7ca323eae5319dff", size = 20666, upload-time = "2026-02-03T20:26:55.706Z" }, ] [[package]] -name = "types-redis" -version = "4.6.0.20241004" +name = "testcontainers" +version = "4.14.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cryptography" }, - { name = "types-pyopenssl" }, + { name = "docker" }, + { name = "python-dotenv" }, + { name = "typing-extensions" }, + { name = "urllib3" }, + { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3a/95/c054d3ac940e8bac4ca216470c80c26688a0e79e09f520a942bb27da3386/types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e", size = 49679, upload-time = "2024-10-04T02:43:59.224Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/ac/a597c3a0e02b26cbed6dd07df68be1e57684766fd1c381dee9b170a99690/testcontainers-4.14.2.tar.gz", hash = "sha256:1340ccf16fe3acd9389a6c9e1d9ab21d9fe99a8afdf8165f89c3e69c1967d239", size = 166841, upload-time = "2026-03-18T05:19:16.696Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/55/82/7d25dce10aad92d2226b269bce2f85cfd843b4477cd50245d7d40ecf8f89/types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed", size = 58737, upload-time = "2024-10-04T02:43:57.968Z" }, + { url = "https://files.pythonhosted.org/packages/13/2d/26b8b30067d94339afee62c3edc9b803a6eb9332f521ba77d8aaab5de873/testcontainers-4.14.2-py3-none-any.whl", hash = "sha256:0d0522c3cd8f8d9627cda41f7a6b51b639fa57bdc492923c045117933c668d68", size = 125712, upload-time = "2026-03-18T05:19:15.29Z" }, ] [[package]] -name = "types-setuptools" -version = "80.9.0.20250529" +name = "typer" +version = "0.25.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/79/66/1b276526aad4696a9519919e637801f2c103419d2c248a6feb2729e034d1/types_setuptools-80.9.0.20250529.tar.gz", hash = "sha256:79e088ba0cba2186c8d6499cbd3e143abb142d28a44b042c28d3148b1e353c91", size = 41337, upload-time = "2025-05-29T03:07:34.487Z" } +dependencies = [ + { name = "annotated-doc" }, + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/51/9aed62104cea109b820bbd6c14245af756112017d309da813ef107d42e7e/typer-0.25.1.tar.gz", hash = "sha256:9616eb8853a09ffeabab1698952f33c6f29ffdbceb4eaeecf571880e8d7664cc", size = 122276, upload-time = "2026-04-30T19:32:16.964Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/d8/83790d67ec771bf029a45ff1bd1aedbb738d8aa58c09dd0cc3033eea0e69/types_setuptools-80.9.0.20250529-py3-none-any.whl", hash = "sha256:00dfcedd73e333a430e10db096e4d46af93faf9314f832f13b6bbe3d6757e95f", size = 63263, upload-time = "2025-05-29T03:07:33.064Z" }, + { url = "https://files.pythonhosted.org/packages/3f/f9/2b3ff4e56e5fa7debfaf9eb135d0da96f3e9a1d5b27222223c7296336e5f/typer-0.25.1-py3-none-any.whl", hash = "sha256:75caa44ed46a03fb2dab8808753ffacdbfea88495e74c85a28c5eefcf5f39c89", size = 58409, upload-time = "2026-04-30T19:32:18.271Z" }, +] + +[[package]] +name = "types-pyasn1" +version = "0.6.0.20260408" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/c0/02f897fc8543f64fa6b1ca6a30d388e37c4ec2f761f469a2d9a29b89cdef/types_pyasn1-0.6.0.20260408.tar.gz", hash = "sha256:32dc90927adbe504fd2eee83ae30cf5ef934e5db0d1d94886071fed47eb50c8c", size = 17312, upload-time = "2026-04-08T04:27:16.874Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/a5/473e06d5aaec3730aab5a9d40c2044e673c927412c24bd7f3fa0df7e95d3/types_pyasn1-0.6.0.20260408-py3-none-any.whl", hash = "sha256:ee7fbd98bce61193c5d4f8f7812fa53cddc5b8cc5ceb9fcda6eea539947c6d6b", size = 24044, upload-time = "2026-04-08T04:27:16.002Z" }, +] + +[[package]] +name = "types-python-jose" +version = "3.5.0.20260408" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/b1/87cdb410d22913df39d7aad8864f94a4f5ad8d507ee07556888fdbe55e19/types_python_jose-3.5.0.20260408.tar.gz", hash = "sha256:3f8dccdc327bfffea7a81084ea1cea722fa499f13c1d04f7978b491dd36e0cf1", size = 11989, upload-time = "2026-04-08T04:34:10.577Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d3/83/df2b34e64f0a674935d718471cf10fb392a7e5bdb0e9e7c739885b62d274/types_python_jose-3.5.0.20260408-py3-none-any.whl", hash = "sha256:968d8a8eac1ff9da249d6335a2bb9f82288d59ba23afe91fcc2662eb9f485e2a", size = 14694, upload-time = "2026-04-08T04:34:09.747Z" }, ] [[package]] @@ -1461,31 +2631,40 @@ wheels = [ [[package]] name = "tzdata" -version = "2025.2" +version = "2026.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/19/1b9b0e29f30c6d35cb345486df41110984ea67ae69dddbc0e8a100999493/tzdata-2026.2.tar.gz", hash = "sha256:9173fde7d80d9018e02a662e168e5a2d04f87c41ea174b139fbef642eda62d10", size = 198254, upload-time = "2026-04-24T15:22:08.651Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, + { url = "https://files.pythonhosted.org/packages/ce/e4/dccd7f47c4b64213ac01ef921a1337ee6e30e8c6466046018326977efd95/tzdata-2026.2-py2.py3-none-any.whl", hash = "sha256:bbe9af844f658da81a5f95019480da3a89415801f6cc966806612cc7169bffe7", size = 349321, upload-time = "2026-04-24T15:22:05.876Z" }, ] [[package]] name = "ua-parser" -version = "1.0.1" +version = "1.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ua-parser-builtins" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/70/0e/ed98be735bc89d5040e0c60f5620d0b8c04e9e7da99ed1459e8050e90a77/ua_parser-1.0.1.tar.gz", hash = "sha256:f9d92bf19d4329019cef91707aecc23c6d65143ad7e29a233f0580fb0d15547d", size = 728106, upload-time = "2025-02-01T14:13:32.508Z" } +sdist = { url = "https://files.pythonhosted.org/packages/90/98/5e4b52d772a048af122a6fc5ce365c311efb9f5e79c55fd4fdd7c9f59e83/ua_parser-1.0.2.tar.gz", hash = "sha256:bab404ad42fb37f943107da2f6003ffc79724d11cc95076a7a539513371779da", size = 33239, upload-time = "2026-04-05T20:14:28.229Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/37/be6dfbfa45719aa82c008fb4772cfe5c46db765a2ca4b6f524a1fdfee4d7/ua_parser-1.0.1-py3-none-any.whl", hash = "sha256:b059f2cb0935addea7e551251cbbf42e9a8872f86134163bc1a4f79e0945ffea", size = 31410, upload-time = "2025-02-01T14:13:28.458Z" }, + { url = "https://files.pythonhosted.org/packages/a9/7c/6367995ff57aaa2d9e1055adbaec2519cf5a979780a83a93fdf8c6ec37be/ua_parser-1.0.2-py3-none-any.whl", hash = "sha256:0f8e6d0484af2a9ff804bba5a4fe696e87c028eaba98ad9a7dfae873fef7788a", size = 31219, upload-time = "2026-04-05T20:14:26.913Z" }, ] [[package]] name = "ua-parser-builtins" -version = "0.18.0.post1" +version = "202605" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/42/178db21aab1815583fcdb8ae465fc006b384fbe679412b11ddf8aae90f38/ua_parser_builtins-202605-py3-none-any.whl", hash = "sha256:a86976baa4b7c69a54269fe54091e3f0c7666f15a0f893855ff907a3bb6d878c", size = 90591, upload-time = "2026-05-01T21:25:50.636Z" }, +] + +[[package]] +name = "urllib3" +version = "2.7.0" source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/0c/06f8b233b8fd13b9e5ee11424ef85419ba0d8ba0b3138bf360be2ff56953/urllib3-2.7.0.tar.gz", hash = "sha256:231e0ec3b63ceb14667c67be60f2f2c40a518cb38b03af60abc813da26505f4c", size = 433602, upload-time = "2026-05-07T16:13:18.596Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/d3/13adff37f15489c784cc7669c35a6c3bf94b87540229eedf52ef2a1d0175/ua_parser_builtins-0.18.0.post1-py3-none-any.whl", hash = "sha256:eb4f93504040c3a990a6b0742a2afd540d87d7f9f05fd66e94c101db1564674d", size = 86077, upload-time = "2024-12-05T18:44:36.732Z" }, + { url = "https://files.pythonhosted.org/packages/7f/3e/5db95bcf282c52709639744ca2a8b149baccf648e39c8cc87553df9eae0c/urllib3-2.7.0-py3-none-any.whl", hash = "sha256:9fb4c81ebbb1ce9531cce37674bbc6f1360472bc18ca9a553ede278ef7276897", size = 131087, upload-time = "2026-05-07T16:13:17.151Z" }, ] [[package]] @@ -1500,70 +2679,419 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8f/1c/20bb3d7b2bad56d881e3704131ddedbb16eb787101306887dff349064662/user_agents-2.2.0-py3-none-any.whl", hash = "sha256:a98c4dc72ecbc64812c4534108806fb0a0b3a11ec3fd1eafe807cee5b0a942e7", size = 9614, upload-time = "2020-08-23T06:01:54.047Z" }, ] -[[package]] -name = "uuid" -version = "1.30" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ce/63/f42f5aa951ebf2c8dac81f77a8edcc1c218640a2a35a03b9ff2d4aa64c3d/uuid-1.30.tar.gz", hash = "sha256:1f87cc004ac5120466f36c5beae48b4c48cc411968eed0eaecd3da82aa96193f", size = 5811, upload-time = "2007-05-26T11:13:24Z" } - -[[package]] -name = "uuid6" -version = "2025.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/b7/4c0f736ca824b3a25b15e8213d1bcfc15f8ac2ae48d1b445b310892dc4da/uuid6-2025.0.1.tar.gz", hash = "sha256:cd0af94fa428675a44e32c5319ec5a3485225ba2179eefcf4c3f205ae30a81bd", size = 13932, upload-time = "2025-07-04T18:30:35.186Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/b2/93faaab7962e2aa8d6e174afb6f76be2ca0ce89fde14d3af835acebcaa59/uuid6-2025.0.1-py3-none-any.whl", hash = "sha256:80530ce4d02a93cdf82e7122ca0da3ebbbc269790ec1cb902481fa3e9cc9ff99", size = 6979, upload-time = "2025-07-04T18:30:34.001Z" }, -] - [[package]] name = "uvicorn" -version = "0.34.3" +version = "0.46.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/de/ad/713be230bcda622eaa35c28f0d328c3675c371238470abdea52417f17a8e/uvicorn-0.34.3.tar.gz", hash = "sha256:35919a9a979d7a59334b6b10e05d77c1d0d574c50e0fc98b8b1a0f165708b55a", size = 76631, upload-time = "2025-06-01T07:48:17.531Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/93/041fca8274050e40e6791f267d82e0e2e27dd165627bd640d3e0e378d877/uvicorn-0.46.0.tar.gz", hash = "sha256:fb9da0926999cc6cb22dc7cd71a94a632f078e6ae47ff683c5c420750fb7413d", size = 88758, upload-time = "2026-04-23T07:16:00.151Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/0d/8adfeaa62945f90d19ddc461c55f4a50c258af7662d34b6a3d5d1f8646f6/uvicorn-0.34.3-py3-none-any.whl", hash = "sha256:16246631db62bdfbf069b0645177d6e8a77ba950cfedbfd093acef9444e4d885", size = 62431, upload-time = "2025-06-01T07:48:15.664Z" }, + { url = "https://files.pythonhosted.org/packages/31/a3/5b1562db76a5a488274b2332a97199b32d0442aca0ed193697fd47786316/uvicorn-0.46.0-py3-none-any.whl", hash = "sha256:bbebbcbed972d162afca128605223022bedd345b7bc7855ce66deb31487a9048", size = 70926, upload-time = "2026-04-23T07:15:58.355Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, ] [[package]] name = "uvloop" -version = "0.21.0" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420, upload-time = "2025-10-16T22:16:21.187Z" }, + { url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677, upload-time = "2025-10-16T22:16:22.558Z" }, + { url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819, upload-time = "2025-10-16T22:16:23.903Z" }, + { url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529, upload-time = "2025-10-16T22:16:25.246Z" }, + { url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267, upload-time = "2025-10-16T22:16:26.819Z" }, + { url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105, upload-time = "2025-10-16T22:16:28.252Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" }, + { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" }, + { url = "https://files.pythonhosted.org/packages/5f/6f/e62b4dfc7ad6518e7eff2516f680d02a0f6eb62c0c212e152ca708a0085e/uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4", size = 4426307, upload-time = "2025-10-16T22:16:32.917Z" }, + { url = "https://files.pythonhosted.org/packages/90/60/97362554ac21e20e81bcef1150cb2a7e4ffdaf8ea1e5b2e8bf7a053caa18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2", size = 4131970, upload-time = "2025-10-16T22:16:34.015Z" }, + { url = "https://files.pythonhosted.org/packages/99/39/6b3f7d234ba3964c428a6e40006340f53ba37993f46ed6e111c6e9141d18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0", size = 4296343, upload-time = "2025-10-16T22:16:35.149Z" }, + { url = "https://files.pythonhosted.org/packages/89/8c/182a2a593195bfd39842ea68ebc084e20c850806117213f5a299dfc513d9/uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705", size = 1358611, upload-time = "2025-10-16T22:16:36.833Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/e301ee96a6dc95224b6f1162cd3312f6d1217be3907b79173b06785f2fe7/uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8", size = 751811, upload-time = "2025-10-16T22:16:38.275Z" }, + { url = "https://files.pythonhosted.org/packages/b7/02/654426ce265ac19e2980bfd9ea6590ca96a56f10c76e63801a2df01c0486/uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d", size = 4288562, upload-time = "2025-10-16T22:16:39.375Z" }, + { url = "https://files.pythonhosted.org/packages/15/c0/0be24758891ef825f2065cd5db8741aaddabe3e248ee6acc5e8a80f04005/uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e", size = 4366890, upload-time = "2025-10-16T22:16:40.547Z" }, + { url = "https://files.pythonhosted.org/packages/d2/53/8369e5219a5855869bcee5f4d317f6da0e2c669aecf0ef7d371e3d084449/uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e", size = 4119472, upload-time = "2025-10-16T22:16:41.694Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ba/d69adbe699b768f6b29a5eec7b47dd610bd17a69de51b251126a801369ea/uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad", size = 4239051, upload-time = "2025-10-16T22:16:43.224Z" }, + { url = "https://files.pythonhosted.org/packages/90/cd/b62bdeaa429758aee8de8b00ac0dd26593a9de93d302bff3d21439e9791d/uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142", size = 1362067, upload-time = "2025-10-16T22:16:44.503Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f8/a132124dfda0777e489ca86732e85e69afcd1ff7686647000050ba670689/uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74", size = 752423, upload-time = "2025-10-16T22:16:45.968Z" }, + { url = "https://files.pythonhosted.org/packages/a3/94/94af78c156f88da4b3a733773ad5ba0b164393e357cc4bd0ab2e2677a7d6/uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35", size = 4272437, upload-time = "2025-10-16T22:16:47.451Z" }, + { url = "https://files.pythonhosted.org/packages/b5/35/60249e9fd07b32c665192cec7af29e06c7cd96fa1d08b84f012a56a0b38e/uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25", size = 4292101, upload-time = "2025-10-16T22:16:49.318Z" }, + { url = "https://files.pythonhosted.org/packages/02/62/67d382dfcb25d0a98ce73c11ed1a6fba5037a1a1d533dcbb7cab033a2636/uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6", size = 4114158, upload-time = "2025-10-16T22:16:50.517Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/f1171b4a882a5d13c8b7576f348acfe6074d72eaf52cccef752f748d4a9f/uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079", size = 4177360, upload-time = "2025-10-16T22:16:52.646Z" }, + { url = "https://files.pythonhosted.org/packages/79/7b/b01414f31546caf0919da80ad57cbfe24c56b151d12af68cee1b04922ca8/uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289", size = 1454790, upload-time = "2025-10-16T22:16:54.355Z" }, + { url = "https://files.pythonhosted.org/packages/d4/31/0bb232318dd838cad3fa8fb0c68c8b40e1145b32025581975e18b11fab40/uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3", size = 796783, upload-time = "2025-10-16T22:16:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/42/38/c9b09f3271a7a723a5de69f8e237ab8e7803183131bc57c890db0b6bb872/uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c", size = 4647548, upload-time = "2025-10-16T22:16:57.008Z" }, + { url = "https://files.pythonhosted.org/packages/c1/37/945b4ca0ac27e3dc4952642d4c900edd030b3da6c9634875af6e13ae80e5/uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21", size = 4467065, upload-time = "2025-10-16T22:16:58.206Z" }, + { url = "https://files.pythonhosted.org/packages/97/cc/48d232f33d60e2e2e0b42f4e73455b146b76ebe216487e862700457fbf3c/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88", size = 4328384, upload-time = "2025-10-16T22:16:59.36Z" }, + { url = "https://files.pythonhosted.org/packages/e4/16/c1fd27e9549f3c4baf1dc9c20c456cd2f822dbf8de9f463824b0c0357e06/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e", size = 4296730, upload-time = "2025-10-16T22:17:00.744Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload-time = "2024-10-14T23:38:35.489Z" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" }, + { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" }, + { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" }, + { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" }, + { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" }, + { url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" }, + { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, + { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" }, + { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" }, + { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" }, + { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" }, + { url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" }, + { url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" }, + { url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" }, + { url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" }, + { url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" }, + { url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" }, + { url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" }, + { url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" }, + { url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" }, + { url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" }, + { url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" }, + { url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" }, + { url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" }, + { url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" }, + { url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" }, + { url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" }, + { url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" }, + { url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, + { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" }, + { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" }, + { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" }, +] + +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/db/de907251b4ff46ae804ad0409809504153b3f30984daf82a1d84a9875830/websockets-16.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8", size = 177340, upload-time = "2026-01-10T09:22:34.539Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fa/abe89019d8d8815c8781e90d697dec52523fb8ebe308bf11664e8de1877e/websockets-16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad", size = 175022, upload-time = "2026-01-10T09:22:36.332Z" }, + { url = "https://files.pythonhosted.org/packages/58/5d/88ea17ed1ded2079358b40d31d48abe90a73c9e5819dbcde1606e991e2ad/websockets-16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d", size = 175319, upload-time = "2026-01-10T09:22:37.602Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ae/0ee92b33087a33632f37a635e11e1d99d429d3d323329675a6022312aac2/websockets-16.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe", size = 184631, upload-time = "2026-01-10T09:22:38.789Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c5/27178df583b6c5b31b29f526ba2da5e2f864ecc79c99dae630a85d68c304/websockets-16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b", size = 185870, upload-time = "2026-01-10T09:22:39.893Z" }, + { url = "https://files.pythonhosted.org/packages/87/05/536652aa84ddc1c018dbb7e2c4cbcd0db884580bf8e95aece7593fde526f/websockets-16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5", size = 185361, upload-time = "2026-01-10T09:22:41.016Z" }, + { url = "https://files.pythonhosted.org/packages/6d/e2/d5332c90da12b1e01f06fb1b85c50cfc489783076547415bf9f0a659ec19/websockets-16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64", size = 184615, upload-time = "2026-01-10T09:22:42.442Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/d3f9576691cae9253b51555f841bc6600bf0a983a461c79500ace5a5b364/websockets-16.0-cp311-cp311-win32.whl", hash = "sha256:5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6", size = 178246, upload-time = "2026-01-10T09:22:43.654Z" }, + { url = "https://files.pythonhosted.org/packages/54/67/eaff76b3dbaf18dcddabc3b8c1dba50b483761cccff67793897945b37408/websockets-16.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac", size = 178684, upload-time = "2026-01-10T09:22:44.941Z" }, + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/72/07/c98a68571dcf256e74f1f816b8cc5eae6eb2d3d5cfa44d37f801619d9166/websockets-16.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d", size = 174947, upload-time = "2026-01-10T09:23:36.166Z" }, + { url = "https://files.pythonhosted.org/packages/7e/52/93e166a81e0305b33fe416338be92ae863563fe7bce446b0f687b9df5aea/websockets-16.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03", size = 175260, upload-time = "2026-01-10T09:23:37.409Z" }, + { url = "https://files.pythonhosted.org/packages/56/0c/2dbf513bafd24889d33de2ff0368190a0e69f37bcfa19009ef819fe4d507/websockets-16.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da", size = 176071, upload-time = "2026-01-10T09:23:39.158Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8f/aea9c71cc92bf9b6cc0f7f70df8f0b420636b6c96ef4feee1e16f80f75dd/websockets-16.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c", size = 176968, upload-time = "2026-01-10T09:23:41.031Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3f/f70e03f40ffc9a30d817eef7da1be72ee4956ba8d7255c399a01b135902a/websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767", size = 178735, upload-time = "2026-01-10T09:23:42.259Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, +] + +[[package]] +name = "wrapt" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/64/925f213fdcbb9baeb1530449ac71a4d57fc361c053d06bf78d0c5c7cd80c/wrapt-2.1.2.tar.gz", hash = "sha256:3996a67eecc2c68fd47b4e3c564405a5777367adfd9b8abb58387b63ee83b21e", size = 81678, upload-time = "2026-03-06T02:53:25.134Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/81/60c4471fce95afa5922ca09b88a25f03c93343f759aae0f31fb4412a85c7/wrapt-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:96159a0ee2b0277d44201c3b5be479a9979cf154e8c82fa5df49586a8e7679bb", size = 60666, upload-time = "2026-03-06T02:52:58.934Z" }, + { url = "https://files.pythonhosted.org/packages/6b/be/80e80e39e7cb90b006a0eaf11c73ac3a62bbfb3068469aec15cc0bc795de/wrapt-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98ba61833a77b747901e9012072f038795de7fc77849f1faa965464f3f87ff2d", size = 61601, upload-time = "2026-03-06T02:53:00.487Z" }, + { url = "https://files.pythonhosted.org/packages/b0/be/d7c88cd9293c859fc74b232abdc65a229bb953997995d6912fc85af18323/wrapt-2.1.2-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:767c0dbbe76cae2a60dd2b235ac0c87c9cccf4898aef8062e57bead46b5f6894", size = 114057, upload-time = "2026-03-06T02:52:44.08Z" }, + { url = "https://files.pythonhosted.org/packages/ea/25/36c04602831a4d685d45a93b3abea61eca7fe35dab6c842d6f5d570ef94a/wrapt-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c691a6bc752c0cc4711cc0c00896fcd0f116abc253609ef64ef930032821842", size = 116099, upload-time = "2026-03-06T02:54:56.74Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4e/98a6eb417ef551dc277bec1253d5246b25003cf36fdf3913b65cb7657a56/wrapt-2.1.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f3b7d73012ea75aee5844de58c88f44cf62d0d62711e39da5a82824a7c4626a8", size = 112457, upload-time = "2026-03-06T02:53:52.842Z" }, + { url = "https://files.pythonhosted.org/packages/cb/a6/a6f7186a5297cad8ec53fd7578533b28f795fdf5372368c74bd7e6e9841c/wrapt-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:577dff354e7acd9d411eaf4bfe76b724c89c89c8fc9b7e127ee28c5f7bcb25b6", size = 115351, upload-time = "2026-03-06T02:53:32.684Z" }, + { url = "https://files.pythonhosted.org/packages/97/6f/06e66189e721dbebd5cf20e138acc4d1150288ce118462f2fcbff92d38db/wrapt-2.1.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:3d7b6fd105f8b24e5bd23ccf41cb1d1099796524bcc6f7fbb8fe576c44befbc9", size = 111748, upload-time = "2026-03-06T02:53:08.455Z" }, + { url = "https://files.pythonhosted.org/packages/ef/43/4808b86f499a51370fbdbdfa6cb91e9b9169e762716456471b619fca7a70/wrapt-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:866abdbf4612e0b34764922ef8b1c5668867610a718d3053d59e24a5e5fcfc15", size = 113783, upload-time = "2026-03-06T02:53:02.02Z" }, + { url = "https://files.pythonhosted.org/packages/91/2c/a3f28b8fa7ac2cefa01cfcaca3471f9b0460608d012b693998cd61ef43df/wrapt-2.1.2-cp311-cp311-win32.whl", hash = "sha256:5a0a0a3a882393095573344075189eb2d566e0fd205a2b6414e9997b1b800a8b", size = 57977, upload-time = "2026-03-06T02:53:27.844Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c3/2b1c7bd07a27b1db885a2fab469b707bdd35bddf30a113b4917a7e2139d2/wrapt-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:64a07a71d2730ba56f11d1a4b91f7817dc79bc134c11516b75d1921a7c6fcda1", size = 60336, upload-time = "2026-03-06T02:54:28.104Z" }, + { url = "https://files.pythonhosted.org/packages/ec/5c/76ece7b401b088daa6503d6264dd80f9a727df3e6042802de9a223084ea2/wrapt-2.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:b89f095fe98bc12107f82a9f7d570dc83a0870291aeb6b1d7a7d35575f55d98a", size = 58756, upload-time = "2026-03-06T02:53:16.319Z" }, + { url = "https://files.pythonhosted.org/packages/4c/b6/1db817582c49c7fcbb7df6809d0f515af29d7c2fbf57eb44c36e98fb1492/wrapt-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ff2aad9c4cda28a8f0653fc2d487596458c2a3f475e56ba02909e950a9efa6a9", size = 61255, upload-time = "2026-03-06T02:52:45.663Z" }, + { url = "https://files.pythonhosted.org/packages/a2/16/9b02a6b99c09227c93cd4b73acc3678114154ec38da53043c0ddc1fba0dc/wrapt-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6433ea84e1cfacf32021d2a4ee909554ade7fd392caa6f7c13f1f4bf7b8e8748", size = 61848, upload-time = "2026-03-06T02:53:48.728Z" }, + { url = "https://files.pythonhosted.org/packages/af/aa/ead46a88f9ec3a432a4832dfedb84092fc35af2d0ba40cd04aea3889f247/wrapt-2.1.2-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c20b757c268d30d6215916a5fa8461048d023865d888e437fab451139cad6c8e", size = 121433, upload-time = "2026-03-06T02:54:40.328Z" }, + { url = "https://files.pythonhosted.org/packages/3a/9f/742c7c7cdf58b59085a1ee4b6c37b013f66ac33673a7ef4aaed5e992bc33/wrapt-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79847b83eb38e70d93dc392c7c5b587efe65b3e7afcc167aa8abd5d60e8761c8", size = 123013, upload-time = "2026-03-06T02:53:26.58Z" }, + { url = "https://files.pythonhosted.org/packages/e8/44/2c3dd45d53236b7ed7c646fcf212251dc19e48e599debd3926b52310fafb/wrapt-2.1.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f8fba1bae256186a83d1875b2b1f4e2d1242e8fac0f58ec0d7e41b26967b965c", size = 117326, upload-time = "2026-03-06T02:53:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/74/e2/b17d66abc26bd96f89dec0ecd0ef03da4a1286e6ff793839ec431b9fae57/wrapt-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e3d3b35eedcf5f7d022291ecd7533321c4775f7b9cd0050a31a68499ba45757c", size = 121444, upload-time = "2026-03-06T02:54:09.5Z" }, + { url = "https://files.pythonhosted.org/packages/3c/62/e2977843fdf9f03daf1586a0ff49060b1b2fc7ff85a7ea82b6217c1ae36e/wrapt-2.1.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:6f2c5390460de57fa9582bc8a1b7a6c86e1a41dfad74c5225fc07044c15cc8d1", size = 116237, upload-time = "2026-03-06T02:54:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/88/dd/27fc67914e68d740bce512f11734aec08696e6b17641fef8867c00c949fc/wrapt-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7dfa9f2cf65d027b951d05c662cc99ee3bd01f6e4691ed39848a7a5fffc902b2", size = 120563, upload-time = "2026-03-06T02:53:20.412Z" }, + { url = "https://files.pythonhosted.org/packages/ec/9f/b750b3692ed2ef4705cb305bd68858e73010492b80e43d2a4faa5573cbe7/wrapt-2.1.2-cp312-cp312-win32.whl", hash = "sha256:eba8155747eb2cae4a0b913d9ebd12a1db4d860fc4c829d7578c7b989bd3f2f0", size = 58198, upload-time = "2026-03-06T02:53:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/8e/b2/feecfe29f28483d888d76a48f03c4c4d8afea944dbee2b0cd3380f9df032/wrapt-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1c51c738d7d9faa0b3601708e7e2eda9bf779e1b601dce6c77411f2a1b324a63", size = 60441, upload-time = "2026-03-06T02:52:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/44/e1/e328f605d6e208547ea9fd120804fcdec68536ac748987a68c47c606eea8/wrapt-2.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:c8e46ae8e4032792eb2f677dbd0d557170a8e5524d22acc55199f43efedd39bf", size = 58836, upload-time = "2026-03-06T02:53:22.053Z" }, + { url = "https://files.pythonhosted.org/packages/4c/7a/d936840735c828b38d26a854e85d5338894cda544cb7a85a9d5b8b9c4df7/wrapt-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787fd6f4d67befa6fe2abdffcbd3de2d82dfc6fb8a6d850407c53332709d030b", size = 61259, upload-time = "2026-03-06T02:53:41.922Z" }, + { url = "https://files.pythonhosted.org/packages/5e/88/9a9b9a90ac8ca11c2fdb6a286cb3a1fc7dd774c00ed70929a6434f6bc634/wrapt-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4bdf26e03e6d0da3f0e9422fd36bcebf7bc0eeb55fdf9c727a09abc6b9fe472e", size = 61851, upload-time = "2026-03-06T02:52:48.672Z" }, + { url = "https://files.pythonhosted.org/packages/03/a9/5b7d6a16fd6533fed2756900fc8fc923f678179aea62ada6d65c92718c00/wrapt-2.1.2-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bbac24d879aa22998e87f6b3f481a5216311e7d53c7db87f189a7a0266dafffb", size = 121446, upload-time = "2026-03-06T02:54:14.013Z" }, + { url = "https://files.pythonhosted.org/packages/45/bb/34c443690c847835cfe9f892be78c533d4f32366ad2888972c094a897e39/wrapt-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16997dfb9d67addc2e3f41b62a104341e80cac52f91110dece393923c0ebd5ca", size = 123056, upload-time = "2026-03-06T02:54:10.829Z" }, + { url = "https://files.pythonhosted.org/packages/93/b9/ff205f391cb708f67f41ea148545f2b53ff543a7ac293b30d178af4d2271/wrapt-2.1.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:162e4e2ba7542da9027821cb6e7c5e068d64f9a10b5f15512ea28e954893a267", size = 117359, upload-time = "2026-03-06T02:53:03.623Z" }, + { url = "https://files.pythonhosted.org/packages/1f/3d/1ea04d7747825119c3c9a5e0874a40b33594ada92e5649347c457d982805/wrapt-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f29c827a8d9936ac320746747a016c4bc66ef639f5cd0d32df24f5eacbf9c69f", size = 121479, upload-time = "2026-03-06T02:53:45.844Z" }, + { url = "https://files.pythonhosted.org/packages/78/cc/ee3a011920c7a023b25e8df26f306b2484a531ab84ca5c96260a73de76c0/wrapt-2.1.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:a9dd9813825f7ecb018c17fd147a01845eb330254dff86d3b5816f20f4d6aaf8", size = 116271, upload-time = "2026-03-06T02:54:46.356Z" }, + { url = "https://files.pythonhosted.org/packages/98/fd/e5ff7ded41b76d802cf1191288473e850d24ba2e39a6ec540f21ae3b57cb/wrapt-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f8dbdd3719e534860d6a78526aafc220e0241f981367018c2875178cf83a413", size = 120573, upload-time = "2026-03-06T02:52:50.163Z" }, + { url = "https://files.pythonhosted.org/packages/47/c5/242cae3b5b080cd09bacef0591691ba1879739050cc7c801ff35c8886b66/wrapt-2.1.2-cp313-cp313-win32.whl", hash = "sha256:5c35b5d82b16a3bc6e0a04349b606a0582bc29f573786aebe98e0c159bc48db6", size = 58205, upload-time = "2026-03-06T02:53:47.494Z" }, + { url = "https://files.pythonhosted.org/packages/12/69/c358c61e7a50f290958809b3c61ebe8b3838ea3e070d7aac9814f95a0528/wrapt-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f8bc1c264d8d1cf5b3560a87bbdd31131573eb25f9f9447bb6252b8d4c44a3a1", size = 60452, upload-time = "2026-03-06T02:53:30.038Z" }, + { url = "https://files.pythonhosted.org/packages/8e/66/c8a6fcfe321295fd8c0ab1bd685b5a01462a9b3aa2f597254462fc2bc975/wrapt-2.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:3beb22f674550d5634642c645aba4c72a2c66fb185ae1aebe1e955fae5a13baf", size = 58842, upload-time = "2026-03-06T02:52:52.114Z" }, + { url = "https://files.pythonhosted.org/packages/da/55/9c7052c349106e0b3f17ae8db4b23a691a963c334de7f9dbd60f8f74a831/wrapt-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fc04bc8664a8bc4c8e00b37b5355cffca2535209fba1abb09ae2b7c76ddf82b", size = 63075, upload-time = "2026-03-06T02:53:19.108Z" }, + { url = "https://files.pythonhosted.org/packages/09/a8/ce7b4006f7218248dd71b7b2b732d0710845a0e49213b18faef64811ffef/wrapt-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a9b9d50c9af998875a1482a038eb05755dfd6fe303a313f6a940bb53a83c3f18", size = 63719, upload-time = "2026-03-06T02:54:33.452Z" }, + { url = "https://files.pythonhosted.org/packages/e4/e5/2ca472e80b9e2b7a17f106bb8f9df1db11e62101652ce210f66935c6af67/wrapt-2.1.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2d3ff4f0024dd224290c0eabf0240f1bfc1f26363431505fb1b0283d3b08f11d", size = 152643, upload-time = "2026-03-06T02:52:42.721Z" }, + { url = "https://files.pythonhosted.org/packages/36/42/30f0f2cefca9d9cbf6835f544d825064570203c3e70aa873d8ae12e23791/wrapt-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3278c471f4468ad544a691b31bb856374fbdefb7fee1a152153e64019379f015", size = 158805, upload-time = "2026-03-06T02:54:25.441Z" }, + { url = "https://files.pythonhosted.org/packages/bb/67/d08672f801f604889dcf58f1a0b424fe3808860ede9e03affc1876b295af/wrapt-2.1.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8914c754d3134a3032601c6984db1c576e6abaf3fc68094bb8ab1379d75ff92", size = 145990, upload-time = "2026-03-06T02:53:57.456Z" }, + { url = "https://files.pythonhosted.org/packages/68/a7/fd371b02e73babec1de6ade596e8cd9691051058cfdadbfd62a5898f3295/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ff95d4264e55839be37bafe1536db2ab2de19da6b65f9244f01f332b5286cfbf", size = 155670, upload-time = "2026-03-06T02:54:55.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/9fe0095dfdb621009f40117dcebf41d7396c2c22dca6eac779f4c007b86c/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:76405518ca4e1b76fbb1b9f686cff93aebae03920cc55ceeec48ff9f719c5f67", size = 144357, upload-time = "2026-03-06T02:54:24.092Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b6/ec7b4a254abbe4cde9fa15c5d2cca4518f6b07d0f1b77d4ee9655e30280e/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c0be8b5a74c5824e9359b53e7e58bef71a729bacc82e16587db1c4ebc91f7c5a", size = 150269, upload-time = "2026-03-06T02:53:31.268Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6b/2fabe8ebf148f4ee3c782aae86a795cc68ffe7d432ef550f234025ce0cfa/wrapt-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:f01277d9a5fc1862f26f7626da9cf443bebc0abd2f303f41c5e995b15887dabd", size = 59894, upload-time = "2026-03-06T02:54:15.391Z" }, + { url = "https://files.pythonhosted.org/packages/ca/fb/9ba66fc2dedc936de5f8073c0217b5d4484e966d87723415cc8262c5d9c2/wrapt-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:84ce8f1c2104d2f6daa912b1b5b039f331febfeee74f8042ad4e04992bd95c8f", size = 63197, upload-time = "2026-03-06T02:54:41.943Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1c/012d7423c95d0e337117723eb8ecf73c622ce15a97847e84cf3f8f26cd7e/wrapt-2.1.2-cp313-cp313t-win_arm64.whl", hash = "sha256:a93cd767e37faeddbe07d8fc4212d5cba660af59bdb0f6372c93faaa13e6e679", size = 60363, upload-time = "2026-03-06T02:54:48.093Z" }, + { url = "https://files.pythonhosted.org/packages/39/25/e7ea0b417db02bb796182a5316398a75792cd9a22528783d868755e1f669/wrapt-2.1.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:1370e516598854e5b4366e09ce81e08bfe94d42b0fd569b88ec46cc56d9164a9", size = 61418, upload-time = "2026-03-06T02:53:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0f/fa539e2f6a770249907757eaeb9a5ff4deb41c026f8466c1c6d799088a9b/wrapt-2.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6de1a3851c27e0bd6a04ca993ea6f80fc53e6c742ee1601f486c08e9f9b900a9", size = 61914, upload-time = "2026-03-06T02:52:53.37Z" }, + { url = "https://files.pythonhosted.org/packages/53/37/02af1867f5b1441aaeda9c82deed061b7cd1372572ddcd717f6df90b5e93/wrapt-2.1.2-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:de9f1a2bbc5ac7f6012ec24525bdd444765a2ff64b5985ac6e0692144838542e", size = 120417, upload-time = "2026-03-06T02:54:30.74Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b7/0138a6238c8ba7476c77cf786a807f871672b37f37a422970342308276e7/wrapt-2.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:970d57ed83fa040d8b20c52fe74a6ae7e3775ae8cff5efd6a81e06b19078484c", size = 122797, upload-time = "2026-03-06T02:54:51.539Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ad/819ae558036d6a15b7ed290d5b14e209ca795dd4da9c58e50c067d5927b0/wrapt-2.1.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3969c56e4563c375861c8df14fa55146e81ac11c8db49ea6fb7f2ba58bc1ff9a", size = 117350, upload-time = "2026-03-06T02:54:37.651Z" }, + { url = "https://files.pythonhosted.org/packages/8b/2d/afc18dc57a4600a6e594f77a9ae09db54f55ba455440a54886694a84c71b/wrapt-2.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:57d7c0c980abdc5f1d98b11a2aa3bb159790add80258c717fa49a99921456d90", size = 121223, upload-time = "2026-03-06T02:54:35.221Z" }, + { url = "https://files.pythonhosted.org/packages/b9/5b/5ec189b22205697bc56eb3b62aed87a1e0423e9c8285d0781c7a83170d15/wrapt-2.1.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:776867878e83130c7a04237010463372e877c1c994d449ca6aaafeab6aab2586", size = 116287, upload-time = "2026-03-06T02:54:19.654Z" }, + { url = "https://files.pythonhosted.org/packages/f7/2d/f84939a7c9b5e6cdd8a8d0f6a26cabf36a0f7e468b967720e8b0cd2bdf69/wrapt-2.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:fab036efe5464ec3291411fabb80a7a39e2dd80bae9bcbeeca5087fdfa891e19", size = 119593, upload-time = "2026-03-06T02:54:16.697Z" }, + { url = "https://files.pythonhosted.org/packages/0b/fe/ccd22a1263159c4ac811ab9374c061bcb4a702773f6e06e38de5f81a1bdc/wrapt-2.1.2-cp314-cp314-win32.whl", hash = "sha256:e6ed62c82ddf58d001096ae84ce7f833db97ae2263bff31c9b336ba8cfe3f508", size = 58631, upload-time = "2026-03-06T02:53:06.498Z" }, + { url = "https://files.pythonhosted.org/packages/65/0a/6bd83be7bff2e7efaac7b4ac9748da9d75a34634bbbbc8ad077d527146df/wrapt-2.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:467e7c76315390331c67073073d00662015bb730c566820c9ca9b54e4d67fd04", size = 60875, upload-time = "2026-03-06T02:53:50.252Z" }, + { url = "https://files.pythonhosted.org/packages/6c/c0/0b3056397fe02ff80e5a5d72d627c11eb885d1ca78e71b1a5c1e8c7d45de/wrapt-2.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:da1f00a557c66225d53b095a97eace0fc5349e3bfda28fa34ffae238978ee575", size = 59164, upload-time = "2026-03-06T02:53:59.128Z" }, + { url = "https://files.pythonhosted.org/packages/71/ed/5d89c798741993b2371396eb9d4634f009ff1ad8a6c78d366fe2883ea7a6/wrapt-2.1.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:62503ffbc2d3a69891cf29beeaccdb4d5e0a126e2b6a851688d4777e01428dbb", size = 63163, upload-time = "2026-03-06T02:52:54.873Z" }, + { url = "https://files.pythonhosted.org/packages/c6/8c/05d277d182bf36b0a13d6bd393ed1dec3468a25b59d01fba2dd70fe4d6ae/wrapt-2.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c7e6cd120ef837d5b6f860a6ea3745f8763805c418bb2f12eeb1fa6e25f22d22", size = 63723, upload-time = "2026-03-06T02:52:56.374Z" }, + { url = "https://files.pythonhosted.org/packages/f4/27/6c51ec1eff4413c57e72d6106bb8dec6f0c7cdba6503d78f0fa98767bcc9/wrapt-2.1.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3769a77df8e756d65fbc050333f423c01ae012b4f6731aaf70cf2bef61b34596", size = 152652, upload-time = "2026-03-06T02:53:23.79Z" }, + { url = "https://files.pythonhosted.org/packages/db/4c/d7dd662d6963fc7335bfe29d512b02b71cdfa23eeca7ab3ac74a67505deb/wrapt-2.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a76d61a2e851996150ba0f80582dd92a870643fa481f3b3846f229de88caf044", size = 158807, upload-time = "2026-03-06T02:53:35.742Z" }, + { url = "https://files.pythonhosted.org/packages/b4/4d/1e5eea1a78d539d346765727422976676615814029522c76b87a95f6bcdd/wrapt-2.1.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6f97edc9842cf215312b75fe737ee7c8adda75a89979f8e11558dfff6343cc4b", size = 146061, upload-time = "2026-03-06T02:52:57.574Z" }, + { url = "https://files.pythonhosted.org/packages/89/bc/62cabea7695cd12a288023251eeefdcb8465056ddaab6227cb78a2de005b/wrapt-2.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4006c351de6d5007aa33a551f600404ba44228a89e833d2fadc5caa5de8edfbf", size = 155667, upload-time = "2026-03-06T02:53:39.422Z" }, + { url = "https://files.pythonhosted.org/packages/e9/99/6f2888cd68588f24df3a76572c69c2de28287acb9e1972bf0c83ce97dbc1/wrapt-2.1.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a9372fc3639a878c8e7d87e1556fa209091b0a66e912c611e3f833e2c4202be2", size = 144392, upload-time = "2026-03-06T02:54:22.41Z" }, + { url = "https://files.pythonhosted.org/packages/40/51/1dfc783a6c57971614c48e361a82ca3b6da9055879952587bc99fe1a7171/wrapt-2.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3144b027ff30cbd2fca07c0a87e67011adb717eb5f5bd8496325c17e454257a3", size = 150296, upload-time = "2026-03-06T02:54:07.848Z" }, + { url = "https://files.pythonhosted.org/packages/6c/38/cbb8b933a0201076c1f64fc42883b0023002bdc14a4964219154e6ff3350/wrapt-2.1.2-cp314-cp314t-win32.whl", hash = "sha256:3b8d15e52e195813efe5db8cec156eebe339aaf84222f4f4f051a6c01f237ed7", size = 60539, upload-time = "2026-03-06T02:54:00.594Z" }, + { url = "https://files.pythonhosted.org/packages/82/dd/e5176e4b241c9f528402cebb238a36785a628179d7d8b71091154b3e4c9e/wrapt-2.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:08ffa54146a7559f5b8df4b289b46d963a8e74ed16ba3687f99896101a3990c5", size = 63969, upload-time = "2026-03-06T02:54:39Z" }, + { url = "https://files.pythonhosted.org/packages/5c/99/79f17046cf67e4a95b9987ea129632ba8bcec0bc81f3fb3d19bdb0bd60cd/wrapt-2.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:72aaa9d0d8e4ed0e2e98019cea47a21f823c9dd4b43c7b77bba6679ffcca6a00", size = 60554, upload-time = "2026-03-06T02:53:14.132Z" }, + { url = "https://files.pythonhosted.org/packages/1a/c7/8528ac2dfa2c1e6708f647df7ae144ead13f0a31146f43c7264b4942bf12/wrapt-2.1.2-py3-none-any.whl", hash = "sha256:b8fd6fa2b2c4e7621808f8c62e8317f4aae56e59721ad933bac5239d913cf0e8", size = 43993, upload-time = "2026-03-06T02:53:12.905Z" }, +] + +[[package]] +name = "wtforms" +version = "3.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/c7/96d10183c3470f1836846f7b9527d6cb0b6c2226ebca40f36fa29f23de60/wtforms-3.1.2.tar.gz", hash = "sha256:f8d76180d7239c94c6322f7990ae1216dae3659b7aa1cee94b6318bdffb474b9", size = 134705, upload-time = "2024-01-06T07:52:41.075Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8", size = 1447410, upload-time = "2024-10-14T23:37:33.612Z" }, - { url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0", size = 805476, upload-time = "2024-10-14T23:37:36.11Z" }, - { url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e", size = 3960855, upload-time = "2024-10-14T23:37:37.683Z" }, - { url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb", size = 3973185, upload-time = "2024-10-14T23:37:40.226Z" }, - { url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6", size = 3820256, upload-time = "2024-10-14T23:37:42.839Z" }, - { url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d", size = 3937323, upload-time = "2024-10-14T23:37:45.337Z" }, - { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284, upload-time = "2024-10-14T23:37:47.833Z" }, - { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349, upload-time = "2024-10-14T23:37:50.149Z" }, - { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089, upload-time = "2024-10-14T23:37:51.703Z" }, - { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770, upload-time = "2024-10-14T23:37:54.122Z" }, - { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321, upload-time = "2024-10-14T23:37:55.766Z" }, - { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022, upload-time = "2024-10-14T23:37:58.195Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123, upload-time = "2024-10-14T23:38:00.688Z" }, - { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325, upload-time = "2024-10-14T23:38:02.309Z" }, - { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806, upload-time = "2024-10-14T23:38:04.711Z" }, - { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068, upload-time = "2024-10-14T23:38:06.385Z" }, - { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428, upload-time = "2024-10-14T23:38:08.416Z" }, - { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018, upload-time = "2024-10-14T23:38:10.888Z" }, + { url = "https://files.pythonhosted.org/packages/18/19/c3232f35e24dccfad372e9f341c4f3a1166ae7c66e4e1351a9467c921cc1/wtforms-3.1.2-py3-none-any.whl", hash = "sha256:bf831c042829c8cdbad74c27575098d541d039b1faa74c771545ecac916f2c07", size = 145961, upload-time = "2024-01-06T07:52:43.023Z" }, ] [[package]] -name = "virtualenv" -version = "20.35.3" +name = "yarl" +version = "1.23.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "distlib" }, - { name = "filelock" }, - { name = "platformdirs" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a4/d5/b0ccd381d55c8f45d46f77df6ae59fbc23d19e901e2d523395598e5f4c93/virtualenv-20.35.3.tar.gz", hash = "sha256:4f1a845d131133bdff10590489610c98c168ff99dc75d6c96853801f7f67af44", size = 6002907, upload-time = "2025-10-10T21:23:33.178Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/73/d9a94da0e9d470a543c1b9d3ccbceb0f59455983088e727b8a1824ed90fb/virtualenv-20.35.3-py3-none-any.whl", hash = "sha256:63d106565078d8c8d0b206d48080f938a8b25361e19432d2c9db40d2899c810a", size = 5981061, upload-time = "2025-10-10T21:23:30.433Z" }, + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/aa/60da938b8f0997ba3a911263c40d82b6f645a67902a490b46f3355e10fae/yarl-1.23.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b35d13d549077713e4414f927cdc388d62e543987c572baee613bf82f11a4b99", size = 123641, upload-time = "2026-03-01T22:04:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/24/84/e237607faf4e099dbb8a4f511cfd5efcb5f75918baad200ff7380635631b/yarl-1.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbb0fef01f0c6b38cb0f39b1f78fc90b807e0e3c86a7ff3ce74ad77ce5c7880c", size = 86248, upload-time = "2026-03-01T22:04:44.757Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0d/71ceabc14c146ba8ee3804ca7b3d42b1664c8440439de5214d366fec7d3a/yarl-1.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc52310451fc7c629e13c4e061cbe2dd01684d91f2f8ee2821b083c58bd72432", size = 85988, upload-time = "2026-03-01T22:04:46.365Z" }, + { url = "https://files.pythonhosted.org/packages/8c/6c/4a90d59c572e46b270ca132aca66954f1175abd691f74c1ef4c6711828e2/yarl-1.23.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2c6b50c7b0464165472b56b42d4c76a7b864597007d9c085e8b63e185cf4a7a", size = 100566, upload-time = "2026-03-01T22:04:47.639Z" }, + { url = "https://files.pythonhosted.org/packages/49/fb/c438fb5108047e629f6282a371e6e91cf3f97ee087c4fb748a1f32ceef55/yarl-1.23.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aafe5dcfda86c8af00386d7781d4c2181b5011b7be3f2add5e99899ea925df05", size = 92079, upload-time = "2026-03-01T22:04:48.925Z" }, + { url = "https://files.pythonhosted.org/packages/d9/13/d269aa1aed3e4f50a5a103f96327210cc5fa5dd2d50882778f13c7a14606/yarl-1.23.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ee33b875f0b390564c1fb7bc528abf18c8ee6073b201c6ae8524aca778e2d83", size = 108741, upload-time = "2026-03-01T22:04:50.838Z" }, + { url = "https://files.pythonhosted.org/packages/85/fb/115b16f22c37ea4437d323e472945bea97301c8ec6089868fa560abab590/yarl-1.23.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c41e021bc6d7affb3364dc1e1e5fa9582b470f283748784bd6ea0558f87f42c", size = 108099, upload-time = "2026-03-01T22:04:52.499Z" }, + { url = "https://files.pythonhosted.org/packages/9a/64/c53487d9f4968045b8afa51aed7ca44f58b2589e772f32745f3744476c82/yarl-1.23.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99c8a9ed30f4164bc4c14b37a90208836cbf50d4ce2a57c71d0f52c7fb4f7598", size = 102678, upload-time = "2026-03-01T22:04:55.176Z" }, + { url = "https://files.pythonhosted.org/packages/85/59/cd98e556fbb2bf8fab29c1a722f67ad45c5f3447cac798ab85620d1e70af/yarl-1.23.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2af5c81a1f124609d5f33507082fc3f739959d4719b56877ab1ee7e7b3d602b", size = 100803, upload-time = "2026-03-01T22:04:56.588Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c0/b39770b56d4a9f0bb5f77e2f1763cd2d75cc2f6c0131e3b4c360348fcd65/yarl-1.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6b41389c19b07c760c7e427a3462e8ab83c4bb087d127f0e854c706ce1b9215c", size = 100163, upload-time = "2026-03-01T22:04:58.492Z" }, + { url = "https://files.pythonhosted.org/packages/e7/64/6980f99ab00e1f0ff67cb84766c93d595b067eed07439cfccfc8fb28c1a6/yarl-1.23.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1dc702e42d0684f42d6519c8d581e49c96cefaaab16691f03566d30658ee8788", size = 93859, upload-time = "2026-03-01T22:05:00.268Z" }, + { url = "https://files.pythonhosted.org/packages/38/69/912e6c5e146793e5d4b5fe39ff5b00f4d22463dfd5a162bec565ac757673/yarl-1.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0e40111274f340d32ebcc0a5668d54d2b552a6cca84c9475859d364b380e3222", size = 108202, upload-time = "2026-03-01T22:05:02.273Z" }, + { url = "https://files.pythonhosted.org/packages/59/97/35ca6767524687ad64e5f5c31ad54bc76d585585a9fcb40f649e7e82ffed/yarl-1.23.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:4764a6a7588561a9aef92f65bda2c4fb58fe7c675c0883862e6df97559de0bfb", size = 99866, upload-time = "2026-03-01T22:05:03.597Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1c/1a3387ee6d73589f6f2a220ae06f2984f6c20b40c734989b0a44f5987308/yarl-1.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:03214408cfa590df47728b84c679ae4ef00be2428e11630277be0727eba2d7cc", size = 107852, upload-time = "2026-03-01T22:05:04.986Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b8/35c0750fcd5a3f781058bfd954515dd4b1eab45e218cbb85cf11132215f1/yarl-1.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:170e26584b060879e29fac213e4228ef063f39128723807a312e5c7fec28eff2", size = 102919, upload-time = "2026-03-01T22:05:06.397Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1c/9a1979aec4a81896d597bcb2177827f2dbee3f5b7cc48b2d0dadb644b41d/yarl-1.23.0-cp311-cp311-win32.whl", hash = "sha256:51430653db848d258336cfa0244427b17d12db63d42603a55f0d4546f50f25b5", size = 82602, upload-time = "2026-03-01T22:05:08.444Z" }, + { url = "https://files.pythonhosted.org/packages/93/22/b85eca6fa2ad9491af48c973e4c8cf6b103a73dbb271fe3346949449fca0/yarl-1.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf49a3ae946a87083ef3a34c8f677ae4243f5b824bfc4c69672e72b3d6719d46", size = 87461, upload-time = "2026-03-01T22:05:10.145Z" }, + { url = "https://files.pythonhosted.org/packages/93/95/07e3553fe6f113e6864a20bdc53a78113cda3b9ced8784ee52a52c9f80d8/yarl-1.23.0-cp311-cp311-win_arm64.whl", hash = "sha256:b39cb32a6582750b6cc77bfb3c49c0f8760dc18dc96ec9fb55fbb0f04e08b928", size = 82336, upload-time = "2026-03-01T22:05:11.554Z" }, + { url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" }, + { url = "https://files.pythonhosted.org/packages/99/30/58260ed98e6ff7f90ba84442c1ddd758c9170d70327394a6227b310cd60f/yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8", size = 97587, upload-time = "2026-03-01T22:05:17.384Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/8b08aac08b50682e65759f7f8dde98ae8168f72487e7357a5d684c581ef9/yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072", size = 92528, upload-time = "2026-03-01T22:05:18.804Z" }, + { url = "https://files.pythonhosted.org/packages/52/07/0b7179101fe5f8385ec6c6bb5d0cb9f76bd9fb4a769591ab6fb5cdbfc69a/yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8", size = 105339, upload-time = "2026-03-01T22:05:20.235Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/36d82869ab5ec829ca8574dfcb92b51286fcfb1e9c7a73659616362dc880/yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7", size = 105061, upload-time = "2026-03-01T22:05:22.268Z" }, + { url = "https://files.pythonhosted.org/packages/66/3e/868e5c3364b6cee19ff3e1a122194fa4ce51def02c61023970442162859e/yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51", size = 100132, upload-time = "2026-03-01T22:05:23.638Z" }, + { url = "https://files.pythonhosted.org/packages/cf/26/9c89acf82f08a52cb52d6d39454f8d18af15f9d386a23795389d1d423823/yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67", size = 99289, upload-time = "2026-03-01T22:05:25.749Z" }, + { url = "https://files.pythonhosted.org/packages/6f/54/5b0db00d2cb056922356104468019c0a132e89c8d3ab67d8ede9f4483d2a/yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7", size = 96950, upload-time = "2026-03-01T22:05:27.318Z" }, + { url = "https://files.pythonhosted.org/packages/f6/40/10fa93811fd439341fad7e0718a86aca0de9548023bbb403668d6555acab/yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d", size = 93960, upload-time = "2026-03-01T22:05:28.738Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/8ae2e6cd77d0805f4526e30ec43b6f9a3dfc542d401ac4990d178e4bf0cf/yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760", size = 104703, upload-time = "2026-03-01T22:05:30.438Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0c/b3ceacf82c3fe21183ce35fa2acf5320af003d52bc1fcf5915077681142e/yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2", size = 98325, upload-time = "2026-03-01T22:05:31.835Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e0/12900edd28bdab91a69bd2554b85ad7b151f64e8b521fe16f9ad2f56477a/yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86", size = 105067, upload-time = "2026-03-01T22:05:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/15/61/74bb1182cf79c9bbe4eb6b1f14a57a22d7a0be5e9cedf8e2d5c2086474c3/yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34", size = 100285, upload-time = "2026-03-01T22:05:35.4Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/cd5ef733f2550de6241bd8bd8c3febc78158b9d75f197d9c7baa113436af/yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d", size = 82359, upload-time = "2026-03-01T22:05:36.811Z" }, + { url = "https://files.pythonhosted.org/packages/f5/be/25216a49daeeb7af2bec0db22d5e7df08ed1d7c9f65d78b14f3b74fd72fc/yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e", size = 87674, upload-time = "2026-03-01T22:05:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/d2/35/aeab955d6c425b227d5b7247eafb24f2653fedc32f95373a001af5dfeb9e/yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9", size = 81879, upload-time = "2026-03-01T22:05:40.006Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4b/a0a6e5d0ee8a2f3a373ddef8a4097d74ac901ac363eea1440464ccbe0898/yarl-1.23.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:16c6994ac35c3e74fb0ae93323bf8b9c2a9088d55946109489667c510a7d010e", size = 123796, upload-time = "2026-03-01T22:05:41.412Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/8925d68af039b835ae876db5838e82e76ec87b9782ecc97e192b809c4831/yarl-1.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a42e651629dafb64fd5b0286a3580613702b5809ad3f24934ea87595804f2c5", size = 86547, upload-time = "2026-03-01T22:05:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/50/06d511cc4b8e0360d3c94af051a768e84b755c5eb031b12adaaab6dec6e5/yarl-1.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c6b9461a2a8b47c65eef63bb1c76a4f1c119618ffa99ea79bc5bb1e46c5821b", size = 85854, upload-time = "2026-03-01T22:05:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f4/4e30b250927ffdab4db70da08b9b8d2194d7c7b400167b8fbeca1e4701ca/yarl-1.23.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2569b67d616eab450d262ca7cb9f9e19d2f718c70a8b88712859359d0ab17035", size = 98351, upload-time = "2026-03-01T22:05:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/86/fc/4118c5671ea948208bdb1492d8b76bdf1453d3e73df051f939f563e7dcc5/yarl-1.23.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e9d9a4d06d3481eab79803beb4d9bd6f6a8e781ec078ac70d7ef2dcc29d1bea5", size = 92711, upload-time = "2026-03-01T22:05:48.316Z" }, + { url = "https://files.pythonhosted.org/packages/56/11/1ed91d42bd9e73c13dc9e7eb0dd92298d75e7ac4dd7f046ad0c472e231cd/yarl-1.23.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f514f6474e04179d3d33175ed3f3e31434d3130d42ec153540d5b157deefd735", size = 106014, upload-time = "2026-03-01T22:05:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/ce/c9/74e44e056a23fbc33aca71779ef450ca648a5bc472bdad7a82339918f818/yarl-1.23.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fda207c815b253e34f7e1909840fd14299567b1c0eb4908f8c2ce01a41265401", size = 105557, upload-time = "2026-03-01T22:05:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/fe/b1e10b08d287f518994f1e2ff9b6d26f0adeecd8dd7d533b01bab29a3eda/yarl-1.23.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6cf500e61c90f305094911f9acc9c86da1a05a7a3f5be9f68817043f486e4", size = 101559, upload-time = "2026-03-01T22:05:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/72/59/c5b8d94b14e3d3c2a9c20cb100119fd534ab5a14b93673ab4cc4a4141ea5/yarl-1.23.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7504f2b476d21653e4d143f44a175f7f751cd41233525312696c76aa3dbb23f", size = 100502, upload-time = "2026-03-01T22:05:54.954Z" }, + { url = "https://files.pythonhosted.org/packages/77/4f/96976cb54cbfc5c9fd73ed4c51804f92f209481d1fb190981c0f8a07a1d7/yarl-1.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:578110dd426f0d209d1509244e6d4a3f1a3e9077655d98c5f22583d63252a08a", size = 98027, upload-time = "2026-03-01T22:05:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/63/6e/904c4f476471afdbad6b7e5b70362fb5810e35cd7466529a97322b6f5556/yarl-1.23.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:609d3614d78d74ebe35f54953c5bbd2ac647a7ddb9c30a5d877580f5e86b22f2", size = 95369, upload-time = "2026-03-01T22:05:58.141Z" }, + { url = "https://files.pythonhosted.org/packages/9d/40/acfcdb3b5f9d68ef499e39e04d25e141fe90661f9d54114556cf83be8353/yarl-1.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4966242ec68afc74c122f8459abd597afd7d8a60dc93d695c1334c5fd25f762f", size = 105565, upload-time = "2026-03-01T22:06:00.286Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c6/31e28f3a6ba2869c43d124f37ea5260cac9c9281df803c354b31f4dd1f3c/yarl-1.23.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0fd068364a6759bc794459f0a735ab151d11304346332489c7972bacbe9e72b", size = 99813, upload-time = "2026-03-01T22:06:01.712Z" }, + { url = "https://files.pythonhosted.org/packages/08/1f/6f65f59e72d54aa467119b63fc0b0b1762eff0232db1f4720cd89e2f4a17/yarl-1.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:39004f0ad156da43e86aa71f44e033de68a44e5a31fc53507b36dd253970054a", size = 105632, upload-time = "2026-03-01T22:06:03.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c4/18b178a69935f9e7a338127d5b77d868fdc0f0e49becd286d51b3a18c61d/yarl-1.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5723c01a56c5028c807c701aa66722916d2747ad737a046853f6c46f4875543", size = 101895, upload-time = "2026-03-01T22:06:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/8f/54/f5b870b5505663911dba950a8e4776a0dbd51c9c54c0ae88e823e4b874a0/yarl-1.23.0-cp313-cp313-win32.whl", hash = "sha256:1b6b572edd95b4fa8df75de10b04bc81acc87c1c7d16bcdd2035b09d30acc957", size = 82356, upload-time = "2026-03-01T22:06:06.04Z" }, + { url = "https://files.pythonhosted.org/packages/7a/84/266e8da36879c6edcd37b02b547e2d9ecdfea776be49598e75696e3316e1/yarl-1.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:baaf55442359053c7d62f6f8413a62adba3205119bcb6f49594894d8be47e5e3", size = 87515, upload-time = "2026-03-01T22:06:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/00/fd/7e1c66efad35e1649114fa13f17485f62881ad58edeeb7f49f8c5e748bf9/yarl-1.23.0-cp313-cp313-win_arm64.whl", hash = "sha256:fb4948814a2a98e3912505f09c9e7493b1506226afb1f881825368d6fb776ee3", size = 81785, upload-time = "2026-03-01T22:06:10.181Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fc/119dd07004f17ea43bb91e3ece6587759edd7519d6b086d16bfbd3319982/yarl-1.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:aecfed0b41aa72b7881712c65cf764e39ce2ec352324f5e0837c7048d9e6daaa", size = 130719, upload-time = "2026-03-01T22:06:11.708Z" }, + { url = "https://files.pythonhosted.org/packages/e6/0d/9f2348502fbb3af409e8f47730282cd6bc80dec6630c1e06374d882d6eb2/yarl-1.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a41bcf68efd19073376eb8cf948b8d9be0af26256403e512bb18f3966f1f9120", size = 89690, upload-time = "2026-03-01T22:06:13.429Z" }, + { url = "https://files.pythonhosted.org/packages/50/93/e88f3c80971b42cfc83f50a51b9d165a1dbf154b97005f2994a79f212a07/yarl-1.23.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cde9a2ecd91668bcb7f077c4966d8ceddb60af01b52e6e3e2680e4cf00ad1a59", size = 89851, upload-time = "2026-03-01T22:06:15.53Z" }, + { url = "https://files.pythonhosted.org/packages/1c/07/61c9dd8ba8f86473263b4036f70fb594c09e99c0d9737a799dfd8bc85651/yarl-1.23.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5023346c4ee7992febc0068e7593de5fa2bf611848c08404b35ebbb76b1b0512", size = 95874, upload-time = "2026-03-01T22:06:17.553Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e9/f9ff8ceefba599eac6abddcfb0b3bee9b9e636e96dbf54342a8577252379/yarl-1.23.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1009abedb49ae95b136a8904a3f71b342f849ffeced2d3747bf29caeda218c4", size = 88710, upload-time = "2026-03-01T22:06:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/eb/78/0231bfcc5d4c8eec220bc2f9ef82cb4566192ea867a7c5b4148f44f6cbcd/yarl-1.23.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a8d00f29b42f534cc8aa3931cfe773b13b23e561e10d2b26f27a8d309b0e82a1", size = 101033, upload-time = "2026-03-01T22:06:21.203Z" }, + { url = "https://files.pythonhosted.org/packages/cd/9b/30ea5239a61786f18fd25797151a17fbb3be176977187a48d541b5447dd4/yarl-1.23.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:95451e6ce06c3e104556d73b559f5da6c34a069b6b62946d3ad66afcd51642ea", size = 100817, upload-time = "2026-03-01T22:06:22.738Z" }, + { url = "https://files.pythonhosted.org/packages/62/e2/a4980481071791bc83bce2b7a1a1f7adcabfa366007518b4b845e92eeee3/yarl-1.23.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531ef597132086b6cf96faa7c6c1dcd0361dd5f1694e5cc30375907b9b7d3ea9", size = 97482, upload-time = "2026-03-01T22:06:24.21Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1e/304a00cf5f6100414c4b5a01fc7ff9ee724b62158a08df2f8170dfc72a2d/yarl-1.23.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:88f9fb0116fbfcefcab70f85cf4b74a2b6ce5d199c41345296f49d974ddb4123", size = 95949, upload-time = "2026-03-01T22:06:25.697Z" }, + { url = "https://files.pythonhosted.org/packages/68/03/093f4055ed4cae649ac53bca3d180bd37102e9e11d048588e9ab0c0108d0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e7b0460976dc75cb87ad9cc1f9899a4b97751e7d4e77ab840fc9b6d377b8fd24", size = 95839, upload-time = "2026-03-01T22:06:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/b9/28/4c75ebb108f322aa8f917ae10a8ffa4f07cae10a8a627b64e578617df6a0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:115136c4a426f9da976187d238e84139ff6b51a20839aa6e3720cd1026d768de", size = 90696, upload-time = "2026-03-01T22:06:29.048Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/42c2e2dd91c1a570402f51bdf066bfdb1241c2240ba001967bad778e77b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ead11956716a940c1abc816b7df3fa2b84d06eaed8832ca32f5c5e058c65506b", size = 100865, upload-time = "2026-03-01T22:06:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/74/05/1bcd60a8a0a914d462c305137246b6f9d167628d73568505fce3f1cb2e65/yarl-1.23.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:fe8f8f5e70e6dbdfca9882cd9deaac058729bcf323cf7a58660901e55c9c94f6", size = 96234, upload-time = "2026-03-01T22:06:32.692Z" }, + { url = "https://files.pythonhosted.org/packages/90/b2/f52381aac396d6778ce516b7bc149c79e65bfc068b5de2857ab69eeea3b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a0e317df055958a0c1e79e5d2aa5a5eaa4a6d05a20d4b0c9c3f48918139c9fc6", size = 100295, upload-time = "2026-03-01T22:06:34.268Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/638bae5bbf1113a659b2435d8895474598afe38b4a837103764f603aba56/yarl-1.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f0fd84de0c957b2d280143522c4f91a73aada1923caee763e24a2b3fda9f8a5", size = 97784, upload-time = "2026-03-01T22:06:35.864Z" }, + { url = "https://files.pythonhosted.org/packages/80/25/a3892b46182c586c202629fc2159aa13975d3741d52ebd7347fd501d48d5/yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595", size = 88313, upload-time = "2026-03-01T22:06:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/43/68/8c5b36aa5178900b37387937bc2c2fe0e9505537f713495472dcf6f6fccc/yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090", size = 94932, upload-time = "2026-03-01T22:06:39.579Z" }, + { url = "https://files.pythonhosted.org/packages/c6/cc/d79ba8292f51f81f4dc533a8ccfb9fc6992cabf0998ed3245de7589dc07c/yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144", size = 84786, upload-time = "2026-03-01T22:06:41.988Z" }, + { url = "https://files.pythonhosted.org/packages/90/98/b85a038d65d1b92c3903ab89444f48d3cee490a883477b716d7a24b1a78c/yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912", size = 124455, upload-time = "2026-03-01T22:06:43.615Z" }, + { url = "https://files.pythonhosted.org/packages/39/54/bc2b45559f86543d163b6e294417a107bb87557609007c007ad889afec18/yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474", size = 86752, upload-time = "2026-03-01T22:06:45.425Z" }, + { url = "https://files.pythonhosted.org/packages/24/f9/e8242b68362bffe6fb536c8db5076861466fc780f0f1b479fc4ffbebb128/yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719", size = 86291, upload-time = "2026-03-01T22:06:46.974Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d8/d1cb2378c81dd729e98c716582b1ccb08357e8488e4c24714658cc6630e8/yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319", size = 99026, upload-time = "2026-03-01T22:06:48.459Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ff/7196790538f31debe3341283b5b0707e7feb947620fc5e8236ef28d44f72/yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434", size = 92355, upload-time = "2026-03-01T22:06:50.306Z" }, + { url = "https://files.pythonhosted.org/packages/c1/56/25d58c3eddde825890a5fe6aa1866228377354a3c39262235234ab5f616b/yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723", size = 106417, upload-time = "2026-03-01T22:06:52.1Z" }, + { url = "https://files.pythonhosted.org/packages/51/8a/882c0e7bc8277eb895b31bce0138f51a1ba551fc2e1ec6753ffc1e7c1377/yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039", size = 106422, upload-time = "2026-03-01T22:06:54.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/2b/fef67d616931055bf3d6764885990a3ac647d68734a2d6a9e1d13de437a2/yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52", size = 101915, upload-time = "2026-03-01T22:06:55.895Z" }, + { url = "https://files.pythonhosted.org/packages/18/6a/530e16aebce27c5937920f3431c628a29a4b6b430fab3fd1c117b26ff3f6/yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c", size = 100690, upload-time = "2026-03-01T22:06:58.21Z" }, + { url = "https://files.pythonhosted.org/packages/88/08/93749219179a45e27b036e03260fda05190b911de8e18225c294ac95bbc9/yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae", size = 98750, upload-time = "2026-03-01T22:06:59.794Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cf/ea424a004969f5d81a362110a6ac1496d79efdc6d50c2c4b2e3ea0fc2519/yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e", size = 94685, upload-time = "2026-03-01T22:07:01.375Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b7/14341481fe568e2b0408bcf1484c652accafe06a0ade9387b5d3fd9df446/yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85", size = 106009, upload-time = "2026-03-01T22:07:03.151Z" }, + { url = "https://files.pythonhosted.org/packages/0a/e6/5c744a9b54f4e8007ad35bce96fbc9218338e84812d36f3390cea616881a/yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd", size = 100033, upload-time = "2026-03-01T22:07:04.701Z" }, + { url = "https://files.pythonhosted.org/packages/0c/23/e3bfc188d0b400f025bc49d99793d02c9abe15752138dcc27e4eaf0c4a9e/yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6", size = 106483, upload-time = "2026-03-01T22:07:06.231Z" }, + { url = "https://files.pythonhosted.org/packages/72/42/f0505f949a90b3f8b7a363d6cbdf398f6e6c58946d85c6d3a3bc70595b26/yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe", size = 102175, upload-time = "2026-03-01T22:07:08.4Z" }, + { url = "https://files.pythonhosted.org/packages/aa/65/b39290f1d892a9dd671d1c722014ca062a9c35d60885d57e5375db0404b5/yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169", size = 83871, upload-time = "2026-03-01T22:07:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5b/9b92f54c784c26e2a422e55a8d2607ab15b7ea3349e28359282f84f01d43/yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70", size = 89093, upload-time = "2026-03-01T22:07:11.501Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7d/8a84dc9381fd4412d5e7ff04926f9865f6372b4c2fd91e10092e65d29eb8/yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e", size = 83384, upload-time = "2026-03-01T22:07:13.069Z" }, + { url = "https://files.pythonhosted.org/packages/dd/8d/d2fad34b1c08aa161b74394183daa7d800141aaaee207317e82c790b418d/yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679", size = 131019, upload-time = "2026-03-01T22:07:14.903Z" }, + { url = "https://files.pythonhosted.org/packages/19/ff/33009a39d3ccf4b94d7d7880dfe17fb5816c5a4fe0096d9b56abceea9ac7/yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412", size = 89894, upload-time = "2026-03-01T22:07:17.372Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f1/dab7ac5e7306fb79c0190766a3c00b4cb8d09a1f390ded68c85a5934faf5/yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4", size = 89979, upload-time = "2026-03-01T22:07:19.361Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b1/08e95f3caee1fad6e65017b9f26c1d79877b502622d60e517de01e72f95d/yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c", size = 95943, upload-time = "2026-03-01T22:07:21.266Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cc/6409f9018864a6aa186c61175b977131f373f1988e198e031236916e87e4/yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4", size = 88786, upload-time = "2026-03-01T22:07:23.129Z" }, + { url = "https://files.pythonhosted.org/packages/76/40/cc22d1d7714b717fde2006fad2ced5efe5580606cb059ae42117542122f3/yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94", size = 101307, upload-time = "2026-03-01T22:07:24.689Z" }, + { url = "https://files.pythonhosted.org/packages/8f/0d/476c38e85ddb4c6ec6b20b815bdd779aa386a013f3d8b85516feee55c8dc/yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28", size = 100904, upload-time = "2026-03-01T22:07:26.287Z" }, + { url = "https://files.pythonhosted.org/packages/72/32/0abe4a76d59adf2081dcb0397168553ece4616ada1c54d1c49d8936c74f8/yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6", size = 97728, upload-time = "2026-03-01T22:07:27.906Z" }, + { url = "https://files.pythonhosted.org/packages/b7/35/7b30f4810fba112f60f5a43237545867504e15b1c7647a785fbaf588fac2/yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277", size = 95964, upload-time = "2026-03-01T22:07:30.198Z" }, + { url = "https://files.pythonhosted.org/packages/2d/86/ed7a73ab85ef00e8bb70b0cb5421d8a2a625b81a333941a469a6f4022828/yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4", size = 95882, upload-time = "2026-03-01T22:07:32.132Z" }, + { url = "https://files.pythonhosted.org/packages/19/90/d56967f61a29d8498efb7afb651e0b2b422a1e9b47b0ab5f4e40a19b699b/yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a", size = 90797, upload-time = "2026-03-01T22:07:34.404Z" }, + { url = "https://files.pythonhosted.org/packages/72/00/8b8f76909259f56647adb1011d7ed8b321bcf97e464515c65016a47ecdf0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb", size = 101023, upload-time = "2026-03-01T22:07:35.953Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e2/cab11b126fb7d440281b7df8e9ddbe4851e70a4dde47a202b6642586b8d9/yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41", size = 96227, upload-time = "2026-03-01T22:07:37.594Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9b/2c893e16bfc50e6b2edf76c1a9eb6cb0c744346197e74c65e99ad8d634d0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2", size = 100302, upload-time = "2026-03-01T22:07:39.334Z" }, + { url = "https://files.pythonhosted.org/packages/28/ec/5498c4e3a6d5f1003beb23405671c2eb9cdbf3067d1c80f15eeafe301010/yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4", size = 98202, upload-time = "2026-03-01T22:07:41.717Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c3/cd737e2d45e70717907f83e146f6949f20cc23cd4bf7b2688727763aa458/yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4", size = 90558, upload-time = "2026-03-01T22:07:43.433Z" }, + { url = "https://files.pythonhosted.org/packages/e1/19/3774d162f6732d1cfb0b47b4140a942a35ca82bb19b6db1f80e9e7bdc8f8/yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2", size = 97610, upload-time = "2026-03-01T22:07:45.773Z" }, + { url = "https://files.pythonhosted.org/packages/51/47/3fa2286c3cb162c71cdb34c4224d5745a1ceceb391b2bd9b19b668a8d724/yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25", size = 86041, upload-time = "2026-03-01T22:07:49.026Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" }, ] diff --git a/zensical.toml b/zensical.toml new file mode 100644 index 00000000..a1f79dcc --- /dev/null +++ b/zensical.toml @@ -0,0 +1,178 @@ +[project] +site_name = "FastAPI Boilerplate" +site_description = "Production-ready FastAPI starter with sessions, OAuth, SQLAdmin, Taskiq, Redis caching, and a plugin-aware CLI." +site_url = "https://benavlabs.github.io/FastAPI-boilerplate" +docs_dir = "docs" +site_dir = "site" + +copyright = "© 2026 Benav Labs" + +extra_css = ["stylesheets/extra.css"] + +# ============================================================================== +# Theme +# ============================================================================== + +[project.theme] +variant = "modern" + +logo = "assets/FastAPI-boilerplate.png" +favicon = "assets/FastAPI-boilerplate.png" + +font.text = "Ubuntu" +font.code = "JetBrains Mono" + +features = [ + "navigation.instant", + "navigation.instant.prefetch", + "navigation.tabs", + "navigation.indexes", + "navigation.top", + "navigation.footer", + "search.suggest", + "search.highlight", + "content.code.copy", + "content.code.annotate", + "toc.follow", +] + +# Light/dark palette — uses brand colors via stylesheets/extra.css +[[project.theme.palette]] +scheme = "default" +primary = "custom" +accent = "custom" +toggle.icon = "lucide/sun" +toggle.name = "Switch to dark mode" + +[[project.theme.palette]] +scheme = "slate" +primary = "custom" +accent = "custom" +toggle.icon = "lucide/moon" +toggle.name = "Switch to light mode" + +# ============================================================================== +# Footer / extra +# ============================================================================== + +[[project.extra.social]] +icon = "fontawesome/brands/github" +link = "https://github.com/benavlabs/fastapi-boilerplate" +name = "FastAPI boilerplate on GitHub" + +[[project.extra.social]] +icon = "fontawesome/brands/python" +link = "https://pypi.org/project/fastapi/" +name = "FastAPI on PyPI" + +# ============================================================================== +# Navigation +# ============================================================================== + +[[project.nav]] +"Home" = "index.md" + +[[project.nav]] +"Getting Started" = [ + { "Overview" = "getting-started/index.md" }, + { "Installation" = "getting-started/installation.md" }, + { "Configuration" = "getting-started/configuration.md" }, + { "First Run" = "getting-started/first-run.md" }, +] + +[[project.nav]] +"User Guide" = [ + { "Overview" = "user-guide/index.md" }, + { "Project Structure" = "user-guide/project-structure.md" }, + { "Configuration" = [ + { "Overview" = "user-guide/configuration/index.md" }, + { "Environment Variables" = "user-guide/configuration/environment-variables.md" }, + { "Settings Classes" = "user-guide/configuration/settings-classes.md" }, + { "Docker Setup" = "user-guide/configuration/docker-setup.md" }, + { "Environment-Specific" = "user-guide/configuration/environment-specific.md" }, + ] }, + { "Database" = [ + { "Overview" = "user-guide/database/index.md" }, + { "Models" = "user-guide/database/models.md" }, + { "Schemas" = "user-guide/database/schemas.md" }, + { "CRUD Operations" = "user-guide/database/crud.md" }, + { "Migrations" = "user-guide/database/migrations.md" }, + ] }, + { "API" = [ + { "Overview" = "user-guide/api/index.md" }, + { "Endpoints" = "user-guide/api/endpoints.md" }, + { "Pagination" = "user-guide/api/pagination.md" }, + { "Exceptions" = "user-guide/api/exceptions.md" }, + { "Versioning" = "user-guide/api/versioning.md" }, + ] }, + { "Authentication" = [ + { "Overview" = "user-guide/authentication/index.md" }, + { "Sessions" = "user-guide/authentication/sessions.md" }, + { "User Management" = "user-guide/authentication/user-management.md" }, + { "Permissions" = "user-guide/authentication/permissions.md" }, + ] }, + { "Admin Panel" = [ + { "Overview" = "user-guide/admin-panel/index.md" }, + { "Configuration" = "user-guide/admin-panel/configuration.md" }, + { "Adding Models" = "user-guide/admin-panel/adding-models.md" }, + { "User Management" = "user-guide/admin-panel/user-management.md" }, + ] }, + { "Caching" = [ + { "Overview" = "user-guide/caching/index.md" }, + { "Redis Cache" = "user-guide/caching/redis-cache.md" }, + { "Client Cache" = "user-guide/caching/client-cache.md" }, + { "Cache Strategies" = "user-guide/caching/cache-strategies.md" }, + ] }, + { "Background Tasks" = "user-guide/background-tasks/index.md" }, + { "Rate Limiting" = "user-guide/rate-limiting/index.md" }, + { "Development" = "user-guide/development.md" }, + { "Production" = "user-guide/production.md" }, + { "Testing" = "user-guide/testing.md" }, +] + +[[project.nav]] +"CLI" = [ + { "Overview" = "cli/index.md" }, + { "Commands" = "cli/commands.md" }, + { "Plugins" = "cli/plugins.md" }, +] + +[[project.nav]] +"Community" = "community.md" + +# ============================================================================== +# Markdown extensions +# ============================================================================== + +[[project.markdown_extensions]] +admonition = {} + +[[project.markdown_extensions]] +"pymdownx.details" = {} + +[[project.markdown_extensions]] +"pymdownx.highlight" = { anchor_linenums = true, line_spans = "__span", pygments_lang_class = true } + +[[project.markdown_extensions]] +"pymdownx.inlinehilite" = {} + +[[project.markdown_extensions]] +"pymdownx.snippets" = {} + +[[project.markdown_extensions]] +"pymdownx.superfences" = {} + +[[project.markdown_extensions]] +"pymdownx.tabbed" = { alternate_style = true } + +[[project.markdown_extensions]] +"pymdownx.tasklist" = { custom_checkbox = true } + +[[project.markdown_extensions]] +"toc" = { permalink = true } + +[[project.markdown_extensions]] +"attr_list" = {} + +[[project.markdown_extensions]] +"md_in_html" = {}