diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..1a395c3 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,35 @@ +__pycache__ +*.pyc +*.pyo +*.pyd +.Python +*.so +*.egg +*.egg-info +dist +build +.git +.gitignore +.env +.env.local +.venv +venv +env +ENV +.pytest_cache +.coverage +htmlcov +.tox +.vscode +.idea +*.swp +*.swo +*~ +.DS_Store +Thumbs.db +README.md +API.md +tests +alembic/versions/*.pyc +# Keep poetry.lock but ignore cache +.poetry_cache diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..803ea23 --- /dev/null +++ b/.gitignore @@ -0,0 +1,65 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# Virtual Environment +venv/ +env/ +ENV/ +.venv + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# Environment +.env +.env.local +.env.*.local + +# Database +*.db +*.sqlite +*.sqlite3 + +# Logs +*.log +logs/ + +# Testing +.pytest_cache/ +.coverage +htmlcov/ +.tox/ +.hypothesis/ + +# Alembic +alembic.ini.bak + +# Docker +.dockerignore.bak + +# OS +.DS_Store +Thumbs.db diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..3abee3e --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,25 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +default_language_version: + python: python3.12 +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-added-large-files + - id: check-toml + - id: check-yaml + args: + - --unsafe + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.14.9 + hooks: + - id: ruff + args: + - --fix + types_or: [python, pyi] + - id: ruff-format + types_or: [python, pyi] diff --git a/API.md b/API.md new file mode 100644 index 0000000..a5f861c --- /dev/null +++ b/API.md @@ -0,0 +1,603 @@ +# Server Inventory Management API Documentation + +## Overview + +This is a production-grade REST API for managing server inventory. The API follows RFC standards for HTTP APIs, including proper status codes, headers, and error responses (RFC 7807 Problem Details format). + +## Quick Reference + +### Run with Docker (Recommended) +```bash +# Start the application +docker-compose up -d + +# Run tests +docker-compose -f docker-compose.test.yml up --build +``` + +### Run Locally +```bash +# Install dependencies +poetry install + +# Start PostgreSQL +docker-compose up -d db + +# Run migrations +environment=LOCAL poetry run alembic upgrade head + +# Start API (ORM mode - default) +environment=LOCAL poetry run uvicorn app.main:app --reload + +# Start API (Raw SQL mode) +USE_RAW_SQL=true environment=LOCAL poetry run uvicorn app.main:app --reload +``` + +### CLI Commands +```bash +# Create a server +poetry run server-cli create + +# List all servers +poetry run server-cli list + +# Get a server +poetry run server-cli get + +# Update a server +poetry run server-cli update --hostname --ip --state + +# Delete a server +poetry run server-cli delete +``` + +## Base URL + +- **Development**: `http://localhost:8000` +- **Production**: Configure via `API_HOST` and `API_PORT` environment variables + +## Authentication + +Currently, the API does not require authentication. In production, you should add authentication middleware. + +## API Endpoints + +### Health Checks + +#### GET /health + +Liveness probe endpoint to check if the application is running. + +**Response:** +- **Status Code**: `200 OK` +- **Body**: + ```json + { + "status": "healthy", + "timestamp": "2024-01-01T00:00:00Z" + } + ``` + +#### GET /health/ready + +Readiness probe endpoint that checks database connectivity. + +**Response:** +- **Status Code**: `200 OK` (healthy) or `503 Service Unavailable` (unhealthy) +- **Body**: + ```json + { + "status": "healthy", + "timestamp": "2024-01-01T00:00:00Z" + } + ``` + +### Server Management + +#### POST /servers + +Create a new server. + +**Request Body:** +```json +{ + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "state": "active" +} +``` + +**Validation Rules:** +- `hostname`: Required, unique, 1-255 characters +- `ip_address`: Required, valid IPv4 or IPv6 address +- `state`: Required, one of: `active`, `offline`, `retired` + +**Response:** +- **Status Code**: `201 Created` +- **Headers**: + - `Location: /servers/{id}` (RFC 7231) +- **Body**: + ```json + { + "id": 1, + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "state": "active", + "created_at": "2024-01-01T00:00:00Z", + "updated_at": "2024-01-01T00:00:00Z" + } + ``` + +**Error Responses:** +- `400 Bad Request`: Invalid IP address or state +- `409 Conflict`: Duplicate hostname +- `422 Unprocessable Entity`: Validation errors + +**Example Error Response (RFC 7807):** +```json +{ + "type": "about:blank", + "title": "DuplicateHostname", + "status": 409, + "detail": "Server with hostname 'web-server-01' already exists", + "instance": "/servers", + "extra": { + "hostname": "web-server-01" + } +} +``` + +#### GET /servers + +List all servers. + +**Response:** +- **Status Code**: `200 OK` +- **Body**: + ```json + { + "servers": [ + { + "id": 1, + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "state": "active", + "created_at": "2024-01-01T00:00:00Z", + "updated_at": "2024-01-01T00:00:00Z" + } + ], + "count": 1 + } + ``` + +#### GET /servers/{id} + +Get a server by ID. + +**Path Parameters:** +- `id` (integer): Server ID + +**Response:** +- **Status Code**: `200 OK` +- **Body**: + ```json + { + "id": 1, + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "state": "active", + "created_at": "2024-01-01T00:00:00Z", + "updated_at": "2024-01-01T00:00:00Z" + } + ``` + +**Error Responses:** +- `404 Not Found`: Server not found + +#### PUT /servers/{id} + +Update an existing server. All fields are optional. + +**Path Parameters:** +- `id` (integer): Server ID + +**Request Body:** +```json +{ + "hostname": "web-server-01-updated", + "ip_address": "192.168.1.101", + "state": "offline" +} +``` + +**Response:** +- **Status Code**: `200 OK` +- **Body**: Updated server object (same format as GET /servers/{id}) + +**Error Responses:** +- `400 Bad Request`: Invalid IP address or state +- `404 Not Found`: Server not found +- `409 Conflict`: Duplicate hostname + +#### DELETE /servers/{id} + +Delete a server by ID. + +**Path Parameters:** +- `id` (integer): Server ID + +**Response:** +- **Status Code**: `204 No Content` +- **Body**: Empty + +**Error Responses:** +- `404 Not Found`: Server not found + +## Error Response Format (RFC 7807) + +All error responses follow the RFC 7807 Problem Details for HTTP APIs format: + +```json +{ + "type": "about:blank", + "title": "Error Title", + "status": 400, + "detail": "Human-readable error message", + "instance": "/servers/123", + "extra": { + "additional": "context" + } +} +``` + +## HTTP Status Codes + +- `200 OK`: Successful GET, PUT +- `201 Created`: Successful POST (with Location header) +- `204 No Content`: Successful DELETE +- `400 Bad Request`: Validation errors, invalid input +- `404 Not Found`: Resource not found +- `409 Conflict`: Duplicate hostname +- `422 Unprocessable Entity`: Request validation errors +- `500 Internal Server Error`: Server errors +- `503 Service Unavailable`: Readiness check failure + +## Running the API + +### Using Docker Compose (Recommended) + +1. **Start services:** + ```bash + docker-compose up -d + ``` + +2. **Check logs:** + ```bash + docker-compose logs -f api + ``` + +3. **Stop services:** + ```bash + docker-compose down + ``` + +4. **Run with custom environment:** + ```bash + ENVIRONMENT=prod docker-compose up -d + ``` + +### Local Development + +1. **Install Poetry (if not already installed):** + ```bash + curl -sSL https://install.python-poetry.org | python3 - + ``` + +2. **Install dependencies:** + ```bash + poetry install + ``` + +3. **Start PostgreSQL:** + ```bash + docker-compose up -d db + ``` + +4. **Run migrations:** + ```bash + environment=LOCAL poetry run alembic upgrade head + ``` + +5. **Start the API server:** + ```bash + environment=LOCAL poetry run uvicorn app.main:app --reload + ``` + +6. **Enable Raw SQL mode (optional):** + ```bash + USE_RAW_SQL=true environment=LOCAL poetry run uvicorn app.main:app --reload + ``` + +## API Documentation + +Interactive API documentation is available at: +- **Swagger UI**: `http://localhost:8000/docs` +- **ReDoc**: `http://localhost:8000/redoc` + +## CLI Tool + +The CLI tool provides a convenient way to interact with the API from the command line. + +### Installation + +The CLI is included in the project. Install with Poetry: +```bash +poetry install +``` + +### Usage + +Set the API base URL (optional, defaults to `http://localhost:8000`): +```bash +export API_BASE_URL=http://localhost:8000 +``` + +**Commands:** + +1. **Create a server:** + ```bash + poetry run server-cli create web-server-01 192.168.1.100 active + ``` + +2. **List all servers:** + ```bash + poetry run server-cli list + ``` + +3. **Get a server:** + ```bash + poetry run server-cli get 1 + ``` + +4. **Update a server:** + ```bash + poetry run server-cli update 1 --hostname updated-server --state offline + ``` + +5. **Delete a server:** + ```bash + poetry run server-cli delete 1 + ``` + +### CLI Examples + +```bash +# Create multiple servers +poetry run server-cli create web-01 192.168.1.10 active +poetry run server-cli create web-02 192.168.1.11 active +poetry run server-cli create db-01 192.168.1.20 active + +# List all servers +poetry run server-cli list + +# Update server state +poetry run server-cli update 1 --state offline + +# Update IP address +poetry run server-cli update 1 --ip 192.168.1.101 + +# Delete a server +poetry run server-cli delete 1 +``` + +## Database Schema + +### Servers Table + +| Column | Type | Constraints | Description | +|--------|------|-------------|-------------| +| id | INTEGER | PRIMARY KEY, AUTO_INCREMENT | Server ID | +| hostname | VARCHAR(255) | UNIQUE, NOT NULL, INDEXED | Server hostname | +| ip_address | VARCHAR(45) | NOT NULL | IP address (IPv4 or IPv6) | +| state | VARCHAR(20) | NOT NULL, CHECK | State: active, offline, retired | +| created_at | TIMESTAMP | NOT NULL, DEFAULT NOW() | Creation timestamp | +| updated_at | TIMESTAMP | NOT NULL, DEFAULT NOW() | Last update timestamp | + +### Constraints + +- `hostname` must be unique +- `state` must be one of: `active`, `offline`, `retired` +- `ip_address` must be a valid IPv4 or IPv6 address (validated in application layer) + +## Environment Variables + +| Variable | Description | Default | +|----------|-------------|---------| +| `ENVIRONMENT` | Environment: LOCAL, DEVELOPMENT, STAGING, PRODUCTION, TESTING | `LOCAL` | +| `DATABASE_DIALECT` | Database dialect | `postgresql` | +| `DATABASE_DRIVER` | Database driver | `asyncpg` | +| `DATABASE_USERNAME` | Database username | `postgres` | +| `DATABASE_PASSWORD` | Database password | `postgres` | +| `DATABASE_NAME` | Database name | `server_inventory` | +| `DATABASE_HOST` | Database host | `localhost` | +| `DATABASE_PORT` | Database port | `5432` | +| `API_HOST` | API server host | `0.0.0.0` | +| `API_PORT` | API server port | `8000` | +| `LOG_LEVEL` | Logging level | `INFO` | +| `CORS_ORIGINS` | Allowed CORS origins | `*` | +| `DATABASE_POOL_SIZE` | Database connection pool size | `10` | +| `DATABASE_POOL_MAX_OVERFLOW` | Database connection pool max overflow | `20` | +| `USE_RAW_SQL` | Use raw SQL queries instead of ORM | `false` | + +## Testing + +### Run Tests + +```bash +# Run all tests with testing environment +environment=TESTING poetry run pytest + +# Run with coverage +environment=TESTING poetry run pytest --cov=app --cov-report=term-missing + +# Run specific test file +environment=TESTING poetry run pytest tests/test_routes.py + +# Run tests in Docker +docker-compose -f docker-compose.test.yml up --build +``` + +### Test Coverage + +The test suite includes: +- Route layer tests (HTTP endpoints) +- Service layer tests (business logic) +- Repository layer tests (database operations) - **tests both ORM and Raw SQL strategies** +- CLI tests + +### Test Files + +| File | Description | +|------|-------------| +| `tests/test_routes.py` | HTTP endpoint tests (CRUD operations, validation, error handling) | +| `tests/test_services.py` | Business logic tests | +| `tests/test_repositories.py` | Database operation tests (ORM and Raw SQL) | +| `tests/test_cli.py` | CLI command tests | +| `tests/conftest.py` | Pytest fixtures and configuration | + +## Deployment + +### Production Considerations + +1. **Environment Variables**: Set all required environment variables +2. **Database Migrations**: Run migrations before starting the API: + ```bash + poetry run alembic upgrade head + ``` +3. **Health Checks**: Use `/health` and `/health/ready` for orchestration +4. **Logging**: Configure structured logging for log aggregation +5. **Security**: Add authentication/authorization middleware +6. **SSL/TLS**: Use HTTPS in production +7. **Connection Pooling**: Tune `DB_POOL_SIZE` and `DB_MAX_OVERFLOW` based on load + +### Docker Deployment + +```bash +# Build and run +docker-compose up -d --build + +# View logs +docker-compose logs -f + +# Scale API (if needed) +docker-compose up -d --scale api=3 +``` + +## Architecture + +The API follows a layered architecture: + +1. **Routing Layer**: FastAPI route handlers +2. **Service Layer**: Business logic and validation +3. **Schema Layer**: Pydantic models for request/response validation +4. **Repository Layer**: Database operations (ORM or Raw SQL) +5. **Model Layer**: SQLAlchemy ORM models + +## Raw SQL Mode + +The application supports **raw SQL queries** as required. You can switch between ORM and Raw SQL modes: + +### Enable Raw SQL Mode + +Set the `USE_RAW_SQL` environment variable to `true`: + +```bash +# Via environment variable +USE_RAW_SQL=true poetry run uvicorn app.main:app --reload + +# Via Docker Compose +USE_RAW_SQL=true docker-compose up -d + +# In environment file (environments/local.env) +USE_RAW_SQL=true +``` + +### How It Works + +The application uses a **Strategy Pattern** with two implementations: + +| Strategy | Description | Config Value | +|----------|-------------|--------------| +| `ORMRepository` | Uses SQLAlchemy ORM for queries | `USE_RAW_SQL=false` (default) | +| `RawSQLRepository` | Uses parameterized raw SQL queries | `USE_RAW_SQL=true` | + +### Raw SQL Example + +When `USE_RAW_SQL=true`, the application executes queries like: + +```sql +-- Create server +INSERT INTO servers (hostname, ip_address, state, created_at, updated_at) +VALUES (:hostname, :ip_address, :state, NOW(), NOW()) +RETURNING id, hostname, ip_address, state, created_at, updated_at; + +-- Get server by ID +SELECT id, hostname, ip_address, state, created_at, updated_at +FROM servers +WHERE id = :id; + +-- List all servers +SELECT id, hostname, ip_address, state, created_at, updated_at +FROM servers +ORDER BY id; + +-- Update server +UPDATE servers +SET hostname = :hostname, ip_address = :ip_address, state = :state, updated_at = NOW() +WHERE id = :id +RETURNING id, hostname, ip_address, state, created_at, updated_at; + +-- Delete server +DELETE FROM servers WHERE id = :id; +``` + +All queries use **parameterized values** (`:parameter`) to prevent SQL injection. + +## Validation Rules + +The API enforces the following validation rules as per requirements: + +### Hostname Validation +- **Uniqueness**: Hostname must be unique across all servers +- **Length**: 1-255 characters +- **Enforcement**: Database UNIQUE constraint + application-level check + +### IP Address Validation +- **Format**: Must be a valid IPv4 or IPv6 address +- **Validation**: Uses Python's `ipaddress.ip_address()` module +- **Examples**: `192.168.1.100` (IPv4), `2001:db8::1` (IPv6) + +### State Validation +- **Allowed Values**: `active`, `offline`, `retired` +- **Enforcement**: Enum validation + Database CHECK constraint + +## Requirements Compliance + +This project satisfies all specified requirements: + +| Requirement | Status | Implementation | +|-------------|--------|----------------| +| FastAPI or Flask | ✅ | FastAPI with async support | +| PostgreSQL storage | ✅ | PostgreSQL with asyncpg driver | +| Raw SQL queries | ✅ | `RawSQLRepository` with `USE_RAW_SQL=true` | +| Hostname uniqueness | ✅ | DB constraint + application validation | +| IP address validation | ✅ | `ipaddress.ip_address()` validation | +| State validation | ✅ | Enum: active, offline, retired | +| API documentation | ✅ | This file (API.md) | +| CLI tool | ✅ | Typer-based CLI | +| pytest test suite | ✅ | 4 test files with comprehensive coverage | +| Docker Compose | ✅ | `docker-compose.yml` + `docker-compose.test.yml` | + +## Support + +For issues or questions, please refer to the project repository or contact the development team. diff --git a/README.md b/README.md index 3145d38..41b93d9 100644 --- a/README.md +++ b/README.md @@ -1,31 +1,363 @@ -# Instructions +# Server Inventory Management System -You are developing an inventory management software solution for a cloud services company that provisions servers in multiple data centers. You must build a CRUD app for tracking the state of all the servers. +A production-grade CRUD application for tracking server inventory with FastAPI, PostgreSQL, CLI tool, comprehensive tests, and Docker Compose deployment. -Deliverables: -- PR to https://github.com/Mathpix/hiring-challenge-devops-python that includes: -- API code -- CLI code -- pytest test suite -- Working Docker Compose stack +## Overview -Short API.md on how to run everything, also a short API and CLI spec +This system provides a complete solution for managing server inventory across multiple data centers. It follows a layered architecture with proper separation of concerns, RFC-compliant HTTP APIs, and production-ready features. -Required endpoints: -- POST /servers → create a server -- GET /servers → list all servers -- GET /servers/{id} → get one server -- PUT /servers/{id} → update server -- DELETE /servers/{id} → delete server +## Features -Requirements: -- Use FastAPI or Flask -- Store data in PostgreSQL -- Use raw SQL +- ✅ **RESTful API** with FastAPI +- ✅ **PostgreSQL** database with SQLAlchemy ORM and Raw SQL support +- ✅ **CLI Tool** for command-line operations +- ✅ **Comprehensive Tests** with pytest +- ✅ **Docker Compose** for easy deployment +- ✅ **Database Migrations** with Alembic +- ✅ **RFC-Compliant** HTTP status codes and error responses +- ✅ **Production-Ready** logging, health checks, and error handling +- ✅ **Dual Query Mode** - Switch between SQLAlchemy ORM and raw SQL queries +- ✅ **Multi-Environment Support** - LOCAL, DEVELOPMENT, STAGING, PRODUCTION, TESTING -Validate that: -- hostname is unique -- IP address looks like an IP +## Quick Start -State is one of: active, offline, retired +### Using Docker Compose (Recommended) +1. **Clone the repository:** + ```bash + git clone + cd server-inventory-management + ``` + +2. **Start the application (Development):** + ```bash + docker-compose up -d + ``` + +3. **Check the API:** + ```bash + curl http://localhost:8000/health + ``` + +4. **Access API documentation:** + - Swagger UI: http://localhost:8000/docs + - ReDoc: http://localhost:8000/redoc + +### Local Development (Without Docker) + +1. **Install Poetry (if not already installed):** + ```bash + curl -sSL https://install.python-poetry.org | python3 - + ``` + +2. **Install dependencies:** + ```bash + poetry install + ``` + +3. **Start PostgreSQL (using Docker):** + ```bash + docker-compose up -d db + ``` + +4. **Run migrations:** + ```bash + environment=LOCAL poetry run alembic upgrade head + ``` + +5. **Start the API:** + ```bash + environment=LOCAL poetry run uvicorn app.main:app --reload + ``` + +## Environment Configuration + +The application supports multiple environments with dedicated configuration files: + +### Available Environments + +| Environment | File | Description | +|-------------|------|-------------| +| LOCAL | `environments/local.env` | Local development without Docker | +| DEVELOPMENT | `environments/development.env` | Development server/Docker | +| STAGING | `environments/staging.env` | Staging/QA environment | +| PRODUCTION | `environments/production.env` | Production environment | +| TESTING | `environments/testing.env` | Running tests | + +### Using Environments + +#### Local Development (Without Docker) + +Set the `environment` variable before running commands: + +```bash +# Run Alembic migrations +environment=LOCAL poetry run alembic upgrade head + +# Run the API server +environment=LOCAL poetry run uvicorn app.main:app --reload + +# Run tests +environment=TESTING poetry run pytest + +# Run CLI commands +environment=LOCAL poetry run server-cli list +``` + +#### Docker Compose + +The Dockerfile uses multi-stage builds. Set `DOCKER_TARGET` to select the environment: + +```bash +# Development (default) +docker-compose up -d + +# Local development +DOCKER_TARGET=local docker-compose up -d + +# Staging +DOCKER_TARGET=staging docker-compose up -d + +# Production +DOCKER_TARGET=production docker-compose up -d + +# Testing (separate compose file) +docker-compose -f docker-compose.test.yml up --build +``` + +Or set variables in a `.env` file at project root: +```bash +DOCKER_TARGET=production +ENVIRONMENT=PRODUCTION +DATABASE_PASSWORD=secure_password +``` + +Build images directly: +```bash +docker build -f docker/Dockerfile --target production -t server-inventory:prod . +``` + +### Environment Variables + +Each environment file contains: + +| Variable | Description | Example | +|----------|-------------|---------| +| `ENVIRONMENT` | Environment name | `LOCAL`, `DEVELOPMENT`, `STAGING`, `PRODUCTION`, `TESTING` | +| `DATABASE_DIALECT` | Database dialect | `postgresql`, `mysql` | +| `DATABASE_DRIVER` | Database driver | `asyncpg`, `pymysql` | +| `DATABASE_USERNAME` | Database username | `postgres` | +| `DATABASE_PASSWORD` | Database password | `postgres` | +| `DATABASE_NAME` | Database name | `server_inventory` | +| `DATABASE_HOST` | Database host | `localhost`, `db` | +| `DATABASE_PORT` | Database port | `5432`, `3306` | +| `DATABASE_ENGINE_ECHO` | Echo SQL queries | `True`, `False` | +| `DATABASE_POOL_SIZE` | Connection pool size | `10` | +| `DATABASE_POOL_MAX_OVERFLOW` | Pool max overflow | `20` | +| `DATABASE_POOL_TIMEOUT` | Pool timeout (seconds) | `30` | +| `DATABASE_POOL_RECYCLE` | Pool recycle (seconds) | `3600` | +| `API_HOST` | API server host | `0.0.0.0` | +| `API_PORT` | API server port | `8000` | +| `LOG_LEVEL` | Logging level | `DEBUG`, `INFO`, `WARNING`, `ERROR` | +| `CORS_ORIGINS` | Allowed CORS origin | `*`, `https://example.com` | +| `USE_RAW_SQL` | Use raw SQL instead of ORM | `True` or `False` | + +## Project Structure + +``` +server-inventory-management/ +├── app/ # Application code +│ ├── models/ # SQLAlchemy ORM models +│ ├── schemas/ # Pydantic schemas +│ ├── repositories/ # Generic repository pattern +│ │ ├── base.py # BaseRepository[T] generic interface +│ │ ├── factory.py # RepositoryFactory with strategy selection +│ │ ├── server_repository.py # Server-specific repository +│ │ └── strategies/ # Query strategy implementations +│ │ ├── orm.py # ORMRepository[T] - SQLAlchemy ORM +│ │ └── raw_sql.py # RawSQLRepository[T] - Raw SQL queries +│ ├── services/ # Business logic +│ ├── routes/ # FastAPI routes +│ ├── config.py # Configuration with environment loading +│ ├── database.py # Database setup +│ ├── exceptions.py # Custom exceptions +│ ├── logging_config.py # Logging setup +│ └── main.py # FastAPI app +├── cli/ # CLI tool +├── alembic/ # Database migrations +├── environments/ # Environment configuration files +│ ├── local.env # Local development +│ ├── development.env # Development server +│ ├── staging.env # Staging environment +│ ├── production.env # Production environment +│ └── testing.env # Testing environment +├── tests/ # Test suite +├── docker/ # Docker configuration +│ └── Dockerfile # Multi-stage Docker image +├── docker-compose.yml # Main Docker Compose (all environments) +├── docker-compose.test.yml # Testing compose file +├── pyproject.toml # Poetry configuration & dependencies +├── poetry.lock # Locked dependency versions +├── API.md # API documentation +└── README.md # This file +``` + +## API Endpoints + +All endpoints are documented in [API.md](API.md). Here's a quick reference: + +- `POST /servers` - Create a server +- `GET /servers` - List all servers +- `GET /servers/{id}` - Get a server by ID +- `PUT /servers/{id}` - Update a server +- `DELETE /servers/{id}` - Delete a server +- `GET /health` - Health check (liveness) +- `GET /health/ready` - Readiness check + +## CLI Usage + +The CLI tool provides convenient command-line access to the API: + +```bash +# Create a server +environment=LOCAL poetry run server-cli create web-server-01 192.168.1.100 active + +# List all servers +environment=LOCAL poetry run server-cli list + +# Get a server +environment=LOCAL poetry run server-cli get 1 + +# Update a server +environment=LOCAL poetry run server-cli update 1 --hostname updated-server --state offline + +# Delete a server +environment=LOCAL poetry run server-cli delete 1 +``` + +See [API.md](API.md) for detailed CLI documentation. + +## Testing + +Run the test suite: + +```bash +# All tests with testing environment +environment=TESTING poetry run pytest + +# With coverage +environment=TESTING poetry run pytest --cov=app --cov-report=term-missing + +# Specific test file +environment=TESTING poetry run pytest tests/test_routes.py + +# Using Docker +docker-compose -f docker-compose.test.yml up --build +``` + +## Database Migrations + +```bash +# Create a new migration +environment=LOCAL poetry run alembic revision --autogenerate -m "Description" + +# Apply migrations +environment=LOCAL poetry run alembic upgrade head + +# Rollback one migration +environment=LOCAL poetry run alembic downgrade -1 +``` + +## Architecture + +The application follows a layered architecture: + +1. **Routing Layer** (`app/routes/`) - FastAPI route handlers +2. **Service Layer** (`app/services/`) - Business logic +3. **Schema Layer** (`app/schemas/`) - Pydantic validation +4. **Repository Layer** (`app/repositories/`) - Database operations (ORM or Raw SQL) +5. **Model Layer** (`app/models/`) - SQLAlchemy ORM models + +### Generic Repository Pattern + +The repository layer uses a **Strategy Pattern** with **Generic TypeVars** for maximum flexibility: + +``` +app/repositories/ +├── base.py # BaseRepository[T] - Generic abstract base +├── factory.py # RepositoryFactory - Strategy selection +├── server_repository.py # ServerRepository - Domain-specific logic +└── strategies/ + ├── orm.py # ORMRepository[T] - SQLAlchemy ORM implementation + └── raw_sql.py # RawSQLRepository[T] - Raw SQL implementation +``` + +#### Key Components + +| Component | Description | +|-----------|-------------| +| `BaseRepository[T]` | Generic abstract base class with TypeVar for any model | +| `ORMRepository[T]` | Generic ORM implementation using SQLAlchemy | +| `RawSQLRepository[T]` | Generic raw SQL implementation using parameterized queries | +| `RepositoryFactory` | Factory that selects strategy based on config | +| `ServerRepository` | Domain-specific wrapper with business rules | + +#### Usage Example + +```python +from app.repositories import RepositoryFactory, QueryStrategy +from app.models.server import Server + +# Auto-select based on USE_RAW_SQL config +repo = RepositoryFactory.create(session, Server) + +# Or explicitly choose strategy +orm_repo = RepositoryFactory.create(session, Server, QueryStrategy.ORM) +raw_repo = RepositoryFactory.create(session, Server, QueryStrategy.RAW_SQL) + +# Works with any model - just pass the model class! +# repo = RepositoryFactory.create(session, AnyOtherModel) +``` + +### Database Query Modes + +The application supports two query modes controlled by `USE_RAW_SQL`: + +#### SQLAlchemy ORM Mode (Default) +Uses SQLAlchemy ORM for all database operations. This is the default mode. + +```bash +# Default - uses ORM +USE_RAW_SQL=false +``` + +#### Raw SQL Mode +Uses raw PostgreSQL queries for all database operations. Enable with: + +```bash +USE_RAW_SQL=true +``` + +Both modes implement the same `BaseRepository` interface, making it easy to switch between them without changing any other code. The `RepositoryFactory` automatically selects the correct implementation based on configuration. + +## Requirements Met + +✅ **FastAPI** - Modern async web framework +✅ **PostgreSQL** - Database with SQLAlchemy ORM and Raw SQL queries +✅ **CLI Tool** - Command-line interface using Typer +✅ **pytest Test Suite** - Comprehensive tests for all layers +✅ **Docker Compose** - Complete stack with PostgreSQL +✅ **API Documentation** - Detailed API.md with examples +✅ **Raw SQL Support** - Toggle between ORM and raw SQL via configuration +✅ **Multi-Environment Configuration** - Separate configs for LOCAL, DEV, STAGING, PROD, TESTING + +### Validation Rules + +✅ **Hostname uniqueness** - Enforced at database and application level +✅ **IP address validation** - Validates IPv4 and IPv6 using Python `ipaddress` module +✅ **State validation** - Must be one of: `active`, `offline`, `retired` + +## Documentation + +- [API.md](API.md) - Complete API documentation with examples +- Interactive docs available at `/docs` (Swagger) and `/redoc` (ReDoc) diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..5d23107 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,114 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/__init__.py b/alembic/__init__.py new file mode 100644 index 0000000..7742aad --- /dev/null +++ b/alembic/__init__.py @@ -0,0 +1 @@ +"""Alembic database migrations.""" diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..a1649aa --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,80 @@ +"""Alembic environment configuration for migrations.""" + +from logging.config import fileConfig + +from sqlalchemy import engine_from_config, pool + +from alembic import context + +# Import your models and database configuration +from app.config import settings +from app.database import Base +from app.models.server import Server # noqa: F401 + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# Set the SQLAlchemy URL from settings (convert async to sync driver) +sync_url = settings.database_url.replace("+asyncpg", "+psycopg2") +config.set_main_option("sqlalchemy.url", sync_url) + +# add your model's MetaData object here +# for 'autogenerate' support +target_metadata = Base.metadata + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/001_initial_migration_create_servers_table.py b/alembic/versions/001_initial_migration_create_servers_table.py new file mode 100644 index 0000000..ffe082e --- /dev/null +++ b/alembic/versions/001_initial_migration_create_servers_table.py @@ -0,0 +1,52 @@ +"""Initial migration: create servers table + +Revision ID: 001 +Revises: +Create Date: 2024-01-01 00:00:00.000000 + +""" + +from collections.abc import Sequence + +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "001" +down_revision: str | None = None +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + + +def upgrade() -> None: + """Create servers table.""" + op.create_table( + "servers", + sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), + sa.Column("hostname", sa.String(length=255), nullable=False), + sa.Column("ip_address", sa.String(length=45), nullable=False), + sa.Column("state", sa.String(length=20), nullable=False), + sa.Column( + "created_at", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "updated_at", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("hostname"), + sa.CheckConstraint("state IN ('active', 'offline', 'retired')", name="check_server_state"), + ) + op.create_index(op.f("ix_servers_hostname"), "servers", ["hostname"], unique=True) + + +def downgrade() -> None: + """Drop servers table.""" + op.drop_index(op.f("ix_servers_hostname"), table_name="servers") + op.drop_table("servers") diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..53a1ff2 --- /dev/null +++ b/app/__init__.py @@ -0,0 +1,10 @@ +""" +Server Inventory Management Application. + +A production-grade CRUD API for tracking server inventory. + +Author: Ali Khan +""" + +__author__ = "Ali Khan" +__version__ = "1.0.0" diff --git a/app/config.py b/app/config.py new file mode 100644 index 0000000..dbb400b --- /dev/null +++ b/app/config.py @@ -0,0 +1,183 @@ +"""Application configuration using Pydantic Settings.""" + +import os +from enum import Enum +from pathlib import Path + +from pydantic import Field +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Environment(str, Enum): + """Available environment types.""" + + LOCAL = "LOCAL" + DEVELOPMENT = "DEVELOPMENT" + STAGING = "STAGING" + PRODUCTION = "PRODUCTION" + TESTING = "TESTING" + + +def get_env_file() -> str | None: + """ + Determine which environment file to load based on the 'environment' env var. + + The environment variable can be set via command line: + environment=LOCAL poetry run uvicorn app.main:app --reload + environment=TESTING poetry run pytest + + Returns: + Path to the environment file, or None if file doesn't exist. + """ + # Get the environment from env var (default to LOCAL) + env_name = os.getenv("environment", "LOCAL").upper() + + # Map environment names to file names + env_file_map = { + "LOCAL": "local.env", + "DEVELOPMENT": "development.env", + "DEVEL": "development.env", # Alias + "STAGING": "staging.env", + "PRODUCTION": "production.env", + "PROD": "production.env", # Alias + "TESTING": "testing.env", + "TEST": "testing.env", # Alias + } + + # Get the env file name + env_file_name = env_file_map.get(env_name, "local.env") + + # Build the path to the env file + # Check both project root and environments folder + project_root = Path(__file__).parent.parent + environments_folder = project_root / "environments" + + env_file_path = environments_folder / env_file_name + + if env_file_path.exists(): + return str(env_file_path) + + # Fallback to .env in project root + fallback_env = project_root / ".env" + if fallback_env.exists(): + return str(fallback_env) + + return None + + +class Settings(BaseSettings): + """Application settings loaded from environment variables.""" + + model_config = SettingsConfigDict( + env_file=get_env_file(), + env_file_encoding="utf-8", + case_sensitive=False, + extra="ignore", + ) + + # Environment + environment: str = Field( + default="LOCAL", + description="Environment: LOCAL, DEVELOPMENT, STAGING, PRODUCTION, TESTING", + ) + + # Database Configuration + database_dialect: str = Field( + default="postgresql", description="Database dialect (postgresql, mysql, etc.)" + ) + database_driver: str = Field( + default="asyncpg", description="Database driver (asyncpg, pymysql, etc.)" + ) + database_username: str = Field(default="postgres", description="Database username") + database_password: str = Field(default="postgres", description="Database password") + database_name: str = Field(default="server_inventory", description="Database name") + database_host: str = Field(default="localhost", description="Database host") + database_port: int = Field(default=5432, description="Database port") + + # Database Engine Settings + database_engine_echo: bool = Field(default=False, description="Echo SQL queries to stdout") + database_pool_echo: bool = Field(default=False, description="Echo connection pool events") + database_engine_pool_pre_ping: bool = Field( + default=True, description="Enable connection pool pre-ping" + ) + database_check_same_thread: bool = Field( + default=False, description="SQLite check_same_thread option" + ) + + # Database Connection Pool Settings + database_pool_size: int = Field(default=10, description="Database connection pool size") + database_pool_max_overflow: int = Field( + default=20, description="Database connection pool max overflow" + ) + database_pool_timeout: int = Field( + default=30, description="Database connection pool timeout in seconds" + ) + database_pool_recycle: int = Field( + default=3600, description="Database connection pool recycle time in seconds" + ) + + # API Server + api_host: str = Field(default="0.0.0.0", description="API server host") + api_port: int = Field(default=8000, description="API server port") + + # Logging + log_level: str = Field(default="INFO", description="Logging level: DEBUG, INFO, WARNING, ERROR") + + # CORS + cors_origins: str = Field( + default="*", + description="Allowed CORS origin", + ) + + # Database Query Mode + use_raw_sql: bool = Field( + default=False, + description="Use raw SQL queries instead of SQLAlchemy ORM. Set to True for raw SQL mode.", + ) + + @property + def database_url(self) -> str: + """Construct the database URL from individual components.""" + # Handle empty host (for some configurations) + host = self.database_host if self.database_host else "localhost" + + # Construct the URL + return ( + f"{self.database_dialect}+{self.database_driver}://" + f"{self.database_username}:{self.database_password}@" + f"{host}:{self.database_port}/{self.database_name}" + ) + + @property + def is_local(self) -> bool: + """Check if running in local environment.""" + return self.environment.upper() == "LOCAL" + + @property + def is_development(self) -> bool: + """Check if running in development environment.""" + return self.environment.upper() in ("DEVELOPMENT", "DEVEL") + + @property + def is_staging(self) -> bool: + """Check if running in staging environment.""" + return self.environment.upper() == "STAGING" + + @property + def is_production(self) -> bool: + """Check if running in production environment.""" + return self.environment.upper() in ("PRODUCTION", "PROD") + + @property + def is_testing(self) -> bool: + """Check if running in testing environment.""" + return self.environment.upper() in ("TESTING", "TEST") + + @property + def env_file_loaded(self) -> str | None: + """Return the path of the loaded env file.""" + return get_env_file() + + +# Global settings instance +settings = Settings() diff --git a/app/database.py b/app/database.py new file mode 100644 index 0000000..19579bd --- /dev/null +++ b/app/database.py @@ -0,0 +1,59 @@ +"""Database session management and connection setup.""" + +from collections.abc import AsyncGenerator + +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.orm import declarative_base + +from app.config import settings + +# Create async engine +engine = create_async_engine( + settings.database_url, + pool_size=settings.database_pool_size, + max_overflow=settings.database_pool_max_overflow, + pool_timeout=settings.database_pool_timeout, + pool_recycle=settings.database_pool_recycle, + pool_pre_ping=settings.database_engine_pool_pre_ping, + echo=settings.database_engine_echo, + future=True, +) + +# Create async session factory +AsyncSessionLocal = async_sessionmaker( + engine, + class_=AsyncSession, + expire_on_commit=False, + autocommit=False, + autoflush=False, +) + +# Base class for declarative models +Base = declarative_base() + + +async def get_db() -> AsyncGenerator[AsyncSession, None]: + """ + Dependency function for FastAPI to get database session. + Yields a session and ensures it's closed after the request. + """ + async with AsyncSessionLocal() as session: + try: + yield session + await session.commit() + except Exception: + await session.rollback() + raise + finally: + await session.close() + + +async def init_db() -> None: + """Initialize database by creating all tables.""" + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + +async def close_db() -> None: + """Close database connections on application shutdown.""" + await engine.dispose() diff --git a/app/exceptions.py b/app/exceptions.py new file mode 100644 index 0000000..4657696 --- /dev/null +++ b/app/exceptions.py @@ -0,0 +1,57 @@ +"""Custom exception classes for the application.""" + +from typing import Any + + +class BaseApplicationError(Exception): + """Base exception for all application errors.""" + + def __init__( + self, + message: str, + status_code: int = 500, + detail: str | None = None, + extra: dict[str, Any] | None = None, + ): + self.message = message + self.status_code = status_code + self.detail = detail or message + self.extra = extra or {} + super().__init__(self.message) + + +class ServerNotFoundError(BaseApplicationError): + """Raised when a server is not found.""" + + def __init__(self, server_id: int, detail: str | None = None): + message = f"Server with id {server_id} not found" + super().__init__( + message=message, + status_code=404, + detail=detail or message, + extra={"server_id": server_id}, + ) + + +class DuplicateHostnameError(BaseApplicationError): + """Raised when attempting to create a server with a duplicate hostname.""" + + def __init__(self, hostname: str, detail: str | None = None): + message = f"Server with hostname '{hostname}' already exists" + super().__init__( + message=message, + status_code=409, + detail=detail or message, + extra={"hostname": hostname}, + ) + + +class DatabaseError(BaseApplicationError): + """Raised when a database operation fails.""" + + def __init__(self, message: str, detail: str | None = None): + super().__init__( + message=message, + status_code=500, + detail=detail or message, + ) diff --git a/app/logging_config.py b/app/logging_config.py new file mode 100644 index 0000000..c03ae81 --- /dev/null +++ b/app/logging_config.py @@ -0,0 +1,89 @@ +"""Structured logging configuration.""" + +import json +import logging +import sys +from typing import Any + +from app.config import settings + + +class JSONFormatter(logging.Formatter): + """JSON formatter for structured logging in production.""" + + def format(self, record: logging.LogRecord) -> str: + """Format log record as JSON.""" + log_data: dict[str, Any] = { + "timestamp": self.formatTime(record, self.datefmt), + "level": record.levelname, + "logger": record.name, + "message": record.getMessage(), + } + + # Add exception info if present + if record.exc_info: + log_data["exception"] = self.formatException(record.exc_info) + + # Add extra fields + if hasattr(record, "extra_fields"): + log_data.update(record.extra_fields) + + return json.dumps(log_data) + + +class ColoredFormatter(logging.Formatter): + """Colored formatter for development.""" + + COLORS = { + "DEBUG": "\033[36m", # Cyan + "INFO": "\033[32m", # Green + "WARNING": "\033[33m", # Yellow + "ERROR": "\033[31m", # Red + "CRITICAL": "\033[35m", # Magenta + } + RESET = "\033[0m" + + def format(self, record: logging.LogRecord) -> str: + """Format log record with colors.""" + color = self.COLORS.get(record.levelname, "") + record.levelname = f"{color}{record.levelname}{self.RESET}" + return super().format(record) + + +def setup_logging() -> None: + """Configure application logging.""" + # Get log level from settings + log_level = getattr(logging, settings.log_level.upper(), logging.INFO) + + # Create root logger + root_logger = logging.getLogger() + root_logger.setLevel(log_level) + + # Remove existing handlers + root_logger.handlers.clear() + + # Create console handler + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setLevel(log_level) + + # Choose formatter based on environment + if settings.is_production: + formatter = JSONFormatter() + else: + formatter = ColoredFormatter( + fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + console_handler.setFormatter(formatter) + root_logger.addHandler(console_handler) + + # Set specific logger levels + logging.getLogger("uvicorn").setLevel(logging.WARNING) + logging.getLogger("uvicorn.access").setLevel(logging.WARNING) + logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING) + + +def get_logger(name: str) -> logging.Logger: + """Get a logger instance with the given name.""" + return logging.getLogger(name) diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..24dc182 --- /dev/null +++ b/app/main.py @@ -0,0 +1,206 @@ +"""FastAPI application entry point.""" + +from contextlib import asynccontextmanager + +from fastapi import FastAPI, Request, status +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse + +from app.config import settings +from app.database import close_db, init_db +from app.exceptions import BaseApplicationError +from app.logging_config import get_logger, setup_logging +from app.routes import health, servers + +# Setup logging +setup_logging() +logger = get_logger(__name__) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """ + Application lifespan manager for startup and shutdown events. + + Args: + app: FastAPI application instance + """ + # Startup + logger.info("Starting application...") + logger.info(f"Environment: {settings.environment}") + logger.info( + f"Database URL: {settings.database_url.split('@')[-1] if '@' in settings.database_url else 'hidden'}" + ) + + try: + # Initialize database + await init_db() + logger.info("Database initialized successfully") + except Exception as e: + logger.error(f"Failed to initialize database: {e}", exc_info=True) + raise + + yield + + # Shutdown + logger.info("Shutting down application...") + try: + await close_db() + logger.info("Database connections closed") + except Exception as e: + logger.error(f"Error closing database connections: {e}", exc_info=True) + + +# Create FastAPI application +app = FastAPI( + title="Server Inventory Management API", + description="Production-grade CRUD API for tracking server inventory", + version="1.0.0", + lifespan=lifespan, + docs_url="/docs", + redoc_url="/redoc", +) + +# Configure CORS +app.add_middleware( + CORSMiddleware, + allow_origins=[settings.cors_origins], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +# Global exception handler for custom exceptions +@app.exception_handler(BaseApplicationError) +async def application_error_handler(request: Request, exc: BaseApplicationError) -> JSONResponse: + """ + Global exception handler for application errors. + + Args: + request: FastAPI request object + exc: Application error instance + + Returns: + JSON error response in RFC 7807 format + """ + logger.error( + f"Application error: {exc.message}", + extra={ + "status_code": exc.status_code, + "path": request.url.path, + "method": request.method, + "extra": exc.extra, + }, + ) + + return JSONResponse( + status_code=exc.status_code, + content={ + "type": "about:blank", + "title": exc.__class__.__name__.replace("Error", ""), + "status": exc.status_code, + "detail": exc.detail, + "instance": str(request.url), + "extra": exc.extra, + }, + ) + + +# Global exception handler for unhandled exceptions +@app.exception_handler(Exception) +async def global_exception_handler(request: Request, exc: Exception) -> JSONResponse: + """ + Global exception handler for unhandled exceptions. + + Args: + request: FastAPI request object + exc: Exception instance + + Returns: + JSON error response + """ + logger.exception( + f"Unhandled exception: {str(exc)}", + extra={ + "path": request.url.path, + "method": request.method, + }, + ) + + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content={ + "type": "about:blank", + "title": "Internal Server Error", + "status": 500, + "detail": "An unexpected error occurred", + "instance": str(request.url), + }, + ) + + +# Request logging middleware +@app.middleware("http") +async def log_requests(request: Request, call_next): + """ + Middleware to log HTTP requests and responses. + + Args: + request: FastAPI request object + call_next: Next middleware/route handler + + Returns: + Response from route handler + """ + import time + + start_time = time.time() + + # Log request + logger.info( + f"Request: {request.method} {request.url.path}", + extra={ + "method": request.method, + "path": request.url.path, + "query_params": str(request.query_params), + }, + ) + + # Process request + response = await call_next(request) + + # Calculate processing time + process_time = time.time() - start_time + + # Log response + logger.info( + f"Response: {request.method} {request.url.path} - {response.status_code}", + extra={ + "method": request.method, + "path": request.url.path, + "status_code": response.status_code, + "process_time": process_time, + }, + ) + + # Add process time header + response.headers["X-Process-Time"] = str(process_time) + + return response + + +# Include routers +app.include_router(health.router) +app.include_router(servers.router) + + +@app.get("/", tags=["root"]) +async def root(): + """Root endpoint.""" + return { + "message": "Server Inventory Management API", + "version": "1.0.0", + "docs": "/docs", + "health": "/health", + } diff --git a/app/models/__init__.py b/app/models/__init__.py new file mode 100644 index 0000000..1a9c13f --- /dev/null +++ b/app/models/__init__.py @@ -0,0 +1,5 @@ +"""SQLAlchemy ORM models.""" + +from app.models.server import Server + +__all__ = ["Server"] diff --git a/app/models/server.py b/app/models/server.py new file mode 100644 index 0000000..51bbecc --- /dev/null +++ b/app/models/server.py @@ -0,0 +1,66 @@ +"""Server SQLAlchemy ORM model.""" + +from datetime import datetime +from enum import Enum + +from sqlalchemy import CheckConstraint, DateTime, Integer, String, func +from sqlalchemy.orm import Mapped, mapped_column + +from app.database import Base + + +class ServerState(str, Enum): + """Server state enumeration.""" + + ACTIVE = "active" + OFFLINE = "offline" + RETIRED = "retired" + + +class Server(Base): + """Server model representing the servers table.""" + + __tablename__ = "servers" + + id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) + hostname: Mapped[str] = mapped_column( + String(255), + unique=True, + nullable=False, + index=True, + comment="Unique server hostname", + ) + ip_address: Mapped[str] = mapped_column( + String(45), # IPv6 max length is 45 characters + nullable=False, + comment="Server IP address (IPv4 or IPv6)", + ) + state: Mapped[str] = mapped_column( + String(20), + nullable=False, + comment="Server state: active, offline, or retired", + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + server_default=func.now(), + nullable=False, + comment="Timestamp when server was created", + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + server_default=func.now(), + onupdate=func.now(), + nullable=False, + comment="Timestamp when server was last updated", + ) + + __table_args__ = ( + CheckConstraint( + "state IN ('active', 'offline', 'retired')", + name="check_server_state", + ), + ) + + def __repr__(self) -> str: + """String representation of Server.""" + return f"" diff --git a/app/repositories/__init__.py b/app/repositories/__init__.py new file mode 100644 index 0000000..d1fb4f5 --- /dev/null +++ b/app/repositories/__init__.py @@ -0,0 +1,22 @@ +"""Repository layer with generic CRUD operations and strategy pattern.""" + +from app.repositories.base import BaseRepository, QueryStrategy +from app.repositories.factory import RepositoryFactory, get_repository +from app.repositories.server_repository import ServerRepository, get_server_repository +from app.repositories.strategies.orm import ORMRepository +from app.repositories.strategies.raw_sql import RawSQLRepository + +__all__ = [ + # Base classes + "BaseRepository", + "QueryStrategy", + # Strategy implementations + "ORMRepository", + "RawSQLRepository", + # Factory + "RepositoryFactory", + "get_repository", + # Server-specific + "ServerRepository", + "get_server_repository", +] diff --git a/app/repositories/base.py b/app/repositories/base.py new file mode 100644 index 0000000..08f84e0 --- /dev/null +++ b/app/repositories/base.py @@ -0,0 +1,176 @@ +"""Generic base repository with strategy pattern support. + +This module implements a generic repository pattern with strategy support +for switching between ORM and raw SQL implementations. + +Author: Ali Khan +""" + +from abc import ABC, abstractmethod +from enum import Enum +from typing import Any, Generic, TypeVar + +from pydantic import BaseModel +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import DeclarativeBase + +# Type variables for generic repository +ModelType = TypeVar("ModelType", bound=DeclarativeBase) +CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel) +UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel) + + +class QueryStrategy(str, Enum): + """Available query strategy types.""" + + ORM = "orm" + RAW_SQL = "raw_sql" + + +class BaseRepository(ABC, Generic[ModelType, CreateSchemaType, UpdateSchemaType]): + """ + Abstract base repository providing a generic interface for CRUD operations. + + This class defines the contract that all repository implementations must follow, + whether using SQLAlchemy ORM or raw SQL queries. + + Type Parameters: + ModelType: The SQLAlchemy model class + CreateSchemaType: Pydantic schema for create operations + UpdateSchemaType: Pydantic schema for update operations + """ + + def __init__(self, session: AsyncSession, model: type[ModelType]): + """ + Initialize the repository. + + Args: + session: AsyncSession for database operations + model: The SQLAlchemy model class this repository manages + """ + self.session = session + self.model = model + self._table_name = model.__tablename__ + + @property + def table_name(self) -> str: + """Get the table name for this repository's model.""" + return self._table_name + + @abstractmethod + async def create(self, data: CreateSchemaType) -> ModelType: + """ + Create a new record. + + Args: + data: Pydantic schema with creation data + + Returns: + Created model instance + + Raises: + DatabaseError: If database operation fails + """ + pass + + @abstractmethod + async def get_by_id(self, id: int) -> ModelType | None: + """ + Get a record by its primary key ID. + + Args: + id: Primary key ID + + Returns: + Model instance or None if not found + """ + pass + + @abstractmethod + async def get_by_field(self, field_name: str, value: Any) -> ModelType | None: + """ + Get a record by a specific field value. + + Args: + field_name: Name of the field to search by + value: Value to match + + Returns: + Model instance or None if not found + """ + pass + + @abstractmethod + async def list_all(self, order_by: str | None = "id") -> list[ModelType]: + """ + List all records. + + Args: + order_by: Field name to order by (default: "id") + + Returns: + List of model instances + """ + pass + + @abstractmethod + async def update(self, id: int, data: UpdateSchemaType) -> ModelType | None: + """ + Update an existing record. + + Args: + id: Primary key ID + data: Pydantic schema with update data + + Returns: + Updated model instance or None if not found + + Raises: + DatabaseError: If database operation fails + """ + pass + + @abstractmethod + async def delete(self, id: int) -> bool: + """ + Delete a record by ID. + + Args: + id: Primary key ID + + Returns: + True if deleted, False if not found + """ + pass + + @abstractmethod + async def exists(self, field_name: str, value: Any, exclude_id: int | None = None) -> bool: + """ + Check if a record exists with the given field value. + + Args: + field_name: Name of the field to check + value: Value to check for + exclude_id: Optional ID to exclude from the check (useful for updates) + + Returns: + True if exists, False otherwise + """ + pass + + def _get_model_columns(self) -> list[str]: + """Get list of column names for the model.""" + return [column.name for column in self.model.__table__.columns] + + def _schema_to_dict(self, schema: BaseModel, exclude_unset: bool = False) -> dict[str, Any]: + """ + Convert Pydantic schema to dictionary. + + Args: + schema: Pydantic model instance + exclude_unset: If True, exclude fields that weren't explicitly set + + Returns: + Dictionary representation + """ + return schema.model_dump(exclude_unset=exclude_unset) diff --git a/app/repositories/factory.py b/app/repositories/factory.py new file mode 100644 index 0000000..d8d766a --- /dev/null +++ b/app/repositories/factory.py @@ -0,0 +1,136 @@ +"""Repository factory for creating repositories with strategy selection.""" + +from typing import TypeVar + +from pydantic import BaseModel +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import DeclarativeBase + +from app.config import settings +from app.repositories.base import BaseRepository, QueryStrategy +from app.repositories.strategies.orm import ORMRepository +from app.repositories.strategies.raw_sql import RawSQLRepository + +# Type variables +ModelType = TypeVar("ModelType", bound=DeclarativeBase) +CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel) +UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel) + + +class RepositoryFactory: + """ + Factory for creating repository instances with the configured query strategy. + + This factory reads the USE_RAW_SQL configuration setting and returns + the appropriate repository implementation (ORM or Raw SQL). + + Usage: + # Create a repository for Server model + repo = RepositoryFactory.create( + session=db_session, + model=Server, + create_schema=ServerCreate, + update_schema=ServerUpdate, + ) + + # Or specify strategy explicitly + repo = RepositoryFactory.create( + session=db_session, + model=Server, + create_schema=ServerCreate, + update_schema=ServerUpdate, + strategy=QueryStrategy.RAW_SQL, + ) + """ + + @staticmethod + def create( + session: AsyncSession, + model: type[ModelType], + strategy: QueryStrategy = None, + ) -> BaseRepository[ModelType, CreateSchemaType, UpdateSchemaType]: + """ + Create a repository instance with the appropriate strategy. + + Args: + session: AsyncSession for database operations + model: The SQLAlchemy model class + strategy: Optional explicit strategy. If not provided, uses config setting. + + Returns: + Repository instance (ORM or Raw SQL based on configuration) + """ + # Determine strategy from config if not explicitly provided + if strategy is None: + strategy = QueryStrategy.RAW_SQL if settings.use_raw_sql else QueryStrategy.ORM + + # Create and return the appropriate repository + if strategy == QueryStrategy.RAW_SQL: + return RawSQLRepository(session, model) + else: + return ORMRepository(session, model) + + @staticmethod + def create_orm( + session: AsyncSession, + model: type[ModelType], + ) -> ORMRepository[ModelType, CreateSchemaType, UpdateSchemaType]: + """ + Create an ORM repository instance explicitly. + + Args: + session: AsyncSession for database operations + model: The SQLAlchemy model class + + Returns: + ORM Repository instance + """ + return ORMRepository(session, model) + + @staticmethod + def create_raw_sql( + session: AsyncSession, + model: type[ModelType], + ) -> RawSQLRepository[ModelType, CreateSchemaType, UpdateSchemaType]: + """ + Create a Raw SQL repository instance explicitly. + + Args: + session: AsyncSession for database operations + model: The SQLAlchemy model class + + Returns: + Raw SQL Repository instance + """ + return RawSQLRepository(session, model) + + @staticmethod + def get_current_strategy() -> QueryStrategy: + """ + Get the current query strategy based on configuration. + + Returns: + Current QueryStrategy (ORM or RAW_SQL) + """ + return QueryStrategy.RAW_SQL if settings.use_raw_sql else QueryStrategy.ORM + + +def get_repository( + session: AsyncSession, + model: type[ModelType], + strategy: QueryStrategy = None, +) -> BaseRepository[ModelType, CreateSchemaType, UpdateSchemaType]: + """ + Convenience function to create a repository. + + This is a shorthand for RepositoryFactory.create(). + + Args: + session: AsyncSession for database operations + model: The SQLAlchemy model class + strategy: Optional explicit strategy + + Returns: + Repository instance based on configuration + """ + return RepositoryFactory.create(session, model, strategy) diff --git a/app/repositories/server_repository.py b/app/repositories/server_repository.py new file mode 100644 index 0000000..377a42a --- /dev/null +++ b/app/repositories/server_repository.py @@ -0,0 +1,160 @@ +"""Server-specific repository with domain logic.""" + +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import DatabaseError, DuplicateHostnameError +from app.models.server import Server +from app.repositories.base import BaseRepository, QueryStrategy +from app.repositories.factory import RepositoryFactory +from app.schemas.server import ServerCreate, ServerUpdate + + +class ServerRepository: + """ + Server repository with domain-specific logic. + + This repository wraps the generic repository (ORM or Raw SQL based on config) + and adds Server-specific business rules like hostname uniqueness checks. + + The underlying query strategy (ORM or Raw SQL) is determined by the + USE_RAW_SQL configuration setting. + """ + + def __init__(self, session: AsyncSession, strategy: QueryStrategy = None): + """ + Initialize the server repository. + + Args: + session: AsyncSession for database operations + strategy: Optional explicit query strategy. If not provided, uses config. + """ + self.session = session + self._repo: BaseRepository[Server, ServerCreate, ServerUpdate] = RepositoryFactory.create( + session=session, + model=Server, + strategy=strategy, + ) + + @property + def strategy(self) -> QueryStrategy: + """Get the current query strategy being used.""" + return RepositoryFactory.get_current_strategy() + + async def create(self, server_data: ServerCreate) -> Server: + """ + Create a new server with hostname uniqueness check. + + Args: + server_data: Server creation data + + Returns: + Created Server instance + + Raises: + DuplicateHostnameError: If hostname already exists + DatabaseError: If database operation fails + """ + try: + # Check for duplicate hostname before creating + if await self._repo.exists("hostname", server_data.hostname): + raise DuplicateHostnameError(server_data.hostname) + + return await self._repo.create(server_data) + except DuplicateHostnameError: + raise + except DatabaseError as e: + # Check if it's a unique constraint violation + if "unique" in str(e).lower() or "hostname" in str(e).lower(): + raise DuplicateHostnameError(server_data.hostname) + raise + + async def get_by_id(self, server_id: int) -> Server | None: + """ + Get server by ID. + + Args: + server_id: Server ID + + Returns: + Server instance or None if not found + """ + return await self._repo.get_by_id(server_id) + + async def get_by_hostname(self, hostname: str) -> Server | None: + """ + Get server by hostname. + + Args: + hostname: Server hostname + + Returns: + Server instance or None if not found + """ + return await self._repo.get_by_field("hostname", hostname) + + async def list_all(self) -> list[Server]: + """ + List all servers ordered by ID. + + Returns: + List of Server instances + """ + return await self._repo.list_all(order_by="id") + + async def update(self, server_id: int, server_data: ServerUpdate) -> Server | None: + """ + Update an existing server with hostname uniqueness check. + + Args: + server_id: Server ID + server_data: Server update data + + Returns: + Updated Server instance or None if not found + + Raises: + DuplicateHostnameError: If new hostname already exists + DatabaseError: If database operation fails + """ + try: + # If hostname is being updated, check for duplicates + if server_data.hostname: + if await self._repo.exists("hostname", server_data.hostname, exclude_id=server_id): + raise DuplicateHostnameError(server_data.hostname) + + return await self._repo.update(server_id, server_data) + except DuplicateHostnameError: + raise + except DatabaseError as e: + # Check if it's a unique constraint violation + if "unique" in str(e).lower() or "hostname" in str(e).lower(): + raise DuplicateHostnameError(server_data.hostname or "") + raise + + async def delete(self, server_id: int) -> bool: + """ + Delete a server. + + Args: + server_id: Server ID + + Returns: + True if deleted, False if not found + """ + return await self._repo.delete(server_id) + + +def get_server_repository( + session: AsyncSession, strategy: QueryStrategy = None +) -> ServerRepository: + """ + Factory function to create a ServerRepository. + + Args: + session: AsyncSession for database operations + strategy: Optional explicit query strategy + + Returns: + ServerRepository instance + """ + return ServerRepository(session, strategy) diff --git a/app/repositories/strategies/__init__.py b/app/repositories/strategies/__init__.py new file mode 100644 index 0000000..2cb6fc9 --- /dev/null +++ b/app/repositories/strategies/__init__.py @@ -0,0 +1,6 @@ +"""Repository strategy implementations.""" + +from app.repositories.strategies.orm import ORMRepository +from app.repositories.strategies.raw_sql import RawSQLRepository + +__all__ = ["ORMRepository", "RawSQLRepository"] diff --git a/app/repositories/strategies/orm.py b/app/repositories/strategies/orm.py new file mode 100644 index 0000000..caf2401 --- /dev/null +++ b/app/repositories/strategies/orm.py @@ -0,0 +1,220 @@ +"""Generic ORM repository implementation using SQLAlchemy ORM.""" + +from typing import Any + +from sqlalchemy import select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import DatabaseError +from app.repositories.base import ( + BaseRepository, + CreateSchemaType, + ModelType, + UpdateSchemaType, +) + + +class ORMRepository(BaseRepository[ModelType, CreateSchemaType, UpdateSchemaType]): + """ + Generic repository implementation using SQLAlchemy ORM. + + This repository uses SQLAlchemy's ORM features for all database operations, + providing a clean, Pythonic interface for CRUD operations. + + Type Parameters: + ModelType: The SQLAlchemy model class + CreateSchemaType: Pydantic schema for create operations + UpdateSchemaType: Pydantic schema for update operations + """ + + def __init__(self, session: AsyncSession, model: type[ModelType]): + """ + Initialize the ORM repository. + + Args: + session: AsyncSession for database operations + model: The SQLAlchemy model class + """ + super().__init__(session, model) + + async def create(self, data: CreateSchemaType) -> ModelType: + """ + Create a new record using SQLAlchemy ORM. + + Args: + data: Pydantic schema with creation data + + Returns: + Created model instance + + Raises: + DatabaseError: If database operation fails + """ + try: + # Convert schema to dict and create model instance + data_dict = self._schema_to_dict(data) + instance = self.model(**data_dict) + + self.session.add(instance) + await self.session.flush() + await self.session.refresh(instance) + + return instance + except IntegrityError as e: + await self.session.rollback() + raise DatabaseError(f"Database integrity error: {str(e)}") + except Exception as e: + await self.session.rollback() + raise DatabaseError(f"Failed to create record: {str(e)}") + + async def get_by_id(self, id: int) -> ModelType | None: + """ + Get a record by ID using SQLAlchemy ORM. + + Args: + id: Primary key ID + + Returns: + Model instance or None if not found + """ + try: + result = await self.session.execute(select(self.model).where(self.model.id == id)) + return result.scalar_one_or_none() + except Exception as e: + raise DatabaseError(f"Failed to get record by ID: {str(e)}") + + async def get_by_field(self, field_name: str, value: Any) -> ModelType | None: + """ + Get a record by a specific field value using SQLAlchemy ORM. + + Args: + field_name: Name of the field to search by + value: Value to match + + Returns: + Model instance or None if not found + """ + try: + column = getattr(self.model, field_name, None) + if column is None: + raise DatabaseError(f"Invalid field name: {field_name}") + + result = await self.session.execute(select(self.model).where(column == value)) + return result.scalar_one_or_none() + except DatabaseError: + raise + except Exception as e: + raise DatabaseError(f"Failed to get record by {field_name}: {str(e)}") + + async def list_all(self, order_by: str | None = "id") -> list[ModelType]: + """ + List all records using SQLAlchemy ORM. + + Args: + order_by: Field name to order by (default: "id") + + Returns: + List of model instances + """ + try: + query = select(self.model) + + if order_by: + order_column = getattr(self.model, order_by, None) + if order_column is not None: + query = query.order_by(order_column) + + result = await self.session.execute(query) + return list(result.scalars().all()) + except Exception as e: + raise DatabaseError(f"Failed to list records: {str(e)}") + + async def update(self, id: int, data: UpdateSchemaType) -> ModelType | None: + """ + Update an existing record using SQLAlchemy ORM. + + Args: + id: Primary key ID + data: Pydantic schema with update data + + Returns: + Updated model instance or None if not found + + Raises: + DatabaseError: If database operation fails + """ + try: + # Get existing record + instance = await self.get_by_id(id) + if not instance: + return None + + # Update fields from schema (excluding unset values) + update_data = self._schema_to_dict(data, exclude_unset=True) + for field, value in update_data.items(): + if hasattr(instance, field): + setattr(instance, field, value) + + await self.session.flush() + await self.session.refresh(instance) + + return instance + except IntegrityError as e: + await self.session.rollback() + raise DatabaseError(f"Database integrity error: {str(e)}") + except Exception as e: + await self.session.rollback() + raise DatabaseError(f"Failed to update record: {str(e)}") + + async def delete(self, id: int) -> bool: + """ + Delete a record by ID using SQLAlchemy ORM. + + Args: + id: Primary key ID + + Returns: + True if deleted, False if not found + """ + try: + instance = await self.get_by_id(id) + if not instance: + return False + + await self.session.delete(instance) + await self.session.flush() + + return True + except Exception as e: + await self.session.rollback() + raise DatabaseError(f"Failed to delete record: {str(e)}") + + async def exists(self, field_name: str, value: Any, exclude_id: int | None = None) -> bool: + """ + Check if a record exists with the given field value using SQLAlchemy ORM. + + Args: + field_name: Name of the field to check + value: Value to check for + exclude_id: Optional ID to exclude from the check + + Returns: + True if exists, False otherwise + """ + try: + column = getattr(self.model, field_name, None) + if column is None: + raise DatabaseError(f"Invalid field name: {field_name}") + + query = select(self.model).where(column == value) + + if exclude_id is not None: + query = query.where(self.model.id != exclude_id) + + result = await self.session.execute(query) + return result.scalar_one_or_none() is not None + except DatabaseError: + raise + except Exception as e: + raise DatabaseError(f"Failed to check existence: {str(e)}") diff --git a/app/repositories/strategies/raw_sql.py b/app/repositories/strategies/raw_sql.py new file mode 100644 index 0000000..aa34c88 --- /dev/null +++ b/app/repositories/strategies/raw_sql.py @@ -0,0 +1,327 @@ +"""Generic Raw SQL repository implementation using parameterized queries.""" + +from typing import Any + +from sqlalchemy import text +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import DatabaseError +from app.repositories.base import ( + BaseRepository, + CreateSchemaType, + ModelType, + UpdateSchemaType, +) + + +class RawSQLRepository(BaseRepository[ModelType, CreateSchemaType, UpdateSchemaType]): + """ + Generic repository implementation using raw SQL queries. + + This repository uses parameterized raw SQL queries for all database operations, + providing direct control over SQL while maintaining security through parameterization. + + Type Parameters: + ModelType: The SQLAlchemy model class (used for metadata) + CreateSchemaType: Pydantic schema for create operations + UpdateSchemaType: Pydantic schema for update operations + """ + + def __init__(self, session: AsyncSession, model: type[ModelType]): + """ + Initialize the Raw SQL repository. + + Args: + session: AsyncSession for database operations + model: The SQLAlchemy model class (used for table name and column info) + """ + super().__init__(session, model) + self._columns = self._get_model_columns() + + def _row_to_model(self, row) -> ModelType: + """ + Convert a database row to model instance. + + Args: + row: Database row from query result + + Returns: + Model instance populated with row data + """ + if row is None: + return None + + # Create a dict from the row using column names + row_dict = {} + for col in self._columns: + if hasattr(row, col): + row_dict[col] = getattr(row, col) + elif hasattr(row, "_mapping") and col in row._mapping: + row_dict[col] = row._mapping[col] + + # Create model instance by calling constructor with data + # This works better with SQLAlchemy models + instance = self.model(**row_dict) + return instance + + def _build_select_columns(self) -> str: + """Build the SELECT column list for queries.""" + return ", ".join(self._columns) + + async def create(self, data: CreateSchemaType) -> ModelType: + """ + Create a new record using raw SQL. + + Args: + data: Pydantic schema with creation data + + Returns: + Created model instance + + Raises: + DatabaseError: If database operation fails + """ + try: + data_dict = self._schema_to_dict(data) + + # Build column and parameter lists (excluding auto-generated fields) + insert_columns = [col for col in data_dict.keys() if col in self._columns] + param_placeholders = [f":{col}" for col in insert_columns] + + # Build the INSERT query with RETURNING clause + insert_sql = text( + f""" + INSERT INTO {self.table_name} ({", ".join(insert_columns)}, created_at, updated_at) + VALUES ({", ".join(param_placeholders)}, NOW(), NOW()) + RETURNING {self._build_select_columns()} + """ + ) + + result = await self.session.execute(insert_sql, data_dict) + row = result.fetchone() + + return self._row_to_model(row) + except IntegrityError as e: + await self.session.rollback() + raise DatabaseError(f"Database integrity error: {str(e)}") + except Exception as e: + await self.session.rollback() + raise DatabaseError(f"Failed to create record: {str(e)}") + + async def get_by_id(self, id: int) -> ModelType | None: + """ + Get a record by ID using raw SQL. + + Args: + id: Primary key ID + + Returns: + Model instance or None if not found + """ + try: + select_sql = text( + f""" + SELECT {self._build_select_columns()} + FROM {self.table_name} + WHERE id = :id + """ + ) + + result = await self.session.execute(select_sql, {"id": id}) + row = result.fetchone() + + return self._row_to_model(row) if row else None + except Exception as e: + raise DatabaseError(f"Failed to get record by ID: {str(e)}") + + async def get_by_field(self, field_name: str, value: Any) -> ModelType | None: + """ + Get a record by a specific field value using raw SQL. + + Args: + field_name: Name of the field to search by + value: Value to match + + Returns: + Model instance or None if not found + """ + try: + if field_name not in self._columns: + raise DatabaseError(f"Invalid field name: {field_name}") + + select_sql = text( + f""" + SELECT {self._build_select_columns()} + FROM {self.table_name} + WHERE {field_name} = :value + """ + ) + + result = await self.session.execute(select_sql, {"value": value}) + row = result.fetchone() + + return self._row_to_model(row) if row else None + except DatabaseError: + raise + except Exception as e: + raise DatabaseError(f"Failed to get record by {field_name}: {str(e)}") + + async def list_all(self, order_by: str | None = "id") -> list[ModelType]: + """ + List all records using raw SQL. + + Args: + order_by: Field name to order by (default: "id") + + Returns: + List of model instances + """ + try: + # Validate order_by column to prevent SQL injection + order_clause = "" + if order_by and order_by in self._columns: + order_clause = f"ORDER BY {order_by}" + + select_sql = text( + f""" + SELECT {self._build_select_columns()} + FROM {self.table_name} + {order_clause} + """ + ) + + result = await self.session.execute(select_sql) + rows = result.fetchall() + + return [self._row_to_model(row) for row in rows] + except Exception as e: + raise DatabaseError(f"Failed to list records: {str(e)}") + + async def update(self, id: int, data: UpdateSchemaType) -> ModelType | None: + """ + Update an existing record using raw SQL. + + Args: + id: Primary key ID + data: Pydantic schema with update data + + Returns: + Updated model instance or None if not found + + Raises: + DatabaseError: If database operation fails + """ + try: + # Check if record exists first + existing = await self.get_by_id(id) + if not existing: + return None + + # Get update data (excluding unset fields) + update_data = self._schema_to_dict(data, exclude_unset=True) + if not update_data: + # No fields to update, return existing record + return existing + + # Build SET clause dynamically + set_clauses = [] + params = {"id": id} + + for field, value in update_data.items(): + if field in self._columns: + set_clauses.append(f"{field} = :{field}") + params[field] = value + + # Always update updated_at + set_clauses.append("updated_at = NOW()") + + update_sql = text( + f""" + UPDATE {self.table_name} + SET {", ".join(set_clauses)} + WHERE id = :id + RETURNING {self._build_select_columns()} + """ + ) + + result = await self.session.execute(update_sql, params) + row = result.fetchone() + + return self._row_to_model(row) if row else None + except IntegrityError as e: + await self.session.rollback() + raise DatabaseError(f"Database integrity error: {str(e)}") + except Exception as e: + await self.session.rollback() + raise DatabaseError(f"Failed to update record: {str(e)}") + + async def delete(self, id: int) -> bool: + """ + Delete a record by ID using raw SQL. + + Args: + id: Primary key ID + + Returns: + True if deleted, False if not found + """ + try: + # Check if record exists first + existing = await self.get_by_id(id) + if not existing: + return False + + delete_sql = text( + f""" + DELETE FROM {self.table_name} + WHERE id = :id + """ + ) + + await self.session.execute(delete_sql, {"id": id}) + return True + except Exception as e: + await self.session.rollback() + raise DatabaseError(f"Failed to delete record: {str(e)}") + + async def exists(self, field_name: str, value: Any, exclude_id: int | None = None) -> bool: + """ + Check if a record exists with the given field value using raw SQL. + + Args: + field_name: Name of the field to check + value: Value to check for + exclude_id: Optional ID to exclude from the check + + Returns: + True if exists, False otherwise + """ + try: + if field_name not in self._columns: + raise DatabaseError(f"Invalid field name: {field_name}") + + params = {"value": value} + exclude_clause = "" + + if exclude_id is not None: + exclude_clause = "AND id != :exclude_id" + params["exclude_id"] = exclude_id + + exists_sql = text( + f""" + SELECT EXISTS( + SELECT 1 FROM {self.table_name} + WHERE {field_name} = :value {exclude_clause} + ) AS exists_flag + """ + ) + + result = await self.session.execute(exists_sql, params) + row = result.fetchone() + + return row.exists_flag if row else False + except DatabaseError: + raise + except Exception as e: + raise DatabaseError(f"Failed to check existence: {str(e)}") diff --git a/app/routes/__init__.py b/app/routes/__init__.py new file mode 100644 index 0000000..1849971 --- /dev/null +++ b/app/routes/__init__.py @@ -0,0 +1 @@ +"""FastAPI route handlers.""" diff --git a/app/routes/health.py b/app/routes/health.py new file mode 100644 index 0000000..d7eada6 --- /dev/null +++ b/app/routes/health.py @@ -0,0 +1,76 @@ +"""Health check endpoints.""" + +from datetime import UTC, datetime + +from fastapi import APIRouter, Depends, status +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession + +from app.database import get_db +from app.schemas.common import HealthResponse + +router = APIRouter(tags=["health"]) + + +@router.get( + "/health", + response_model=HealthResponse, + status_code=status.HTTP_200_OK, + summary="Liveness probe", + description="Check if the application is alive", +) +async def health_check() -> HealthResponse: + """ + Liveness probe endpoint. + + Returns: + Health status indicating the application is running + """ + return HealthResponse( + status="healthy", + timestamp=datetime.now(UTC).isoformat(), + ) + + +@router.get( + "/health/ready", + response_model=HealthResponse, + status_code=status.HTTP_200_OK, + summary="Readiness probe", + description="Check if the application is ready to serve traffic (checks database connection)", + responses={ + 503: { + "description": "Service Unavailable", + "model": HealthResponse, + }, + }, +) +async def readiness_check( + db: AsyncSession = Depends(get_db), +) -> HealthResponse: + """ + Readiness probe endpoint that checks database connectivity. + + Args: + db: Database session + + Returns: + Health status indicating the application is ready + + Raises: + HTTPException: 503 if database is not available + """ + try: + # Test database connection + await db.execute(text("SELECT 1")) + return HealthResponse( + status="healthy", + timestamp=datetime.now(UTC).isoformat(), + ) + except Exception: + from fastapi import HTTPException + + raise HTTPException( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + detail="Database connection failed", + ) diff --git a/app/routes/servers.py b/app/routes/servers.py new file mode 100644 index 0000000..1461559 --- /dev/null +++ b/app/routes/servers.py @@ -0,0 +1,236 @@ +"""Server CRUD endpoints.""" + +from typing import Annotated + +from fastapi import APIRouter, Depends, HTTPException, Response, status +from sqlalchemy.ext.asyncio import AsyncSession + +from app.database import get_db +from app.exceptions import ( + BaseApplicationError, + DuplicateHostnameError, + ServerNotFoundError, +) +from app.schemas.common import ErrorResponse +from app.schemas.server import ( + ServerCreate, + ServerListResponse, + ServerResponse, + ServerUpdate, +) +from app.services.server_service import ServerService + +router = APIRouter(prefix="/servers", tags=["servers"]) + + +def get_service(db: Annotated[AsyncSession, Depends(get_db)]) -> ServerService: + """Dependency to get ServerService instance.""" + return ServerService(db) + + +@router.post( + "", + response_model=ServerResponse, + status_code=status.HTTP_201_CREATED, + summary="Create a new server", + description="Create a new server with hostname, IP address, and state", + responses={ + 201: { + "description": "Server created successfully", + "headers": { + "Location": { + "description": "URL of the created server", + "schema": {"type": "string"}, + } + }, + }, + 422: {"description": "Validation Error"}, + 409: {"description": "Conflict - Duplicate hostname", "model": ErrorResponse}, + }, +) +async def create_server( + server_data: ServerCreate, + response: Response, + service: Annotated[ServerService, Depends(get_service)], +) -> ServerResponse: + """ + Create a new server. + + Args: + server_data: Server creation data (validated by Pydantic) + response: FastAPI response object + service: Server service instance + + Returns: + Created server response + + Raises: + HTTPException: 409 for duplicate hostname + """ + try: + server = await service.create_server(server_data) + # RFC 7231: Location header for created resource + response.headers["Location"] = f"/servers/{server.id}" + return server + except DuplicateHostnameError as e: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail=create_error_response(e), + ) + + +@router.get( + "", + response_model=ServerListResponse, + status_code=status.HTTP_200_OK, + summary="List all servers", + description="Get a list of all servers", +) +async def list_servers( + service: Annotated[ServerService, Depends(get_service)], +) -> ServerListResponse: + """ + List all servers. + + Args: + service: Server service instance + + Returns: + List of servers + """ + return await service.list_servers() + + +@router.get( + "/{server_id}", + response_model=ServerResponse, + status_code=status.HTTP_200_OK, + summary="Get server by ID", + description="Get a single server by its ID", + responses={ + 200: {"description": "Server found"}, + 404: {"description": "Server not found", "model": ErrorResponse}, + }, +) +async def get_server( + server_id: int, + service: Annotated[ServerService, Depends(get_service)], +) -> ServerResponse: + """ + Get server by ID. + + Args: + server_id: Server ID + service: Server service instance + + Returns: + Server response + + Raises: + HTTPException: 404 if server not found + """ + try: + return await service.get_server(server_id) + except ServerNotFoundError as e: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=create_error_response(e), + ) + + +@router.put( + "/{server_id}", + response_model=ServerResponse, + status_code=status.HTTP_200_OK, + summary="Update server", + description="Update an existing server (all fields optional)", + responses={ + 200: {"description": "Server updated successfully"}, + 422: {"description": "Validation Error"}, + 404: {"description": "Server not found", "model": ErrorResponse}, + 409: {"description": "Conflict - Duplicate hostname", "model": ErrorResponse}, + }, +) +async def update_server( + server_id: int, + server_data: ServerUpdate, + service: Annotated[ServerService, Depends(get_service)], +) -> ServerResponse: + """ + Update an existing server. + + Args: + server_id: Server ID + server_data: Server update data (validated by Pydantic) + service: Server service instance + + Returns: + Updated server response + + Raises: + HTTPException: 404 if not found, 409 for duplicate hostname + """ + try: + return await service.update_server(server_id, server_data) + except ServerNotFoundError as e: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=create_error_response(e), + ) + except DuplicateHostnameError as e: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail=create_error_response(e), + ) + + +@router.delete( + "/{server_id}", + status_code=status.HTTP_204_NO_CONTENT, + summary="Delete server", + description="Delete a server by ID", + responses={ + 204: {"description": "Server deleted successfully"}, + 404: {"description": "Server not found", "model": ErrorResponse}, + }, +) +async def delete_server( + server_id: int, + service: Annotated[ServerService, Depends(get_service)], +) -> None: + """ + Delete a server. + + Args: + server_id: Server ID + service: Server service instance + + Raises: + HTTPException: 404 if server not found + """ + try: + await service.delete_server(server_id) + except ServerNotFoundError as e: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=create_error_response(e), + ) + + +def create_error_response(error: BaseApplicationError) -> dict: + """ + Create RFC 7807 Problem Details error response. + + Args: + error: Application error instance + + Returns: + Error response dictionary + """ + return { + "type": "about:blank", + "title": error.__class__.__name__.replace("Error", ""), + "status": error.status_code, + "detail": error.detail, + "extra": error.extra, + } diff --git a/app/schemas/__init__.py b/app/schemas/__init__.py new file mode 100644 index 0000000..456aa29 --- /dev/null +++ b/app/schemas/__init__.py @@ -0,0 +1,18 @@ +"""Pydantic schemas for request/response validation.""" + +from app.schemas.common import ErrorResponse, HealthResponse +from app.schemas.server import ( + ServerCreate, + ServerListResponse, + ServerResponse, + ServerUpdate, +) + +__all__ = [ + "ServerCreate", + "ServerUpdate", + "ServerResponse", + "ServerListResponse", + "ErrorResponse", + "HealthResponse", +] diff --git a/app/schemas/common.py b/app/schemas/common.py new file mode 100644 index 0000000..a66d3b2 --- /dev/null +++ b/app/schemas/common.py @@ -0,0 +1,52 @@ +"""Common Pydantic schemas for shared response formats.""" + +from typing import Any + +from pydantic import BaseModel, ConfigDict, Field + + +class ErrorResponse(BaseModel): + """RFC 7807 Problem Details for HTTP APIs error response.""" + + model_config = ConfigDict( + json_schema_extra={ + "example": { + "type": "about:blank", + "title": "Not Found", + "status": 404, + "detail": "Server with id 123 not found", + } + } + ) + + type: str = Field( + default="about:blank", + description="A URI reference that identifies the problem type", + ) + title: str = Field(description="A short, human-readable summary of the problem type") + status: int = Field(description="The HTTP status code") + detail: str = Field(description="A human-readable explanation specific to this occurrence") + instance: str | None = Field( + default=None, + description="A URI reference that identifies the specific occurrence of the problem", + ) + extra: dict[str, Any] | None = Field( + default=None, + description="Additional error context", + ) + + +class HealthResponse(BaseModel): + """Health check response schema.""" + + model_config = ConfigDict( + json_schema_extra={ + "example": { + "status": "healthy", + "timestamp": "2024-01-01T00:00:00Z", + } + } + ) + + status: str = Field(description="Health status: healthy or unhealthy") + timestamp: str = Field(description="ISO 8601 timestamp of the health check") diff --git a/app/schemas/server.py b/app/schemas/server.py new file mode 100644 index 0000000..9961d6c --- /dev/null +++ b/app/schemas/server.py @@ -0,0 +1,156 @@ +"""Server Pydantic schemas for request/response validation.""" + +import ipaddress +from datetime import datetime +from enum import Enum + +from pydantic import BaseModel, ConfigDict, Field, field_validator + + +class ServerState(str, Enum): + """Server state enumeration.""" + + ACTIVE = "active" + OFFLINE = "offline" + RETIRED = "retired" + + +class ServerBase(BaseModel): + """Base schema with shared fields and validators.""" + + hostname: str | None = Field( + default=None, + min_length=1, + max_length=255, + description="Unique server hostname", + examples=["web-server-01"], + ) + ip_address: str | None = Field( + default=None, + description="Server IP address (IPv4 or IPv6)", + examples=["192.168.1.100"], + ) + state: ServerState | None = Field( + default=None, + description="Server state: active, offline, or retired", + examples=[ServerState.ACTIVE], + ) + + @field_validator("hostname") + @classmethod + def validate_hostname(cls, v: str | None) -> str | None: + """Validate hostname is not empty.""" + if v is None: + return v + stripped = v.strip() + if not stripped: + raise ValueError("Hostname cannot be empty") + return stripped + + @field_validator("ip_address") + @classmethod + def validate_ip_address(cls, v: str | None) -> str | None: + """Validate IP address format.""" + if v is None: + return v + try: + ipaddress.ip_address(v) + return v + except ValueError: + raise ValueError(f"Invalid IP address format: {v}") + + +class ServerCreate(ServerBase): + """Schema for creating a new server - all fields required.""" + + model_config = ConfigDict( + json_schema_extra={ + "example": { + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "state": "active", + } + } + ) + + hostname: str = Field( + ..., + min_length=1, + max_length=255, + description="Unique server hostname", + examples=["web-server-01"], + ) + ip_address: str = Field( + ..., + description="Server IP address (IPv4 or IPv6)", + examples=["192.168.1.100"], + ) + state: ServerState = Field( + ..., + description="Server state: active, offline, or retired", + examples=[ServerState.ACTIVE], + ) + + +class ServerUpdate(ServerBase): + """Schema for updating an existing server - all fields optional.""" + + model_config = ConfigDict( + json_schema_extra={ + "example": { + "hostname": "web-server-01", + "ip_address": "192.168.1.101", + "state": "offline", + } + } + ) + + +class ServerResponse(BaseModel): + """Schema for server response.""" + + model_config = ConfigDict( + from_attributes=True, + json_schema_extra={ + "example": { + "id": 1, + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "state": "active", + "created_at": "2024-01-01T00:00:00Z", + "updated_at": "2024-01-01T00:00:00Z", + } + }, + ) + + id: int = Field(description="Server ID") + hostname: str = Field(description="Server hostname") + ip_address: str = Field(description="Server IP address") + state: ServerState = Field(description="Server state") + created_at: datetime = Field(description="Timestamp when server was created") + updated_at: datetime = Field(description="Timestamp when server was last updated") + + +class ServerListResponse(BaseModel): + """Schema for list of servers response.""" + + model_config = ConfigDict( + json_schema_extra={ + "example": { + "servers": [ + { + "id": 1, + "hostname": "web-server-01", + "ip_address": "192.168.1.100", + "state": "active", + "created_at": "2024-01-01T00:00:00Z", + "updated_at": "2024-01-01T00:00:00Z", + } + ], + "count": 1, + } + } + ) + + servers: list[ServerResponse] = Field(description="List of servers") + count: int = Field(description="Total number of servers") diff --git a/app/services/__init__.py b/app/services/__init__.py new file mode 100644 index 0000000..b3e2974 --- /dev/null +++ b/app/services/__init__.py @@ -0,0 +1,5 @@ +"""Service layer for business logic.""" + +from app.services.server_service import ServerService + +__all__ = ["ServerService"] diff --git a/app/services/server_service.py b/app/services/server_service.py new file mode 100644 index 0000000..b11ab9e --- /dev/null +++ b/app/services/server_service.py @@ -0,0 +1,108 @@ +"""Server service for business logic.""" + +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import ServerNotFoundError +from app.repositories import QueryStrategy, ServerRepository +from app.schemas.server import ( + ServerCreate, + ServerListResponse, + ServerResponse, + ServerUpdate, +) + + +class ServerService: + """Service for server business logic.""" + + def __init__(self, session: AsyncSession, strategy: QueryStrategy = None): + """ + Initialize service with database session. + + Args: + session: AsyncSession for database operations + strategy: Optional explicit query strategy (ORM or RAW_SQL) + """ + self.repository = ServerRepository(session, strategy) + + async def create_server(self, server_data: ServerCreate) -> ServerResponse: + """ + Create a new server. + + Args: + server_data: Validated server creation data + + Returns: + Created server response + + Raises: + DuplicateHostnameError: If hostname already exists + """ + server = await self.repository.create(server_data) + return ServerResponse.model_validate(server) + + async def get_server(self, server_id: int) -> ServerResponse: + """ + Get server by ID. + + Args: + server_id: Server ID + + Returns: + Server response + + Raises: + ServerNotFoundError: If server not found + """ + server = await self.repository.get_by_id(server_id) + if not server: + raise ServerNotFoundError(server_id) + + return ServerResponse.model_validate(server) + + async def list_servers(self) -> ServerListResponse: + """ + List all servers. + + Returns: + List of server responses + """ + servers = await self.repository.list_all() + server_responses = [ServerResponse.model_validate(server) for server in servers] + + return ServerListResponse(servers=server_responses, count=len(server_responses)) + + async def update_server(self, server_id: int, server_data: ServerUpdate) -> ServerResponse: + """ + Update an existing server. + + Args: + server_id: Server ID + server_data: Validated server update data + + Returns: + Updated server response + + Raises: + ServerNotFoundError: If server not found + DuplicateHostnameError: If hostname already exists + """ + server = await self.repository.update(server_id, server_data) + if not server: + raise ServerNotFoundError(server_id) + + return ServerResponse.model_validate(server) + + async def delete_server(self, server_id: int) -> None: + """ + Delete a server. + + Args: + server_id: Server ID + + Raises: + ServerNotFoundError: If server not found + """ + deleted = await self.repository.delete(server_id) + if not deleted: + raise ServerNotFoundError(server_id) diff --git a/cli/__init__.py b/cli/__init__.py new file mode 100644 index 0000000..d46ded0 --- /dev/null +++ b/cli/__init__.py @@ -0,0 +1,3 @@ +"""CLI tool for server inventory management.""" + +__author__ = "Ali Khan" diff --git a/cli/main.py b/cli/main.py new file mode 100644 index 0000000..5d71a0b --- /dev/null +++ b/cli/main.py @@ -0,0 +1,245 @@ +"""CLI tool for server inventory management using Typer.""" + +import os +import sys + +import httpx +import typer +from rich.console import Console +from rich.json import JSON +from rich.table import Table + +# Initialize Typer app and Rich console +app = typer.Typer(help="Server Inventory Management CLI") +console = Console() + +# Get API base URL from environment or use default +API_BASE_URL = os.getenv("API_BASE_URL", "http://localhost:8000") + + +def get_client() -> httpx.AsyncClient: + """Create HTTP client for API requests.""" + return httpx.AsyncClient(base_url=API_BASE_URL, timeout=30.0) + + +def print_error(message: str, error_detail: str | None = None) -> None: + """Print error message.""" + console.print(f"[red]Error:[/red] {message}") + if error_detail: + console.print(f"[dim]{error_detail}[/dim]") + + +def print_success(message: str) -> None: + """Print success message.""" + console.print(f"[green]✓[/green] {message}") + + +@app.command() +def create( + hostname: str = typer.Argument(..., help="Server hostname"), + ip_address: str = typer.Argument(..., help="Server IP address (IPv4 or IPv6)"), + state: str = typer.Argument(..., help="Server state: active, offline, or retired"), +) -> None: + """Create a new server.""" + import asyncio + + async def _create(): + async with get_client() as client: + try: + response = await client.post( + "/servers", + json={ + "hostname": hostname, + "ip_address": ip_address, + "state": state, + }, + ) + response.raise_for_status() + server = response.json() + print_success("Server created successfully") + console.print(JSON.from_data(server, indent=2)) + except httpx.HTTPStatusError as e: + error_data = ( + e.response.json() + if e.response.headers.get("content-type") == "application/json" + else {} + ) + detail = error_data.get("detail", e.response.text) + print_error(f"Failed to create server: {e.response.status_code}", detail) + sys.exit(1) + except Exception as e: + print_error(f"Failed to create server: {str(e)}") + sys.exit(1) + + asyncio.run(_create()) + + +@app.command() +def list() -> None: + """List all servers.""" + import asyncio + + async def _list(): + async with get_client() as client: + try: + response = await client.get("/servers") + response.raise_for_status() + data = response.json() + servers = data.get("servers", []) + count = data.get("count", 0) + + if count == 0: + console.print("[yellow]No servers found[/yellow]") + return + + # Create table + table = Table(title=f"Servers ({count})") + table.add_column("ID", style="cyan") + table.add_column("Hostname", style="magenta") + table.add_column("IP Address", style="green") + table.add_column("State", style="yellow") + table.add_column("Created At", style="dim") + + for server in servers: + table.add_row( + str(server["id"]), + server["hostname"], + server["ip_address"], + server["state"], + server["created_at"], + ) + + console.print(table) + except httpx.HTTPStatusError as e: + error_data = ( + e.response.json() + if e.response.headers.get("content-type") == "application/json" + else {} + ) + detail = error_data.get("detail", e.response.text) + print_error(f"Failed to list servers: {e.response.status_code}", detail) + sys.exit(1) + except Exception as e: + print_error(f"Failed to list servers: {str(e)}") + sys.exit(1) + + asyncio.run(_list()) + + +@app.command() +def get( + server_id: int = typer.Argument(..., help="Server ID"), +) -> None: + """Get a server by ID.""" + import asyncio + + async def _get(): + async with get_client() as client: + try: + response = await client.get(f"/servers/{server_id}") + response.raise_for_status() + server = response.json() + console.print(JSON.from_data(server, indent=2)) + except httpx.HTTPStatusError as e: + error_data = ( + e.response.json() + if e.response.headers.get("content-type") == "application/json" + else {} + ) + detail = error_data.get("detail", e.response.text) + if e.response.status_code == 404: + print_error(f"Server with ID {server_id} not found", detail) + else: + print_error(f"Failed to get server: {e.response.status_code}", detail) + sys.exit(1) + except Exception as e: + print_error(f"Failed to get server: {str(e)}") + sys.exit(1) + + asyncio.run(_get()) + + +@app.command() +def update( + server_id: int = typer.Argument(..., help="Server ID"), + hostname: str | None = typer.Option(None, "--hostname", "-n", help="Update hostname"), + ip_address: str | None = typer.Option(None, "--ip", "-i", help="Update IP address"), + state: str | None = typer.Option(None, "--state", "-s", help="Update state"), +) -> None: + """Update a server.""" + import asyncio + + async def _update(): + # Build update payload + payload = {} + if hostname is not None: + payload["hostname"] = hostname + if ip_address is not None: + payload["ip_address"] = ip_address + if state is not None: + payload["state"] = state + + if not payload: + print_error("At least one field must be provided for update") + sys.exit(1) + + async with get_client() as client: + try: + response = await client.put(f"/servers/{server_id}", json=payload) + response.raise_for_status() + server = response.json() + print_success(f"Server {server_id} updated successfully") + console.print(JSON.from_data(server, indent=2)) + except httpx.HTTPStatusError as e: + error_data = ( + e.response.json() + if e.response.headers.get("content-type") == "application/json" + else {} + ) + detail = error_data.get("detail", e.response.text) + if e.response.status_code == 404: + print_error(f"Server with ID {server_id} not found", detail) + else: + print_error(f"Failed to update server: {e.response.status_code}", detail) + sys.exit(1) + except Exception as e: + print_error(f"Failed to update server: {str(e)}") + sys.exit(1) + + asyncio.run(_update()) + + +@app.command() +def delete( + server_id: int = typer.Argument(..., help="Server ID"), +) -> None: + """Delete a server.""" + import asyncio + + async def _delete(): + async with get_client() as client: + try: + response = await client.delete(f"/servers/{server_id}") + response.raise_for_status() + print_success(f"Server {server_id} deleted successfully") + except httpx.HTTPStatusError as e: + error_data = ( + e.response.json() + if e.response.headers.get("content-type") == "application/json" + else {} + ) + detail = error_data.get("detail", e.response.text) + if e.response.status_code == 404: + print_error(f"Server with ID {server_id} not found", detail) + else: + print_error(f"Failed to delete server: {e.response.status_code}", detail) + sys.exit(1) + except Exception as e: + print_error(f"Failed to delete server: {str(e)}") + sys.exit(1) + + asyncio.run(_delete()) + + +if __name__ == "__main__": + app() diff --git a/docker-compose.test.yml b/docker-compose.test.yml new file mode 100644 index 0000000..17fd2cc --- /dev/null +++ b/docker-compose.test.yml @@ -0,0 +1,62 @@ +# Docker Compose for Testing Environment +# Usage: docker-compose -f docker-compose.test.yml up --build + +services: + db-test: + image: postgres:15-alpine + container_name: server_inventory_db_test + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: server_inventory_test + ports: + - "5433:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - test-network + + api-test: + build: + context: . + dockerfile: docker/Dockerfile + target: testing + container_name: server_inventory_api_test + environment: + environment: TESTING + # Database configuration + DATABASE_DIALECT: postgresql + DATABASE_DRIVER: asyncpg + DATABASE_USERNAME: postgres + DATABASE_PASSWORD: postgres + DATABASE_NAME: server_inventory_test + DATABASE_HOST: db-test + DATABASE_PORT: 5432 + DATABASE_ENGINE_ECHO: "false" + DATABASE_POOL_SIZE: 5 + DATABASE_POOL_MAX_OVERFLOW: 10 + LOG_LEVEL: DEBUG + CORS_ORIGINS: "*" + ports: + - "8001:8000" + depends_on: + db-test: + condition: service_healthy + networks: + - test-network + command: > + sh -c " + echo 'Waiting for database...' && + sleep 5 && + echo 'Running migrations...' && + alembic upgrade head && + echo 'Running tests...' && + pytest tests/ -v --cov=app --cov-report=term-missing + " + +networks: + test-network: + driver: bridge diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..4434106 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,80 @@ +# Docker Compose for Server Inventory Management System +# Author: Ali Khan +# +# Usage: +# Local (with hot reload): DOCKER_TARGET=local docker-compose up -d +# Development: docker-compose up -d (default) +# Staging: DOCKER_TARGET=staging docker-compose up -d +# Production: DOCKER_TARGET=production docker-compose up -d +# +# Or set in .env file: +# DOCKER_TARGET=local +# ENVIRONMENT=LOCAL + +services: + db: + image: postgres:15-alpine + container_name: server_inventory_db + environment: + POSTGRES_USER: ${DATABASE_USERNAME:-postgres} + POSTGRES_PASSWORD: ${DATABASE_PASSWORD:-postgres} + POSTGRES_DB: ${DATABASE_NAME:-server_inventory} + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - app-network + + api: + build: + context: . + dockerfile: docker/Dockerfile + target: ${DOCKER_TARGET:-development} + container_name: server_inventory_api + labels: + - "maintainer=Ali Khan" + - "version=1.0.0" + environment: + environment: ${ENVIRONMENT:-DEVELOPMENT} + # Database configuration + DATABASE_DIALECT: ${DATABASE_DIALECT:-postgresql} + DATABASE_DRIVER: ${DATABASE_DRIVER:-asyncpg} + DATABASE_USERNAME: ${DATABASE_USERNAME:-postgres} + DATABASE_PASSWORD: ${DATABASE_PASSWORD:-postgres} + DATABASE_NAME: ${DATABASE_NAME:-server_inventory} + DATABASE_HOST: ${DATABASE_HOST:-db} + DATABASE_PORT: ${DATABASE_PORT:-5432} + DATABASE_ENGINE_ECHO: ${DATABASE_ENGINE_ECHO:-false} + DATABASE_POOL_SIZE: ${DATABASE_POOL_SIZE:-10} + DATABASE_POOL_MAX_OVERFLOW: ${DATABASE_POOL_MAX_OVERFLOW:-20} + DATABASE_POOL_TIMEOUT: ${DATABASE_POOL_TIMEOUT:-30} + DATABASE_POOL_RECYCLE: ${DATABASE_POOL_RECYCLE:-3600} + LOG_LEVEL: ${LOG_LEVEL:-INFO} + CORS_ORIGINS: ${CORS_ORIGINS:-*} + USE_RAW_SQL: ${USE_RAW_SQL:-false} + ports: + - "8000:8000" + depends_on: + db: + condition: service_healthy + networks: + - app-network + # Mount source code for hot reload (works with DOCKER_TARGET=local) + volumes: + - ./app:/app/app + - ./alembic:/app/alembic + - ./cli:/app/cli + - ./environments:/app/environments:ro + +volumes: + postgres_data: + +networks: + app-network: + driver: bridge diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000..fbaa069 --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,140 @@ +# Multi-stage Dockerfile for production-ready FastAPI application with Poetry +# Supports multiple environments: local, development, staging, production + +# ============================================================================= +# Stage 1: Builder - Install dependencies +# ============================================================================= +FROM python:3.11-slim AS builder + +WORKDIR /app + +# Install system dependencies and Poetry +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + libpq-dev \ + curl \ + && rm -rf /var/lib/apt/lists/* \ + && pip install --no-cache-dir poetry==1.7.1 + +# Configure Poetry: no virtualenv in container, install to system +ENV POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_CREATE=false \ + POETRY_CACHE_DIR=/tmp/poetry_cache + +# Copy Poetry files +COPY pyproject.toml poetry.lock* ./ + +# Install dependencies +RUN poetry install --only=main --no-root && rm -rf /tmp/poetry_cache + +# ============================================================================= +# Stage 2: Application Base - Common runtime setup +# ============================================================================= +FROM python:3.11-slim AS application_base + +WORKDIR /app + +# Install runtime dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + libpq5 \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Create non-root user for security +RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app + +# Copy installed packages from builder +COPY --from=builder /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages +COPY --from=builder /usr/local/bin /usr/local/bin + +# Copy application code +COPY --chown=appuser:appuser app/ ./app/ +COPY --chown=appuser:appuser alembic/ ./alembic/ +COPY --chown=appuser:appuser alembic.ini ./ +COPY --chown=appuser:appuser cli/ ./cli/ + +# Create environments directory +RUN mkdir -p /app/environments && chown -R appuser:appuser /app/environments + +# Switch to non-root user +USER appuser + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +# ============================================================================= +# Stage 3: Local Development +# ============================================================================= +FROM application_base AS local + +ENV environment=LOCAL +COPY --chown=appuser:appuser environments/local.env ./environments/ + +EXPOSE 8000 + +# Run with reload for development +CMD ["sh", "-c", "alembic upgrade head && uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload"] + +# ============================================================================= +# Stage 4: Development +# ============================================================================= +FROM application_base AS development + +ENV environment=DEVELOPMENT +COPY --chown=appuser:appuser environments/development.env ./environments/ + +EXPOSE 8000 + +# Run with deprecation warnings for debugging +CMD ["sh", "-c", "alembic upgrade head && python -W always::DeprecationWarning -m uvicorn app.main:app --host 0.0.0.0 --port 8000"] + +# ============================================================================= +# Stage 5: Staging +# ============================================================================= +FROM application_base AS staging + +ENV environment=STAGING +COPY --chown=appuser:appuser environments/staging.env ./environments/ + +EXPOSE 8000 + +# Run with deprecation warnings +CMD ["sh", "-c", "alembic upgrade head && python -W always::DeprecationWarning -m uvicorn app.main:app --host 0.0.0.0 --port 8000"] + +# ============================================================================= +# Stage 6: Production +# ============================================================================= +FROM application_base AS production + +ENV environment=PRODUCTION +COPY --chown=appuser:appuser environments/production.env ./environments/ + +EXPOSE 8000 + +# Production optimized run (no reload, no deprecation warnings) +CMD ["sh", "-c", "alembic upgrade head && uvicorn app.main:app --host 0.0.0.0 --port 8000 --workers 4"] + +# ============================================================================= +# Stage 7: Testing +# ============================================================================= +FROM application_base AS testing + +ENV environment=TESTING +COPY --chown=appuser:appuser environments/testing.env ./environments/ +COPY --chown=appuser:appuser tests/ ./tests/ +COPY --chown=appuser:appuser pyproject.toml ./ + +# Install dev dependencies for testing +USER root +RUN pip install --no-cache-dir pytest pytest-asyncio pytest-cov httpx +USER appuser + +EXPOSE 8000 + +CMD ["pytest", "-v", "--cov=app", "--cov-report=term-missing"] + +# ============================================================================= +# Default Stage (Production) +# ============================================================================= +FROM production AS default diff --git a/environments/development.env b/environments/development.env new file mode 100644 index 0000000..169bc80 --- /dev/null +++ b/environments/development.env @@ -0,0 +1,37 @@ +# Development Environment Configuration +# Used for development server deployment + +# Environment +ENVIRONMENT=DEVELOPMENT + +# Database +DATABASE_DIALECT=postgresql +DATABASE_DRIVER=asyncpg +DATABASE_USERNAME=postgres +DATABASE_PASSWORD=postgres +DATABASE_NAME=server_inventory +DATABASE_HOST=db +DATABASE_PORT=5432 +DATABASE_ENGINE_ECHO=True +DATABASE_POOL_ECHO=False +DATABASE_ENGINE_POOL_PRE_PING=True +DATABASE_CHECK_SAME_THREAD=False + +# Connection pool settings +DATABASE_POOL_SIZE=10 +DATABASE_POOL_MAX_OVERFLOW=20 +DATABASE_POOL_TIMEOUT=30 +DATABASE_POOL_RECYCLE=3600 + +# API Server +API_HOST=0.0.0.0 +API_PORT=8000 + +# Logging +LOG_LEVEL=DEBUG + +# CORS +CORS_ORIGINS=* + +# Database Query Mode (True for raw SQL, False for ORM) +USE_RAW_SQL=False diff --git a/environments/local.env b/environments/local.env new file mode 100644 index 0000000..443608a --- /dev/null +++ b/environments/local.env @@ -0,0 +1,37 @@ +# Local Development Environment Configuration +# Used for local development without Docker + +# Environment +ENVIRONMENT=LOCAL + +# Database +DATABASE_DIALECT=postgresql +DATABASE_DRIVER=asyncpg +DATABASE_USERNAME=postgres +DATABASE_PASSWORD=postgres +DATABASE_NAME=server_inventory +DATABASE_HOST=localhost +DATABASE_PORT=5432 +DATABASE_ENGINE_ECHO=True +DATABASE_POOL_ECHO=False +DATABASE_ENGINE_POOL_PRE_PING=True +DATABASE_CHECK_SAME_THREAD=False + +# Connection pool settings +DATABASE_POOL_SIZE=10 +DATABASE_POOL_MAX_OVERFLOW=20 +DATABASE_POOL_TIMEOUT=30 +DATABASE_POOL_RECYCLE=3600 + +# Database Query Mode (True for raw SQL, False for ORM) +USE_RAW_SQL=True + +# API Server +API_HOST=0.0.0.0 +API_PORT=8000 + +# Logging +LOG_LEVEL=DEBUG + +# CORS +CORS_ORIGINS=* diff --git a/environments/production.env b/environments/production.env new file mode 100644 index 0000000..cc1f83c --- /dev/null +++ b/environments/production.env @@ -0,0 +1,37 @@ +# Production Environment Configuration +# Used for production server deployment + +# Environment +ENVIRONMENT=PRODUCTION + +# Database (update with actual production DB credentials) +DATABASE_DIALECT=postgresql +DATABASE_DRIVER=asyncpg +DATABASE_USERNAME=postgres +DATABASE_PASSWORD=postgres +DATABASE_NAME=server_inventory +DATABASE_HOST=db +DATABASE_PORT=5432 +DATABASE_ENGINE_ECHO=False +DATABASE_POOL_ECHO=False +DATABASE_ENGINE_POOL_PRE_PING=True +DATABASE_CHECK_SAME_THREAD=False + +# Connection pool settings +DATABASE_POOL_SIZE=20 +DATABASE_POOL_MAX_OVERFLOW=40 +DATABASE_POOL_TIMEOUT=30 +DATABASE_POOL_RECYCLE=3600 + +# API Server +API_HOST=0.0.0.0 +API_PORT=8000 + +# Logging +LOG_LEVEL=WARNING + +# CORS - Update with actual production domain +CORS_ORIGINS=https://example.com + +# Database Query Mode (True for raw SQL, False for ORM) +USE_RAW_SQL=False diff --git a/environments/staging.env b/environments/staging.env new file mode 100644 index 0000000..76c54c5 --- /dev/null +++ b/environments/staging.env @@ -0,0 +1,37 @@ +# Staging Environment Configuration +# Used for staging/QA server deployment + +# Environment +ENVIRONMENT=STAGING + +# Database (update with actual staging DB credentials) +DATABASE_DIALECT=postgresql +DATABASE_DRIVER=asyncpg +DATABASE_USERNAME=postgres +DATABASE_PASSWORD=postgres +DATABASE_NAME=server_inventory +DATABASE_HOST=db +DATABASE_PORT=5432 +DATABASE_ENGINE_ECHO=False +DATABASE_POOL_ECHO=False +DATABASE_ENGINE_POOL_PRE_PING=True +DATABASE_CHECK_SAME_THREAD=False + +# Connection pool settings +DATABASE_POOL_SIZE=15 +DATABASE_POOL_MAX_OVERFLOW=30 +DATABASE_POOL_TIMEOUT=30 +DATABASE_POOL_RECYCLE=3600 + +# API Server +API_HOST=0.0.0.0 +API_PORT=8000 + +# Logging +LOG_LEVEL=INFO + +# CORS - Update with actual staging domain +CORS_ORIGINS=https://staging.example.com + +# Database Query Mode (True for raw SQL, False for ORM) +USE_RAW_SQL=False diff --git a/environments/testing.env b/environments/testing.env new file mode 100644 index 0000000..6545a5a --- /dev/null +++ b/environments/testing.env @@ -0,0 +1,37 @@ +# Testing Environment Configuration +# Used for running tests + +# Environment +ENVIRONMENT=TESTING + +# Database +DATABASE_DIALECT=postgresql +DATABASE_DRIVER=asyncpg +DATABASE_USERNAME=postgres +DATABASE_PASSWORD=postgres +DATABASE_NAME=server_inventory_test +DATABASE_HOST=localhost +DATABASE_PORT=5432 +DATABASE_ENGINE_ECHO=False +DATABASE_POOL_ECHO=False +DATABASE_ENGINE_POOL_PRE_PING=True +DATABASE_CHECK_SAME_THREAD=False + +# Connection pool settings +DATABASE_POOL_SIZE=5 +DATABASE_POOL_MAX_OVERFLOW=10 +DATABASE_POOL_TIMEOUT=30 +DATABASE_POOL_RECYCLE=3600 + +# API Server +API_HOST=0.0.0.0 +API_PORT=8000 + +# Logging +LOG_LEVEL=WARNING + +# CORS +CORS_ORIGINS=* + +# Database Query Mode (True for raw SQL, False for ORM) +USE_RAW_SQL=TRUE diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..b914304 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1827 @@ +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. + +[[package]] +name = "alembic" +version = "1.17.2" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "alembic-1.17.2-py3-none-any.whl", hash = "sha256:f483dd1fe93f6c5d49217055e4d15b905b425b6af906746abb35b69c1996c4e6"}, + {file = "alembic-1.17.2.tar.gz", hash = "sha256:bbe9751705c5e0f14877f02d46c53d10885e377e3d90eda810a016f9baa19e8e"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.4.0" +typing-extensions = ">=4.12" + +[package.extras] +tz = ["tzdata"] + +[[package]] +name = "annotated-doc" +version = "0.0.4" +description = "Document parameters, class attributes, return types, and variables inline, with Annotated." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320"}, + {file = "annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.12.0" +description = "High-level concurrency and networking framework on top of asyncio or Trio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb"}, + {file = "anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0"}, +] + +[package.dependencies] +idna = ">=2.8" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +trio = ["trio (>=0.31.0) ; python_version < \"3.10\"", "trio (>=0.32.0) ; python_version >= \"3.10\""] + +[[package]] +name = "asyncpg" +version = "0.31.0" +description = "An asyncio PostgreSQL driver" +optional = false +python-versions = ">=3.9.0" +groups = ["main"] +files = [ + {file = "asyncpg-0.31.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:831712dd3cf117eec68575a9b50da711893fd63ebe277fc155ecae1c6c9f0f61"}, + {file = "asyncpg-0.31.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b17c89312c2f4ccea222a3a6571f7df65d4ba2c0e803339bfc7bed46a96d3be"}, + {file = "asyncpg-0.31.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3faa62f997db0c9add34504a68ac2c342cfee4d57a0c3062fcf0d86c7f9cb1e8"}, + {file = "asyncpg-0.31.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8ea599d45c361dfbf398cb67da7fd052affa556a401482d3ff1ee99bd68808a1"}, + {file = "asyncpg-0.31.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:795416369c3d284e1837461909f58418ad22b305f955e625a4b3a2521d80a5f3"}, + {file = "asyncpg-0.31.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a8d758dac9d2e723e173d286ef5e574f0b350ec00e9186fce84d0fc5f6a8e6b8"}, + {file = "asyncpg-0.31.0-cp310-cp310-win32.whl", hash = "sha256:2d076d42eb583601179efa246c5d7ae44614b4144bc1c7a683ad1222814ed095"}, + {file = "asyncpg-0.31.0-cp310-cp310-win_amd64.whl", hash = "sha256:9ea33213ac044171f4cac23740bed9a3805abae10e7025314cfbd725ec670540"}, + {file = "asyncpg-0.31.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eee690960e8ab85063ba93af2ce128c0f52fd655fdff9fdb1a28df01329f031d"}, + {file = "asyncpg-0.31.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2657204552b75f8288de08ca60faf4a99a65deef3a71d1467454123205a88fab"}, + {file = "asyncpg-0.31.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a429e842a3a4b4ea240ea52d7fe3f82d5149853249306f7ff166cb9948faa46c"}, + {file = "asyncpg-0.31.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0807be46c32c963ae40d329b3a686356e417f674c976c07fa49f1b30303f109"}, + {file = "asyncpg-0.31.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e5d5098f63beeae93512ee513d4c0c53dc12e9aa2b7a1af5a81cddf93fe4e4da"}, + {file = "asyncpg-0.31.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37fc6c00a814e18eef51833545d1891cac9aa69140598bb076b4cd29b3e010b9"}, + {file = "asyncpg-0.31.0-cp311-cp311-win32.whl", hash = "sha256:5a4af56edf82a701aece93190cc4e094d2df7d33f6e915c222fb09efbb5afc24"}, + {file = "asyncpg-0.31.0-cp311-cp311-win_amd64.whl", hash = "sha256:480c4befbdf079c14c9ca43c8c5e1fe8b6296c96f1f927158d4f1e750aacc047"}, + {file = "asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad"}, + {file = "asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d"}, + {file = "asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a"}, + {file = "asyncpg-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:aad7a33913fb8bcb5454313377cc330fbb19a0cd5faa7272407d8a0c4257b671"}, + {file = "asyncpg-0.31.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3df118d94f46d85b2e434fd62c84cb66d5834d5a890725fe625f498e72e4d5ec"}, + {file = "asyncpg-0.31.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5b6efff3c17c3202d4b37189969acf8927438a238c6257f66be3c426beba20"}, + {file = "asyncpg-0.31.0-cp312-cp312-win32.whl", hash = "sha256:027eaa61361ec735926566f995d959ade4796f6a49d3bde17e5134b9964f9ba8"}, + {file = "asyncpg-0.31.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d6bdcbc93d608a1158f17932de2321f68b1a967a13e014998db87a72ed3186"}, + {file = "asyncpg-0.31.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c204fab1b91e08b0f47e90a75d1b3c62174dab21f670ad6c5d0f243a228f015b"}, + {file = "asyncpg-0.31.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54a64f91839ba59008eccf7aad2e93d6e3de688d796f35803235ea1c4898ae1e"}, + {file = "asyncpg-0.31.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0e0822b1038dc7253b337b0f3f676cadc4ac31b126c5d42691c39691962e403"}, + {file = "asyncpg-0.31.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bef056aa502ee34204c161c72ca1f3c274917596877f825968368b2c33f585f4"}, + {file = "asyncpg-0.31.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0bfbcc5b7ffcd9b75ab1558f00db2ae07db9c80637ad1b2469c43df79d7a5ae2"}, + {file = "asyncpg-0.31.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22bc525ebbdc24d1261ecbf6f504998244d4e3be1721784b5f64664d61fbe602"}, + {file = "asyncpg-0.31.0-cp313-cp313-win32.whl", hash = "sha256:f890de5e1e4f7e14023619399a471ce4b71f5418cd67a51853b9910fdfa73696"}, + {file = "asyncpg-0.31.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc5f2fa9916f292e5c5c8b2ac2813763bcd7f58e130055b4ad8a0531314201ab"}, + {file = "asyncpg-0.31.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f6b56b91bb0ffc328c4e3ed113136cddd9deefdf5f79ab448598b9772831df44"}, + {file = "asyncpg-0.31.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:334dec28cf20d7f5bb9e45b39546ddf247f8042a690bff9b9573d00086e69cb5"}, + {file = "asyncpg-0.31.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98cc158c53f46de7bb677fd20c417e264fc02b36d901cc2a43bd6cb0dc6dbfd2"}, + {file = "asyncpg-0.31.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9322b563e2661a52e3cdbc93eed3be7748b289f792e0011cb2720d278b366ce2"}, + {file = "asyncpg-0.31.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19857a358fc811d82227449b7ca40afb46e75b33eb8897240c3839dd8b744218"}, + {file = "asyncpg-0.31.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ba5f8886e850882ff2c2ace5732300e99193823e8107e2c53ef01c1ebfa1e85d"}, + {file = "asyncpg-0.31.0-cp314-cp314-win32.whl", hash = "sha256:cea3a0b2a14f95834cee29432e4ddc399b95700eb1d51bbc5bfee8f31fa07b2b"}, + {file = "asyncpg-0.31.0-cp314-cp314-win_amd64.whl", hash = "sha256:04d19392716af6b029411a0264d92093b6e5e8285ae97a39957b9a9c14ea72be"}, + {file = "asyncpg-0.31.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bdb957706da132e982cc6856bb2f7b740603472b54c3ebc77fe60ea3e57e1bd2"}, + {file = "asyncpg-0.31.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6d11b198111a72f47154fa03b85799f9be63701e068b43f84ac25da0bda9cb31"}, + {file = "asyncpg-0.31.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18c83b03bc0d1b23e6230f5bf8d4f217dc9bc08644ce0502a9d91dc9e634a9c7"}, + {file = "asyncpg-0.31.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e009abc333464ff18b8f6fd146addffd9aaf63e79aa3bb40ab7a4c332d0c5e9e"}, + {file = "asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3b1fbcb0e396a5ca435a8826a87e5c2c2cc0c8c68eb6fadf82168056b0e53a8c"}, + {file = "asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8df714dba348efcc162d2adf02d213e5fab1bd9f557e1305633e851a61814a7a"}, + {file = "asyncpg-0.31.0-cp314-cp314t-win32.whl", hash = "sha256:1b41f1afb1033f2b44f3234993b15096ddc9cd71b21a42dbd87fc6a57b43d65d"}, + {file = "asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3"}, + {file = "asyncpg-0.31.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb3cde58321a1f89ce41812be3f2a98dddedc1e76d0838aba1d724f1e4e1a95"}, + {file = "asyncpg-0.31.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6974f36eb9a224d8fb428bcf66bd411aa12cf57c2967463178149e73d4de366"}, + {file = "asyncpg-0.31.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc2b685f400ceae428f79f78b58110470d7b4466929a7f78d455964b17ad1008"}, + {file = "asyncpg-0.31.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bb223567dea5f47c45d347f2bde5486be8d9f40339f27217adb3fb1c3be51298"}, + {file = "asyncpg-0.31.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:22be6e02381bab3101cd502d9297ac71e2f966c86e20e78caead9934c98a8af6"}, + {file = "asyncpg-0.31.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:37a58919cfef2448a920df00d1b2f821762d17194d0dbf355d6dde8d952c04f9"}, + {file = "asyncpg-0.31.0-cp39-cp39-win32.whl", hash = "sha256:c1a9c5b71d2371a2290bc93336cd05ba4ec781683cab292adbddc084f89443c6"}, + {file = "asyncpg-0.31.0-cp39-cp39-win_amd64.whl", hash = "sha256:c1e1ab5bc65373d92dd749d7308c5b26fb2dc0fbe5d3bf68a32b676aa3bcd24a"}, + {file = "asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735"}, +] + +[package.extras] +gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] + +[[package]] +name = "black" +version = "25.12.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "black-25.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f85ba1ad15d446756b4ab5f3044731bf68b777f8f9ac9cdabd2425b97cd9c4e8"}, + {file = "black-25.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:546eecfe9a3a6b46f9d69d8a642585a6eaf348bcbbc4d87a19635570e02d9f4a"}, + {file = "black-25.12.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:17dcc893da8d73d8f74a596f64b7c98ef5239c2cd2b053c0f25912c4494bf9ea"}, + {file = "black-25.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:09524b0e6af8ba7a3ffabdfc7a9922fb9adef60fed008c7cd2fc01f3048e6e6f"}, + {file = "black-25.12.0-cp310-cp310-win_arm64.whl", hash = "sha256:b162653ed89eb942758efeb29d5e333ca5bb90e5130216f8369857db5955a7da"}, + {file = "black-25.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0cfa263e85caea2cff57d8f917f9f51adae8e20b610e2b23de35b5b11ce691a"}, + {file = "black-25.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1a2f578ae20c19c50a382286ba78bfbeafdf788579b053d8e4980afb079ab9be"}, + {file = "black-25.12.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e1b65634b0e471d07ff86ec338819e2ef860689859ef4501ab7ac290431f9b"}, + {file = "black-25.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a3fa71e3b8dd9f7c6ac4d818345237dfb4175ed3bf37cd5a581dbc4c034f1ec5"}, + {file = "black-25.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:51e267458f7e650afed8445dc7edb3187143003d52a1b710c7321aef22aa9655"}, + {file = "black-25.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:31f96b7c98c1ddaeb07dc0f56c652e25bdedaac76d5b68a059d998b57c55594a"}, + {file = "black-25.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05dd459a19e218078a1f98178c13f861fe6a9a5f88fc969ca4d9b49eb1809783"}, + {file = "black-25.12.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1f68c5eff61f226934be6b5b80296cf6939e5d2f0c2f7d543ea08b204bfaf59"}, + {file = "black-25.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:274f940c147ddab4442d316b27f9e332ca586d39c85ecf59ebdea82cc9ee8892"}, + {file = "black-25.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:169506ba91ef21e2e0591563deda7f00030cb466e747c4b09cb0a9dae5db2f43"}, + {file = "black-25.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a05ddeb656534c3e27a05a29196c962877c83fa5503db89e68857d1161ad08a5"}, + {file = "black-25.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9ec77439ef3e34896995503865a85732c94396edcc739f302c5673a2315e1e7f"}, + {file = "black-25.12.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e509c858adf63aa61d908061b52e580c40eae0dfa72415fa47ac01b12e29baf"}, + {file = "black-25.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:252678f07f5bac4ff0d0e9b261fbb029fa530cfa206d0a636a34ab445ef8ca9d"}, + {file = "black-25.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bc5b1c09fe3c931ddd20ee548511c64ebf964ada7e6f0763d443947fd1c603ce"}, + {file = "black-25.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:0a0953b134f9335c2434864a643c842c44fba562155c738a2a37a4d61f00cad5"}, + {file = "black-25.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2355bbb6c3b76062870942d8cc450d4f8ac71f9c93c40122762c8784df49543f"}, + {file = "black-25.12.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9678bd991cc793e81d19aeeae57966ee02909877cb65838ccffef24c3ebac08f"}, + {file = "black-25.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:97596189949a8aad13ad12fcbb4ae89330039b96ad6742e6f6b45e75ad5cfd83"}, + {file = "black-25.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:778285d9ea197f34704e3791ea9404cd6d07595745907dd2ce3da7a13627b29b"}, + {file = "black-25.12.0-py3-none-any.whl", hash = "sha256:48ceb36c16dbc84062740049eef990bb2ce07598272e673c17d1a7720c71c828"}, + {file = "black-25.12.0.tar.gz", hash = "sha256:8d3dd9cea14bff7ddc0eb243c811cdb1a011ebb4800a5f0335a01a68654796a7"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +pytokens = ">=0.3.0" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2025.11.12" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"}, + {file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"}, +] + +[[package]] +name = "cfgv" +version = "3.5.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0"}, + {file = "cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132"}, +] + +[[package]] +name = "click" +version = "8.3.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main", "dev"] +files = [ + {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"}, + {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.13.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "coverage-7.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:02d9fb9eccd48f6843c98a37bd6817462f130b86da8660461e8f5e54d4c06070"}, + {file = "coverage-7.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:367449cf07d33dc216c083f2036bb7d976c6e4903ab31be400ad74ad9f85ce98"}, + {file = "coverage-7.13.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cdb3c9f8fef0a954c632f64328a3935988d33a6604ce4bf67ec3e39670f12ae5"}, + {file = "coverage-7.13.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d10fd186aac2316f9bbb46ef91977f9d394ded67050ad6d84d94ed6ea2e8e54e"}, + {file = "coverage-7.13.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f88ae3e69df2ab62fb0bc5219a597cb890ba5c438190ffa87490b315190bb33"}, + {file = "coverage-7.13.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c4be718e51e86f553bcf515305a158a1cd180d23b72f07ae76d6017c3cc5d791"}, + {file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a00d3a393207ae12f7c49bb1c113190883b500f48979abb118d8b72b8c95c032"}, + {file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a7b1cd820e1b6116f92c6128f1188e7afe421c7e1b35fa9836b11444e53ebd9"}, + {file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:37eee4e552a65866f15dedd917d5e5f3d59805994260720821e2c1b51ac3248f"}, + {file = "coverage-7.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:62d7c4f13102148c78d7353c6052af6d899a7f6df66a32bddcc0c0eb7c5326f8"}, + {file = "coverage-7.13.0-cp310-cp310-win32.whl", hash = "sha256:24e4e56304fdb56f96f80eabf840eab043b3afea9348b88be680ec5986780a0f"}, + {file = "coverage-7.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:74c136e4093627cf04b26a35dab8cbfc9b37c647f0502fc313376e11726ba303"}, + {file = "coverage-7.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0dfa3855031070058add1a59fdfda0192fd3e8f97e7c81de0596c145dea51820"}, + {file = "coverage-7.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fdb6f54f38e334db97f72fa0c701e66d8479af0bc3f9bfb5b90f1c30f54500f"}, + {file = "coverage-7.13.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7e442c013447d1d8d195be62852270b78b6e255b79b8675bad8479641e21fd96"}, + {file = "coverage-7.13.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ed5630d946859de835a85e9a43b721123a8a44ec26e2830b296d478c7fd4259"}, + {file = "coverage-7.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f15a931a668e58087bc39d05d2b4bf4b14ff2875b49c994bbdb1c2217a8daeb"}, + {file = "coverage-7.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30a3a201a127ea57f7e14ba43c93c9c4be8b7d17a26e03bb49e6966d019eede9"}, + {file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a485ff48fbd231efa32d58f479befce52dcb6bfb2a88bb7bf9a0b89b1bc8030"}, + {file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:22486cdafba4f9e471c816a2a5745337742a617fef68e890d8baf9f3036d7833"}, + {file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:263c3dbccc78e2e331e59e90115941b5f53e85cfcc6b3b2fbff1fd4e3d2c6ea8"}, + {file = "coverage-7.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e5330fa0cc1f5c3c4c3bb8e101b742025933e7848989370a1d4c8c5e401ea753"}, + {file = "coverage-7.13.0-cp311-cp311-win32.whl", hash = "sha256:0f4872f5d6c54419c94c25dd6ae1d015deeb337d06e448cd890a1e89a8ee7f3b"}, + {file = "coverage-7.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51a202e0f80f241ccb68e3e26e19ab5b3bf0f813314f2c967642f13ebcf1ddfe"}, + {file = "coverage-7.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:d2a9d7f1c11487b1c69367ab3ac2d81b9b3721f097aa409a3191c3e90f8f3dd7"}, + {file = "coverage-7.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0b3d67d31383c4c68e19a88e28fc4c2e29517580f1b0ebec4a069d502ce1e0bf"}, + {file = "coverage-7.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:581f086833d24a22c89ae0fe2142cfaa1c92c930adf637ddf122d55083fb5a0f"}, + {file = "coverage-7.13.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0a3a30f0e257df382f5f9534d4ce3d4cf06eafaf5192beb1a7bd066cb10e78fb"}, + {file = "coverage-7.13.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:583221913fbc8f53b88c42e8dbb8fca1d0f2e597cb190ce45916662b8b9d9621"}, + {file = "coverage-7.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f5d9bd30756fff3e7216491a0d6d520c448d5124d3d8e8f56446d6412499e74"}, + {file = "coverage-7.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a23e5a1f8b982d56fa64f8e442e037f6ce29322f1f9e6c2344cd9e9f4407ee57"}, + {file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b01c22bc74a7fb44066aaf765224c0d933ddf1f5047d6cdfe4795504a4493f8"}, + {file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:898cce66d0836973f48dda4e3514d863d70142bdf6dfab932b9b6a90ea5b222d"}, + {file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3ab483ea0e251b5790c2aac03acde31bff0c736bf8a86829b89382b407cd1c3b"}, + {file = "coverage-7.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1d84e91521c5e4cb6602fe11ece3e1de03b2760e14ae4fcf1a4b56fa3c801fcd"}, + {file = "coverage-7.13.0-cp312-cp312-win32.whl", hash = "sha256:193c3887285eec1dbdb3f2bd7fbc351d570ca9c02ca756c3afbc71b3c98af6ef"}, + {file = "coverage-7.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:4f3e223b2b2db5e0db0c2b97286aba0036ca000f06aca9b12112eaa9af3d92ae"}, + {file = "coverage-7.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:086cede306d96202e15a4b77ace8472e39d9f4e5f9fd92dd4fecdfb2313b2080"}, + {file = "coverage-7.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:28ee1c96109974af104028a8ef57cec21447d42d0e937c0275329272e370ebcf"}, + {file = "coverage-7.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d1e97353dcc5587b85986cda4ff3ec98081d7e84dd95e8b2a6d59820f0545f8a"}, + {file = "coverage-7.13.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:99acd4dfdfeb58e1937629eb1ab6ab0899b131f183ee5f23e0b5da5cba2fec74"}, + {file = "coverage-7.13.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ff45e0cd8451e293b63ced93161e189780baf444119391b3e7d25315060368a6"}, + {file = "coverage-7.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4f72a85316d8e13234cafe0a9f81b40418ad7a082792fa4165bd7d45d96066b"}, + {file = "coverage-7.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:11c21557d0e0a5a38632cbbaca5f008723b26a89d70db6315523df6df77d6232"}, + {file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76541dc8d53715fb4f7a3a06b34b0dc6846e3c69bc6204c55653a85dd6220971"}, + {file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6e9e451dee940a86789134b6b0ffbe31c454ade3b849bb8a9d2cca2541a8e91d"}, + {file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:5c67dace46f361125e6b9cace8fe0b729ed8479f47e70c89b838d319375c8137"}, + {file = "coverage-7.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f59883c643cb19630500f57016f76cfdcd6845ca8c5b5ea1f6e17f74c8e5f511"}, + {file = "coverage-7.13.0-cp313-cp313-win32.whl", hash = "sha256:58632b187be6f0be500f553be41e277712baa278147ecb7559983c6d9faf7ae1"}, + {file = "coverage-7.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:73419b89f812f498aca53f757dd834919b48ce4799f9d5cad33ca0ae442bdb1a"}, + {file = "coverage-7.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:eb76670874fdd6091eedcc856128ee48c41a9bbbb9c3f1c7c3cf169290e3ffd6"}, + {file = "coverage-7.13.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6e63ccc6e0ad8986386461c3c4b737540f20426e7ec932f42e030320896c311a"}, + {file = "coverage-7.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:494f5459ffa1bd45e18558cd98710c36c0b8fbfa82a5eabcbe671d80ecffbfe8"}, + {file = "coverage-7.13.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:06cac81bf10f74034e055e903f5f946e3e26fc51c09fc9f584e4a1605d977053"}, + {file = "coverage-7.13.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f2ffc92b46ed6e6760f1d47a71e56b5664781bc68986dbd1836b2b70c0ce2071"}, + {file = "coverage-7.13.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0602f701057c6823e5db1b74530ce85f17c3c5be5c85fc042ac939cbd909426e"}, + {file = "coverage-7.13.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:25dc33618d45456ccb1d37bce44bc78cf269909aa14c4db2e03d63146a8a1493"}, + {file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:71936a8b3b977ddd0b694c28c6a34f4fff2e9dd201969a4ff5d5fc7742d614b0"}, + {file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:936bc20503ce24770c71938d1369461f0c5320830800933bc3956e2a4ded930e"}, + {file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:af0a583efaacc52ae2521f8d7910aff65cdb093091d76291ac5820d5e947fc1c"}, + {file = "coverage-7.13.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f1c23e24a7000da892a312fb17e33c5f94f8b001de44b7cf8ba2e36fbd15859e"}, + {file = "coverage-7.13.0-cp313-cp313t-win32.whl", hash = "sha256:5f8a0297355e652001015e93be345ee54393e45dc3050af4a0475c5a2b767d46"}, + {file = "coverage-7.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6abb3a4c52f05e08460bd9acf04fec027f8718ecaa0d09c40ffbc3fbd70ecc39"}, + {file = "coverage-7.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:3ad968d1e3aa6ce5be295ab5fe3ae1bf5bb4769d0f98a80a0252d543a2ef2e9e"}, + {file = "coverage-7.13.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:453b7ec753cf5e4356e14fe858064e5520c460d3bbbcb9c35e55c0d21155c256"}, + {file = "coverage-7.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:af827b7cbb303e1befa6c4f94fd2bf72f108089cfa0f8abab8f4ca553cf5ca5a"}, + {file = "coverage-7.13.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9987a9e4f8197a1000280f7cc089e3ea2c8b3c0a64d750537809879a7b4ceaf9"}, + {file = "coverage-7.13.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3188936845cd0cb114fa6a51842a304cdbac2958145d03be2377ec41eb285d19"}, + {file = "coverage-7.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2bdb3babb74079f021696cb46b8bb5f5661165c385d3a238712b031a12355be"}, + {file = "coverage-7.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7464663eaca6adba4175f6c19354feea61ebbdd735563a03d1e472c7072d27bb"}, + {file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8069e831f205d2ff1f3d355e82f511eb7c5522d7d413f5db5756b772ec8697f8"}, + {file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6fb2d5d272341565f08e962cce14cdf843a08ac43bd621783527adb06b089c4b"}, + {file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:5e70f92ef89bac1ac8a99b3324923b4749f008fdbd7aa9cb35e01d7a284a04f9"}, + {file = "coverage-7.13.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4b5de7d4583e60d5fd246dd57fcd3a8aa23c6e118a8c72b38adf666ba8e7e927"}, + {file = "coverage-7.13.0-cp314-cp314-win32.whl", hash = "sha256:a6c6e16b663be828a8f0b6c5027d36471d4a9f90d28444aa4ced4d48d7d6ae8f"}, + {file = "coverage-7.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:0900872f2fdb3ee5646b557918d02279dc3af3dfb39029ac4e945458b13f73bc"}, + {file = "coverage-7.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:3a10260e6a152e5f03f26db4a407c4c62d3830b9af9b7c0450b183615f05d43b"}, + {file = "coverage-7.13.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9097818b6cc1cfb5f174e3263eba4a62a17683bcfe5c4b5d07f4c97fa51fbf28"}, + {file = "coverage-7.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0018f73dfb4301a89292c73be6ba5f58722ff79f51593352759c1790ded1cabe"}, + {file = "coverage-7.13.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:166ad2a22ee770f5656e1257703139d3533b4a0b6909af67c6b4a3adc1c98657"}, + {file = "coverage-7.13.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f6aaef16d65d1787280943f1c8718dc32e9cf141014e4634d64446702d26e0ff"}, + {file = "coverage-7.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e999e2dcc094002d6e2c7bbc1fb85b58ba4f465a760a8014d97619330cdbbbf3"}, + {file = "coverage-7.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:00c3d22cf6fb1cf3bf662aaaa4e563be8243a5ed2630339069799835a9cc7f9b"}, + {file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22ccfe8d9bb0d6134892cbe1262493a8c70d736b9df930f3f3afae0fe3ac924d"}, + {file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:9372dff5ea15930fea0445eaf37bbbafbc771a49e70c0aeed8b4e2c2614cc00e"}, + {file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:69ac2c492918c2461bc6ace42d0479638e60719f2a4ef3f0815fa2df88e9f940"}, + {file = "coverage-7.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:739c6c051a7540608d097b8e13c76cfa85263ced467168dc6b477bae3df7d0e2"}, + {file = "coverage-7.13.0-cp314-cp314t-win32.whl", hash = "sha256:fe81055d8c6c9de76d60c94ddea73c290b416e061d40d542b24a5871bad498b7"}, + {file = "coverage-7.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:445badb539005283825959ac9fa4a28f712c214b65af3a2c464f1adc90f5fcbc"}, + {file = "coverage-7.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:de7f6748b890708578fc4b7bb967d810aeb6fcc9bff4bb77dbca77dab2f9df6a"}, + {file = "coverage-7.13.0-py3-none-any.whl", hash = "sha256:850d2998f380b1e266459ca5b47bc9e7daf9af1d070f66317972f382d46f1904"}, + {file = "coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936"}, +] + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[[package]] +name = "distlib" +version = "0.4.0" +description = "Distribution utilities" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16"}, + {file = "distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d"}, +] + +[[package]] +name = "fastapi" +version = "0.124.4" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "fastapi-0.124.4-py3-none-any.whl", hash = "sha256:6d1e703698443ccb89e50abe4893f3c84d9d6689c0cf1ca4fad6d3c15cf69f15"}, + {file = "fastapi-0.124.4.tar.gz", hash = "sha256:0e9422e8d6b797515f33f500309f6e1c98ee4e85563ba0f2debb282df6343763"}, +] + +[package.dependencies] +annotated-doc = ">=0.0.2" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.40.0,<0.51.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "filelock" +version = "3.20.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "filelock-3.20.1-py3-none-any.whl", hash = "sha256:15d9e9a67306188a44baa72f569d2bfd803076269365fdea0934385da4dc361a"}, + {file = "filelock-3.20.1.tar.gz", hash = "sha256:b8360948b351b80f420878d8516519a2204b07aefcdcfd24912a5d33127f188c"}, +] + +[[package]] +name = "greenlet" +version = "3.3.0" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "greenlet-3.3.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:6f8496d434d5cb2dce025773ba5597f71f5410ae499d5dd9533e0653258cdb3d"}, + {file = "greenlet-3.3.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b96dc7eef78fd404e022e165ec55327f935b9b52ff355b067eb4a0267fc1cffb"}, + {file = "greenlet-3.3.0-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:73631cd5cccbcfe63e3f9492aaa664d278fda0ce5c3d43aeda8e77317e38efbd"}, + {file = "greenlet-3.3.0-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b299a0cb979f5d7197442dccc3aee67fce53500cd88951b7e6c35575701c980b"}, + {file = "greenlet-3.3.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7dee147740789a4632cace364816046e43310b59ff8fb79833ab043aefa72fd5"}, + {file = "greenlet-3.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:39b28e339fc3c348427560494e28d8a6f3561c8d2bcf7d706e1c624ed8d822b9"}, + {file = "greenlet-3.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b3c374782c2935cc63b2a27ba8708471de4ad1abaa862ffdb1ef45a643ddbb7d"}, + {file = "greenlet-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:b49e7ed51876b459bd645d83db257f0180e345d3f768a35a85437a24d5a49082"}, + {file = "greenlet-3.3.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e29f3018580e8412d6aaf5641bb7745d38c85228dacf51a73bd4e26ddf2a6a8e"}, + {file = "greenlet-3.3.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a687205fb22794e838f947e2194c0566d3812966b41c78709554aa883183fb62"}, + {file = "greenlet-3.3.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4243050a88ba61842186cb9e63c7dfa677ec146160b0efd73b855a3d9c7fcf32"}, + {file = "greenlet-3.3.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:670d0f94cd302d81796e37299bcd04b95d62403883b24225c6b5271466612f45"}, + {file = "greenlet-3.3.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cb3a8ec3db4a3b0eb8a3c25436c2d49e3505821802074969db017b87bc6a948"}, + {file = "greenlet-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2de5a0b09eab81fc6a382791b995b1ccf2b172a9fec934747a7a23d2ff291794"}, + {file = "greenlet-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4449a736606bd30f27f8e1ff4678ee193bc47f6ca810d705981cfffd6ce0d8c5"}, + {file = "greenlet-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:7652ee180d16d447a683c04e4c5f6441bae7ba7b17ffd9f6b3aff4605e9e6f71"}, + {file = "greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb"}, + {file = "greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3"}, + {file = "greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655"}, + {file = "greenlet-3.3.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c024b1e5696626890038e34f76140ed1daf858e37496d33f2af57f06189e70d7"}, + {file = "greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b"}, + {file = "greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53"}, + {file = "greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614"}, + {file = "greenlet-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7a34b13d43a6b78abf828a6d0e87d3385680eaf830cd60d20d52f249faabf39"}, + {file = "greenlet-3.3.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a1e41a81c7e2825822f4e068c48cb2196002362619e2d70b148f20a831c00739"}, + {file = "greenlet-3.3.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f515a47d02da4d30caaa85b69474cec77b7929b2e936ff7fb853d42f4bf8808"}, + {file = "greenlet-3.3.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d2d9fd66bfadf230b385fdc90426fcd6eb64db54b40c495b72ac0feb5766c54"}, + {file = "greenlet-3.3.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30a6e28487a790417d036088b3bcb3f3ac7d8babaa7d0139edbaddebf3af9492"}, + {file = "greenlet-3.3.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:087ea5e004437321508a8d6f20efc4cfec5e3c30118e1417ea96ed1d93950527"}, + {file = "greenlet-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab97cf74045343f6c60a39913fa59710e4bd26a536ce7ab2397adf8b27e67c39"}, + {file = "greenlet-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5375d2e23184629112ca1ea89a53389dddbffcf417dad40125713d88eb5f96e8"}, + {file = "greenlet-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ee1942ea19550094033c35d25d20726e4f1c40d59545815e1128ac58d416d38"}, + {file = "greenlet-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:60c2ef0f578afb3c8d92ea07ad327f9a062547137afe91f38408f08aacab667f"}, + {file = "greenlet-3.3.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5d554d0712ba1de0a6c94c640f7aeba3f85b3a6e1f2899c11c2c0428da9365"}, + {file = "greenlet-3.3.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3a898b1e9c5f7307ebbde4102908e6cbfcb9ea16284a3abe15cab996bee8b9b3"}, + {file = "greenlet-3.3.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dcd2bdbd444ff340e8d6bdf54d2f206ccddbb3ccfdcd3c25bf4afaa7b8f0cf45"}, + {file = "greenlet-3.3.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5773edda4dc00e173820722711d043799d3adb4f01731f40619e07ea2750b955"}, + {file = "greenlet-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ac0549373982b36d5fd5d30beb8a7a33ee541ff98d2b502714a09f1169f31b55"}, + {file = "greenlet-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d198d2d977460358c3b3a4dc844f875d1adb33817f0613f663a656f463764ccc"}, + {file = "greenlet-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:73f51dd0e0bdb596fb0417e475fa3c5e32d4c83638296e560086b8d7da7c4170"}, + {file = "greenlet-3.3.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d6ed6f85fae6cdfdb9ce04c9bf7a08d666cfcfb914e7d006f44f840b46741931"}, + {file = "greenlet-3.3.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9125050fcf24554e69c4cacb086b87b3b55dc395a8b3ebe6487b045b2614388"}, + {file = "greenlet-3.3.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:87e63ccfa13c0a0f6234ed0add552af24cc67dd886731f2261e46e241608bee3"}, + {file = "greenlet-3.3.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2662433acbca297c9153a4023fe2161c8dcfdcc91f10433171cf7e7d94ba2221"}, + {file = "greenlet-3.3.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3c6e9b9c1527a78520357de498b0e709fb9e2f49c3a513afd5a249007261911b"}, + {file = "greenlet-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:286d093f95ec98fdd92fcb955003b8a3d054b4e2cab3e2707a5039e7b50520fd"}, + {file = "greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9"}, + {file = "greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil", "setuptools"] + +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.16" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httptools" +version = "0.7.1" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "httptools-0.7.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:11d01b0ff1fe02c4c32d60af61a4d613b74fad069e47e06e9067758c01e9ac78"}, + {file = "httptools-0.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84d86c1e5afdc479a6fdabf570be0d3eb791df0ae727e8dbc0259ed1249998d4"}, + {file = "httptools-0.7.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c8c751014e13d88d2be5f5f14fc8b89612fcfa92a9cc480f2bc1598357a23a05"}, + {file = "httptools-0.7.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:654968cb6b6c77e37b832a9be3d3ecabb243bbe7a0b8f65fbc5b6b04c8fcabed"}, + {file = "httptools-0.7.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b580968316348b474b020edf3988eecd5d6eec4634ee6561e72ae3a2a0e00a8a"}, + {file = "httptools-0.7.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d496e2f5245319da9d764296e86c5bb6fcf0cf7a8806d3d000717a889c8c0b7b"}, + {file = "httptools-0.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:cbf8317bfccf0fed3b5680c559d3459cccf1abe9039bfa159e62e391c7270568"}, + {file = "httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657"}, + {file = "httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70"}, + {file = "httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df"}, + {file = "httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e"}, + {file = "httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274"}, + {file = "httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec"}, + {file = "httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb"}, + {file = "httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5"}, + {file = "httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5"}, + {file = "httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03"}, + {file = "httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2"}, + {file = "httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362"}, + {file = "httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c"}, + {file = "httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321"}, + {file = "httptools-0.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3"}, + {file = "httptools-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca"}, + {file = "httptools-0.7.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c"}, + {file = "httptools-0.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66"}, + {file = "httptools-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346"}, + {file = "httptools-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650"}, + {file = "httptools-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6"}, + {file = "httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270"}, + {file = "httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3"}, + {file = "httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1"}, + {file = "httptools-0.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df091cf961a3be783d6aebae963cc9b71e00d57fa6f149025075217bc6a55a7b"}, + {file = "httptools-0.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f084813239e1eb403ddacd06a30de3d3e09a9b76e7894dcda2b22f8a726e9c60"}, + {file = "httptools-0.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7347714368fb2b335e9063bc2b96f2f87a9ceffcd9758ac295f8bbcd3ffbc0ca"}, + {file = "httptools-0.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:cfabda2a5bb85aa2a904ce06d974a3f30fb36cc63d7feaddec05d2050acede96"}, + {file = "httptools-0.7.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ac50afa68945df63ec7a2707c506bd02239272288add34539a2ef527254626a4"}, + {file = "httptools-0.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de987bb4e7ac95b99b805b99e0aae0ad51ae61df4263459d36e07cf4052d8b3a"}, + {file = "httptools-0.7.1-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d169162803a24425eb5e4d51d79cbf429fd7a491b9e570a55f495ea55b26f0bf"}, + {file = "httptools-0.7.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49794f9250188a57fa73c706b46cb21a313edb00d337ca4ce1a011fe3c760b28"}, + {file = "httptools-0.7.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aeefa0648362bb97a7d6b5ff770bfb774930a327d7f65f8208394856862de517"}, + {file = "httptools-0.7.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0d92b10dbf0b3da4823cde6a96d18e6ae358a9daa741c71448975f6a2c339cad"}, + {file = "httptools-0.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:5ddbd045cfcb073db2449563dd479057f2c2b681ebc232380e63ef15edc9c023"}, + {file = "httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9"}, +] + +[[package]] +name = "httpx" +version = "0.28.1" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" + +[package.extras] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "identify" +version = "2.6.15" +description = "File identification library for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757"}, + {file = "identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.11" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.3.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, +] + +[[package]] +name = "mako" +version = "1.3.10" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59"}, + {file = "mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147"}, + {file = "markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "markdown-it-pyrs", "mistletoe (>=1.0,<2.0)", "mistune (>=3.0,<4.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins (>=0.5.0)"] +profiling = ["gprof2dot"] +rtd = ["ipykernel", "jupyter_sphinx", "mdit-py-plugins (>=0.5.0)", "myst-parser", "pyyaml", "sphinx", "sphinx-book-theme (>=1.0,<2.0)", "sphinx-copybutton", "sphinx-design"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions", "requests"] + +[[package]] +name = "markupsafe" +version = "3.0.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}, + {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}, + {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}, + {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}, + {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}, + {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"}, + {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"}, + {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.5.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31"}, + {file = "platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda"}, +] + +[package.extras] +docs = ["furo (>=2025.9.25)", "proselint (>=0.14)", "sphinx (>=8.2.3)", "sphinx-autodoc-typehints (>=3.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.4.2)", "pytest-cov (>=7)", "pytest-mock (>=3.15.1)"] +type = ["mypy (>=1.18.2)"] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "4.5.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77"}, + {file = "pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "psycopg2-binary" +version = "2.9.11" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6fe6b47d0b42ce1c9f1fa3e35bb365011ca22e39db37074458f27921dca40f2"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c0e4262e089516603a09474ee13eabf09cb65c332277e39af68f6233911087"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c47676e5b485393f069b4d7a811267d3168ce46f988fa602658b8bb901e9e64d"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a28d8c01a7b27a1e3265b11250ba7557e5f72b5ee9e5f3a2fa8d2949c29bf5d2"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f3f2732cf504a1aa9e9609d02f79bea1067d99edf844ab92c247bbca143303b"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91537a8df2bde69b1c1db01d6d944c831ca793952e4f57892600e96cee95f2cd"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4dca1f356a67ecb68c81a7bc7809f1569ad9e152ce7fd02c2f2036862ca9f66b"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37d8412565a7267f7d79e29ab66876e55cb5e8e7b3bbf94f8206f6795f8f7e7e"}, + {file = "psycopg2_binary-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:c665f01ec8ab273a61c62beeb8cce3014c214429ced8a308ca1fc410ecac3a39"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908"}, + {file = "psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d"}, + {file = "psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:366df99e710a2acd90efed3764bb1e28df6c675d33a7fb40df9b7281694432ee"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1"}, + {file = "psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:92e3b669236327083a2e33ccfa0d320dd01b9803b3e14dd986a4fc54aa00f4e1"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e0deeb03da539fa3577fcb0b3f2554a97f7e5477c246098dbb18091a4a01c16f"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b52a3f9bb540a3e4ec0f6ba6d31339727b2950c9772850d6545b7eae0b9d7c5"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:db4fd476874ccfdbb630a54426964959e58da4c61c9feba73e6094d51303d7d8"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47f212c1d3be608a12937cc131bd85502954398aaa1320cb4c14421a0ffccf4c"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fcf21be3ce5f5659daefd2b3b3b6e4727b028221ddc94e6c1523425579664747"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:9bd81e64e8de111237737b29d68039b9c813bdf520156af36d26819c9a979e5f"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3cb3a676873d7506825221045bd70e0427c905b9c8ee8d6acd70cfcbd6e576d"}, + {file = "psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20e7fb94e20b03dcc783f76c0865f9da39559dcc0c28dd1a3fce0d01902a6b9c"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bdab48575b6f870f465b397c38f1b415520e9879fdf10a53ee4f49dcbdf8a21"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9d3a9edcfbe77a3ed4bc72836d466dfce4174beb79eda79ea155cc77237ed9e8"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:44fc5c2b8fa871ce7f0023f619f1349a0aa03a0857f2c96fbc01c657dcbbdb49"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9c55460033867b4622cda1b6872edf445809535144152e5d14941ef591980edf"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:691c807d94aecfbc76a14e1408847d59ff5b5906a04a23e12a89007672b9e819"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b81627b691f29c4c30a8f322546ad039c40c328373b11dff7490a3e1b517855"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:41360b01c140c2a03d346cec3280cf8a71aa07d94f3b1509fa0161c366af66b4"}, + {file = "psycopg2_binary-2.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:875039274f8a2361e5207857899706da840768e2a775bf8c65e82f60b197df02"}, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, + {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.41.5" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, +] + +[package.dependencies] +typing-extensions = ">=4.14.1" + +[[package]] +name = "pydantic-settings" +version = "2.12.0" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809"}, + {file = "pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" +typing-inspection = ">=0.4.0" + +[package.extras] +aws-secrets-manager = ["boto3 (>=1.35.0)", "boto3-stubs[secretsmanager]"] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +gcp-secret-manager = ["google-cloud-secret-manager (>=2.23.1)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "pygments" +version = "2.19.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "8.4.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79"}, + {file = "pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01"}, +] + +[package.dependencies] +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +iniconfig = ">=1" +packaging = ">=20" +pluggy = ">=1.5,<2" +pygments = ">=2.7.2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.25.3" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"}, + {file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-cov" +version = "6.3.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest_cov-6.3.0-py3-none-any.whl", hash = "sha256:440db28156d2468cafc0415b4f8e50856a0d11faefa38f30906048fe490f1749"}, + {file = "pytest_cov-6.3.0.tar.gz", hash = "sha256:35c580e7800f87ce892e687461166e1ac2bcb8fb9e13aea79032518d6e503ff2"}, +] + +[package.dependencies] +coverage = {version = ">=7.5", extras = ["toml"]} +pluggy = ">=1.2" +pytest = ">=6.2.5" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "python-dotenv" +version = "1.2.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61"}, + {file = "python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytokens" +version = "0.3.0" +description = "A Fast, spec compliant Python 3.14+ tokenizer that runs on older Pythons." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytokens-0.3.0-py3-none-any.whl", hash = "sha256:95b2b5eaf832e469d141a378872480ede3f251a5a5041b8ec6e581d3ac71bbf3"}, + {file = "pytokens-0.3.0.tar.gz", hash = "sha256:2f932b14ed08de5fcf0b391ace2642f858f1394c0857202959000b68ed7a458a"}, +] + +[package.extras] +dev = ["black", "build", "mypy", "pytest", "pytest-cov", "setuptools", "tox", "twine", "wheel"] + +[[package]] +name = "pyyaml" +version = "6.0.3" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, +] + +[[package]] +name = "rich" +version = "14.2.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd"}, + {file = "rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "ruff" +version = "0.14.9" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "ruff-0.14.9-py3-none-linux_armv6l.whl", hash = "sha256:f1ec5de1ce150ca6e43691f4a9ef5c04574ad9ca35c8b3b0e18877314aba7e75"}, + {file = "ruff-0.14.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ed9d7417a299fc6030b4f26333bf1117ed82a61ea91238558c0268c14e00d0c2"}, + {file = "ruff-0.14.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d5dc3473c3f0e4a1008d0ef1d75cee24a48e254c8bed3a7afdd2b4392657ed2c"}, + {file = "ruff-0.14.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84bf7c698fc8f3cb8278830fb6b5a47f9bcc1ed8cb4f689b9dd02698fa840697"}, + {file = "ruff-0.14.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aa733093d1f9d88a5d98988d8834ef5d6f9828d03743bf5e338bf980a19fce27"}, + {file = "ruff-0.14.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a1cfb04eda979b20c8c19550c8b5f498df64ff8da151283311ce3199e8b3648"}, + {file = "ruff-0.14.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1e5cb521e5ccf0008bd74d5595a4580313844a42b9103b7388eca5a12c970743"}, + {file = "ruff-0.14.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd429a8926be6bba4befa8cdcf3f4dd2591c413ea5066b1e99155ed245ae42bb"}, + {file = "ruff-0.14.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab208c1b7a492e37caeaf290b1378148f75e13c2225af5d44628b95fd7834273"}, + {file = "ruff-0.14.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72034534e5b11e8a593f517b2f2f2b273eb68a30978c6a2d40473ad0aaa4cb4a"}, + {file = "ruff-0.14.9-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:712ff04f44663f1b90a1195f51525836e3413c8a773574a7b7775554269c30ed"}, + {file = "ruff-0.14.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a111fee1db6f1d5d5810245295527cda1d367c5aa8f42e0fca9a78ede9b4498b"}, + {file = "ruff-0.14.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8769efc71558fecc25eb295ddec7d1030d41a51e9dcf127cbd63ec517f22d567"}, + {file = "ruff-0.14.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:347e3bf16197e8a2de17940cd75fd6491e25c0aa7edf7d61aa03f146a1aa885a"}, + {file = "ruff-0.14.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7715d14e5bccf5b660f54516558aa94781d3eb0838f8e706fb60e3ff6eff03a8"}, + {file = "ruff-0.14.9-py3-none-win32.whl", hash = "sha256:df0937f30aaabe83da172adaf8937003ff28172f59ca9f17883b4213783df197"}, + {file = "ruff-0.14.9-py3-none-win_amd64.whl", hash = "sha256:c0b53a10e61df15a42ed711ec0bda0c582039cf6c754c49c020084c55b5b0bc2"}, + {file = "ruff-0.14.9-py3-none-win_arm64.whl", hash = "sha256:8e821c366517a074046d92f0e9213ed1c13dbc5b37a7fc20b07f79b64d62cc84"}, + {file = "ruff-0.14.9.tar.gz", hash = "sha256:35f85b25dd586381c0cc053f48826109384c81c00ad7ef1bd977bfcc28119d5b"}, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.45" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sqlalchemy-2.0.45-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c64772786d9eee72d4d3784c28f0a636af5b0a29f3fe26ff11f55efe90c0bd85"}, + {file = "sqlalchemy-2.0.45-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7ae64ebf7657395824a19bca98ab10eb9a3ecb026bf09524014f1bb81cb598d4"}, + {file = "sqlalchemy-2.0.45-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f02325709d1b1a1489f23a39b318e175a171497374149eae74d612634b234c0"}, + {file = "sqlalchemy-2.0.45-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d2c3684fca8a05f0ac1d9a21c1f4a266983a7ea9180efb80ffeb03861ecd01a0"}, + {file = "sqlalchemy-2.0.45-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040f6f0545b3b7da6b9317fc3e922c9a98fc7243b2a1b39f78390fc0942f7826"}, + {file = "sqlalchemy-2.0.45-cp310-cp310-win32.whl", hash = "sha256:830d434d609fe7bfa47c425c445a8b37929f140a7a44cdaf77f6d34df3a7296a"}, + {file = "sqlalchemy-2.0.45-cp310-cp310-win_amd64.whl", hash = "sha256:0209d9753671b0da74da2cfbb9ecf9c02f72a759e4b018b3ab35f244c91842c7"}, + {file = "sqlalchemy-2.0.45-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e90a344c644a4fa871eb01809c32096487928bd2038bf10f3e4515cb688cc56"}, + {file = "sqlalchemy-2.0.45-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8c8b41b97fba5f62349aa285654230296829672fc9939cd7f35aab246d1c08b"}, + {file = "sqlalchemy-2.0.45-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:12c694ed6468333a090d2f60950e4250b928f457e4962389553d6ba5fe9951ac"}, + {file = "sqlalchemy-2.0.45-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f7d27a1d977a1cfef38a0e2e1ca86f09c4212666ce34e6ae542f3ed0a33bc606"}, + {file = "sqlalchemy-2.0.45-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d62e47f5d8a50099b17e2bfc1b0c7d7ecd8ba6b46b1507b58cc4f05eefc3bb1c"}, + {file = "sqlalchemy-2.0.45-cp311-cp311-win32.whl", hash = "sha256:3c5f76216e7b85770d5bb5130ddd11ee89f4d52b11783674a662c7dd57018177"}, + {file = "sqlalchemy-2.0.45-cp311-cp311-win_amd64.whl", hash = "sha256:a15b98adb7f277316f2c276c090259129ee4afca783495e212048daf846654b2"}, + {file = "sqlalchemy-2.0.45-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3ee2aac15169fb0d45822983631466d60b762085bc4535cd39e66bea362df5f"}, + {file = "sqlalchemy-2.0.45-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba547ac0b361ab4f1608afbc8432db669bd0819b3e12e29fb5fa9529a8bba81d"}, + {file = "sqlalchemy-2.0.45-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:215f0528b914e5c75ef2559f69dca86878a3beeb0c1be7279d77f18e8d180ed4"}, + {file = "sqlalchemy-2.0.45-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:107029bf4f43d076d4011f1afb74f7c3e2ea029ec82eb23d8527d5e909e97aa6"}, + {file = "sqlalchemy-2.0.45-cp312-cp312-win32.whl", hash = "sha256:0c9f6ada57b58420a2c0277ff853abe40b9e9449f8d7d231763c6bc30f5c4953"}, + {file = "sqlalchemy-2.0.45-cp312-cp312-win_amd64.whl", hash = "sha256:8defe5737c6d2179c7997242d6473587c3beb52e557f5ef0187277009f73e5e1"}, + {file = "sqlalchemy-2.0.45-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe187fc31a54d7fd90352f34e8c008cf3ad5d064d08fedd3de2e8df83eb4a1cf"}, + {file = "sqlalchemy-2.0.45-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:672c45cae53ba88e0dad74b9027dddd09ef6f441e927786b05bec75d949fbb2e"}, + {file = "sqlalchemy-2.0.45-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:470daea2c1ce73910f08caf10575676a37159a6d16c4da33d0033546bddebc9b"}, + {file = "sqlalchemy-2.0.45-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9c6378449e0940476577047150fd09e242529b761dc887c9808a9a937fe990c8"}, + {file = "sqlalchemy-2.0.45-cp313-cp313-win32.whl", hash = "sha256:4b6bec67ca45bc166c8729910bd2a87f1c0407ee955df110d78948f5b5827e8a"}, + {file = "sqlalchemy-2.0.45-cp313-cp313-win_amd64.whl", hash = "sha256:afbf47dc4de31fa38fd491f3705cac5307d21d4bb828a4f020ee59af412744ee"}, + {file = "sqlalchemy-2.0.45-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:83d7009f40ce619d483d26ac1b757dfe3167b39921379a8bd1b596cf02dab4a6"}, + {file = "sqlalchemy-2.0.45-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d8a2ca754e5415cde2b656c27900b19d50ba076aa05ce66e2207623d3fe41f5a"}, + {file = "sqlalchemy-2.0.45-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f46ec744e7f51275582e6a24326e10c49fbdd3fc99103e01376841213028774"}, + {file = "sqlalchemy-2.0.45-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:883c600c345123c033c2f6caca18def08f1f7f4c3ebeb591a63b6fceffc95cce"}, + {file = "sqlalchemy-2.0.45-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2c0b74aa79e2deade948fe8593654c8ef4228c44ba862bb7c9585c8e0db90f33"}, + {file = "sqlalchemy-2.0.45-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8a420169cef179d4c9064365f42d779f1e5895ad26ca0c8b4c0233920973db74"}, + {file = "sqlalchemy-2.0.45-cp314-cp314-win32.whl", hash = "sha256:e50dcb81a5dfe4b7b4a4aa8f338116d127cb209559124f3694c70d6cd072b68f"}, + {file = "sqlalchemy-2.0.45-cp314-cp314-win_amd64.whl", hash = "sha256:4748601c8ea959e37e03d13dcda4a44837afcd1b21338e637f7c935b8da06177"}, + {file = "sqlalchemy-2.0.45-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd337d3526ec5298f67d6a30bbbe4ed7e5e68862f0bf6dd21d289f8d37b7d60b"}, + {file = "sqlalchemy-2.0.45-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9a62b446b7d86a3909abbcd1cd3cc550a832f99c2bc37c5b22e1925438b9367b"}, + {file = "sqlalchemy-2.0.45-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5964f832431b7cdfaaa22a660b4c7eb1dfcd6ed41375f67fd3e3440fd95cb3cc"}, + {file = "sqlalchemy-2.0.45-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee580ab50e748208754ae8980cec79ec205983d8cf8b3f7c39067f3d9f2c8e22"}, + {file = "sqlalchemy-2.0.45-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13e27397a7810163440c6bfed6b3fe46f1bfb2486eb540315a819abd2c004128"}, + {file = "sqlalchemy-2.0.45-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ed3635353e55d28e7f4a95c8eda98a5cdc0a0b40b528433fbd41a9ae88f55b3d"}, + {file = "sqlalchemy-2.0.45-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:db6834900338fb13a9123307f0c2cbb1f890a8656fcd5e5448ae3ad5bbe8d312"}, + {file = "sqlalchemy-2.0.45-cp38-cp38-win32.whl", hash = "sha256:1d8b4a7a8c9b537509d56d5cd10ecdcfbb95912d72480c8861524efecc6a3fff"}, + {file = "sqlalchemy-2.0.45-cp38-cp38-win_amd64.whl", hash = "sha256:ebd300afd2b62679203435f596b2601adafe546cb7282d5a0cd3ed99e423720f"}, + {file = "sqlalchemy-2.0.45-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d29b2b99d527dbc66dd87c3c3248a5dd789d974a507f4653c969999fc7c1191b"}, + {file = "sqlalchemy-2.0.45-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:59a8b8bd9c6bedf81ad07c8bd5543eedca55fe9b8780b2b628d495ba55f8db1e"}, + {file = "sqlalchemy-2.0.45-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd93c6f5d65f254ceabe97548c709e073d6da9883343adaa51bf1a913ce93f8e"}, + {file = "sqlalchemy-2.0.45-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d0beadc2535157070c9c17ecf25ecec31e13c229a8f69196d7590bde8082bf1"}, + {file = "sqlalchemy-2.0.45-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e057f928ffe9c9b246a55b469c133b98a426297e1772ad24ce9f0c47d123bd5b"}, + {file = "sqlalchemy-2.0.45-cp39-cp39-win32.whl", hash = "sha256:c1c2091b1489435ff85728fafeb990f073e64f6f5e81d5cd53059773e8521eb6"}, + {file = "sqlalchemy-2.0.45-cp39-cp39-win_amd64.whl", hash = "sha256:56ead1f8dfb91a54a28cd1d072c74b3d635bcffbd25e50786533b822d4f2cde2"}, + {file = "sqlalchemy-2.0.45-py3-none-any.whl", hash = "sha256:5225a288e4c8cc2308dbdd874edad6e7d0fd38eac1e9e5f23503425c8eee20d0"}, + {file = "sqlalchemy-2.0.45.tar.gz", hash = "sha256:1632a4bda8d2d25703fdad6363058d882541bdaaee0e5e3ddfa0cd3229efce88"}, +] + +[package.dependencies] +greenlet = {version = ">=1", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "starlette" +version = "0.50.0" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca"}, + {file = "starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca"}, +] + +[package.dependencies] +anyio = ">=3.6.2,<5" +typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""} + +[package.extras] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] + +[[package]] +name = "typer" +version = "0.20.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "typer-0.20.0-py3-none-any.whl", hash = "sha256:5b463df6793ec1dca6213a3cf4c0f03bc6e322ac5e16e13ddd622a889489784a"}, + {file = "typer-0.20.0.tar.gz", hash = "sha256:1aaf6494031793e4876fb0bacfa6a912b551cf43c1e63c800df8b1a866720c37"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + +[[package]] +name = "typing-extensions" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "uvicorn" +version = "0.38.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02"}, + {file = "uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +uvloop = {version = ">=0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.22.1" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.1" +groups = ["main"] +markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"" +files = [ + {file = "uvloop-0.22.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ef6f0d4cc8a9fa1f6a910230cd53545d9a14479311e87e3cb225495952eb672c"}, + {file = "uvloop-0.22.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7cd375a12b71d33d46af85a3343b35d98e8116134ba404bd657b3b1d15988792"}, + {file = "uvloop-0.22.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac33ed96229b7790eb729702751c0e93ac5bc3bcf52ae9eccbff30da09194b86"}, + {file = "uvloop-0.22.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:481c990a7abe2c6f4fc3d98781cc9426ebd7f03a9aaa7eb03d3bfc68ac2a46bd"}, + {file = "uvloop-0.22.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a592b043a47ad17911add5fbd087c76716d7c9ccc1d64ec9249ceafd735f03c2"}, + {file = "uvloop-0.22.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1489cf791aa7b6e8c8be1c5a080bae3a672791fcb4e9e12249b05862a2ca9cec"}, + {file = "uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9"}, + {file = "uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77"}, + {file = "uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21"}, + {file = "uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702"}, + {file = "uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733"}, + {file = "uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473"}, + {file = "uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42"}, + {file = "uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6"}, + {file = "uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370"}, + {file = "uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4"}, + {file = "uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2"}, + {file = "uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0"}, + {file = "uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705"}, + {file = "uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8"}, + {file = "uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d"}, + {file = "uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e"}, + {file = "uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e"}, + {file = "uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad"}, + {file = "uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142"}, + {file = "uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74"}, + {file = "uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35"}, + {file = "uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25"}, + {file = "uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6"}, + {file = "uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079"}, + {file = "uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289"}, + {file = "uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3"}, + {file = "uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c"}, + {file = "uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21"}, + {file = "uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88"}, + {file = "uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e"}, + {file = "uvloop-0.22.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:80eee091fe128e425177fbd82f8635769e2f32ec9daf6468286ec57ec0313efa"}, + {file = "uvloop-0.22.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:017bd46f9e7b78e81606329d07141d3da446f8798c6baeec124260e22c262772"}, + {file = "uvloop-0.22.1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3e5c6727a57cb6558592a95019e504f605d1c54eb86463ee9f7a2dbd411c820"}, + {file = "uvloop-0.22.1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:57df59d8b48feb0e613d9b1f5e57b7532e97cbaf0d61f7aa9aa32221e84bc4b6"}, + {file = "uvloop-0.22.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:55502bc2c653ed2e9692e8c55cb95b397d33f9f2911e929dc97c4d6b26d04242"}, + {file = "uvloop-0.22.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4a968a72422a097b09042d5fa2c5c590251ad484acf910a651b4b620acd7f193"}, + {file = "uvloop-0.22.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b45649628d816c030dba3c80f8e2689bab1c89518ed10d426036cdc47874dfc4"}, + {file = "uvloop-0.22.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ea721dd3203b809039fcc2983f14608dae82b212288b346e0bfe46ec2fab0b7c"}, + {file = "uvloop-0.22.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ae676de143db2b2f60a9696d7eca5bb9d0dd6cc3ac3dad59a8ae7e95f9e1b54"}, + {file = "uvloop-0.22.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:17d4e97258b0172dfa107b89aa1eeba3016f4b1974ce85ca3ef6a66b35cbf659"}, + {file = "uvloop-0.22.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:05e4b5f86e621cf3927631789999e697e58f0d2d32675b67d9ca9eb0bca55743"}, + {file = "uvloop-0.22.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:286322a90bea1f9422a470d5d2ad82d38080be0a29c4dd9b3e6384320a4d11e7"}, + {file = "uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f"}, +] + +[package.extras] +dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx_rtd_theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=6.1,<7.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=25.3.0,<25.4.0)", "pycodestyle (>=2.11.0,<2.12.0)"] + +[[package]] +name = "virtualenv" +version = "20.35.4" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "virtualenv-20.35.4-py3-none-any.whl", hash = "sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b"}, + {file = "virtualenv-20.35.4.tar.gz", hash = "sha256:643d3914d73d3eeb0c552cbb12d7e82adf0e504dbf86a3182f8771a153a1971c"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] + +[[package]] +name = "watchfiles" +version = "1.1.1" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "watchfiles-1.1.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:eef58232d32daf2ac67f42dea51a2c80f0d03379075d44a587051e63cc2e368c"}, + {file = "watchfiles-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03fa0f5237118a0c5e496185cafa92878568b652a2e9a9382a5151b1a0380a43"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca65483439f9c791897f7db49202301deb6e15fe9f8fe2fed555bf986d10c31"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f0ab1c1af0cb38e3f598244c17919fb1a84d1629cc08355b0074b6d7f53138ac"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bc570d6c01c206c46deb6e935a260be44f186a2f05179f52f7fcd2be086a94d"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e84087b432b6ac94778de547e08611266f1f8ffad28c0ee4c82e028b0fc5966d"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:620bae625f4cb18427b1bb1a2d9426dc0dd5a5ba74c7c2cdb9de405f7b129863"}, + {file = "watchfiles-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:544364b2b51a9b0c7000a4b4b02f90e9423d97fbbf7e06689236443ebcad81ab"}, + {file = "watchfiles-1.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bbe1ef33d45bc71cf21364df962af171f96ecaeca06bd9e3d0b583efb12aec82"}, + {file = "watchfiles-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a0bb430adb19ef49389e1ad368450193a90038b5b752f4ac089ec6942c4dff4"}, + {file = "watchfiles-1.1.1-cp310-cp310-win32.whl", hash = "sha256:3f6d37644155fb5beca5378feb8c1708d5783145f2a0f1c4d5a061a210254844"}, + {file = "watchfiles-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:a36d8efe0f290835fd0f33da35042a1bb5dc0e83cbc092dcf69bce442579e88e"}, + {file = "watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5"}, + {file = "watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff"}, + {file = "watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606"}, + {file = "watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701"}, + {file = "watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10"}, + {file = "watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849"}, + {file = "watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4"}, + {file = "watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e"}, + {file = "watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d"}, + {file = "watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb"}, + {file = "watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803"}, + {file = "watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94"}, + {file = "watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43"}, + {file = "watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9"}, + {file = "watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9"}, + {file = "watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404"}, + {file = "watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18"}, + {file = "watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae"}, + {file = "watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d"}, + {file = "watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b"}, + {file = "watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374"}, + {file = "watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0"}, + {file = "watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42"}, + {file = "watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18"}, + {file = "watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da"}, + {file = "watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04"}, + {file = "watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77"}, + {file = "watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef"}, + {file = "watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf"}, + {file = "watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5"}, + {file = "watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510"}, + {file = "watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05"}, + {file = "watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6"}, + {file = "watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81"}, + {file = "watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b"}, + {file = "watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a"}, + {file = "watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02"}, + {file = "watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21"}, + {file = "watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc"}, + {file = "watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c"}, + {file = "watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099"}, + {file = "watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01"}, + {file = "watchfiles-1.1.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c882d69f6903ef6092bedfb7be973d9319940d56b8427ab9187d1ecd73438a70"}, + {file = "watchfiles-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d6ff426a7cb54f310d51bfe83fe9f2bbe40d540c741dc974ebc30e6aa238f52e"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79ff6c6eadf2e3fc0d7786331362e6ef1e51125892c75f1004bd6b52155fb956"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c1f5210f1b8fc91ead1283c6fd89f70e76fb07283ec738056cf34d51e9c1d62c"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9c4702f29ca48e023ffd9b7ff6b822acdf47cb1ff44cb490a3f1d5ec8987e9c"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acb08650863767cbc58bca4813b92df4d6c648459dcaa3d4155681962b2aa2d3"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08af70fd77eee58549cd69c25055dc344f918d992ff626068242259f98d598a2"}, + {file = "watchfiles-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c3631058c37e4a0ec440bf583bc53cdbd13e5661bb6f465bc1d88ee9a0a4d02"}, + {file = "watchfiles-1.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cf57a27fb986c6243d2ee78392c503826056ffe0287e8794503b10fb51b881be"}, + {file = "watchfiles-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d7e7067c98040d646982daa1f37a33d3544138ea155536c2e0e63e07ff8a7e0f"}, + {file = "watchfiles-1.1.1-cp39-cp39-win32.whl", hash = "sha256:6c9c9262f454d1c4d8aaa7050121eb4f3aea197360553699520767daebf2180b"}, + {file = "watchfiles-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:74472234c8370669850e1c312490f6026d132ca2d396abfad8830b4f1c096957"}, + {file = "watchfiles-1.1.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:17ef139237dfced9da49fb7f2232c86ca9421f666d78c264c7ffca6601d154c3"}, + {file = "watchfiles-1.1.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:672b8adf25b1a0d35c96b5888b7b18699d27d4194bac8beeae75be4b7a3fc9b2"}, + {file = "watchfiles-1.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a13aea58bc2b90173bc69f2a90de8e282648939a00a602e1dc4ee23e26b66d"}, + {file = "watchfiles-1.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b495de0bb386df6a12b18335a0285dda90260f51bdb505503c02bcd1ce27a8b"}, + {file = "watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88"}, + {file = "watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336"}, + {file = "watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24"}, + {file = "watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49"}, + {file = "watchfiles-1.1.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdab464fee731e0884c35ae3588514a9bcf718d0e2c82169c1c4a85cc19c3c7f"}, + {file = "watchfiles-1.1.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3dbd8cbadd46984f802f6d479b7e3afa86c42d13e8f0f322d669d79722c8ec34"}, + {file = "watchfiles-1.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5524298e3827105b61951a29c3512deb9578586abf3a7c5da4a8069df247cccc"}, + {file = "watchfiles-1.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b943d3668d61cfa528eb949577479d3b077fd25fb83c641235437bc0b5bc60e"}, + {file = "watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "websockets" +version = "15.0.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}, + {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}, + {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}, + {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}, + {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}, + {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}, + {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}, + {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}, + {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"}, + {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"}, + {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"}, + {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}, + {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, +] + +[metadata] +lock-version = "2.1" +python-versions = "^3.11" +content-hash = "240c1c657849ef5506924e7274a19c6815fb12d4d3d1ec51156527cce66e3e45" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..81ec0cd --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,97 @@ +[tool.poetry] +name = "server-inventory-management" +version = "1.0.0" +description = "Production-grade CRUD application for tracking server inventory" +authors = ["Ali Khan "] +readme = "README.md" +packages = [ + { include = "app" }, + { include = "cli" }, +] + +[tool.poetry.dependencies] +python = "^3.11" +fastapi = "^0.124.0" +uvicorn = { version = "^0.38.0", extras = ["standard"] } +sqlalchemy = "^2.0.0" +asyncpg = "^0.31.0" +psycopg2-binary = "^2.9.9" # Sync driver for Alembic migrations +pydantic = "^2.5.0" +pydantic-settings = "^2.1.0" +httpx = "^0.28.0" +alembic = "^1.12.0" +python-dotenv = "^1.0.0" +typer = "^0.20.0" +rich = "^14.0.0" +greenlet = "^3.3.0" + +[tool.poetry.group.dev.dependencies] +pytest = "^8.0.0" +pytest-asyncio = "^0.25.0" +pytest-cov = "^6.0.0" +pre-commit = "^4.5.1" +ruff = "^0.14.9" + +[tool.poetry.scripts] +server-cli = "cli.main:app" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +testpaths = ["tests"] +python_files = ["test_*.py"] +python_classes = ["Test*"] +python_functions = ["test_*"] +asyncio_mode = "auto" +addopts = "-v --strict-markers --tb=short" + +[tool.coverage.run] +source = ["app", "cli"] +omit = [ + "app/main.py", + "app/database.py", + "app/logging_config.py", +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "if __name__ == .__main__.:", + "raise NotImplementedError", +] + +[tool.ruff] +line-length = 100 +target-version = "py311" + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "UP", # pyupgrade +] +ignore = [ + "E501", # line too long (handled by formatter) + "B008", # do not perform function calls in argument defaults + "B904", # raise from (many existing exceptions would need updating) +] + +[tool.ruff.lint.isort] +known-first-party = ["app", "cli"] + +[tool.mypy] +python_version = "3.11" +warn_return_any = true +warn_unused_ignores = true +disallow_untyped_defs = true +plugins = ["pydantic.mypy"] + +[[tool.mypy.overrides]] +module = "tests.*" +disallow_untyped_defs = false diff --git a/start.sh b/start.sh new file mode 100755 index 0000000..16408bd --- /dev/null +++ b/start.sh @@ -0,0 +1,146 @@ +#!/bin/bash +# Startup script for the Server Inventory Management System + +set -e + +# Default environment +ENVIRONMENT=${environment:-LOCAL} + +echo "🚀 Starting Server Inventory Management System..." +echo "📍 Environment: $ENVIRONMENT" + +# Check if Poetry is installed (for local development) +check_poetry() { + if command -v poetry &> /dev/null; then + echo "✅ Poetry is installed" + return 0 + else + echo "⚠️ Poetry not found. Install with: curl -sSL https://install.python-poetry.org | python3 -" + return 1 + fi +} + +# Check if Docker is running +check_docker() { + if ! docker info > /dev/null 2>&1; then + echo "❌ Docker is not running. Please start Docker and try again." + return 1 + fi + return 0 +} + +# Start with Docker Compose +start_docker() { + local env_lower=$(echo "$ENVIRONMENT" | tr '[:upper:]' '[:lower:]') + + echo "📦 Starting Docker Compose services..." + + if [ "$env_lower" = "testing" ] || [ "$env_lower" = "test" ]; then + echo " Using: docker-compose.test.yml" + docker-compose -f docker-compose.test.yml up -d + else + echo " Using: DOCKER_TARGET=$env_lower" + DOCKER_TARGET=$env_lower ENVIRONMENT=$ENVIRONMENT docker-compose up -d + fi +} + +# Start locally with Poetry +start_local() { + if ! check_poetry; then + exit 1 + fi + + # Check environment file exists + ENV_FILE="environments/${ENVIRONMENT,,}.env" + if [ ! -f "$ENV_FILE" ]; then + echo "⚠️ Environment file not found: $ENV_FILE" + echo " Available environments: local, development, staging, production, testing" + exit 1 + fi + + echo "📦 Using environment file: $ENV_FILE" + + # Start PostgreSQL with Docker + echo "🐘 Starting PostgreSQL..." + docker-compose up -d db + + echo "⏳ Waiting for PostgreSQL to be ready..." + sleep 5 + + # Run migrations + echo "📊 Running database migrations..." + environment=$ENVIRONMENT poetry run alembic upgrade head + + # Start the API + echo "🚀 Starting API server..." + environment=$ENVIRONMENT poetry run uvicorn app.main:app --reload +} + +# Parse arguments +MODE=${1:-docker} + +case $MODE in + docker) + if check_docker; then + start_docker + else + exit 1 + fi + ;; + local|poetry) + start_local + ;; + *) + echo "Usage: $0 [docker|local] [environment]" + echo "" + echo "Modes:" + echo " docker - Start with Docker Compose (default)" + echo " local - Start locally with Poetry" + echo "" + echo "Environment:" + echo " Set via 'environment' variable:" + echo " environment=LOCAL ./start.sh local" + echo " environment=DEVELOPMENT ./start.sh docker" + echo "" + echo "Available environments: LOCAL, DEVELOPMENT, STAGING, PRODUCTION, TESTING" + exit 1 + ;; +esac + +# Wait and check health +echo "" +echo "⏳ Waiting for services to be ready..." +sleep 5 + +# Check if API is healthy +echo "🏥 Checking API health..." +max_attempts=30 +attempt=0 +while [ $attempt -lt $max_attempts ]; do + if curl -f http://localhost:8000/health > /dev/null 2>&1; then + echo "✅ API is healthy!" + break + fi + attempt=$((attempt + 1)) + echo " Attempt $attempt/$max_attempts..." + sleep 2 +done + +if [ $attempt -eq $max_attempts ]; then + echo "❌ API did not become healthy. Check logs with: docker-compose logs -f api" + exit 1 +fi + +echo "" +echo "✅ Server Inventory Management System is running!" +echo "" +echo "📍 Environment: $ENVIRONMENT" +echo "📍 API: http://localhost:8000" +echo "📚 Swagger UI: http://localhost:8000/docs" +echo "📖 ReDoc: http://localhost:8000/redoc" +echo "" +echo "Commands:" +echo " View logs: docker-compose logs -f" +echo " Stop: docker-compose down" +echo " Run tests: environment=TESTING poetry run pytest" +echo "" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..033ce93 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Test suite for server inventory management system.""" diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..012ddff --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,97 @@ +"""Pytest configuration and fixtures.""" + +import asyncio +from collections.abc import AsyncGenerator + +import pytest +import pytest_asyncio +from httpx import ASGITransport, AsyncClient +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine + +from app.database import Base, get_db + +# Test database URL +TEST_DATABASE_URL = "postgresql+asyncpg://postgres:postgres@localhost:5432/server_inventory_test" + + +@pytest.fixture(scope="session") +def event_loop(): + """Create an instance of the default event loop for the test session.""" + loop = asyncio.get_event_loop_policy().new_event_loop() + yield loop + loop.close() + + +# Create test engine at module level for reuse +test_engine = create_async_engine( + TEST_DATABASE_URL, + echo=False, + future=True, +) + +# Create test session factory +TestSessionLocal = async_sessionmaker( + test_engine, + class_=AsyncSession, + expire_on_commit=False, + autocommit=False, + autoflush=False, +) + + +@pytest_asyncio.fixture(scope="function") +async def test_db_session() -> AsyncGenerator[AsyncSession, None]: + """Create a test database session.""" + # Create tables + async with test_engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + # Create session + async with TestSessionLocal() as session: + yield session + + # Drop tables after test + async with test_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + + +@pytest_asyncio.fixture +async def async_test_client( + test_db_session: AsyncSession, +) -> AsyncGenerator[AsyncClient, None]: + """Create an async test client with test database override.""" + # Import app here to avoid circular imports and ensure fresh state + from app.main import app + + async def override_get_db() -> AsyncGenerator[AsyncSession, None]: + """Override database dependency for testing.""" + yield test_db_session + + app.dependency_overrides[get_db] = override_get_db + + # Use ASGITransport for proper async handling + transport = ASGITransport(app=app, raise_app_exceptions=False) + async with AsyncClient(transport=transport, base_url="http://test") as client: + yield client + + app.dependency_overrides.clear() + + +@pytest.fixture +def sample_server_data(): + """Sample server data for testing.""" + return { + "hostname": "test-server-01", + "ip_address": "192.168.1.100", + "state": "active", + } + + +@pytest.fixture +def sample_server_update_data(): + """Sample server update data for testing.""" + return { + "hostname": "test-server-01-updated", + "ip_address": "192.168.1.101", + "state": "offline", + } diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..e87b37a --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,456 @@ +"""Tests for CLI tool.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import httpx +from typer.testing import CliRunner + +from cli.main import app + +runner = CliRunner() + + +def create_mock_response(json_data, status_code=200): + """Create a mock httpx Response.""" + mock_response = MagicMock(spec=httpx.Response) + mock_response.json.return_value = json_data + mock_response.status_code = status_code + mock_response.raise_for_status = MagicMock() + mock_response.headers = {"content-type": "application/json"} + return mock_response + + +class TestCLI: + """Test CLI commands.""" + + @patch("cli.main.get_client") + def test_create_server_success(self, mock_get_client, sample_server_data): + """Test successful server creation via CLI.""" + mock_response = create_mock_response( + { + "id": 1, + **sample_server_data, + "created_at": "2024-01-01T00:00:00Z", + "updated_at": "2024-01-01T00:00:00Z", + }, + status_code=201, + ) + + # Create mock client + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.post = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke( + app, + [ + "create", + sample_server_data["hostname"], + sample_server_data["ip_address"], + sample_server_data["state"], + ], + ) + assert result.exit_code == 0, f"Output: {result.stdout}" + + @patch("cli.main.get_client") + def test_list_servers_success(self, mock_get_client): + """Test listing servers via CLI.""" + mock_response = create_mock_response( + { + "servers": [ + { + "id": 1, + "hostname": "test-server", + "ip_address": "192.168.1.100", + "state": "active", + "created_at": "2024-01-01T00:00:00Z", + "updated_at": "2024-01-01T00:00:00Z", + } + ], + "count": 1, + }, + status_code=200, + ) + + # Create mock client + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.get = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["list"]) + assert result.exit_code == 0, f"Output: {result.stdout}" + assert "test-server" in result.stdout + + @patch("cli.main.get_client") + def test_get_server_success(self, mock_get_client): + """Test getting server via CLI.""" + mock_response = create_mock_response( + { + "id": 1, + "hostname": "test-server", + "ip_address": "192.168.1.100", + "state": "active", + "created_at": "2024-01-01T00:00:00Z", + "updated_at": "2024-01-01T00:00:00Z", + }, + status_code=200, + ) + + # Create mock client + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.get = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["get", "1"]) + assert result.exit_code == 0, f"Output: {result.stdout}" + assert "test-server" in result.stdout + + @patch("cli.main.get_client") + def test_update_server_success(self, mock_get_client): + """Test updating server via CLI.""" + mock_response = create_mock_response( + { + "id": 1, + "hostname": "updated-server", + "ip_address": "192.168.1.101", + "state": "offline", + "created_at": "2024-01-01T00:00:00Z", + "updated_at": "2024-01-01T00:00:00Z", + }, + status_code=200, + ) + + # Create mock client + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.put = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["update", "1", "--hostname=updated-server"]) + assert result.exit_code == 0, f"Output: {result.stdout}" + + @patch("cli.main.get_client") + def test_delete_server_success(self, mock_get_client): + """Test deleting server via CLI.""" + mock_response = create_mock_response({}, status_code=204) + + # Create mock client + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.delete = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["delete", "1"]) + assert result.exit_code == 0, f"Output: {result.stdout}" + + @patch("cli.main.get_client") + def test_create_server_http_error(self, mock_get_client, sample_server_data): + """Test create server with HTTP error.""" + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 409 + mock_response.text = "Conflict" + mock_response.headers = {"content-type": "application/json"} + mock_response.json.return_value = {"detail": "Hostname already exists"} + + error = httpx.HTTPStatusError("Error", request=MagicMock(), response=mock_response) + mock_response.raise_for_status = MagicMock(side_effect=error) + + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.post = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke( + app, + [ + "create", + sample_server_data["hostname"], + sample_server_data["ip_address"], + sample_server_data["state"], + ], + ) + assert result.exit_code == 1 + + @patch("cli.main.get_client") + def test_create_server_general_error(self, mock_get_client, sample_server_data): + """Test create server with general exception.""" + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.post = AsyncMock(side_effect=Exception("Connection failed")) + mock_get_client.return_value = mock_client + + result = runner.invoke( + app, + [ + "create", + sample_server_data["hostname"], + sample_server_data["ip_address"], + sample_server_data["state"], + ], + ) + assert result.exit_code == 1 + assert "Connection failed" in result.stdout + + @patch("cli.main.get_client") + def test_list_servers_http_error(self, mock_get_client): + """Test list servers with HTTP error.""" + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 500 + mock_response.text = "Internal Server Error" + mock_response.headers = {"content-type": "text/plain"} + + error = httpx.HTTPStatusError("Error", request=MagicMock(), response=mock_response) + mock_response.raise_for_status = MagicMock(side_effect=error) + + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.get = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["list"]) + assert result.exit_code == 1 + + @patch("cli.main.get_client") + def test_list_servers_general_error(self, mock_get_client): + """Test list servers with general exception.""" + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.get = AsyncMock(side_effect=Exception("Connection failed")) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["list"]) + assert result.exit_code == 1 + assert "Connection failed" in result.stdout + + @patch("cli.main.get_client") + def test_list_servers_empty(self, mock_get_client): + """Test list servers when empty.""" + mock_response = create_mock_response({"servers": [], "count": 0}, status_code=200) + + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.get = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["list"]) + assert result.exit_code == 0 + assert "No servers found" in result.stdout + + @patch("cli.main.get_client") + def test_get_server_http_error_404(self, mock_get_client): + """Test get server with 404 error.""" + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 404 + mock_response.text = "Not Found" + mock_response.headers = {"content-type": "application/json"} + mock_response.json.return_value = {"detail": "Server not found"} + + error = httpx.HTTPStatusError("Error", request=MagicMock(), response=mock_response) + mock_response.raise_for_status = MagicMock(side_effect=error) + + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.get = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["get", "99999"]) + assert result.exit_code == 1 + assert "not found" in result.stdout.lower() + + @patch("cli.main.get_client") + def test_get_server_http_error_other(self, mock_get_client): + """Test get server with other HTTP error.""" + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 500 + mock_response.text = "Internal Server Error" + mock_response.headers = {"content-type": "text/plain"} + + error = httpx.HTTPStatusError("Error", request=MagicMock(), response=mock_response) + mock_response.raise_for_status = MagicMock(side_effect=error) + + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.get = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["get", "1"]) + assert result.exit_code == 1 + + @patch("cli.main.get_client") + def test_get_server_general_error(self, mock_get_client): + """Test get server with general exception.""" + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.get = AsyncMock(side_effect=Exception("Connection failed")) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["get", "1"]) + assert result.exit_code == 1 + assert "Connection failed" in result.stdout + + @patch("cli.main.get_client") + def test_update_server_no_fields(self, mock_get_client): + """Test update server with no fields to update.""" + result = runner.invoke(app, ["update", "1"]) + assert result.exit_code == 1 + assert "At least one field must be provided" in result.stdout + + @patch("cli.main.get_client") + def test_update_server_http_error_404(self, mock_get_client): + """Test update server with 404 error.""" + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 404 + mock_response.text = "Not Found" + mock_response.headers = {"content-type": "application/json"} + mock_response.json.return_value = {"detail": "Server not found"} + + error = httpx.HTTPStatusError("Error", request=MagicMock(), response=mock_response) + mock_response.raise_for_status = MagicMock(side_effect=error) + + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.put = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["update", "99999", "--hostname=test"]) + assert result.exit_code == 1 + assert "not found" in result.stdout.lower() + + @patch("cli.main.get_client") + def test_update_server_http_error_other(self, mock_get_client): + """Test update server with other HTTP error.""" + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 409 + mock_response.text = "Conflict" + mock_response.headers = {"content-type": "application/json"} + mock_response.json.return_value = {"detail": "Duplicate hostname"} + + error = httpx.HTTPStatusError("Error", request=MagicMock(), response=mock_response) + mock_response.raise_for_status = MagicMock(side_effect=error) + + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.put = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["update", "1", "--hostname=test"]) + assert result.exit_code == 1 + + @patch("cli.main.get_client") + def test_update_server_general_error(self, mock_get_client): + """Test update server with general exception.""" + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.put = AsyncMock(side_effect=Exception("Connection failed")) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["update", "1", "--hostname=test"]) + assert result.exit_code == 1 + assert "Connection failed" in result.stdout + + @patch("cli.main.get_client") + def test_delete_server_http_error_404(self, mock_get_client): + """Test delete server with 404 error.""" + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 404 + mock_response.text = "Not Found" + mock_response.headers = {"content-type": "application/json"} + mock_response.json.return_value = {"detail": "Server not found"} + + error = httpx.HTTPStatusError("Error", request=MagicMock(), response=mock_response) + mock_response.raise_for_status = MagicMock(side_effect=error) + + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.delete = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["delete", "99999"]) + assert result.exit_code == 1 + assert "not found" in result.stdout.lower() + + @patch("cli.main.get_client") + def test_delete_server_http_error_other(self, mock_get_client): + """Test delete server with other HTTP error.""" + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 500 + mock_response.text = "Internal Server Error" + mock_response.headers = {"content-type": "text/plain"} + + error = httpx.HTTPStatusError("Error", request=MagicMock(), response=mock_response) + mock_response.raise_for_status = MagicMock(side_effect=error) + + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.delete = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["delete", "1"]) + assert result.exit_code == 1 + + @patch("cli.main.get_client") + def test_delete_server_general_error(self, mock_get_client): + """Test delete server with general exception.""" + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.delete = AsyncMock(side_effect=Exception("Connection failed")) + mock_get_client.return_value = mock_client + + result = runner.invoke(app, ["delete", "1"]) + assert result.exit_code == 1 + assert "Connection failed" in result.stdout + + @patch("cli.main.get_client") + def test_update_with_all_options(self, mock_get_client): + """Test update with all options (hostname, ip, state).""" + mock_response = create_mock_response( + { + "id": 1, + "hostname": "updated-server", + "ip_address": "10.0.0.1", + "state": "offline", + "created_at": "2024-01-01T00:00:00Z", + "updated_at": "2024-01-01T00:00:00Z", + }, + status_code=200, + ) + + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.put = AsyncMock(return_value=mock_response) + mock_get_client.return_value = mock_client + + result = runner.invoke( + app, + [ + "update", + "1", + "--hostname=updated-server", + "--ip=10.0.0.1", + "--state=offline", + ], + ) + assert result.exit_code == 0 diff --git a/tests/test_repositories.py b/tests/test_repositories.py new file mode 100644 index 0000000..a8396c0 --- /dev/null +++ b/tests/test_repositories.py @@ -0,0 +1,1032 @@ +"""Tests for repository layer (database operations).""" + +from unittest.mock import AsyncMock, patch + +import pytest +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import DatabaseError, DuplicateHostnameError +from app.models.server import Server +from app.repositories.base import QueryStrategy +from app.repositories.factory import RepositoryFactory, get_repository +from app.repositories.server_repository import ServerRepository, get_server_repository +from app.repositories.strategies.orm import ORMRepository +from app.repositories.strategies.raw_sql import RawSQLRepository +from app.schemas.server import ServerCreate, ServerUpdate + + +class TestServerRepository: + """Test server repository database operations.""" + + @pytest.mark.asyncio + async def test_create_server(self, test_db_session: AsyncSession, sample_server_data): + """Test creating a server.""" + repository = ServerRepository(test_db_session) + server_data = ServerCreate(**sample_server_data) + server = await repository.create(server_data) + assert server.hostname == sample_server_data["hostname"] + assert server.ip_address == sample_server_data["ip_address"] + assert server.state == sample_server_data["state"] + assert server.id is not None + + @pytest.mark.asyncio + async def test_create_duplicate_hostname( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test creating server with duplicate hostname.""" + repository = ServerRepository(test_db_session) + server_data = ServerCreate(**sample_server_data) + await repository.create(server_data) + with pytest.raises(DuplicateHostnameError): + await repository.create(server_data) + + @pytest.mark.asyncio + async def test_get_by_id(self, test_db_session: AsyncSession, sample_server_data): + """Test getting server by ID.""" + repository = ServerRepository(test_db_session) + server_data = ServerCreate(**sample_server_data) + created = await repository.create(server_data) + found = await repository.get_by_id(created.id) + assert found is not None + assert found.id == created.id + assert found.hostname == sample_server_data["hostname"] + + @pytest.mark.asyncio + async def test_get_by_id_not_found(self, test_db_session: AsyncSession): + """Test getting non-existent server by ID.""" + repository = ServerRepository(test_db_session) + result = await repository.get_by_id(99999) + assert result is None + + @pytest.mark.asyncio + async def test_get_by_hostname(self, test_db_session: AsyncSession, sample_server_data): + """Test getting server by hostname.""" + repository = ServerRepository(test_db_session) + server_data = ServerCreate(**sample_server_data) + created = await repository.create(server_data) + found = await repository.get_by_hostname(sample_server_data["hostname"]) + assert found is not None + assert found.id == created.id + + @pytest.mark.asyncio + async def test_get_by_hostname_not_found(self, test_db_session: AsyncSession): + """Test getting non-existent server by hostname.""" + repository = ServerRepository(test_db_session) + result = await repository.get_by_hostname("non-existent") + assert result is None + + @pytest.mark.asyncio + async def test_list_all(self, test_db_session: AsyncSession, sample_server_data): + """Test listing all servers.""" + repository = ServerRepository(test_db_session) + server_data = ServerCreate(**sample_server_data) + await repository.create(server_data) + servers = await repository.list_all() + assert len(servers) == 1 + assert servers[0].hostname == sample_server_data["hostname"] + + @pytest.mark.asyncio + async def test_update_server( + self, + test_db_session: AsyncSession, + sample_server_data, + sample_server_update_data, + ): + """Test updating server.""" + repository = ServerRepository(test_db_session) + server_data = ServerCreate(**sample_server_data) + created = await repository.create(server_data) + update_data = ServerUpdate(**sample_server_update_data) + updated = await repository.update(created.id, update_data) + assert updated.hostname == sample_server_update_data["hostname"] + assert updated.ip_address == sample_server_update_data["ip_address"] + assert updated.state == sample_server_update_data["state"] + + @pytest.mark.asyncio + async def test_update_server_not_found( + self, test_db_session: AsyncSession, sample_server_update_data + ): + """Test updating non-existent server.""" + repository = ServerRepository(test_db_session) + update_data = ServerUpdate(**sample_server_update_data) + result = await repository.update(99999, update_data) + assert result is None + + @pytest.mark.asyncio + async def test_update_server_duplicate_hostname( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test updating server with duplicate hostname.""" + repository = ServerRepository(test_db_session) + # Create two servers + await repository.create(ServerCreate(**sample_server_data)) + server2_data = {**sample_server_data, "hostname": "server-02"} + server2 = await repository.create(ServerCreate(**server2_data)) + # Try to update server2's hostname to match server1 + update_data = ServerUpdate(hostname=sample_server_data["hostname"]) + with pytest.raises(DuplicateHostnameError): + await repository.update(server2.id, update_data) + + @pytest.mark.asyncio + async def test_delete_server(self, test_db_session: AsyncSession, sample_server_data): + """Test deleting server.""" + repository = ServerRepository(test_db_session) + server_data = ServerCreate(**sample_server_data) + created = await repository.create(server_data) + deleted = await repository.delete(created.id) + assert deleted is True + # Verify deletion + found = await repository.get_by_id(created.id) + assert found is None + + @pytest.mark.asyncio + async def test_delete_server_not_found(self, test_db_session: AsyncSession): + """Test deleting non-existent server.""" + repository = ServerRepository(test_db_session) + deleted = await repository.delete(99999) + assert deleted is False + + @pytest.mark.asyncio + async def test_strategy_property(self, test_db_session: AsyncSession): + """Test getting current strategy.""" + repository = ServerRepository(test_db_session) + strategy = repository.strategy + assert strategy in [QueryStrategy.ORM, QueryStrategy.RAW_SQL] + + @pytest.mark.asyncio + async def test_get_server_repository_factory(self, test_db_session: AsyncSession): + """Test get_server_repository factory function.""" + repository = get_server_repository(test_db_session) + assert isinstance(repository, ServerRepository) + + @pytest.mark.asyncio + async def test_get_server_repository_with_strategy(self, test_db_session: AsyncSession): + """Test get_server_repository with explicit strategy.""" + repository = get_server_repository(test_db_session, strategy=QueryStrategy.ORM) + assert isinstance(repository, ServerRepository) + + +class TestRawSQLRepository: + """Test raw SQL repository strategy.""" + + @pytest.mark.asyncio + async def test_create_server_raw_sql(self, test_db_session: AsyncSession, sample_server_data): + """Test creating a server with raw SQL.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + server_data = ServerCreate(**sample_server_data) + server = await repository.create(server_data) + assert server.hostname == sample_server_data["hostname"] + assert server.ip_address == sample_server_data["ip_address"] + assert server.id is not None + + @pytest.mark.asyncio + async def test_get_by_id_raw_sql(self, test_db_session: AsyncSession, sample_server_data): + """Test getting server by ID with raw SQL.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + server_data = ServerCreate(**sample_server_data) + created = await repository.create(server_data) + found = await repository.get_by_id(created.id) + assert found is not None + assert found.id == created.id + + @pytest.mark.asyncio + async def test_get_by_id_not_found_raw_sql(self, test_db_session: AsyncSession): + """Test getting non-existent server by ID with raw SQL.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + result = await repository.get_by_id(99999) + assert result is None + + @pytest.mark.asyncio + async def test_get_by_hostname_raw_sql(self, test_db_session: AsyncSession, sample_server_data): + """Test getting server by hostname with raw SQL.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + server_data = ServerCreate(**sample_server_data) + await repository.create(server_data) + found = await repository.get_by_hostname(sample_server_data["hostname"]) + assert found is not None + assert found.hostname == sample_server_data["hostname"] + + @pytest.mark.asyncio + async def test_list_all_raw_sql(self, test_db_session: AsyncSession, sample_server_data): + """Test listing all servers with raw SQL.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + server_data = ServerCreate(**sample_server_data) + await repository.create(server_data) + servers = await repository.list_all() + assert len(servers) == 1 + + @pytest.mark.asyncio + async def test_list_all_empty_raw_sql(self, test_db_session: AsyncSession): + """Test listing servers when empty with raw SQL.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + servers = await repository.list_all() + assert len(servers) == 0 + + @pytest.mark.asyncio + async def test_update_server_raw_sql( + self, + test_db_session: AsyncSession, + sample_server_data, + sample_server_update_data, + ): + """Test updating server with raw SQL.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + server_data = ServerCreate(**sample_server_data) + created = await repository.create(server_data) + update_data = ServerUpdate(**sample_server_update_data) + updated = await repository.update(created.id, update_data) + assert updated.hostname == sample_server_update_data["hostname"] + + @pytest.mark.asyncio + async def test_update_server_partial_raw_sql( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test partial update with raw SQL.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + server_data = ServerCreate(**sample_server_data) + created = await repository.create(server_data) + # Only update hostname + update_data = ServerUpdate(hostname="new-hostname") + updated = await repository.update(created.id, update_data) + assert updated.hostname == "new-hostname" + assert updated.ip_address == sample_server_data["ip_address"] + + @pytest.mark.asyncio + async def test_update_server_not_found_raw_sql( + self, test_db_session: AsyncSession, sample_server_update_data + ): + """Test updating non-existent server with raw SQL.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + update_data = ServerUpdate(**sample_server_update_data) + result = await repository.update(99999, update_data) + assert result is None + + @pytest.mark.asyncio + async def test_update_server_no_changes_raw_sql( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test update with no changes returns existing record.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + server_data = ServerCreate(**sample_server_data) + created = await repository.create(server_data) + # Empty update + update_data = ServerUpdate() + updated = await repository.update(created.id, update_data) + assert updated.id == created.id + + @pytest.mark.asyncio + async def test_delete_server_raw_sql(self, test_db_session: AsyncSession, sample_server_data): + """Test deleting server with raw SQL.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + server_data = ServerCreate(**sample_server_data) + created = await repository.create(server_data) + deleted = await repository.delete(created.id) + assert deleted is True + found = await repository.get_by_id(created.id) + assert found is None + + @pytest.mark.asyncio + async def test_delete_server_not_found_raw_sql(self, test_db_session: AsyncSession): + """Test deleting non-existent server with raw SQL.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + deleted = await repository.delete(99999) + assert deleted is False + + @pytest.mark.asyncio + async def test_duplicate_hostname_raw_sql( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test duplicate hostname detection with raw SQL.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + server_data = ServerCreate(**sample_server_data) + await repository.create(server_data) + with pytest.raises(DuplicateHostnameError): + await repository.create(server_data) + + +class TestORMRepository: + """Test ORM repository strategy edge cases.""" + + @pytest.mark.asyncio + async def test_create_server_orm(self, test_db_session: AsyncSession, sample_server_data): + """Test creating a server with ORM.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.ORM) + server_data = ServerCreate(**sample_server_data) + server = await repository.create(server_data) + assert server.hostname == sample_server_data["hostname"] + assert server.id is not None + + @pytest.mark.asyncio + async def test_list_all_with_order_orm(self, test_db_session: AsyncSession, sample_server_data): + """Test listing with ordering using ORM.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.ORM) + # Create multiple servers + for i in range(3): + data = {**sample_server_data, "hostname": f"server-{i:02d}"} + await repository.create(ServerCreate(**data)) + servers = await repository.list_all() + assert len(servers) == 3 + + @pytest.mark.asyncio + async def test_update_partial_orm(self, test_db_session: AsyncSession, sample_server_data): + """Test partial update with ORM.""" + repository = ServerRepository(test_db_session, strategy=QueryStrategy.ORM) + server_data = ServerCreate(**sample_server_data) + created = await repository.create(server_data) + update_data = ServerUpdate(hostname="updated-hostname") + updated = await repository.update(created.id, update_data) + assert updated.hostname == "updated-hostname" + assert updated.ip_address == sample_server_data["ip_address"] + + +class TestRepositoryFactory: + """Test repository factory.""" + + def test_create_orm_repository(self, test_db_session: AsyncSession): + """Test creating ORM repository.""" + repo = RepositoryFactory.create_orm(test_db_session, Server) + assert isinstance(repo, ORMRepository) + + def test_create_raw_sql_repository(self, test_db_session: AsyncSession): + """Test creating Raw SQL repository.""" + repo = RepositoryFactory.create_raw_sql(test_db_session, Server) + assert isinstance(repo, RawSQLRepository) + + def test_create_with_explicit_orm_strategy(self, test_db_session: AsyncSession): + """Test creating repository with explicit ORM strategy.""" + repo = RepositoryFactory.create(test_db_session, Server, strategy=QueryStrategy.ORM) + assert isinstance(repo, ORMRepository) + + def test_create_with_explicit_raw_sql_strategy(self, test_db_session: AsyncSession): + """Test creating repository with explicit Raw SQL strategy.""" + repo = RepositoryFactory.create(test_db_session, Server, strategy=QueryStrategy.RAW_SQL) + assert isinstance(repo, RawSQLRepository) + + @patch("app.repositories.factory.settings") + def test_create_uses_config_orm(self, mock_settings, test_db_session: AsyncSession): + """Test factory uses config for ORM.""" + mock_settings.use_raw_sql = False + repo = RepositoryFactory.create(test_db_session, Server) + assert isinstance(repo, ORMRepository) + + @patch("app.repositories.factory.settings") + def test_create_uses_config_raw_sql(self, mock_settings, test_db_session: AsyncSession): + """Test factory uses config for Raw SQL.""" + mock_settings.use_raw_sql = True + repo = RepositoryFactory.create(test_db_session, Server) + assert isinstance(repo, RawSQLRepository) + + @patch("app.repositories.factory.settings") + def test_get_current_strategy_orm(self, mock_settings): + """Test get_current_strategy returns ORM.""" + mock_settings.use_raw_sql = False + assert RepositoryFactory.get_current_strategy() == QueryStrategy.ORM + + @patch("app.repositories.factory.settings") + def test_get_current_strategy_raw_sql(self, mock_settings): + """Test get_current_strategy returns Raw SQL.""" + mock_settings.use_raw_sql = True + assert RepositoryFactory.get_current_strategy() == QueryStrategy.RAW_SQL + + def test_get_repository_convenience_function(self, test_db_session: AsyncSession): + """Test get_repository convenience function.""" + repo = get_repository(test_db_session, Server) + assert isinstance(repo, (ORMRepository, RawSQLRepository)) + + def test_get_repository_with_strategy(self, test_db_session: AsyncSession): + """Test get_repository with explicit strategy.""" + repo = get_repository(test_db_session, Server, strategy=QueryStrategy.ORM) + assert isinstance(repo, ORMRepository) + + +class TestBaseRepositoryHelpers: + """Test base repository helper methods.""" + + def test_get_model_columns(self, test_db_session: AsyncSession): + """Test _get_model_columns returns column names.""" + repo = ORMRepository(test_db_session, Server) + columns = repo._get_model_columns() + assert "id" in columns + assert "hostname" in columns + assert "ip_address" in columns + assert "state" in columns + + def test_schema_to_dict(self, test_db_session: AsyncSession, sample_server_data): + """Test _schema_to_dict converts schema to dict.""" + repo = ORMRepository(test_db_session, Server) + schema = ServerCreate(**sample_server_data) + result = repo._schema_to_dict(schema) + assert result["hostname"] == sample_server_data["hostname"] + + def test_schema_to_dict_exclude_unset(self, test_db_session: AsyncSession): + """Test _schema_to_dict with exclude_unset.""" + repo = ORMRepository(test_db_session, Server) + schema = ServerUpdate(hostname="new-hostname") + result = repo._schema_to_dict(schema, exclude_unset=True) + assert "hostname" in result + assert "ip_address" not in result + + def test_table_name_property(self, test_db_session: AsyncSession): + """Test table_name property.""" + repo = ORMRepository(test_db_session, Server) + assert repo.table_name == "servers" + + +class TestRawSQLRepositoryHelpers: + """Test raw SQL repository helper methods.""" + + def test_row_to_model_with_none(self, test_db_session: AsyncSession): + """Test _row_to_model returns None for None input.""" + repo = RawSQLRepository(test_db_session, Server) + result = repo._row_to_model(None) + assert result is None + + def test_build_select_columns(self, test_db_session: AsyncSession): + """Test _build_select_columns.""" + repo = RawSQLRepository(test_db_session, Server) + columns = repo._build_select_columns() + assert "id" in columns + assert "hostname" in columns + + +class TestRepositoryErrorHandling: + """Test repository error handling.""" + + @pytest.mark.asyncio + async def test_create_integrity_error_orm( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test create with integrity error for ORM.""" + repo = ORMRepository(test_db_session, Server) + server_data = ServerCreate(**sample_server_data) + # Create first server + await repo.create(server_data) + # Try to create duplicate (will trigger integrity error at DB level) + with pytest.raises(DatabaseError) as exc_info: + await repo.create(server_data) + assert "integrity" in str(exc_info.value).lower() or "Database" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_create_integrity_error_raw_sql( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test create with integrity error for Raw SQL.""" + repo = RawSQLRepository(test_db_session, Server) + server_data = ServerCreate(**sample_server_data) + # Create first server + await repo.create(server_data) + # Try to create duplicate + with pytest.raises(DatabaseError) as exc_info: + await repo.create(server_data) + assert "integrity" in str(exc_info.value).lower() or "Database" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_update_integrity_error_orm( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test update with integrity error for ORM.""" + repo = ORMRepository(test_db_session, Server) + # Create two servers + await repo.create(ServerCreate(**sample_server_data)) + server2_data = {**sample_server_data, "hostname": "server-02"} + server2 = await repo.create(ServerCreate(**server2_data)) + # Try to update server2 hostname to match server1 + update_data = ServerUpdate(hostname=sample_server_data["hostname"]) + with pytest.raises(DatabaseError) as exc_info: + await repo.update(server2.id, update_data) + assert "integrity" in str(exc_info.value).lower() or "Database" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_update_integrity_error_raw_sql( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test update with integrity error for Raw SQL.""" + repo = RawSQLRepository(test_db_session, Server) + # Create two servers + await repo.create(ServerCreate(**sample_server_data)) + server2_data = {**sample_server_data, "hostname": "server-02"} + server2 = await repo.create(ServerCreate(**server2_data)) + # Try to update server2 hostname to match server1 + update_data = ServerUpdate(hostname=sample_server_data["hostname"]) + with pytest.raises(DatabaseError) as exc_info: + await repo.update(server2.id, update_data) + assert "integrity" in str(exc_info.value).lower() or "Database" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_get_by_invalid_field_orm(self, test_db_session: AsyncSession): + """Test getting by invalid field raises DatabaseError.""" + repo = ORMRepository(test_db_session, Server) + with pytest.raises(DatabaseError) as exc_info: + await repo.get_by_field("invalid_field", "value") + assert "Invalid field name" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_get_by_invalid_field_raw_sql(self, test_db_session: AsyncSession): + """Test getting by invalid field raises DatabaseError for raw SQL.""" + repo = RawSQLRepository(test_db_session, Server) + with pytest.raises(DatabaseError) as exc_info: + await repo.get_by_field("invalid_field", "value") + assert "Invalid field name" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_exists_invalid_field_orm(self, test_db_session: AsyncSession): + """Test exists with invalid field raises DatabaseError.""" + repo = ORMRepository(test_db_session, Server) + with pytest.raises(DatabaseError) as exc_info: + await repo.exists("invalid_field", "value") + assert "Invalid field name" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_exists_invalid_field_raw_sql(self, test_db_session: AsyncSession): + """Test exists with invalid field raises DatabaseError for raw SQL.""" + repo = RawSQLRepository(test_db_session, Server) + with pytest.raises(DatabaseError) as exc_info: + await repo.exists("invalid_field", "value") + assert "Invalid field name" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_exists_with_exclude_id_orm( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test exists with exclude_id for ORM.""" + repo = ServerRepository(test_db_session, strategy=QueryStrategy.ORM) + server = await repo.create(ServerCreate(**sample_server_data)) + # Check existence excluding the created server's ID + exists = await repo._repo.exists( + "hostname", sample_server_data["hostname"], exclude_id=server.id + ) + assert exists is False + # Check existence without excluding + exists = await repo._repo.exists("hostname", sample_server_data["hostname"]) + assert exists is True + + @pytest.mark.asyncio + async def test_exists_with_exclude_id_raw_sql( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test exists with exclude_id for Raw SQL.""" + repo = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + server = await repo.create(ServerCreate(**sample_server_data)) + # Check existence excluding the created server's ID + exists = await repo._repo.exists( + "hostname", sample_server_data["hostname"], exclude_id=server.id + ) + assert exists is False + # Check existence without excluding + exists = await repo._repo.exists("hostname", sample_server_data["hostname"]) + assert exists is True + + +class TestServerRepositoryEdgeCases: + """Test server repository edge cases and error handling.""" + + @pytest.mark.asyncio + async def test_create_catches_database_error_with_unique(self, test_db_session: AsyncSession): + """Test create catches DatabaseError with unique constraint.""" + from unittest.mock import patch + + repo = ServerRepository(test_db_session, strategy=QueryStrategy.ORM) + server_data = ServerCreate(hostname="test", ip_address="1.1.1.1", state="active") + + # Mock _repo.exists to return False, and _repo.create to raise DatabaseError with 'unique' + with patch.object(repo._repo, "exists", new=AsyncMock(return_value=False)): + with patch.object( + repo._repo, + "create", + new=AsyncMock(side_effect=DatabaseError("unique constraint violation")), + ): + with pytest.raises(DuplicateHostnameError): + await repo.create(server_data) + + @pytest.mark.asyncio + async def test_create_catches_database_error_with_hostname(self, test_db_session: AsyncSession): + """Test create catches DatabaseError with hostname in message.""" + from unittest.mock import patch + + repo = ServerRepository(test_db_session, strategy=QueryStrategy.ORM) + server_data = ServerCreate(hostname="test", ip_address="1.1.1.1", state="active") + + with patch.object(repo._repo, "exists", new=AsyncMock(return_value=False)): + with patch.object( + repo._repo, + "create", + new=AsyncMock(side_effect=DatabaseError("duplicate hostname error")), + ): + with pytest.raises(DuplicateHostnameError): + await repo.create(server_data) + + @pytest.mark.asyncio + async def test_create_reraises_other_database_errors(self, test_db_session: AsyncSession): + """Test create re-raises other DatabaseErrors.""" + from unittest.mock import patch + + repo = ServerRepository(test_db_session, strategy=QueryStrategy.ORM) + server_data = ServerCreate(hostname="test", ip_address="1.1.1.1", state="active") + + with patch.object(repo._repo, "exists", new=AsyncMock(return_value=False)): + with patch.object( + repo._repo, + "create", + new=AsyncMock(side_effect=DatabaseError("connection failed")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.create(server_data) + assert "connection failed" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_update_catches_database_error_with_unique( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test update catches DatabaseError with unique constraint.""" + from unittest.mock import patch + + repo = ServerRepository(test_db_session, strategy=QueryStrategy.ORM) + # Create a server first + server = await repo.create(ServerCreate(**sample_server_data)) + + update_data = ServerUpdate(hostname="new-hostname") + + with patch.object(repo._repo, "exists", new=AsyncMock(return_value=False)): + with patch.object( + repo._repo, + "update", + new=AsyncMock(side_effect=DatabaseError("unique constraint violation")), + ): + with pytest.raises(DuplicateHostnameError): + await repo.update(server.id, update_data) + + @pytest.mark.asyncio + async def test_update_catches_database_error_with_hostname( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test update catches DatabaseError with hostname in message.""" + from unittest.mock import patch + + repo = ServerRepository(test_db_session, strategy=QueryStrategy.ORM) + server = await repo.create(ServerCreate(**sample_server_data)) + + update_data = ServerUpdate(hostname="new-hostname") + + with patch.object(repo._repo, "exists", new=AsyncMock(return_value=False)): + with patch.object( + repo._repo, + "update", + new=AsyncMock(side_effect=DatabaseError("duplicate hostname error")), + ): + with pytest.raises(DuplicateHostnameError): + await repo.update(server.id, update_data) + + @pytest.mark.asyncio + async def test_update_reraises_other_database_errors( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test update re-raises other DatabaseErrors.""" + from unittest.mock import patch + + repo = ServerRepository(test_db_session, strategy=QueryStrategy.ORM) + server = await repo.create(ServerCreate(**sample_server_data)) + + update_data = ServerUpdate(hostname="new-hostname") + + with patch.object(repo._repo, "exists", new=AsyncMock(return_value=False)): + with patch.object( + repo._repo, + "update", + new=AsyncMock(side_effect=DatabaseError("connection failed")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.update(server.id, update_data) + assert "connection failed" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_update_with_empty_hostname( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test update without hostname doesn't check for duplicates.""" + repo = ServerRepository(test_db_session, strategy=QueryStrategy.ORM) + server = await repo.create(ServerCreate(**sample_server_data)) + + # Update only IP, no hostname + update_data = ServerUpdate(ip_address="10.0.0.1") + updated = await repo.update(server.id, update_data) + assert updated.ip_address == "10.0.0.1" + + +class TestORMExceptionPaths: + """Test ORM repository exception paths.""" + + @pytest.mark.asyncio + async def test_create_general_exception(self, test_db_session: AsyncSession): + """Test create with general exception.""" + from unittest.mock import patch + + repo = ORMRepository(test_db_session, Server) + server_data = ServerCreate(hostname="test", ip_address="1.1.1.1", state="active") + + with patch.object( + test_db_session, + "flush", + new=AsyncMock(side_effect=Exception("Connection lost")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.create(server_data) + assert "Failed to create record" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_get_by_id_exception(self, test_db_session: AsyncSession): + """Test get_by_id with exception.""" + from unittest.mock import patch + + repo = ORMRepository(test_db_session, Server) + + with patch.object( + test_db_session, + "execute", + new=AsyncMock(side_effect=Exception("Query failed")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.get_by_id(1) + assert "Failed to get record by ID" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_get_by_field_exception(self, test_db_session: AsyncSession): + """Test get_by_field with exception.""" + from unittest.mock import patch + + repo = ORMRepository(test_db_session, Server) + + with patch.object( + test_db_session, + "execute", + new=AsyncMock(side_effect=Exception("Query failed")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.get_by_field("hostname", "test") + assert "Failed to get record by hostname" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_list_all_exception(self, test_db_session: AsyncSession): + """Test list_all with exception.""" + from unittest.mock import patch + + repo = ORMRepository(test_db_session, Server) + + with patch.object( + test_db_session, + "execute", + new=AsyncMock(side_effect=Exception("Query failed")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.list_all() + assert "Failed to list records" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_update_general_exception( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test update with general exception.""" + from unittest.mock import patch + + repo = ORMRepository(test_db_session, Server) + server = await repo.create(ServerCreate(**sample_server_data)) + + update_data = ServerUpdate(hostname="new-hostname") + + with patch.object( + test_db_session, + "flush", + new=AsyncMock(side_effect=Exception("Flush failed")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.update(server.id, update_data) + assert "Failed to update record" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_delete_exception(self, test_db_session: AsyncSession, sample_server_data): + """Test delete with exception.""" + from unittest.mock import patch + + repo = ORMRepository(test_db_session, Server) + server = await repo.create(ServerCreate(**sample_server_data)) + + with patch.object( + test_db_session, + "delete", + new=AsyncMock(side_effect=Exception("Delete failed")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.delete(server.id) + assert "Failed to delete record" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_exists_exception(self, test_db_session: AsyncSession): + """Test exists with exception.""" + from unittest.mock import patch + + repo = ORMRepository(test_db_session, Server) + + with patch.object( + test_db_session, + "execute", + new=AsyncMock(side_effect=Exception("Query failed")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.exists("hostname", "test") + assert "Failed to check existence" in str(exc_info.value) + + +class TestRawSQLExceptionPaths: + """Test raw SQL repository exception paths.""" + + @pytest.mark.asyncio + async def test_create_general_exception(self, test_db_session: AsyncSession): + """Test create with general exception.""" + from unittest.mock import patch + + repo = RawSQLRepository(test_db_session, Server) + server_data = ServerCreate(hostname="test", ip_address="1.1.1.1", state="active") + + with patch.object( + test_db_session, + "execute", + new=AsyncMock(side_effect=Exception("Connection lost")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.create(server_data) + assert "Failed to create record" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_get_by_id_exception(self, test_db_session: AsyncSession): + """Test get_by_id with exception.""" + from unittest.mock import patch + + repo = RawSQLRepository(test_db_session, Server) + + with patch.object( + test_db_session, + "execute", + new=AsyncMock(side_effect=Exception("Query failed")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.get_by_id(1) + assert "Failed to get record by ID" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_get_by_field_exception(self, test_db_session: AsyncSession): + """Test get_by_field with exception.""" + from unittest.mock import patch + + repo = RawSQLRepository(test_db_session, Server) + + with patch.object( + test_db_session, + "execute", + new=AsyncMock(side_effect=Exception("Query failed")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.get_by_field("hostname", "test") + assert "Failed to get record by hostname" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_list_all_exception(self, test_db_session: AsyncSession): + """Test list_all with exception.""" + from unittest.mock import patch + + repo = RawSQLRepository(test_db_session, Server) + + with patch.object( + test_db_session, + "execute", + new=AsyncMock(side_effect=Exception("Query failed")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.list_all() + assert "Failed to list records" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_update_general_exception( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test update with general exception after get.""" + from unittest.mock import patch + + repo = RawSQLRepository(test_db_session, Server) + server = await repo.create(ServerCreate(**sample_server_data)) + + update_data = ServerUpdate(hostname="new-hostname") + original_execute = test_db_session.execute + call_count = [0] + + async def mock_execute(*args, **kwargs): + call_count[0] += 1 + # First call is for get_by_id, let it succeed + if call_count[0] == 1: + return await original_execute(*args, **kwargs) + # Second call is for update, make it fail + raise Exception("Update failed") + + with patch.object(test_db_session, "execute", new=mock_execute): + with pytest.raises(DatabaseError) as exc_info: + await repo.update(server.id, update_data) + assert "Failed to update record" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_delete_exception(self, test_db_session: AsyncSession, sample_server_data): + """Test delete with exception after get.""" + from unittest.mock import patch + + repo = RawSQLRepository(test_db_session, Server) + server = await repo.create(ServerCreate(**sample_server_data)) + + original_execute = test_db_session.execute + call_count = [0] + + async def mock_execute(*args, **kwargs): + call_count[0] += 1 + # First call is for get_by_id, let it succeed + if call_count[0] == 1: + return await original_execute(*args, **kwargs) + # Second call is for delete, make it fail + raise Exception("Delete failed") + + with patch.object(test_db_session, "execute", new=mock_execute): + with pytest.raises(DatabaseError) as exc_info: + await repo.delete(server.id) + assert "Failed to delete record" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_exists_exception(self, test_db_session: AsyncSession): + """Test exists with exception.""" + from unittest.mock import patch + + repo = RawSQLRepository(test_db_session, Server) + + with patch.object( + test_db_session, + "execute", + new=AsyncMock(side_effect=Exception("Query failed")), + ): + with pytest.raises(DatabaseError) as exc_info: + await repo.exists("hostname", "test") + assert "Failed to check existence" in str(exc_info.value) + + +class TestRawSQLRowToModel: + """Test raw SQL row to model conversion.""" + + @pytest.mark.asyncio + async def test_row_to_model_with_mapping( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test _row_to_model handles rows with _mapping attribute.""" + repo = ServerRepository(test_db_session, strategy=QueryStrategy.RAW_SQL) + server = await repo.create(ServerCreate(**sample_server_data)) + # Fetch the server to get a real row + found = await repo.get_by_id(server.id) + assert found is not None + assert found.hostname == sample_server_data["hostname"] + + @pytest.mark.asyncio + async def test_list_all_without_order_by( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test list_all with None order_by.""" + repo = RawSQLRepository(test_db_session, Server) + # Create a server + await repo.create(ServerCreate(**sample_server_data)) + # List with None order_by + servers = await repo.list_all(order_by=None) + assert len(servers) == 1 + + @pytest.mark.asyncio + async def test_list_all_with_invalid_order_by( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test list_all with invalid order_by is ignored.""" + repo = RawSQLRepository(test_db_session, Server) + # Create a server + await repo.create(ServerCreate(**sample_server_data)) + # List with invalid order_by (should be ignored) + servers = await repo.list_all(order_by="invalid_column") + assert len(servers) == 1 + + @pytest.mark.asyncio + async def test_list_all_orm_with_invalid_order( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test ORM list_all with invalid order_by is ignored.""" + repo = ORMRepository(test_db_session, Server) + await repo.create(ServerCreate(**sample_server_data)) + # List with invalid order_by + servers = await repo.list_all(order_by="invalid_column") + assert len(servers) == 1 + + @pytest.mark.asyncio + async def test_list_all_orm_without_order( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test ORM list_all with None order_by.""" + repo = ORMRepository(test_db_session, Server) + await repo.create(ServerCreate(**sample_server_data)) + servers = await repo.list_all(order_by=None) + assert len(servers) == 1 diff --git a/tests/test_routes.py b/tests/test_routes.py new file mode 100644 index 0000000..1747388 --- /dev/null +++ b/tests/test_routes.py @@ -0,0 +1,203 @@ +"""Tests for route layer (HTTP endpoints).""" + +import pytest +from fastapi import status + + +class TestServerRoutes: + """Test server CRUD endpoints.""" + + @pytest.mark.asyncio + async def test_create_server_success(self, async_test_client, sample_server_data): + """Test successful server creation.""" + response = await async_test_client.post("/servers", json=sample_server_data) + assert response.status_code == status.HTTP_201_CREATED + data = response.json() + assert data["hostname"] == sample_server_data["hostname"] + assert data["ip_address"] == sample_server_data["ip_address"] + assert data["state"] == sample_server_data["state"] + assert "id" in data + assert "created_at" in data + assert "updated_at" in data + # Check Location header (RFC 7231) + assert "Location" in response.headers + assert f"/servers/{data['id']}" in response.headers["Location"] + + @pytest.mark.asyncio + async def test_create_server_duplicate_hostname(self, async_test_client, sample_server_data): + """Test creating server with duplicate hostname.""" + # Create first server + await async_test_client.post("/servers", json=sample_server_data) + # Try to create duplicate + response = await async_test_client.post("/servers", json=sample_server_data) + assert response.status_code == status.HTTP_409_CONFLICT + error = response.json() + # Error is wrapped in "detail" by HTTPException + error_detail = error["detail"] + assert error_detail["status"] == 409 + assert ( + "hostname" in error_detail["detail"].lower() + or "duplicate" in error_detail["detail"].lower() + ) + + @pytest.mark.asyncio + async def test_create_server_invalid_ip(self, async_test_client, sample_server_data): + """Test creating server with invalid IP address.""" + sample_server_data["ip_address"] = "invalid-ip" + response = await async_test_client.post("/servers", json=sample_server_data) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + @pytest.mark.asyncio + async def test_create_server_invalid_state(self, async_test_client, sample_server_data): + """Test creating server with invalid state.""" + sample_server_data["state"] = "invalid-state" + response = await async_test_client.post("/servers", json=sample_server_data) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + @pytest.mark.asyncio + async def test_list_servers_empty(self, async_test_client): + """Test listing servers when none exist.""" + response = await async_test_client.get("/servers") + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["count"] == 0 + assert data["servers"] == [] + + @pytest.mark.asyncio + async def test_list_servers_with_data(self, async_test_client, sample_server_data): + """Test listing servers with data.""" + # Create a server + create_response = await async_test_client.post("/servers", json=sample_server_data) + created_server = create_response.json() + + # List servers + response = await async_test_client.get("/servers") + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["count"] == 1 + assert len(data["servers"]) == 1 + assert data["servers"][0]["id"] == created_server["id"] + + @pytest.mark.asyncio + async def test_get_server_success(self, async_test_client, sample_server_data): + """Test getting server by ID.""" + # Create a server + create_response = await async_test_client.post("/servers", json=sample_server_data) + created_server = create_response.json() + + # Get server + response = await async_test_client.get(f"/servers/{created_server['id']}") + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["id"] == created_server["id"] + assert data["hostname"] == sample_server_data["hostname"] + + @pytest.mark.asyncio + async def test_get_server_not_found(self, async_test_client): + """Test getting non-existent server.""" + response = await async_test_client.get("/servers/99999") + assert response.status_code == status.HTTP_404_NOT_FOUND + error = response.json() + # Error is wrapped in "detail" by HTTPException + assert error["detail"]["status"] == 404 + + @pytest.mark.asyncio + async def test_update_server_success( + self, async_test_client, sample_server_data, sample_server_update_data + ): + """Test updating server.""" + # Create a server + create_response = await async_test_client.post("/servers", json=sample_server_data) + created_server = create_response.json() + + # Update server + response = await async_test_client.put( + f"/servers/{created_server['id']}", + json=sample_server_update_data, + ) + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["hostname"] == sample_server_update_data["hostname"] + assert data["ip_address"] == sample_server_update_data["ip_address"] + assert data["state"] == sample_server_update_data["state"] + + @pytest.mark.asyncio + async def test_update_server_partial(self, async_test_client, sample_server_data): + """Test partial server update.""" + # Create a server + create_response = await async_test_client.post("/servers", json=sample_server_data) + created_server = create_response.json() + + # Update only hostname + response = await async_test_client.put( + f"/servers/{created_server['id']}", + json={"hostname": "updated-hostname"}, + ) + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["hostname"] == "updated-hostname" + assert data["ip_address"] == sample_server_data["ip_address"] # Unchanged + + @pytest.mark.asyncio + async def test_update_server_not_found(self, async_test_client, sample_server_update_data): + """Test updating non-existent server.""" + response = await async_test_client.put("/servers/99999", json=sample_server_update_data) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.asyncio + async def test_delete_server_success(self, async_test_client, sample_server_data): + """Test deleting server.""" + # Create a server + create_response = await async_test_client.post("/servers", json=sample_server_data) + created_server = create_response.json() + + # Delete server + response = await async_test_client.delete(f"/servers/{created_server['id']}") + assert response.status_code == status.HTTP_204_NO_CONTENT + + # Verify deletion + get_response = await async_test_client.get(f"/servers/{created_server['id']}") + assert get_response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.asyncio + async def test_delete_server_not_found(self, async_test_client): + """Test deleting non-existent server.""" + response = await async_test_client.delete("/servers/99999") + assert response.status_code == status.HTTP_404_NOT_FOUND + + +class TestHealthRoutes: + """Test health check endpoints.""" + + @pytest.mark.asyncio + async def test_health_check(self, async_test_client): + """Test liveness probe.""" + response = await async_test_client.get("/health") + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["status"] == "healthy" + assert "timestamp" in data + + @pytest.mark.asyncio + async def test_readiness_check(self, async_test_client): + """Test readiness probe.""" + response = await async_test_client.get("/health/ready") + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data["status"] == "healthy" + assert "timestamp" in data + + +class TestRootEndpoint: + """Test root endpoint.""" + + @pytest.mark.asyncio + async def test_root_endpoint(self, async_test_client): + """Test root endpoint returns API info.""" + response = await async_test_client.get("/") + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert "message" in data + assert "version" in data + assert "docs" in data + assert "health" in data diff --git a/tests/test_services.py b/tests/test_services.py new file mode 100644 index 0000000..7e55c07 --- /dev/null +++ b/tests/test_services.py @@ -0,0 +1,123 @@ +"""Tests for service layer (business logic).""" + +import pytest +from pydantic import ValidationError +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import DuplicateHostnameError, ServerNotFoundError +from app.schemas.server import ServerCreate, ServerUpdate +from app.services.server_service import ServerService + + +class TestServerService: + """Test server service business logic.""" + + @pytest.mark.asyncio + async def test_create_server_success(self, test_db_session: AsyncSession, sample_server_data): + """Test successful server creation.""" + service = ServerService(test_db_session) + server_data = ServerCreate(**sample_server_data) + result = await service.create_server(server_data) + assert result.hostname == sample_server_data["hostname"] + assert result.ip_address == sample_server_data["ip_address"] + assert result.state == sample_server_data["state"] + assert result.id is not None + + @pytest.mark.asyncio + async def test_create_server_duplicate_hostname( + self, test_db_session: AsyncSession, sample_server_data + ): + """Test creating server with duplicate hostname.""" + service = ServerService(test_db_session) + server_data = ServerCreate(**sample_server_data) + # Create first server + await service.create_server(server_data) + # Try to create duplicate + with pytest.raises(DuplicateHostnameError): + await service.create_server(server_data) + + def test_schema_rejects_invalid_ip(self, sample_server_data): + """Test that Pydantic schema rejects invalid IP address.""" + sample_server_data["ip_address"] = "invalid-ip" + with pytest.raises(ValidationError) as exc_info: + ServerCreate(**sample_server_data) + assert "ip_address" in str(exc_info.value) + + def test_schema_rejects_invalid_state(self, sample_server_data): + """Test that Pydantic schema rejects invalid state.""" + sample_server_data["state"] = "invalid-state" + with pytest.raises(ValidationError) as exc_info: + ServerCreate(**sample_server_data) + assert "state" in str(exc_info.value) + + @pytest.mark.asyncio + async def test_get_server_success(self, test_db_session: AsyncSession, sample_server_data): + """Test getting server by ID.""" + service = ServerService(test_db_session) + server_data = ServerCreate(**sample_server_data) + created = await service.create_server(server_data) + result = await service.get_server(created.id) + assert result.id == created.id + assert result.hostname == sample_server_data["hostname"] + + @pytest.mark.asyncio + async def test_get_server_not_found(self, test_db_session: AsyncSession): + """Test getting non-existent server.""" + service = ServerService(test_db_session) + with pytest.raises(ServerNotFoundError): + await service.get_server(99999) + + @pytest.mark.asyncio + async def test_list_servers(self, test_db_session: AsyncSession, sample_server_data): + """Test listing servers.""" + service = ServerService(test_db_session) + server_data = ServerCreate(**sample_server_data) + await service.create_server(server_data) + result = await service.list_servers() + assert result.count == 1 + assert len(result.servers) == 1 + + @pytest.mark.asyncio + async def test_update_server_success( + self, + test_db_session: AsyncSession, + sample_server_data, + sample_server_update_data, + ): + """Test updating server.""" + service = ServerService(test_db_session) + server_data = ServerCreate(**sample_server_data) + created = await service.create_server(server_data) + update_data = ServerUpdate(**sample_server_update_data) + result = await service.update_server(created.id, update_data) + assert result.hostname == sample_server_update_data["hostname"] + assert result.ip_address == sample_server_update_data["ip_address"] + assert result.state == sample_server_update_data["state"] + + @pytest.mark.asyncio + async def test_update_server_not_found( + self, test_db_session: AsyncSession, sample_server_update_data + ): + """Test updating non-existent server.""" + service = ServerService(test_db_session) + update_data = ServerUpdate(**sample_server_update_data) + with pytest.raises(ServerNotFoundError): + await service.update_server(99999, update_data) + + @pytest.mark.asyncio + async def test_delete_server_success(self, test_db_session: AsyncSession, sample_server_data): + """Test deleting server.""" + service = ServerService(test_db_session) + server_data = ServerCreate(**sample_server_data) + created = await service.create_server(server_data) + await service.delete_server(created.id) + # Verify deletion + with pytest.raises(ServerNotFoundError): + await service.get_server(created.id) + + @pytest.mark.asyncio + async def test_delete_server_not_found(self, test_db_session: AsyncSession): + """Test deleting non-existent server.""" + service = ServerService(test_db_session) + with pytest.raises(ServerNotFoundError): + await service.delete_server(99999)