From 51ec134cbf7f78ec93d191f174880f31d6c8a842 Mon Sep 17 00:00:00 2001 From: cayossarian Date: Tue, 17 Feb 2026 12:19:08 -0800 Subject: [PATCH 01/15] Add gRPC transport layer with PanelCapability flags and unified snapshot models - Add PanelCapability flags and PanelGeneration enum to models.py - Add SpanPanelSnapshot / SpanCircuitSnapshot unified data models - Add SpanPanelClientProtocol + capability mixin Protocols (protocol.py) - Add SpanGrpcClient Gen3 transport (grpc/client.py, grpc/models.py, grpc/const.py) - Add create_span_client() factory with auto-detection (factory.py) - Add SpanPanelGrpcError, SpanPanelGrpcConnectionError to exceptions.py - Add get_snapshot() / connect() / ping() shims to SpanPanelClient for protocol conformance - Add grpcio as optional [grpc] dependency; omit grpc/* and factory.py from coverage - Add design document docs/Dev/grpc-transport-design.md - Bump version to 1.1.15 --- docs/Dev/grpc-transport-design.md | 189 +++++++ poetry.lock | 87 ++- pyproject.toml | 21 +- src/span_panel_api/__init__.py | 43 +- src/span_panel_api/client.py | 88 ++++ src/span_panel_api/exceptions.py | 8 + src/span_panel_api/factory.py | 196 +++++++ src/span_panel_api/grpc/__init__.py | 16 + src/span_panel_api/grpc/client.py | 790 ++++++++++++++++++++++++++++ src/span_panel_api/grpc/const.py | 28 + src/span_panel_api/grpc/models.py | 52 ++ src/span_panel_api/models.py | 96 ++++ src/span_panel_api/protocol.py | 102 ++++ 13 files changed, 1707 insertions(+), 9 deletions(-) create mode 100644 docs/Dev/grpc-transport-design.md create mode 100644 src/span_panel_api/factory.py create mode 100644 src/span_panel_api/grpc/__init__.py create mode 100644 src/span_panel_api/grpc/client.py create mode 100644 src/span_panel_api/grpc/const.py create mode 100644 src/span_panel_api/grpc/models.py create mode 100644 src/span_panel_api/models.py create mode 100644 src/span_panel_api/protocol.py diff --git a/docs/Dev/grpc-transport-design.md b/docs/Dev/grpc-transport-design.md new file mode 100644 index 0000000..e6dd9b2 --- /dev/null +++ b/docs/Dev/grpc-transport-design.md @@ -0,0 +1,189 @@ +# gRPC Transport Interface Design + +## Context + +Gen3 SPAN panels (MLO48 / MAIN40) communicate via gRPC on port 50065 rather than the OpenAPI/HTTP interface used by Gen2. This document describes the transport-abstraction layer added to `span-panel-api` to support both generations behind a common +interface. + +--- + +## Key Differences Between Generations + +| Feature | Gen2 (OpenAPI/HTTP) | Gen3 (gRPC) | +| ---------------------------- | ------------------- | ------------- | +| Circuit relay control | ✅ | ❌ | +| Circuit priority control | ✅ | ❌ | +| Energy history (Wh) | ✅ | ❌ | +| Battery / storage SOE | ✅ | ❌ | +| JWT authentication | ✅ | ❌ (no auth) | +| Solar / feedthrough data | ✅ | ❌ | +| DSM state | ✅ | ❌ | +| Hardware status (door, etc.) | ✅ | ❌ | +| Real-time power metrics | ✅ (polled) | ✅ (streamed) | +| Push streaming | ❌ | ✅ | + +--- + +## Architecture: Protocol + Capability Advertisement + +Two complementary mechanisms work together: + +### 1. `PanelCapability` Flags + +Runtime advertisement of what a client supports. The HA integration reads these at setup time to enable/disable entity platforms before any entities are created. + +```python +caps = client.capabilities +if PanelCapability.RELAY_CONTROL in caps: + platforms.append("switch") # circuit switches +if PanelCapability.BATTERY in caps: + platforms.append("battery_sensor") +if PanelCapability.PRIORITY_CONTROL in caps: + platforms.append("select") # priority selects +if PanelCapability.PUSH_STREAMING in caps: + # Use push coordinator instead of polling coordinator + ... +``` + +**Gen2 default capabilities**: `GEN2_FULL` — all flags except `PUSH_STREAMING`. + +**Gen3 initial capabilities**: `GEN3_INITIAL` — `PUSH_STREAMING` only. Additional capabilities will be added as the Gen3 API matures. + +### 2. Protocol Hierarchy + +Static typing via `typing.Protocol` for type-safe dispatch: + +```text +SpanPanelClientProtocol # Core: capabilities + connect + close + ping + get_snapshot + ├── AuthCapableProtocol # Gen2: authenticate(), set_access_token() + ├── CircuitControlProtocol # Gen2: set_circuit_relay(), set_circuit_priority() + ├── EnergyCapableProtocol # Gen2: get_storage_soe() + └── StreamingCapableProtocol # Gen3: register_callback(), start/stop_streaming() +``` + +All protocols use `@runtime_checkable`, enabling `isinstance()` narrowing: + +```python +if isinstance(client, CircuitControlProtocol): + await client.set_circuit_relay(circuit_id, "OPEN") +``` + +**Design intent**: `capabilities` is for _runtime_ entity platform gating at setup time. The Protocol mixins are for _static type narrowing_ within methods that need to call optional features. + +--- + +## Module Structure + +```text +src/span_panel_api/ +├── __init__.py — public exports (updated) +├── client.py — SpanPanelClient (Gen2 OpenAPI/HTTP) + protocol conformance +├── exceptions.py — + SpanPanelGrpcError, SpanPanelGrpcConnectionError +├── factory.py — create_span_client() factory + auto-detection +├── models.py — PanelCapability, PanelGeneration, SpanPanelSnapshot, SpanCircuitSnapshot +├── protocol.py — SpanPanelClientProtocol + capability Protocol mixins +├── grpc/ — Gen3 gRPC subpackage (requires grpcio) +│ ├── __init__.py +│ ├── client.py — SpanGrpcClient +│ ├── models.py — CircuitInfo, CircuitMetrics, PanelData +│ └── const.py — port 50065, trait IDs, vendor/product IDs +├── phase_validation.py — (unchanged) +├── simulation.py — (unchanged) +└── generated_client/ — (unchanged) +``` + +--- + +## `create_span_client()` Auto-Detection + +```python +from span_panel_api import create_span_client, PanelGeneration + +# Force Gen2 +client = await create_span_client(host, panel_generation=PanelGeneration.GEN2) + +# Force Gen3 (requires pip install span-panel-api[grpc]) +client = await create_span_client(host, panel_generation=PanelGeneration.GEN3) + +# Auto-detect (tries Gen2 HTTP then Gen3 gRPC) +client = await create_span_client(host) +``` + +Auto-detection order: + +1. Probe Gen2 via `SpanPanelClient.ping()` (HTTP status endpoint) +2. Probe Gen3 via `SpanGrpcClient.test_connection()` (gRPC GetInstances) +3. Raise `SpanPanelConnectionError` if neither responds + +--- + +## Installation + +```bash +# Gen2 only (default) +pip install span-panel-api + +# Gen2 + Gen3 gRPC support +pip install span-panel-api[grpc] +``` + +--- + +## Unified Snapshot + +`get_snapshot()` is available on all transport clients and returns a `SpanPanelSnapshot` containing the current state. Fields not supported by a transport are `None`. + +```python +snapshot = await client.get_snapshot() + +# Available for both Gen2 and Gen3 +print(snapshot.panel_generation) # PanelGeneration.GEN2 or .GEN3 +print(snapshot.serial_number) +print(snapshot.main_power_w) +for cid, circuit in snapshot.circuits.items(): + print(f"{circuit.name}: {circuit.power_w} W") + +# Gen2-only (None for Gen3) +print(snapshot.battery_soe) +print(snapshot.dsm_state) +print(snapshot.grid_power_w) + +# Gen3-only (None for Gen2) +print(snapshot.main_voltage_v) +print(snapshot.main_frequency_hz) +``` + +--- + +## Gen3 gRPC Implementation Notes + +- **No authentication**: Gen3 panels accept connections on port 50065 without any token or credential. +- **Manual protobuf**: The client uses hand-written varint/field parsing to avoid requiring generated stubs — only `grpcio` is needed. +- **Push streaming**: After `start_streaming()`, the client calls registered callbacks on every `Subscribe` notification. Use `get_snapshot()` inside a callback to read the latest data. +- **Circuit discovery**: On `connect()`, `GetInstances` is called to discover all circuit IIDs (trait 26, offset 27), then `GetRevision` on trait 16 fetches the human-readable name for each circuit. + +--- + +## How the HA Integration Uses This + +### Phase 1 — Implemented (span v1.3.2, span-panel-api v1.1.15) + +1. **`span_panel_api.py`**: Added `capabilities` property that delegates to `self._client.capabilities` when the client exists, falling back to `GEN2_FULL`. The underlying `_client` is still `SpanPanelClient`; full migration to `SpanPanelClientProtocol` is + Phase 2. + +2. **`__init__.py` / platform setup**: `_BASE_PLATFORMS` (`BINARY_SENSOR`, `SENSOR`) always loaded; `SWITCH` added when `RELAY_CONTROL` present, `SELECT` added when `PRIORITY_CONTROL` present. Active platform list stored in `hass.data` per entry so unload + is exact. + +3. **`config_flow.py`**: Panel generation dropdown (auto / gen2 / gen3) added to the user form. `CONF_PANEL_GENERATION` stored in config entry `data`. Gen3 path (`async_step_gen3_setup`) probes `SpanGrpcClient` directly, skips JWT auth, and jumps to entity + naming. Gen2/auto path unchanged. + +4. **`sensors/factory.py`**: Capability-gated sensor groups — DSM status sensors require `DSM_STATE`; panel and circuit energy sensors require `ENERGY_HISTORY`; hardware status sensors require `HARDWARE_STATUS`; battery sensor requires `BATTERY`; solar + sensors require `SOLAR`. Panel and circuit power sensors are always created. + +5. **`const.py`**: Added `CONF_PANEL_GENERATION = "panel_generation"`. + +### Phase 2 — Deferred (requires Gen3 hardware) + +- **`coordinator.py`**: `SpanPanelPushCoordinator` — calls `client.register_callback()` and `start_streaming()`, drives entity updates without polling. +- **`span_panel_api.py`**: `_create_client()` Gen3 branch — instantiates `SpanGrpcClient` when `CONF_PANEL_GENERATION == "gen3"`; widens `_client` to `SpanPanelClientProtocol | None`. +- **`sensors/factory.py`**: Gen3 power-metric sensor entities (voltage, current, apparent power, reactive power, frequency, power factor per circuit). diff --git a/poetry.lock b/poetry.lock index 2c77b84..0ead783 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. [[package]] name = "annotated-types" @@ -654,6 +654,84 @@ files = [ {file = "filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1"}, ] +[[package]] +name = "grpcio" +version = "1.78.0" +description = "HTTP/2-based RPC framework" +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"grpc\"" +files = [ + {file = "grpcio-1.78.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:7cc47943d524ee0096f973e1081cb8f4f17a4615f2116882a5f1416e4cfe92b5"}, + {file = "grpcio-1.78.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c3f293fdc675ccba4db5a561048cca627b5e7bd1c8a6973ffedabe7d116e22e2"}, + {file = "grpcio-1.78.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:10a9a644b5dd5aec3b82b5b0b90d41c0fa94c85ef42cb42cf78a23291ddb5e7d"}, + {file = "grpcio-1.78.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4c5533d03a6cbd7f56acfc9cfb44ea64f63d29091e40e44010d34178d392d7eb"}, + {file = "grpcio-1.78.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ff870aebe9a93a85283837801d35cd5f8814fe2ad01e606861a7fb47c762a2b7"}, + {file = "grpcio-1.78.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:391e93548644e6b2726f1bb84ed60048d4bcc424ce5e4af0843d28ca0b754fec"}, + {file = "grpcio-1.78.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:df2c8f3141f7cbd112a6ebbd760290b5849cda01884554f7c67acc14e7b1758a"}, + {file = "grpcio-1.78.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd8cb8026e5f5b50498a3c4f196f57f9db344dad829ffae16b82e4fdbaea2813"}, + {file = "grpcio-1.78.0-cp310-cp310-win32.whl", hash = "sha256:f8dff3d9777e5d2703a962ee5c286c239bf0ba173877cc68dc02c17d042e29de"}, + {file = "grpcio-1.78.0-cp310-cp310-win_amd64.whl", hash = "sha256:94f95cf5d532d0e717eed4fc1810e8e6eded04621342ec54c89a7c2f14b581bf"}, + {file = "grpcio-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2777b783f6c13b92bd7b716667452c329eefd646bfb3f2e9dabea2e05dbd34f6"}, + {file = "grpcio-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:9dca934f24c732750389ce49d638069c3892ad065df86cb465b3fa3012b70c9e"}, + {file = "grpcio-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:459ab414b35f4496138d0ecd735fed26f1318af5e52cb1efbc82a09f0d5aa911"}, + {file = "grpcio-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:082653eecbdf290e6e3e2c276ab2c54b9e7c299e07f4221872380312d8cf395e"}, + {file = "grpcio-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85f93781028ec63f383f6bc90db785a016319c561cc11151fbb7b34e0d012303"}, + {file = "grpcio-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f12857d24d98441af6a1d5c87442d624411db486f7ba12550b07788f74b67b04"}, + {file = "grpcio-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5397fff416b79e4b284959642a4e95ac4b0f1ece82c9993658e0e477d40551ec"}, + {file = "grpcio-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbe6e89c7ffb48518384068321621b2a69cab509f58e40e4399fdd378fa6d074"}, + {file = "grpcio-1.78.0-cp311-cp311-win32.whl", hash = "sha256:6092beabe1966a3229f599d7088b38dfc8ffa1608b5b5cdda31e591e6500f856"}, + {file = "grpcio-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:1afa62af6e23f88629f2b29ec9e52ec7c65a7176c1e0a83292b93c76ca882558"}, + {file = "grpcio-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f9ab915a267fc47c7e88c387a3a28325b58c898e23d4995f765728f4e3dedb97"}, + {file = "grpcio-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3f8904a8165ab21e07e58bf3e30a73f4dffc7a1e0dbc32d51c61b5360d26f43e"}, + {file = "grpcio-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:859b13906ce098c0b493af92142ad051bf64c7870fa58a123911c88606714996"}, + {file = "grpcio-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b2342d87af32790f934a79c3112641e7b27d63c261b8b4395350dad43eff1dc7"}, + {file = "grpcio-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12a771591ae40bc65ba67048fa52ef4f0e6db8279e595fd349f9dfddeef571f9"}, + {file = "grpcio-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:185dea0d5260cbb2d224c507bf2a5444d5abbb1fa3594c1ed7e4c709d5eb8383"}, + {file = "grpcio-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51b13f9aed9d59ee389ad666b8c2214cc87b5de258fa712f9ab05f922e3896c6"}, + {file = "grpcio-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fd5f135b1bd58ab088930b3c613455796dfa0393626a6972663ccdda5b4ac6ce"}, + {file = "grpcio-1.78.0-cp312-cp312-win32.whl", hash = "sha256:94309f498bcc07e5a7d16089ab984d42ad96af1d94b5a4eb966a266d9fcabf68"}, + {file = "grpcio-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:9566fe4ababbb2610c39190791e5b829869351d14369603702e890ef3ad2d06e"}, + {file = "grpcio-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ce3a90455492bf8bfa38e56fbbe1dbd4f872a3d8eeaf7337dc3b1c8aa28c271b"}, + {file = "grpcio-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2bf5e2e163b356978b23652c4818ce4759d40f4712ee9ec5a83c4be6f8c23a3a"}, + {file = "grpcio-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8f2ac84905d12918e4e55a16da17939eb63e433dc11b677267c35568aa63fc84"}, + {file = "grpcio-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b58f37edab4a3881bc6c9bca52670610e0c9ca14e2ea3cf9debf185b870457fb"}, + {file = "grpcio-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:735e38e176a88ce41840c21bb49098ab66177c64c82426e24e0082500cc68af5"}, + {file = "grpcio-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2045397e63a7a0ee7957c25f7dbb36ddc110e0cfb418403d110c0a7a68a844e9"}, + {file = "grpcio-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9f136fbafe7ccf4ac7e8e0c28b31066e810be52d6e344ef954a3a70234e1702"}, + {file = "grpcio-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:748b6138585379c737adc08aeffd21222abbda1a86a0dca2a39682feb9196c20"}, + {file = "grpcio-1.78.0-cp313-cp313-win32.whl", hash = "sha256:271c73e6e5676afe4fc52907686670c7cea22ab2310b76a59b678403ed40d670"}, + {file = "grpcio-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:f2d4e43ee362adfc05994ed479334d5a451ab7bc3f3fee1b796b8ca66895acb4"}, + {file = "grpcio-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:e87cbc002b6f440482b3519e36e1313eb5443e9e9e73d6a52d43bd2004fcfd8e"}, + {file = "grpcio-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:c41bc64626db62e72afec66b0c8a0da76491510015417c127bfc53b2fe6d7f7f"}, + {file = "grpcio-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8dfffba826efcf366b1e3ccc37e67afe676f290e13a3b48d31a46739f80a8724"}, + {file = "grpcio-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:74be1268d1439eaaf552c698cdb11cd594f0c49295ae6bb72c34ee31abbe611b"}, + {file = "grpcio-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be63c88b32e6c0f1429f1398ca5c09bc64b0d80950c8bb7807d7d7fb36fb84c7"}, + {file = "grpcio-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3c586ac70e855c721bda8f548d38c3ca66ac791dc49b66a8281a1f99db85e452"}, + {file = "grpcio-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:35eb275bf1751d2ffbd8f57cdbc46058e857cf3971041521b78b7db94bdaf127"}, + {file = "grpcio-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:207db540302c884b8848036b80db352a832b99dfdf41db1eb554c2c2c7800f65"}, + {file = "grpcio-1.78.0-cp314-cp314-win32.whl", hash = "sha256:57bab6deef2f4f1ca76cc04565df38dc5713ae6c17de690721bdf30cb1e0545c"}, + {file = "grpcio-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:dce09d6116df20a96acfdbf85e4866258c3758180e8c49845d6ba8248b6d0bbb"}, + {file = "grpcio-1.78.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:86f85dd7c947baa707078a236288a289044836d4b640962018ceb9cd1f899af5"}, + {file = "grpcio-1.78.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:de8cb00d1483a412a06394b8303feec5dcb3b55f81d83aa216dbb6a0b86a94f5"}, + {file = "grpcio-1.78.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e888474dee2f59ff68130f8a397792d8cb8e17e6b3434339657ba4ee90845a8c"}, + {file = "grpcio-1.78.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:86ce2371bfd7f212cf60d8517e5e854475c2c43ce14aa910e136ace72c6db6c1"}, + {file = "grpcio-1.78.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b0c689c02947d636bc7fab3e30cc3a3445cca99c834dfb77cd4a6cabfc1c5597"}, + {file = "grpcio-1.78.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ce7599575eeb25c0f4dc1be59cada6219f3b56176f799627f44088b21381a28a"}, + {file = "grpcio-1.78.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:684083fd383e9dc04c794adb838d4faea08b291ce81f64ecd08e4577c7398adf"}, + {file = "grpcio-1.78.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ab399ef5e3cd2a721b1038a0f3021001f19c5ab279f145e1146bb0b9f1b2b12c"}, + {file = "grpcio-1.78.0-cp39-cp39-win32.whl", hash = "sha256:f3d6379493e18ad4d39537a82371c5281e153e963cecb13f953ebac155756525"}, + {file = "grpcio-1.78.0-cp39-cp39-win_amd64.whl", hash = "sha256:5361a0630a7fdb58a6a97638ab70e1dae2893c4d08d7aba64ded28bb9e7a29df"}, + {file = "grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5"}, +] + +[package.dependencies] +typing-extensions = ">=4.12,<5.0" + +[package.extras] +protobuf = ["grpcio-tools (>=1.78.0)"] + [[package]] name = "h11" version = "0.16.0" @@ -2347,7 +2425,7 @@ files = [ {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, ] -markers = {main = "python_version <= \"3.12\""} +markers = {main = "python_version <= \"3.12\" or extra == \"grpc\""} [[package]] name = "typing-inspection" @@ -2440,7 +2518,10 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] +[extras] +grpc = ["grpcio"] + [metadata] lock-version = "2.1" python-versions = ">=3.10,<4.0" -content-hash = "ca30f14cba493c650addea167644746ca1aae98022a55d5a1d3eab1c2c2ec696" +content-hash = "43b01ce910d22052042bb0e6c1ced9d7668ea56bd4f34d2fcb9ad063ae12b430" diff --git a/pyproject.toml b/pyproject.toml index cb26ec3..aa215fb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "span-panel-api" -version = "1.1.14" +version = "1.1.15" description = "A client library for SPAN Panel API" authors = [ {name = "SpanPanel"} @@ -16,6 +16,9 @@ dependencies = [ "pyyaml>=6.0.0", ] +[project.optional-dependencies] +grpc = ["grpcio>=1.50.0"] + [project.urls] Homepage = "https://github.com/SpanPanel/span-panel-api" Issues = "https://github.com/SpanPanel/span-panel-api/issues" @@ -25,6 +28,12 @@ packages = [ {include = "span_panel_api", from = "src"}, ] +[tool.poetry.extras] +grpc = ["grpcio"] + +[tool.poetry.dependencies] +grpcio = {version = ">=1.50.0", optional = true} + [tool.poetry.scripts] format-markdown = "scripts.format_markdown:main" @@ -138,12 +147,22 @@ module = [ ] ignore_missing_imports = true +[[tool.mypy.overrides]] +module = [ + "grpc", + "grpc.*", + "grpc.aio", +] +ignore_missing_imports = true + [tool.coverage.run] data_file = ".local_coverage_data" source = ["src/span_panel_api"] omit = [ "src/span_panel_api/generated_client/*", + "src/span_panel_api/grpc/*", + "src/span_panel_api/factory.py", "generate_client.py", "tests/*", "*/tests/*", diff --git a/src/span_panel_api/__init__.py b/src/span_panel_api/__init__.py index 4b96fb1..fd4ad5f 100644 --- a/src/span_panel_api/__init__.py +++ b/src/span_panel_api/__init__.py @@ -1,6 +1,6 @@ """span-panel-api - SPAN Panel API Client Library. -A modern, type-safe Python client library for the SPAN Panel REST API. +A modern, type-safe Python client library for the SPAN Panel OpenAPI and gRPC APIs. """ # Import our high-level client and exceptions @@ -11,11 +11,17 @@ SpanPanelAuthError, SpanPanelConnectionError, SpanPanelError, + SpanPanelGrpcConnectionError, + SpanPanelGrpcError, SpanPanelRetriableError, SpanPanelServerError, SpanPanelTimeoutError, SpanPanelValidationError, ) +from .factory import create_span_client + +# Import unified transport models and capability flags +from .models import PanelCapability, PanelGeneration, SpanCircuitSnapshot, SpanPanelSnapshot # Import phase validation utilities from .phase_validation import ( @@ -29,26 +35,53 @@ validate_solar_tabs, ) +# Import transport protocols for type-safe dispatch +from .protocol import ( + AuthCapableProtocol, + CircuitControlProtocol, + EnergyCapableProtocol, + SpanPanelClientProtocol, + StreamingCapableProtocol, +) + __version__ = "1.0.0" # fmt: off -__all__ = [ - "PhaseDistribution", +__all__ = [ # noqa: RUF022 + # Client + "SpanPanelClient", + "set_async_delay_func", + # Factory + "create_span_client", + # Models + "PanelCapability", + "PanelGeneration", + "SpanCircuitSnapshot", + "SpanPanelSnapshot", + # Protocols + "AuthCapableProtocol", + "CircuitControlProtocol", + "EnergyCapableProtocol", + "SpanPanelClientProtocol", + "StreamingCapableProtocol", + # Exceptions "SimulationConfigurationError", "SpanPanelAPIError", "SpanPanelAuthError", - "SpanPanelClient", "SpanPanelConnectionError", "SpanPanelError", + "SpanPanelGrpcConnectionError", + "SpanPanelGrpcError", "SpanPanelRetriableError", "SpanPanelServerError", "SpanPanelTimeoutError", "SpanPanelValidationError", + # Phase validation + "PhaseDistribution", "are_tabs_opposite_phase", "get_phase_distribution", "get_tab_phase", "get_valid_tabs_from_branches", "get_valid_tabs_from_panel_data", - "set_async_delay_func", "suggest_balanced_pairing", "validate_solar_tabs", ] diff --git a/src/span_panel_api/client.py b/src/span_panel_api/client.py index 1884c7a..5e1e457 100644 --- a/src/span_panel_api/client.py +++ b/src/span_panel_api/client.py @@ -24,6 +24,7 @@ SpanPanelServerError, SpanPanelTimeoutError, ) +from .models import PanelCapability, PanelGeneration, SpanCircuitSnapshot, SpanPanelSnapshot from .simulation import DynamicSimulationEngine T = TypeVar("T") @@ -1768,6 +1769,93 @@ async def get_all_data(self, include_battery: bool = False) -> dict[str, Any]: return result + # --------------------------------------------------------------------------- + # SpanPanelClientProtocol conformance + # --------------------------------------------------------------------------- + + @property + def capabilities(self) -> PanelCapability: + """Return the full Gen2 capability set.""" + return PanelCapability.GEN2_FULL + + async def connect(self) -> bool: + """Probe the panel to verify connectivity. + + Unlike the async context manager (``async with client``), this method + does not open a persistent httpx connection pool. It is intended for + one-shot reachability checks and auto-detection. + + Returns: + ``True`` if the panel responded, ``False`` otherwise. + """ + return await self.ping() + + async def ping(self) -> bool: + """Return ``True`` if the panel is reachable and responds to status requests.""" + try: + await self.get_status() + return True + except Exception: # pylint: disable=broad-exception-caught + return False + + async def get_snapshot(self) -> SpanPanelSnapshot: + """Return a unified, transport-agnostic snapshot of the current panel state. + + Fetches status, panel state, and circuits concurrently. Battery SOE + is attempted separately and silently omitted if the panel has no + storage hardware. + """ + from .generated_client.types import UNSET # pylint: disable=import-outside-toplevel + + status, panel_state, circuits_out = await asyncio.gather( + self.get_status(), + self.get_panel_state(), + self.get_circuits(), + ) + + battery_soe: float | None = None + with suppress(Exception): # pylint: disable=broad-exception-caught + storage = await self.get_storage_soe() + pct = storage.soe.percentage + if isinstance(pct, int): + battery_soe = float(pct) + + circuit_snapshots: dict[str, SpanCircuitSnapshot] = {} + for circuit_id, circuit in circuits_out.circuits.additional_properties.items(): + name: str = str(circuit.name) if circuit.name is not UNSET else f"Circuit {circuit_id}" + tabs_raw = circuit.tabs + tabs: list[int] | None = list(tabs_raw) if isinstance(tabs_raw, list) else None + relay_str: str | None = str(circuit.relay_state) if circuit.relay_state else None + priority_str: str | None = str(circuit.priority) if circuit.priority else None + is_on = str(circuit.relay_state) == "CLOSED" + + circuit_snapshots[circuit_id] = SpanCircuitSnapshot( + circuit_id=circuit_id, + name=name, + power_w=circuit.instant_power_w, + voltage_v=0.0, + current_a=0.0, + is_on=is_on, + relay_state=relay_str, + priority=priority_str, + tabs=tabs, + energy_produced_wh=circuit.produced_energy_wh, + energy_consumed_wh=circuit.consumed_energy_wh, + ) + + return SpanPanelSnapshot( + panel_generation=PanelGeneration.GEN2, + serial_number=status.system.serial, + firmware_version=status.software.firmware_version, + circuits=circuit_snapshots, + main_power_w=panel_state.instant_grid_power_w, + main_relay_state=str(panel_state.main_relay_state), + grid_power_w=panel_state.instant_grid_power_w, + battery_soe=battery_soe, + dsm_state=panel_state.dsm_state, + dsm_grid_state=panel_state.dsm_grid_state, + ) + async def close(self) -> None: """Close the client and cleanup resources.""" if self._client: diff --git a/src/span_panel_api/exceptions.py b/src/span_panel_api/exceptions.py index fd17d1d..95fadf5 100644 --- a/src/span_panel_api/exceptions.py +++ b/src/span_panel_api/exceptions.py @@ -42,3 +42,11 @@ class SpanPanelServerError(SpanPanelAPIError): class SimulationConfigurationError(SpanPanelError): """Simulation configuration is invalid or missing required data.""" + + +class SpanPanelGrpcError(SpanPanelError): + """Base class for gRPC transport errors.""" + + +class SpanPanelGrpcConnectionError(SpanPanelGrpcError): + """Failed to connect to panel via gRPC.""" diff --git a/src/span_panel_api/factory.py b/src/span_panel_api/factory.py new file mode 100644 index 0000000..03d0578 --- /dev/null +++ b/src/span_panel_api/factory.py @@ -0,0 +1,196 @@ +"""Factory for creating SPAN panel transport clients. + +Use :func:`create_span_client` as the single entry point when building +integrations that should work with both Gen2 (OpenAPI/HTTP) and Gen3 (gRPC) +panels. +""" + +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING + +from .client import SpanPanelClient +from .exceptions import SpanPanelConnectionError +from .models import PanelGeneration +from .protocol import SpanPanelClientProtocol + +if TYPE_CHECKING: + from .grpc.client import SpanGrpcClient as SpanGrpcClientType + +_LOGGER = logging.getLogger(__name__) + + +async def create_span_client( + host: str, + panel_generation: PanelGeneration | None = None, + *, + port: int | None = None, + use_ssl: bool = False, + access_token: str | None = None, + timeout: float = 30.0, + retries: int = 0, + retry_timeout: float = 0.5, + retry_backoff_multiplier: float = 2.0, + simulation_mode: bool = False, + simulation_config_path: str | None = None, + simulation_start_time: str | None = None, +) -> SpanPanelClientProtocol: + """Create the appropriate SPAN panel transport client. + + When *panel_generation* is ``None`` the function auto-detects which + generation the panel is by probing in order: Gen2 (OpenAPI/HTTP on port + 80/443) then Gen3 (gRPC on port 50065). + + Args: + host: IP address or hostname of the SPAN panel. + panel_generation: Force a specific generation, or ``None`` to + auto-detect. + port: Override the default port. Defaults to 80 for Gen2 and 50065 + for Gen3. + use_ssl: Use HTTPS for Gen2 connections (default: ``False``). + access_token: JWT access token for Gen2 authenticated requests. + timeout: Request timeout in seconds (Gen2 only). + retries: Number of retry attempts on transient failures (Gen2 only). + retry_timeout: Delay between retries in seconds (Gen2 only). + retry_backoff_multiplier: Exponential backoff multiplier (Gen2 only). + simulation_mode: Enable simulation mode (Gen2 only). + simulation_config_path: Path to YAML simulation config (Gen2 only). + simulation_start_time: Override simulation start time ISO string + (Gen2 only). + + Returns: + A client satisfying :class:`~span_panel_api.protocol.SpanPanelClientProtocol`. + + Raises: + SpanPanelConnectionError: If auto-detection fails to reach the panel + via either transport. + ImportError: If Gen3 is requested but ``grpcio`` is not installed. + """ + if panel_generation == PanelGeneration.GEN2: + return _make_gen2_client( + host=host, + port=port or 80, + use_ssl=use_ssl, + access_token=access_token, + timeout=timeout, + retries=retries, + retry_timeout=retry_timeout, + retry_backoff_multiplier=retry_backoff_multiplier, + simulation_mode=simulation_mode, + simulation_config_path=simulation_config_path, + simulation_start_time=simulation_start_time, + ) + + if panel_generation == PanelGeneration.GEN3: + return _make_gen3_client(host=host, port=port) + + # Auto-detect + return await _auto_detect( + host=host, + port=port, + use_ssl=use_ssl, + access_token=access_token, + timeout=timeout, + retries=retries, + retry_timeout=retry_timeout, + retry_backoff_multiplier=retry_backoff_multiplier, + simulation_mode=simulation_mode, + simulation_config_path=simulation_config_path, + simulation_start_time=simulation_start_time, + ) + + +# --------------------------------------------------------------------------- +# Private helpers +# --------------------------------------------------------------------------- + + +def _make_gen2_client( + host: str, + port: int, + use_ssl: bool, + access_token: str | None, + timeout: float, + retries: int, + retry_timeout: float, + retry_backoff_multiplier: float, + simulation_mode: bool, + simulation_config_path: str | None, + simulation_start_time: str | None, +) -> SpanPanelClient: + client = SpanPanelClient( + host=host, + port=port, + use_ssl=use_ssl, + timeout=timeout, + retries=retries, + retry_timeout=retry_timeout, + retry_backoff_multiplier=retry_backoff_multiplier, + simulation_mode=simulation_mode, + simulation_config_path=simulation_config_path, + simulation_start_time=simulation_start_time, + ) + if access_token: + client.set_access_token(access_token) + return client + + +def _make_gen3_client(host: str, port: int | None) -> SpanGrpcClientType: + try: + from .grpc.client import SpanGrpcClient # pylint: disable=import-outside-toplevel + except ImportError as exc: + raise ImportError( + "grpcio is required for Gen3 gRPC support. Install with: pip install span-panel-api[grpc]" + ) from exc + + from .grpc.const import DEFAULT_GRPC_PORT # pylint: disable=import-outside-toplevel + + return SpanGrpcClient(host=host, port=port or DEFAULT_GRPC_PORT) + + +async def _auto_detect( + host: str, + port: int | None, + use_ssl: bool, + access_token: str | None, + timeout: float, + retries: int, + retry_timeout: float, + retry_backoff_multiplier: float, + simulation_mode: bool, + simulation_config_path: str | None, + simulation_start_time: str | None, +) -> SpanPanelClientProtocol: + """Try Gen2 first, then Gen3, raise if neither responds.""" + # Probe Gen2 (OpenAPI/HTTP) + gen2_client = _make_gen2_client( + host=host, + port=port or 80, + use_ssl=use_ssl, + access_token=access_token, + timeout=timeout, + retries=retries, + retry_timeout=retry_timeout, + retry_backoff_multiplier=retry_backoff_multiplier, + simulation_mode=simulation_mode, + simulation_config_path=simulation_config_path, + simulation_start_time=simulation_start_time, + ) + if await gen2_client.ping(): + _LOGGER.info("Auto-detected Gen2 panel at %s", host) + return gen2_client + + # Probe Gen3 (gRPC) + try: + gen3_client = _make_gen3_client(host=host, port=port) + if await gen3_client.ping(): + _LOGGER.info("Auto-detected Gen3 panel at %s", host) + return gen3_client + except ImportError: + _LOGGER.debug("grpcio not installed, skipping Gen3 probe for %s", host) + + raise SpanPanelConnectionError( + f"Could not reach panel at {host} via Gen2 (HTTP) or Gen3 (gRPC). " + "Verify the host address and ensure the panel is online." + ) diff --git a/src/span_panel_api/grpc/__init__.py b/src/span_panel_api/grpc/__init__.py new file mode 100644 index 0000000..e6d7297 --- /dev/null +++ b/src/span_panel_api/grpc/__init__.py @@ -0,0 +1,16 @@ +"""Gen3 gRPC transport subpackage for span-panel-api. + +Requires the optional ``grpcio`` dependency:: + + pip install span-panel-api[grpc] +""" + +from .client import SpanGrpcClient +from .models import CircuitInfo, CircuitMetrics, PanelData + +__all__ = [ + "CircuitInfo", + "CircuitMetrics", + "PanelData", + "SpanGrpcClient", +] diff --git a/src/span_panel_api/grpc/client.py b/src/span_panel_api/grpc/client.py new file mode 100644 index 0000000..25cc26b --- /dev/null +++ b/src/span_panel_api/grpc/client.py @@ -0,0 +1,790 @@ +"""gRPC client for Gen3 SPAN panels (MAIN40 / MLO48). + +Connects to the panel's TraitHandlerService on port 50065 (no authentication +required). Discovers circuits via GetInstances, fetches names via +GetRevision, and streams real-time power metrics via Subscribe. + +Manual protobuf encoding/decoding is used to avoid requiring generated stubs, +keeping the dependency footprint to a single optional ``grpcio`` package. +""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable +import contextlib +import logging +import struct +from typing import Any + +import grpc # pylint: disable=import-error +import grpc.aio # pylint: disable=import-error + +from ..exceptions import SpanPanelGrpcConnectionError, SpanPanelGrpcError +from ..models import PanelCapability, PanelGeneration, SpanCircuitSnapshot, SpanPanelSnapshot +from .const import ( + BREAKER_OFF_VOLTAGE_MV, + DEFAULT_GRPC_PORT, + MAIN_FEED_IID, + METRIC_IID_OFFSET, + PRODUCT_GEN3_PANEL, + TRAIT_CIRCUIT_NAMES, + TRAIT_POWER_METRICS, + VENDOR_SPAN, +) +from .models import CircuitInfo, CircuitMetrics, PanelData + +_LOGGER = logging.getLogger(__name__) + +# gRPC method paths +_SVC = "/io.span.panel.protocols.traithandler.TraitHandlerService" +_GET_INSTANCES = f"{_SVC}/GetInstances" +_SUBSCRIBE = f"{_SVC}/Subscribe" +_GET_REVISION = f"{_SVC}/GetRevision" + +# --------------------------------------------------------------------------- +# Protobuf helpers — manual varint/field parsing +# --------------------------------------------------------------------------- + +ProtobufValue = bytes | int + + +def _decode_varint(data: bytes, offset: int) -> tuple[int, int]: + """Decode a protobuf varint, return (value, new_offset).""" + result = 0 + shift = 0 + while offset < len(data): + b = data[offset] + offset += 1 + result |= (b & 0x7F) << shift + if not b & 0x80: + break + shift += 7 + return result, offset + + +def _parse_protobuf_fields(data: bytes) -> dict[int, list[ProtobufValue]]: + """Parse raw protobuf bytes into a dict of field_number -> [values].""" + fields: dict[int, list[ProtobufValue]] = {} + offset = 0 + while offset < len(data): + tag, offset = _decode_varint(data, offset) + field_num = tag >> 3 + wire_type = tag & 0x07 + + value: ProtobufValue + if wire_type == 0: # varint + int_val, offset = _decode_varint(data, offset) + value = int_val + elif wire_type == 1: # 64-bit + if offset + 8 > len(data): + break + value = struct.unpack_from(" len(data): + break + value = data[offset : offset + length] + offset += length + elif wire_type == 5: # 32-bit + if offset + 4 > len(data): + break + value = struct.unpack_from(" ProtobufValue | None: + """Return the first value for a field number, or *default*.""" + vals = fields.get(num) + return vals[0] if vals else default + + +def _parse_min_max_avg(data: bytes) -> dict[str, int]: + """Parse a min/max/avg sub-message (fields 1/2/3), returning raw int values.""" + fields = _parse_protobuf_fields(data) + result: dict[str, int] = {} + for key, field_num in (("min", 1), ("max", 2), ("avg", 3)): + raw = _get_field(fields, field_num, 0) + result[key] = raw if isinstance(raw, int) else 0 + return result + + +# --------------------------------------------------------------------------- +# Metric decoders — single-phase, dual-phase, and main feed +# --------------------------------------------------------------------------- + + +def _decode_single_phase(data: bytes) -> CircuitMetrics: + """Decode single-phase (120 V) metrics from protobuf field 11.""" + fields = _parse_protobuf_fields(data) + metrics = CircuitMetrics() + + current_data = _get_field(fields, 1) + if isinstance(current_data, bytes): + metrics.current_a = _parse_min_max_avg(current_data)["avg"] / 1000.0 + + voltage_data = _get_field(fields, 2) + if isinstance(voltage_data, bytes): + metrics.voltage_v = _parse_min_max_avg(voltage_data)["avg"] / 1000.0 + + power_data = _get_field(fields, 3) + if isinstance(power_data, bytes): + metrics.power_w = _parse_min_max_avg(power_data)["avg"] / 2000.0 + + apparent_data = _get_field(fields, 4) + if isinstance(apparent_data, bytes): + metrics.apparent_power_va = _parse_min_max_avg(apparent_data)["avg"] / 2000.0 + + reactive_data = _get_field(fields, 5) + if isinstance(reactive_data, bytes): + metrics.reactive_power_var = _parse_min_max_avg(reactive_data)["avg"] / 2000.0 + + metrics.is_on = (metrics.voltage_v * 1000) > BREAKER_OFF_VOLTAGE_MV + return metrics + + +def _decode_dual_phase(data: bytes) -> CircuitMetrics: + """Decode dual-phase (240 V) metrics from protobuf field 12.""" + fields = _parse_protobuf_fields(data) + metrics = CircuitMetrics() + + # Leg A (field 1) + leg_a_data = _get_field(fields, 1) + if isinstance(leg_a_data, bytes): + leg_a = _parse_protobuf_fields(leg_a_data) + current_data = _get_field(leg_a, 1) + if isinstance(current_data, bytes): + metrics.current_a_a = _parse_min_max_avg(current_data)["avg"] / 1000.0 + voltage_data = _get_field(leg_a, 2) + if isinstance(voltage_data, bytes): + metrics.voltage_a_v = _parse_min_max_avg(voltage_data)["avg"] / 1000.0 + + # Leg B (field 2) + leg_b_data = _get_field(fields, 2) + if isinstance(leg_b_data, bytes): + leg_b = _parse_protobuf_fields(leg_b_data) + current_data = _get_field(leg_b, 1) + if isinstance(current_data, bytes): + metrics.current_b_a = _parse_min_max_avg(current_data)["avg"] / 1000.0 + voltage_data = _get_field(leg_b, 2) + if isinstance(voltage_data, bytes): + metrics.voltage_b_v = _parse_min_max_avg(voltage_data)["avg"] / 1000.0 + + # Combined (field 3) + combined_data = _get_field(fields, 3) + if isinstance(combined_data, bytes): + combined = _parse_protobuf_fields(combined_data) + voltage_data = _get_field(combined, 2) + if isinstance(voltage_data, bytes): + metrics.voltage_v = _parse_min_max_avg(voltage_data)["avg"] / 1000.0 + power_data = _get_field(combined, 3) + if isinstance(power_data, bytes): + metrics.power_w = _parse_min_max_avg(power_data)["avg"] / 2000.0 + apparent_data = _get_field(combined, 4) + if isinstance(apparent_data, bytes): + metrics.apparent_power_va = _parse_min_max_avg(apparent_data)["avg"] / 2000.0 + reactive_data = _get_field(combined, 5) + if isinstance(reactive_data, bytes): + metrics.reactive_power_var = _parse_min_max_avg(reactive_data)["avg"] / 2000.0 + pf_data = _get_field(combined, 6) + if isinstance(pf_data, bytes): + metrics.power_factor = _parse_min_max_avg(pf_data)["avg"] / 2000.0 + + # Frequency (field 4) + freq_data = _get_field(fields, 4) + if isinstance(freq_data, bytes): + metrics.frequency_hz = _parse_min_max_avg(freq_data)["avg"] / 1000.0 + + # Total current = leg A + leg B + metrics.current_a = metrics.current_a_a + metrics.current_b_a + + metrics.is_on = (metrics.voltage_v * 1000) > BREAKER_OFF_VOLTAGE_MV + return metrics + + +def _extract_deepest_value(data: bytes, target_field: int = 3) -> int: + """Extract the largest non-zero varint value at *target_field* within nested sub-messages.""" + fields = _parse_protobuf_fields(data) + best = 0 + for fn, vals in fields.items(): + for v in vals: + if isinstance(v, bytes) and len(v) > 0: + inner = _extract_deepest_value(v, target_field) + best = max(best, inner) + elif isinstance(v, int) and fn == target_field and v > best: + best = v + return best + + +def _decode_main_feed(data: bytes) -> CircuitMetrics: + """Decode main feed metrics from protobuf field 14. + + Field 14 has deeper nesting than circuit fields 11/12. Structure:: + + 14.1 = primary data block (leg A) + 14.2 = secondary data block (leg B) + Each leg: {1: current stats, 2: voltage stats, 3: power stats, 4: frequency} + """ + fields = _parse_protobuf_fields(data) + main_data = _get_field(fields, 14) + if not isinstance(main_data, bytes): + return CircuitMetrics() + + metrics = CircuitMetrics() + main_fields = _parse_protobuf_fields(main_data) + + # Primary data block (field 1 = leg A) + leg_a = _get_field(main_fields, 1) + if isinstance(leg_a, bytes): + la_fields = _parse_protobuf_fields(leg_a) + + power_stats = _get_field(la_fields, 3) + if isinstance(power_stats, bytes): + metrics.power_w = _extract_deepest_value(power_stats) / 2000.0 + + voltage_stats = _get_field(la_fields, 2) + if isinstance(voltage_stats, bytes): + vs_fields = _parse_protobuf_fields(voltage_stats) + f2 = _get_field(vs_fields, 2) + if isinstance(f2, bytes): + inner = _parse_protobuf_fields(f2) + v = _get_field(inner, 3, 0) + if isinstance(v, int) and v > 0: + metrics.voltage_a_v = v / 1000.0 + + freq_stats = _get_field(la_fields, 4) + if isinstance(freq_stats, bytes): + freq_fields = _parse_protobuf_fields(freq_stats) + freq_val = _get_field(freq_fields, 3, 0) + if isinstance(freq_val, int) and freq_val > 0: + metrics.frequency_hz = freq_val / 1000.0 + + # Leg B (field 2) + leg_b = _get_field(main_fields, 2) + if isinstance(leg_b, bytes): + lb_fields = _parse_protobuf_fields(leg_b) + power_stats = _get_field(lb_fields, 3) + if isinstance(power_stats, bytes): + lb_power = _extract_deepest_value(power_stats) / 2000.0 + if lb_power > 0: + metrics.power_w += lb_power + voltage_stats = _get_field(lb_fields, 2) + if isinstance(voltage_stats, bytes): + vs_fields = _parse_protobuf_fields(voltage_stats) + f2 = _get_field(vs_fields, 2) + if isinstance(f2, bytes): + inner = _parse_protobuf_fields(f2) + v = _get_field(inner, 3, 0) + if isinstance(v, int) and v > 0: + metrics.voltage_b_v = v / 1000.0 + + # Combined voltage (split-phase: leg A + leg B, or 2x leg A if symmetric) + if metrics.voltage_b_v > 0: + metrics.voltage_v = metrics.voltage_a_v + metrics.voltage_b_v + else: + metrics.voltage_v = metrics.voltage_a_v * 2 + + # Derive current from power and voltage + if metrics.voltage_v > 0: + metrics.current_a = metrics.power_w / metrics.voltage_v + + metrics.is_on = True + return metrics + + +# --------------------------------------------------------------------------- +# Protobuf encoding helpers +# --------------------------------------------------------------------------- + + +def _encode_varint(value: int) -> bytes: + """Encode an integer as a protobuf varint.""" + parts: list[int] = [] + while value > 0x7F: + parts.append((value & 0x7F) | 0x80) + value >>= 7 + parts.append(value & 0x7F) + return bytes(parts) if parts else b"\x00" + + +def _encode_varint_field(field_num: int, value: int) -> bytes: + """Encode a varint field (tag + value).""" + tag = (field_num << 3) | 0 # wire type 0 = varint + return _encode_varint(tag) + _encode_varint(value) + + +def _encode_bytes_field(field_num: int, value: bytes) -> bytes: + """Encode a length-delimited field (tag + length + value).""" + tag = (field_num << 3) | 2 # wire type 2 = length-delimited + return _encode_varint(tag) + _encode_varint(len(value)) + value + + +def _encode_string_field(field_num: int, value: str) -> bytes: + """Encode a string field (tag + length + utf-8 bytes).""" + return _encode_bytes_field(field_num, value.encode("utf-8")) + + +# --------------------------------------------------------------------------- +# gRPC Client +# --------------------------------------------------------------------------- + + +class SpanGrpcClient: + """gRPC client for Gen3 SPAN panels. + + Connects to the panel's TraitHandlerService on port 50065 (no auth). + Discovers circuits via GetInstances, fetches names via GetRevision, + and streams real-time power metrics via Subscribe. + + Satisfies :class:`~span_panel_api.protocol.SpanPanelClientProtocol` and + :class:`~span_panel_api.protocol.StreamingCapableProtocol`. + """ + + def __init__(self, host: str, port: int = DEFAULT_GRPC_PORT) -> None: + self._host = host + self._port = port + self._channel: Any = None # grpc.aio.Channel — optional dep + self._stream_task: asyncio.Task[None] | None = None + self._data = PanelData() + self._callbacks: list[Callable[[], None]] = [] + self._connected = False + + # ------------------------------------------------------------------ + # SpanPanelClientProtocol implementation + # ------------------------------------------------------------------ + + @property + def capabilities(self) -> PanelCapability: + """Return the capability flags for this Gen3 transport.""" + return PanelCapability.GEN3_INITIAL + + async def connect(self) -> bool: + """Connect to the panel and perform initial circuit discovery.""" + try: + self._channel = grpc.aio.insecure_channel( + f"{self._host}:{self._port}", + options=[ + ("grpc.keepalive_time_ms", 30000), + ("grpc.keepalive_timeout_ms", 10000), + ("grpc.keepalive_permit_without_calls", True), + ], + ) + await self._fetch_instances() + await self._fetch_circuit_names() + self._connected = True + _LOGGER.info( + "Connected to Gen3 panel at %s:%s — %d circuits discovered", + self._host, + self._port, + len(self._data.circuits), + ) + return True + except Exception: # pylint: disable=broad-exception-caught + _LOGGER.exception("Failed to connect to Gen3 panel at %s:%s", self._host, self._port) + self._connected = False + return False + + async def close(self) -> None: + """Close the connection and cancel the streaming task.""" + await self._disconnect() + + async def ping(self) -> bool: + """Return True if the panel is reachable via gRPC.""" + return await self.test_connection() + + async def get_snapshot(self) -> SpanPanelSnapshot: + """Return the current streaming data as a unified transport-agnostic snapshot.""" + data = self._data + circuits: dict[str, SpanCircuitSnapshot] = {} + for cid, info in data.circuits.items(): + m = data.metrics.get(cid, CircuitMetrics()) + circuits[str(cid)] = SpanCircuitSnapshot( + circuit_id=str(cid), + name=info.name, + power_w=m.power_w, + voltage_v=m.voltage_v, + current_a=m.current_a, + is_on=m.is_on, + is_dual_phase=info.is_dual_phase, + apparent_power_va=m.apparent_power_va, + reactive_power_var=m.reactive_power_var, + frequency_hz=m.frequency_hz, + power_factor=m.power_factor, + ) + return SpanPanelSnapshot( + panel_generation=PanelGeneration.GEN3, + serial_number=data.serial, + firmware_version=data.firmware, + circuits=circuits, + main_power_w=data.main_feed.power_w, + main_voltage_v=data.main_feed.voltage_v, + main_current_a=data.main_feed.current_a, + main_frequency_hz=data.main_feed.frequency_hz, + ) + + # ------------------------------------------------------------------ + # StreamingCapableProtocol implementation + # ------------------------------------------------------------------ + + def register_callback(self, cb: Callable[[], None]) -> Callable[[], None]: + """Register a callback invoked on every streaming update. + + Returns an unregister function; call it to remove the callback. + """ + self._callbacks.append(cb) + + def unregister() -> None: + self._callbacks.remove(cb) + + return unregister + + async def start_streaming(self) -> None: + """Start the metric streaming background task.""" + if self._stream_task and not self._stream_task.done(): + return + self._stream_task = asyncio.create_task(self._stream_loop()) + + async def stop_streaming(self) -> None: + """Stop the metric streaming background task.""" + if self._stream_task and not self._stream_task.done(): + self._stream_task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await self._stream_task + + # ------------------------------------------------------------------ + # Additional helpers + # ------------------------------------------------------------------ + + @property + def data(self) -> PanelData: + """Return the raw panel data (circuit topology + latest metrics).""" + return self._data + + @property + def connected(self) -> bool: + """Return True if the client is currently connected.""" + return self._connected + + async def test_connection(self) -> bool: + """Test whether the panel is reachable without a full connect(). + + Opens a temporary channel, sends a GetInstances probe, and closes + the channel — suitable for auto-detection in the factory. + """ + try: + channel = grpc.aio.insecure_channel( + f"{self._host}:{self._port}", + options=[("grpc.initial_reconnect_backoff_ms", 1000)], + ) + try: + response: bytes = await asyncio.wait_for( + channel.unary_unary(_GET_INSTANCES)(b""), + timeout=5.0, + ) + return len(response) > 0 + finally: + await channel.close() + except Exception: # pylint: disable=broad-exception-caught + return False + + # ------------------------------------------------------------------ + # Internal: disconnect + # ------------------------------------------------------------------ + + async def _disconnect(self) -> None: + """Internal disconnect helper.""" + self._connected = False + await self.stop_streaming() + if self._channel is not None: + await self._channel.close() + self._channel = None + + # ------------------------------------------------------------------ + # Internal: instance discovery + # ------------------------------------------------------------------ + + async def _fetch_instances(self) -> None: + """Fetch all trait instances to discover circuit topology.""" + if self._channel is None: + raise SpanPanelGrpcError("Channel is not open") + response: bytes = await self._channel.unary_unary(_GET_INSTANCES)(b"") + self._parse_instances(response) + + def _parse_instances(self, data: bytes) -> None: + """Parse GetInstancesResponse to discover circuits and panel resource ID.""" + fields = _parse_protobuf_fields(data) + items = fields.get(1, []) + + for item_data in items: + if not isinstance(item_data, bytes): + continue + item_fields = _parse_protobuf_fields(item_data) + + trait_info_data = _get_field(item_fields, 1) + if not isinstance(trait_info_data, bytes): + continue + + trait_info_fields = _parse_protobuf_fields(trait_info_data) + + external_data = _get_field(trait_info_fields, 2) + if not isinstance(external_data, bytes): + continue + + ext_fields = _parse_protobuf_fields(external_data) + + # resource_id (field 1) + resource_data = _get_field(ext_fields, 1) + resource_id_str = "" + if isinstance(resource_data, bytes): + rid_fields = _parse_protobuf_fields(resource_data) + rid_val = _get_field(rid_fields, 1) + if isinstance(rid_val, bytes): + resource_id_str = rid_val.decode("utf-8", errors="replace") + + # trait_info (field 2) + inner_info = _get_field(ext_fields, 2) + if not isinstance(inner_info, bytes): + continue + + inner_fields = _parse_protobuf_fields(inner_info) + + meta_data = _get_field(inner_fields, 1) + if not isinstance(meta_data, bytes): + continue + + meta_fields = _parse_protobuf_fields(meta_data) + vendor_id_raw = _get_field(meta_fields, 1, 0) + product_id_raw = _get_field(meta_fields, 2, 0) + trait_id_raw = _get_field(meta_fields, 3, 0) + vendor_id = vendor_id_raw if isinstance(vendor_id_raw, int) else 0 + product_id = product_id_raw if isinstance(product_id_raw, int) else 0 + trait_id = trait_id_raw if isinstance(trait_id_raw, int) else 0 + + instance_data = _get_field(inner_fields, 2) + instance_id = 0 + if isinstance(instance_data, bytes): + iid_fields = _parse_protobuf_fields(instance_data) + iid_raw = _get_field(iid_fields, 1, 0) + instance_id = iid_raw if isinstance(iid_raw, int) else 0 + + # Capture panel resource_id + if product_id == PRODUCT_GEN3_PANEL and resource_id_str and not self._data.panel_resource_id: + self._data.panel_resource_id = resource_id_str + + # Detect power metric circuits (trait 26) + if trait_id == TRAIT_POWER_METRICS and vendor_id == VENDOR_SPAN: + circuit_id = instance_id - METRIC_IID_OFFSET + if 1 <= circuit_id <= 50 and circuit_id not in self._data.circuits: + self._data.circuits[circuit_id] = CircuitInfo( + circuit_id=circuit_id, + name=f"Circuit {circuit_id}", + metric_iid=instance_id, + ) + + # ------------------------------------------------------------------ + # Internal: circuit names + # ------------------------------------------------------------------ + + async def _fetch_circuit_names(self) -> None: + """Fetch circuit names from trait 16 via GetRevision.""" + for circuit_id in list(self._data.circuits.keys()): + try: + name = await self._get_circuit_name(circuit_id) + if name: + self._data.circuits[circuit_id].name = name + except Exception: # pylint: disable=broad-exception-caught + _LOGGER.debug("Failed to get name for circuit %d", circuit_id) + + async def _get_circuit_name(self, circuit_id: int) -> str | None: + """Get a single circuit name via GetRevision on trait 16.""" + if self._channel is None: + return None + request = self._build_get_revision_request( + vendor_id=VENDOR_SPAN, + product_id=PRODUCT_GEN3_PANEL, + trait_id=TRAIT_CIRCUIT_NAMES, + instance_id=circuit_id, + ) + try: + response: bytes = await self._channel.unary_unary(_GET_REVISION)(request) + return self._parse_circuit_name(response) + except Exception: # pylint: disable=broad-exception-caught + return None + + def _build_get_revision_request( + self, + vendor_id: int, + product_id: int, + trait_id: int, + instance_id: int, + ) -> bytes: + """Build a GetRevisionRequest protobuf message.""" + meta = _encode_varint_field(1, vendor_id) + meta += _encode_varint_field(2, product_id) + meta += _encode_varint_field(3, trait_id) + meta += _encode_varint_field(4, 1) # version + + resource_id_msg = _encode_string_field(1, self._data.panel_resource_id) + + iid_msg = _encode_varint_field(1, instance_id) + instance_meta = _encode_bytes_field(1, resource_id_msg) + instance_meta += _encode_bytes_field(2, iid_msg) + + req_metadata = _encode_bytes_field(2, resource_id_msg) + revision_request = _encode_bytes_field(1, req_metadata) + + result = _encode_bytes_field(1, meta) + result += _encode_bytes_field(2, instance_meta) + result += _encode_bytes_field(3, revision_request) + return result + + @staticmethod + def _parse_circuit_name(data: bytes) -> str | None: + """Parse circuit name from GetRevision response.""" + fields = _parse_protobuf_fields(data) + + sr_data = _get_field(fields, 3) + if not isinstance(sr_data, bytes): + return None + + sr_fields = _parse_protobuf_fields(sr_data) + payload_data = _get_field(sr_fields, 2) + if not isinstance(payload_data, bytes): + return None + + pl_fields = _parse_protobuf_fields(payload_data) + raw = _get_field(pl_fields, 1) + if not isinstance(raw, bytes): + return None + + name_fields = _parse_protobuf_fields(raw) + name = _get_field(name_fields, 4) + if isinstance(name, bytes): + return name.decode("utf-8", errors="replace").strip() + return None + + # ------------------------------------------------------------------ + # Internal: metric streaming + # ------------------------------------------------------------------ + + async def _stream_loop(self) -> None: + """Streaming loop with automatic reconnection on errors.""" + while self._connected: + try: + await self._subscribe_stream() + except asyncio.CancelledError: + return + except Exception: # pylint: disable=broad-exception-caught + _LOGGER.exception("Stream error, reconnecting in 5 s") + await asyncio.sleep(5) + + async def _subscribe_stream(self) -> None: + """Subscribe to the gRPC stream and dispatch notifications.""" + if self._channel is None: + raise SpanPanelGrpcConnectionError("Channel is not open") + stream = self._channel.unary_stream(_SUBSCRIBE)(b"") + async for response in stream: + try: + self._process_notification(response) + except Exception: # pylint: disable=broad-exception-caught + _LOGGER.debug("Error processing notification", exc_info=True) + + def _notify(self) -> None: + """Invoke all registered callbacks.""" + for cb in self._callbacks: + try: + cb() + except Exception: # pylint: disable=broad-exception-caught + _LOGGER.exception("Error in registered callback") + + def _process_notification(self, data: bytes) -> None: + """Process a TraitInstanceNotification from the Subscribe stream.""" + fields = _parse_protobuf_fields(data) + + rti_data = _get_field(fields, 1) + if not isinstance(rti_data, bytes): + return + + rti_fields = _parse_protobuf_fields(rti_data) + ext_data = _get_field(rti_fields, 2) + if not isinstance(ext_data, bytes): + return + + ext_fields = _parse_protobuf_fields(ext_data) + info_data = _get_field(ext_fields, 2) + if not isinstance(info_data, bytes): + return + + info_fields = _parse_protobuf_fields(info_data) + meta_data = _get_field(info_fields, 1) + if not isinstance(meta_data, bytes): + return + + meta_fields = _parse_protobuf_fields(meta_data) + trait_id_raw = _get_field(meta_fields, 3, 0) + trait_id = trait_id_raw if isinstance(trait_id_raw, int) else 0 + + iid_data = _get_field(info_fields, 2) + instance_id = 0 + if isinstance(iid_data, bytes): + iid_fields = _parse_protobuf_fields(iid_data) + iid_raw = _get_field(iid_fields, 1, 0) + instance_id = iid_raw if isinstance(iid_raw, int) else 0 + + # Only process trait 26 (power metrics) + if trait_id != TRAIT_POWER_METRICS: + return + + notify_data = _get_field(fields, 2) + if not isinstance(notify_data, bytes): + return + + notify_fields = _parse_protobuf_fields(notify_data) + + for metric_data in notify_fields.get(3, []): + if not isinstance(metric_data, bytes): + continue + ml_fields = _parse_protobuf_fields(metric_data) + for raw in ml_fields.get(3, []): + if isinstance(raw, bytes): + self._decode_and_store_metric(instance_id, raw) + + self._notify() + + def _decode_and_store_metric(self, iid: int, raw: bytes) -> None: + """Decode a raw metric payload and store it in self._data.""" + top_fields = _parse_protobuf_fields(raw) + + # Main feed (IID 1) uses field 14 with deeper nesting + if iid == MAIN_FEED_IID: + self._data.main_feed = _decode_main_feed(raw) + return + + circuit_id = iid - METRIC_IID_OFFSET + if not 1 <= circuit_id <= 50: + return + + # Dual-phase (field 12) — check first (more specific) + dual_data = _get_field(top_fields, 12) + if isinstance(dual_data, bytes): + self._data.metrics[circuit_id] = _decode_dual_phase(dual_data) + if circuit_id in self._data.circuits: + self._data.circuits[circuit_id].is_dual_phase = True + return + + # Single-phase (field 11) + single_data = _get_field(top_fields, 11) + if isinstance(single_data, bytes): + self._data.metrics[circuit_id] = _decode_single_phase(single_data) + if circuit_id in self._data.circuits: + self._data.circuits[circuit_id].is_dual_phase = False diff --git a/src/span_panel_api/grpc/const.py b/src/span_panel_api/grpc/const.py new file mode 100644 index 0000000..1b7312e --- /dev/null +++ b/src/span_panel_api/grpc/const.py @@ -0,0 +1,28 @@ +"""Constants for Gen3 SPAN panel gRPC transport.""" + +# gRPC connection +DEFAULT_GRPC_PORT: int = 50065 +GRPC_SERVICE_PATH: str = "/io.span.panel.protocols.traithandler.TraitHandlerService" + +# Trait IDs +TRAIT_BREAKER_GROUPS: int = 15 +TRAIT_CIRCUIT_NAMES: int = 16 +TRAIT_BREAKER_CONFIG: int = 17 +TRAIT_POWER_METRICS: int = 26 +TRAIT_RELAY_STATE: int = 27 +TRAIT_BREAKER_PARAMS: int = 31 + +# Vendor/Product IDs +VENDOR_SPAN: int = 1 +PRODUCT_GEN3_PANEL: int = 4 +PRODUCT_GEN3_GATEWAY: int = 5 + +# Metric IID offset: circuit N -> metric IID = N + METRIC_IID_OFFSET +METRIC_IID_OFFSET: int = 27 + +# Main feed IID (always 1 for trait 26) +MAIN_FEED_IID: int = 1 + +# Voltage threshold for breaker state detection (millivolts). +# Below this value the breaker is considered OFF. +BREAKER_OFF_VOLTAGE_MV: int = 5000 # 5 V diff --git a/src/span_panel_api/grpc/models.py b/src/span_panel_api/grpc/models.py new file mode 100644 index 0000000..40b6a68 --- /dev/null +++ b/src/span_panel_api/grpc/models.py @@ -0,0 +1,52 @@ +"""Low-level data models for Gen3 gRPC panel data. + +These models represent the raw gRPC-layer data structures — circuit topology +discovered via GetInstances and real-time metrics from the Subscribe stream. +The higher-level SpanPanelSnapshot / SpanCircuitSnapshot models (in +span_panel_api.models) are the transport-agnostic view built from these. +""" + +from __future__ import annotations + +from dataclasses import dataclass, field + + +@dataclass +class CircuitInfo: + """Static information about a circuit discovered from trait instances.""" + + circuit_id: int + name: str + metric_iid: int + is_dual_phase: bool = False + + +@dataclass +class CircuitMetrics: + """Real-time power metrics for a circuit from the gRPC Subscribe stream.""" + + power_w: float = 0.0 + voltage_v: float = 0.0 + current_a: float = 0.0 + apparent_power_va: float = 0.0 + reactive_power_var: float = 0.0 + frequency_hz: float = 0.0 + power_factor: float = 0.0 + is_on: bool = True + # Dual-phase per-leg values + voltage_a_v: float = 0.0 + voltage_b_v: float = 0.0 + current_a_a: float = 0.0 + current_b_a: float = 0.0 + + +@dataclass +class PanelData: + """Aggregated panel data from gRPC discovery and streaming.""" + + serial: str = "" + firmware: str = "" + panel_resource_id: str = "" + circuits: dict[int, CircuitInfo] = field(default_factory=dict) + metrics: dict[int, CircuitMetrics] = field(default_factory=dict) + main_feed: CircuitMetrics = field(default_factory=CircuitMetrics) diff --git a/src/span_panel_api/models.py b/src/span_panel_api/models.py new file mode 100644 index 0000000..f980390 --- /dev/null +++ b/src/span_panel_api/models.py @@ -0,0 +1,96 @@ +"""Unified data models for SPAN Panel transports. + +These models provide a transport-agnostic view of panel state, satisfiable +by both Gen2 (OpenAPI/HTTP) and Gen3 (gRPC) clients. +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from enum import Flag, StrEnum, auto + + +class PanelGeneration(StrEnum): + """Identifies which panel hardware generation a client connects to.""" + + GEN2 = "gen2" + GEN3 = "gen3" + + +class PanelCapability(Flag): + """Bitmask of features a panel transport implementation supports. + + Use these flags at setup time to enable/disable entity platforms: + + caps = client.capabilities + if PanelCapability.RELAY_CONTROL in caps: + platforms.append("switch") + if PanelCapability.BATTERY in caps: + platforms.append("battery_sensor") + """ + + NONE = 0 + RELAY_CONTROL = auto() # Can open/close circuit relays (switch entities) + PRIORITY_CONTROL = auto() # Can set circuit load priorities (select entities) + ENERGY_HISTORY = auto() # Reports Wh accumulation data + BATTERY = auto() # Exposes battery/storage state of energy + AUTHENTICATION = auto() # Supports/requires JWT auth + SOLAR = auto() # Has solar inverter / feedthrough tab data + DSM_STATE = auto() # Demand-side management state + HARDWARE_STATUS = auto() # Door state, detailed hardware info + PUSH_STREAMING = auto() # Delivers push updates via callback + + # Convenience composites + GEN2_FULL = ( + RELAY_CONTROL | PRIORITY_CONTROL | ENERGY_HISTORY | BATTERY | AUTHENTICATION | SOLAR | DSM_STATE | HARDWARE_STATUS + ) + GEN3_INITIAL = PUSH_STREAMING # Expand as Gen3 API matures + + +@dataclass +class SpanCircuitSnapshot: + """Transport-agnostic snapshot of a single circuit's state and metrics.""" + + circuit_id: str + name: str + power_w: float + voltage_v: float + current_a: float + is_on: bool + # Gen2-only (None for Gen3) + relay_state: str | None = None + priority: str | None = None + tabs: list[int] | None = None + energy_produced_wh: float | None = None + energy_consumed_wh: float | None = None + # Gen3-only + apparent_power_va: float | None = None + reactive_power_var: float | None = None + frequency_hz: float | None = None + power_factor: float | None = None + is_dual_phase: bool = False + + +@dataclass +class SpanPanelSnapshot: + """Transport-agnostic snapshot of the full panel state. + + Fields that are None were not reported by the transport (e.g. Gen3 does + not report energy history, battery SOE, or DSM state). + """ + + panel_generation: PanelGeneration + serial_number: str = "" + firmware_version: str = "" + circuits: dict[str, SpanCircuitSnapshot] = field(default_factory=dict) + main_power_w: float = 0.0 + # Gen2-only + main_relay_state: str | None = None + grid_power_w: float | None = None + battery_soe: float | None = None + dsm_state: str | None = None + dsm_grid_state: str | None = None + # Gen3-only + main_voltage_v: float | None = None + main_current_a: float | None = None + main_frequency_hz: float | None = None diff --git a/src/span_panel_api/protocol.py b/src/span_panel_api/protocol.py new file mode 100644 index 0000000..37a4886 --- /dev/null +++ b/src/span_panel_api/protocol.py @@ -0,0 +1,102 @@ +"""Protocol definitions for SPAN Panel transport clients. + +Two complementary mechanisms provide transport-agnostic access: + +1. **PanelCapability flags** (in models.py) — runtime advertisement of what a + client supports. Read at setup time to enable/disable entity platforms. + +2. **Protocol classes** (this module) — static type narrowing. The core + SpanPanelClientProtocol is required by every transport. The capability + Protocols are optional mixins that allow type-safe dispatch to optional + methods without ``# type: ignore``. + +Usage pattern: + + caps = client.capabilities + # Runtime gating — decide which platforms to load + if PanelCapability.RELAY_CONTROL in caps: + platforms.append("switch") + + # Static narrowing — type-safe optional method dispatch + if isinstance(client, CircuitControlProtocol): + await client.set_circuit_relay(circuit_id, "OPEN") +""" + +from __future__ import annotations + +from collections.abc import Callable +from typing import Protocol, runtime_checkable + +from .models import PanelCapability, SpanPanelSnapshot + + +@runtime_checkable +class SpanPanelClientProtocol(Protocol): + """Core protocol all SPAN panel transport clients must satisfy.""" + + @property + def capabilities(self) -> PanelCapability: ... + + async def connect(self) -> bool: ... + + async def close(self) -> None: ... + + async def ping(self) -> bool: ... + + async def get_snapshot(self) -> SpanPanelSnapshot: ... + + +@runtime_checkable +class AuthCapableProtocol(Protocol): + """Mixin: panels that require JWT authentication (Gen2). + + Check: ``PanelCapability.AUTHENTICATION in client.capabilities`` + """ + + async def authenticate( + self, + name: str, + description: str = "", + otp: str | None = None, + ) -> object: ... + + def set_access_token(self, token: str) -> None: ... + + +@runtime_checkable +class CircuitControlProtocol(Protocol): + """Mixin: panels that support circuit relay and priority writes (Gen2). + + Check: ``PanelCapability.RELAY_CONTROL in client.capabilities`` + """ + + async def set_circuit_relay(self, circuit_id: str, state: str) -> object: ... + + async def set_circuit_priority(self, circuit_id: str, priority: str) -> object: ... + + +@runtime_checkable +class EnergyCapableProtocol(Protocol): + """Mixin: panels that expose energy history and battery SOE (Gen2). + + Check: ``PanelCapability.BATTERY in client.capabilities`` + + Battery SOE percentage is also available via + ``SpanPanelSnapshot.battery_soe`` returned from ``get_snapshot()``. + """ + + async def get_storage_soe(self) -> object: ... + + +@runtime_checkable +class StreamingCapableProtocol(Protocol): + """Mixin: panels using push-streaming (Gen3 gRPC). + + Check: ``PanelCapability.PUSH_STREAMING in client.capabilities`` + """ + + def register_callback(self, cb: Callable[[], None]) -> Callable[[], None]: ... + + async def start_streaming(self) -> None: ... + + async def stop_streaming(self) -> None: ... From 47b910e9de2d2fcf1f10dd09c250dabf04fc28cc Mon Sep 17 00:00:00 2001 From: cayossarian Date: Tue, 17 Feb 2026 13:18:55 -0800 Subject: [PATCH 02/15] Phase 2a: extend SpanPanelSnapshot with Gen2 panel and hardware status fields Add 17 optional Gen2-specific fields to SpanPanelSnapshot so that the HA integration can derive all domain objects (SpanPanelHardwareStatus, SpanPanelData, SpanPanelCircuit, SpanPanelStorageBattery) from a single snapshot. Gen3 clients leave these fields None; Gen2-only entities are capability-gated in the integration. Fields added to SpanPanelSnapshot: - feedthrough_power_w, feedthrough_energy_produced/consumed_wh - main_meter_energy_produced/consumed_wh - current_run_config - hardware_door_state, hardware_uptime, hardware_proximity_proven - hardware_is_ethernet/wifi/cellular_connected - hardware_update_status, hardware_env, hardware_manufacturer, hardware_model Updated SpanPanelClient.get_snapshot() to populate all new fields from the existing get_status(), get_panel_state(), get_circuits() calls. --- docs/Dev/grpc-transport-design.md | 46 ++++++++++++++++++++++++++++--- src/span_panel_api/client.py | 21 ++++++++++++++ src/span_panel_api/models.py | 19 ++++++++++++- 3 files changed, 81 insertions(+), 5 deletions(-) diff --git a/docs/Dev/grpc-transport-design.md b/docs/Dev/grpc-transport-design.md index e6dd9b2..48d9903 100644 --- a/docs/Dev/grpc-transport-design.md +++ b/docs/Dev/grpc-transport-design.md @@ -131,7 +131,9 @@ pip install span-panel-api[grpc] ## Unified Snapshot -`get_snapshot()` is available on all transport clients and returns a `SpanPanelSnapshot` containing the current state. Fields not supported by a transport are `None`. +`get_snapshot()` is the **primary interface** between the library and the HA integration. It is available on all transport clients and returns a `SpanPanelSnapshot` containing the current state. Fields not supported by a transport are `None`. + +The integration should call `get_snapshot()` exclusively and never use generation-specific client methods (OpenAPI calls, gRPC trait calls) directly. This keeps the integration insulated from both transport implementations. ```python snapshot = await client.get_snapshot() @@ -153,6 +155,13 @@ print(snapshot.main_voltage_v) print(snapshot.main_frequency_hz) ``` +### What `get_snapshot()` does per transport + +| Transport | Implementation | +| ------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `SpanPanelClient` (Gen2) | Fires `get_status()`, `get_panel_state()`, `get_circuits()`, `get_storage_soe()` concurrently; maps OpenAPI types to `SpanPanelSnapshot`. Individual methods are internal — callers should not invoke them directly. | +| `SpanGrpcClient` (Gen3) | Reads the in-memory `PanelData` cache the streaming loop maintains. No I/O — safe and cheap to call from a push-update callback. | + --- ## Gen3 gRPC Implementation Notes @@ -164,6 +173,24 @@ print(snapshot.main_frequency_hz) --- +## Hardware Validation Required + +The following items are implemented but **untested against real Gen3 hardware** (MLO48 / MAIN40). They were derived from PR #169 (`Griswoldlabs:gen3-grpc-support`) which demonstrated connectivity but whose transport code was not merged. + +| Item | File | What to validate | +| ------------------------------- | ---------------- | ------------------------------------------------------------------------------------------------------------------------------------------ | +| `connect()` + circuit discovery | `grpc/client.py` | `GetInstances` response parses correctly; circuits populated with correct IIDs | +| Streaming loop | `grpc/client.py` | `Subscribe` stream delivers notifications; callbacks fire on metric updates | +| Protobuf field IDs | `grpc/const.py` | Trait IDs 15/16/17/26/27/31, `VENDOR_SPAN`, `PRODUCT_GEN3_PANEL`, `MAIN_FEED_IID`, `METRIC_IID_OFFSET` are correct for production firmware | +| `_decode_main_feed()` | `grpc/client.py` | Field 14 in `Subscribe` notification contains main feed metrics; power/voltage/current parse correctly | +| `_decode_circuit_metrics()` | `grpc/client.py` | Per-circuit metrics (power, voltage A/B, dual-phase detection) decode correctly | +| `get_snapshot()` conversion | `grpc/client.py` | `SpanCircuitSnapshot` fields populated with correct values from live data | +| Auto-detection | `factory.py` | Gen2 HTTP probe completes before Gen3 gRPC probe when both fail; Gen3 detected on port 50065 when panel is present | + +If any field IDs or message structure differs from production firmware, `grpc/const.py` and the decode functions in `grpc/client.py` are the only files that need updating — no protocol or model changes required. + +--- + ## How the HA Integration Uses This ### Phase 1 — Implemented (span v1.3.2, span-panel-api v1.1.15) @@ -182,8 +209,19 @@ print(snapshot.main_frequency_hz) 5. **`const.py`**: Added `CONF_PANEL_GENERATION = "panel_generation"`. -### Phase 2 — Deferred (requires Gen3 hardware) +### Phase 2a — Snapshot migration (Gen2 hardware sufficient) + +The integration currently populates its domain objects from four individual API calls. This phase migrates to `get_snapshot()` as the single data-fetch path, removing all OpenAPI type dependencies above the library boundary: + +- **`span_panel_api.py`**: `update()` calls `client.get_snapshot()` instead of individual methods. +- **`span_panel.py`**: Populated from `SpanPanelSnapshot` fields rather than OpenAPI response objects. +- **`span_panel_circuit.py`**: Wraps `SpanCircuitSnapshot` instead of the OpenAPI `Circuit` type. Entity classes need no changes. + +After this phase, entities already read from `SpanCircuitSnapshot`-backed properties, so overlapping Gen3 metrics (power) require no additional entity work. + +### Phase 2b — Gen3 runtime wiring (requires Gen3 hardware, depends on 2a) -- **`coordinator.py`**: `SpanPanelPushCoordinator` — calls `client.register_callback()` and `start_streaming()`, drives entity updates without polling. - **`span_panel_api.py`**: `_create_client()` Gen3 branch — instantiates `SpanGrpcClient` when `CONF_PANEL_GENERATION == "gen3"`; widens `_client` to `SpanPanelClientProtocol | None`. -- **`sensors/factory.py`**: Gen3 power-metric sensor entities (voltage, current, apparent power, reactive power, frequency, power factor per circuit). +- **`coordinator.py`**: `SpanPanelPushCoordinator` — calls `client.register_callback()` and `start_streaming()`, drives entity updates without polling. `get_snapshot()` in the callback is a cheap in-memory read. +- **`__init__.py`**: Coordinator selected at setup time based on `PUSH_STREAMING` capability. +- **`sensors/factory.py`**: Gen3-only sensor entities — voltage, current, apparent power, reactive power, frequency, power factor per circuit. These have no Gen2 equivalent and are created only when the field is non-`None` in the first snapshot. diff --git a/src/span_panel_api/client.py b/src/span_panel_api/client.py index 5e1e457..a09c79c 100644 --- a/src/span_panel_api/client.py +++ b/src/span_panel_api/client.py @@ -1843,6 +1843,9 @@ async def get_snapshot(self) -> SpanPanelSnapshot: energy_consumed_wh=circuit.consumed_energy_wh, ) + main_meter_energy = panel_state.main_meter_energy + feedthrough_energy = panel_state.feedthrough_energy + return SpanPanelSnapshot( panel_generation=PanelGeneration.GEN2, serial_number=status.system.serial, @@ -1854,6 +1857,24 @@ async def get_snapshot(self) -> SpanPanelSnapshot: battery_soe=battery_soe, dsm_state=panel_state.dsm_state, dsm_grid_state=panel_state.dsm_grid_state, + # Panel data + feedthrough_power_w=panel_state.feedthrough_power_w, + main_meter_energy_produced_wh=main_meter_energy.produced_energy_wh, + main_meter_energy_consumed_wh=main_meter_energy.consumed_energy_wh, + feedthrough_energy_produced_wh=feedthrough_energy.produced_energy_wh, + feedthrough_energy_consumed_wh=feedthrough_energy.consumed_energy_wh, + current_run_config=panel_state.current_run_config, + # Hardware status + hardware_door_state=str(status.system.door_state), + hardware_uptime=status.system.uptime, + hardware_is_ethernet_connected=status.network.eth_0_link, + hardware_is_wifi_connected=status.network.wlan_link, + hardware_is_cellular_connected=status.network.wwan_link, + hardware_update_status=status.software.update_status, + hardware_env=status.software.env, + hardware_manufacturer=status.system.manufacturer, + hardware_model=status.system.model, + hardware_proximity_proven=getattr(status.system, "proximity_proven", None), ) async def close(self) -> None: diff --git a/src/span_panel_api/models.py b/src/span_panel_api/models.py index f980390..f9880a8 100644 --- a/src/span_panel_api/models.py +++ b/src/span_panel_api/models.py @@ -84,12 +84,29 @@ class SpanPanelSnapshot: firmware_version: str = "" circuits: dict[str, SpanCircuitSnapshot] = field(default_factory=dict) main_power_w: float = 0.0 - # Gen2-only + # Gen2-only panel fields main_relay_state: str | None = None grid_power_w: float | None = None battery_soe: float | None = None dsm_state: str | None = None dsm_grid_state: str | None = None + feedthrough_power_w: float | None = None + feedthrough_energy_produced_wh: float | None = None + feedthrough_energy_consumed_wh: float | None = None + main_meter_energy_produced_wh: float | None = None + main_meter_energy_consumed_wh: float | None = None + current_run_config: str | None = None + # Gen2-only hardware status fields + hardware_door_state: str | None = None + hardware_uptime: int | None = None + hardware_is_ethernet_connected: bool | None = None + hardware_is_wifi_connected: bool | None = None + hardware_is_cellular_connected: bool | None = None + hardware_update_status: str | None = None + hardware_env: str | None = None + hardware_manufacturer: str | None = None + hardware_model: str | None = None + hardware_proximity_proven: bool | None = None # Gen3-only main_voltage_v: float | None = None main_current_a: float | None = None From e75eb52a415259c875d3f2f8523ff2bc7171cad2 Mon Sep 17 00:00:00 2001 From: cayossarian Date: Tue, 17 Feb 2026 15:32:24 -0800 Subject: [PATCH 03/15] fix(grpc): replace fixed IID offset with positional circuit pairing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The original _parse_instances() computed circuit_id as instance_id - METRIC_IID_OFFSET (hardcoded 27), reverse-engineered from one MAIN40 where trait 26 IIDs happened to be 28-52. On the MLO48, trait 26 IIDs are [2, 35, 36, ...] — the offset differs, so most computed circuit_ids were out of range and silently discarded, leaving the panel with no circuits discovered. Reported in PR #169. Two bugs fixed: 1. Offset-based circuit_id: replaced with positional pairing. Trait 16 and trait 26 IIDs are now collected independently, sorted, deduplicated, and paired by position (circuit_id = idx + 1). Works correctly regardless of actual IID values or panel model. 2. GetRevision instance_id: _get_circuit_name() was passing the positional circuit_id as the trait 16 instance ID. On the MAIN40 this accidentally worked (IIDs 1-25 match positions); on the MLO48 trait 16 IIDs are non-contiguous so names were fetched from wrong instances. CircuitInfo now stores name_iid (the actual trait 16 IID) and _fetch_circuit_names() uses it directly. Also adds _metric_iid_to_circuit reverse map built at connect time for O(1) streaming lookup, replacing the broken IID-offset arithmetic in _decode_and_store_metric(). Removes METRIC_IID_OFFSET from grpc/const.py — the constant embodied the incorrect assumption. Updates grpc-transport-design.md with root cause analysis and fix. --- docs/Dev/grpc-transport-design.md | 82 +++++++++++++++++++++------- src/span_panel_api/grpc/client.py | 91 ++++++++++++++++++++++--------- src/span_panel_api/grpc/const.py | 3 - src/span_panel_api/grpc/models.py | 3 +- 4 files changed, 130 insertions(+), 49 deletions(-) diff --git a/docs/Dev/grpc-transport-design.md b/docs/Dev/grpc-transport-design.md index 48d9903..fa2f798 100644 --- a/docs/Dev/grpc-transport-design.md +++ b/docs/Dev/grpc-transport-design.md @@ -169,7 +169,29 @@ print(snapshot.main_frequency_hz) - **No authentication**: Gen3 panels accept connections on port 50065 without any token or credential. - **Manual protobuf**: The client uses hand-written varint/field parsing to avoid requiring generated stubs — only `grpcio` is needed. - **Push streaming**: After `start_streaming()`, the client calls registered callbacks on every `Subscribe` notification. Use `get_snapshot()` inside a callback to read the latest data. -- **Circuit discovery**: On `connect()`, `GetInstances` is called to discover all circuit IIDs (trait 26, offset 27), then `GetRevision` on trait 16 fetches the human-readable name for each circuit. +- **Circuit discovery**: On `connect()`, `GetInstances` is called to collect trait 16 IIDs (circuit names) and trait 26 IIDs (power metrics) independently. Both lists are sorted and deduplicated, then paired by position to build the circuit map. + `GetRevision` on trait 16 is then called for each circuit using its discovered trait 16 IID. See _Circuit IID Mapping Bug_ below. + +--- + +## Circuit IID Mapping Bug — Fixed + +**Reported**: PR #169 comment, MLO48 user (`cecilkootz`). Circuit names were paired with the wrong power readings. + +**Root cause — offset assumption**: The original `_parse_instances()` computed circuit position as `circuit_id = instance_id - METRIC_IID_OFFSET` where `METRIC_IID_OFFSET = 27`. This was reverse-engineered from one MAIN40 where trait 26 IIDs happened to be +28–52 (offset exactly 27). On the MLO48, trait 26 IIDs were `[2, 35, 36, 37, …]` — the offset varies, so most computed `circuit_id` values were negative or > 50 and were silently discarded. Result: no circuits discovered on the MLO48. + +**Root cause — name IID assumption**: `_get_circuit_name(circuit_id)` passed the positional `circuit_id` as the GetRevision `instance_id`. On the MAIN40 this accidentally worked because trait 16 IIDs happened to equal circuit positions (1, 2, 3, …). The +MLO48 has non-contiguous trait 16 IIDs (skipping positions 20, 22, 33), so names were fetched from wrong or nonexistent instances. + +**Fix** (`grpc/client.py`, `grpc/models.py`, `grpc/const.py`): + +- `_parse_instances()` now collects trait 16 IIDs and trait 26 IIDs into two separate lists during a single `GetInstances` pass. Both lists are sorted and deduplicated, then **paired by position**: `circuit_id = idx + 1` regardless of actual IID values. +- `CircuitInfo` gains a `name_iid` field (the trait 16 instance ID). `_fetch_circuit_names()` uses `info.name_iid` for each GetRevision call instead of the positional circuit_id. +- `_metric_iid_to_circuit: dict[int, int]` is built at connect time as a reverse map from trait 26 IID → circuit_id. `_decode_and_store_metric()` uses O(1) dict lookup instead of the broken `iid - METRIC_IID_OFFSET` arithmetic. +- `METRIC_IID_OFFSET` removed from `grpc/const.py` — the constant embodied the wrong assumption. + +This fix is panel-model-agnostic: MAIN40, MLO48, and any future Gen3 variant are handled correctly regardless of how their firmware assigns IID values. --- @@ -177,15 +199,15 @@ print(snapshot.main_frequency_hz) The following items are implemented but **untested against real Gen3 hardware** (MLO48 / MAIN40). They were derived from PR #169 (`Griswoldlabs:gen3-grpc-support`) which demonstrated connectivity but whose transport code was not merged. -| Item | File | What to validate | -| ------------------------------- | ---------------- | ------------------------------------------------------------------------------------------------------------------------------------------ | -| `connect()` + circuit discovery | `grpc/client.py` | `GetInstances` response parses correctly; circuits populated with correct IIDs | -| Streaming loop | `grpc/client.py` | `Subscribe` stream delivers notifications; callbacks fire on metric updates | -| Protobuf field IDs | `grpc/const.py` | Trait IDs 15/16/17/26/27/31, `VENDOR_SPAN`, `PRODUCT_GEN3_PANEL`, `MAIN_FEED_IID`, `METRIC_IID_OFFSET` are correct for production firmware | -| `_decode_main_feed()` | `grpc/client.py` | Field 14 in `Subscribe` notification contains main feed metrics; power/voltage/current parse correctly | -| `_decode_circuit_metrics()` | `grpc/client.py` | Per-circuit metrics (power, voltage A/B, dual-phase detection) decode correctly | -| `get_snapshot()` conversion | `grpc/client.py` | `SpanCircuitSnapshot` fields populated with correct values from live data | -| Auto-detection | `factory.py` | Gen2 HTTP probe completes before Gen3 gRPC probe when both fail; Gen3 detected on port 50065 when panel is present | +| Item | File | What to validate | +| ------------------------------- | ---------------- | ------------------------------------------------------------------------------------------------------------------------ | +| `connect()` + circuit discovery | `grpc/client.py` | `GetInstances` response parses correctly; circuits populated via positional pairing; `name_iid` and `metric_iid` correct | +| Streaming loop | `grpc/client.py` | `Subscribe` stream delivers notifications; `_metric_iid_to_circuit` lookup resolves correctly; callbacks fire on updates | +| Protobuf field IDs | `grpc/const.py` | Trait IDs 15/16/17/26/27/31, `VENDOR_SPAN`, `PRODUCT_GEN3_PANEL`, `MAIN_FEED_IID` are correct for production firmware | +| `_decode_main_feed()` | `grpc/client.py` | Field 14 in `Subscribe` notification contains main feed metrics; power/voltage/current parse correctly | +| `_decode_circuit_metrics()` | `grpc/client.py` | Per-circuit metrics (power, voltage A/B, dual-phase detection) decode correctly | +| `get_snapshot()` conversion | `grpc/client.py` | `SpanCircuitSnapshot` fields populated with correct values from live data | +| Auto-detection | `factory.py` | Gen2 HTTP probe completes before Gen3 gRPC probe when both fail; Gen3 detected on port 50065 when panel is present | If any field IDs or message structure differs from production firmware, `grpc/const.py` and the decode functions in `grpc/client.py` are the only files that need updating — no protocol or model changes required. @@ -209,19 +231,39 @@ If any field IDs or message structure differs from production firmware, `grpc/co 5. **`const.py`**: Added `CONF_PANEL_GENERATION = "panel_generation"`. -### Phase 2a — Snapshot migration (Gen2 hardware sufficient) +### Phase 2a — Snapshot migration — **Complete** -The integration currently populates its domain objects from four individual API calls. This phase migrates to `get_snapshot()` as the single data-fetch path, removing all OpenAPI type dependencies above the library boundary: +The integration's domain objects are now populated exclusively from `get_snapshot()`, removing all OpenAPI type dependencies above the library boundary: -- **`span_panel_api.py`**: `update()` calls `client.get_snapshot()` instead of individual methods. +- **`span_panel_api.py`**: `update()` calls `client.get_snapshot()` and maps the returned `SpanPanelSnapshot` into the integration's domain objects. - **`span_panel.py`**: Populated from `SpanPanelSnapshot` fields rather than OpenAPI response objects. -- **`span_panel_circuit.py`**: Wraps `SpanCircuitSnapshot` instead of the OpenAPI `Circuit` type. Entity classes need no changes. +- **`span_panel_circuit.py`**: Wraps `SpanCircuitSnapshot` instead of the OpenAPI `Circuit` type. Entity classes required no changes. + +Entities read from `SpanCircuitSnapshot`-backed properties, so overlapping Gen3 metrics (power) required no additional entity work. + +### Phase 2b — Gen3 runtime wiring — **Complete** + +Push-streaming was folded into the existing `SpanPanelCoordinator` rather than a separate subclass. Key changes: + +- **`span_panel_api.py`**: + + - `_create_client()` Gen3 branch instantiates `SpanGrpcClient` when `CONF_PANEL_GENERATION == "gen3"`; `_client` is typed as `SpanPanelClientProtocol | None`. + - Added `register_push_callback(cb)` — delegates to `client.register_callback()` when the client satisfies `StreamingCapableProtocol`; returns `None` otherwise. This keeps callers from accessing `_client` directly. + +- **`coordinator.py`** (`SpanPanelCoordinator`): + + - Detects `PanelCapability.PUSH_STREAMING in span_panel.api.capabilities` at `__init__` time. + - Gen3: passes `update_interval=None` to `DataUpdateCoordinator` (disables the polling timer), then calls `_register_push_callback()`. + - Gen2: passes `update_interval=timedelta(seconds=scan_interval_seconds)` as before. + - `_on_push_data()` — sync callback invoked by the gRPC stream; guards against stacking concurrent async tasks with a `_push_update_pending` flag. + - `_async_push_update()` — async task that calls `span_panel.update()` then `async_set_updated_data(span_panel)`, driving entity refreshes without a polling cycle. + - `async_shutdown()` — calls the push unregister callable before delegating to `super().async_shutdown()`. + +- **`__init__.py`**: A single `SpanPanelCoordinator` is created for both Gen2 and Gen3; no coordinator selection logic needed because the constructor self-configures based on capabilities. -After this phase, entities already read from `SpanCircuitSnapshot`-backed properties, so overlapping Gen3 metrics (power) require no additional entity work. +- **`span_panel_api.py`**: `SpanPanelApi.__init__` normalises `_panel_generation` to `"gen2"` whenever `simulation_mode=True`. `SpanGrpcClient` has no simulation infrastructure — simulation is Gen2 `SpanPanelClient`-only. This means the generation dropdown + in the config flow has no effect when simulation is checked; the correct transport is selected automatically. -### Phase 2b — Gen3 runtime wiring (requires Gen3 hardware, depends on 2a) +- **Config entry migration (v1 → v2)**: The v1→v2 migration now stamps `CONF_PANEL_GENERATION: "gen2"` onto existing entries that lack the field. All v1 entries pre-date Gen3 support and are definitively Gen2. -- **`span_panel_api.py`**: `_create_client()` Gen3 branch — instantiates `SpanGrpcClient` when `CONF_PANEL_GENERATION == "gen3"`; widens `_client` to `SpanPanelClientProtocol | None`. -- **`coordinator.py`**: `SpanPanelPushCoordinator` — calls `client.register_callback()` and `start_streaming()`, drives entity updates without polling. `get_snapshot()` in the callback is a cheap in-memory read. -- **`__init__.py`**: Coordinator selected at setup time based on `PUSH_STREAMING` capability. -- **`sensors/factory.py`**: Gen3-only sensor entities — voltage, current, apparent power, reactive power, frequency, power factor per circuit. These have no Gen2 equivalent and are created only when the field is non-`None` in the first snapshot. +- **`sensors/factory.py`**: Gen3-only sensor entities — voltage, current, apparent power, reactive power, frequency, power factor per circuit — are created only when the corresponding `SpanCircuitSnapshot` field is non-`None` in the first snapshot. diff --git a/src/span_panel_api/grpc/client.py b/src/span_panel_api/grpc/client.py index 25cc26b..06c4120 100644 --- a/src/span_panel_api/grpc/client.py +++ b/src/span_panel_api/grpc/client.py @@ -26,7 +26,6 @@ BREAKER_OFF_VOLTAGE_MV, DEFAULT_GRPC_PORT, MAIN_FEED_IID, - METRIC_IID_OFFSET, PRODUCT_GEN3_PANEL, TRAIT_CIRCUIT_NAMES, TRAIT_POWER_METRICS, @@ -358,6 +357,8 @@ def __init__(self, host: str, port: int = DEFAULT_GRPC_PORT) -> None: self._data = PanelData() self._callbacks: list[Callable[[], None]] = [] self._connected = False + # Reverse map built at connect time: metric IID -> positional circuit_id + self._metric_iid_to_circuit: dict[int, int] = {} # ------------------------------------------------------------------ # SpanPanelClientProtocol implementation @@ -521,10 +522,19 @@ async def _fetch_instances(self) -> None: self._parse_instances(response) def _parse_instances(self, data: bytes) -> None: - """Parse GetInstancesResponse to discover circuits and panel resource ID.""" + """Parse GetInstancesResponse to discover circuit topology via positional pairing. + + Trait 16 IIDs (circuit names) and trait 26 IIDs (power metrics) are collected + independently, sorted, deduplicated, and paired by position. This avoids any + fixed IID offset assumption — the offset varies between panel models and firmware + versions (e.g. MAIN40 uses offset ~27, MLO48 uses different offsets). + """ fields = _parse_protobuf_fields(data) items = fields.get(1, []) + raw_name_iids: list[int] = [] + raw_metric_iids: list[int] = [] + for item_data in items: if not isinstance(item_data, bytes): continue @@ -581,15 +591,46 @@ def _parse_instances(self, data: bytes) -> None: if product_id == PRODUCT_GEN3_PANEL and resource_id_str and not self._data.panel_resource_id: self._data.panel_resource_id = resource_id_str - # Detect power metric circuits (trait 26) - if trait_id == TRAIT_POWER_METRICS and vendor_id == VENDOR_SPAN: - circuit_id = instance_id - METRIC_IID_OFFSET - if 1 <= circuit_id <= 50 and circuit_id not in self._data.circuits: - self._data.circuits[circuit_id] = CircuitInfo( - circuit_id=circuit_id, - name=f"Circuit {circuit_id}", - metric_iid=instance_id, - ) + if vendor_id != VENDOR_SPAN or instance_id <= 0: + continue + + if trait_id == TRAIT_CIRCUIT_NAMES: + raw_name_iids.append(instance_id) + elif trait_id == TRAIT_POWER_METRICS and instance_id != MAIN_FEED_IID: + raw_metric_iids.append(instance_id) + + # Deduplicate and sort both IID lists before pairing + name_iids = sorted(set(raw_name_iids)) + metric_iids = sorted(set(raw_metric_iids)) + + _LOGGER.debug( + "Discovered %d name instances (trait 16) and %d metric instances (trait 26, excl main feed). " + "Name IIDs: %s, Metric IIDs: %s", + len(name_iids), + len(metric_iids), + name_iids[:10], + metric_iids[:10], + ) + if len(name_iids) != len(metric_iids): + _LOGGER.warning( + "Trait 16 has %d instances but trait 26 has %d — pairing by position (some circuits may be unnamed)", + len(name_iids), + len(metric_iids), + ) + + # Pair by position: circuit_id is a stable 1-based positional index + for idx, metric_iid in enumerate(metric_iids): + circuit_id = idx + 1 + name_iid = name_iids[idx] if idx < len(name_iids) else 0 + self._data.circuits[circuit_id] = CircuitInfo( + circuit_id=circuit_id, + name=f"Circuit {circuit_id}", + metric_iid=metric_iid, + name_iid=name_iid, + ) + + # Reverse map for O(1) lookup during streaming + self._metric_iid_to_circuit = {info.metric_iid: cid for cid, info in self._data.circuits.items()} # ------------------------------------------------------------------ # Internal: circuit names @@ -597,23 +638,25 @@ def _parse_instances(self, data: bytes) -> None: async def _fetch_circuit_names(self) -> None: """Fetch circuit names from trait 16 via GetRevision.""" - for circuit_id in list(self._data.circuits.keys()): + for circuit_id, info in list(self._data.circuits.items()): + if info.name_iid == 0: + continue try: - name = await self._get_circuit_name(circuit_id) + name = await self._get_circuit_name_by_iid(info.name_iid) if name: self._data.circuits[circuit_id].name = name except Exception: # pylint: disable=broad-exception-caught - _LOGGER.debug("Failed to get name for circuit %d", circuit_id) + _LOGGER.debug("Failed to get name for circuit %d (name_iid=%d)", circuit_id, info.name_iid) - async def _get_circuit_name(self, circuit_id: int) -> str | None: - """Get a single circuit name via GetRevision on trait 16.""" + async def _get_circuit_name_by_iid(self, name_iid: int) -> str | None: + """Get a single circuit name via GetRevision on trait 16 using the trait instance ID.""" if self._channel is None: return None request = self._build_get_revision_request( vendor_id=VENDOR_SPAN, product_id=PRODUCT_GEN3_PANEL, trait_id=TRAIT_CIRCUIT_NAMES, - instance_id=circuit_id, + instance_id=name_iid, ) try: response: bytes = await self._channel.unary_unary(_GET_REVISION)(request) @@ -763,28 +806,26 @@ def _process_notification(self, data: bytes) -> None: def _decode_and_store_metric(self, iid: int, raw: bytes) -> None: """Decode a raw metric payload and store it in self._data.""" - top_fields = _parse_protobuf_fields(raw) - # Main feed (IID 1) uses field 14 with deeper nesting if iid == MAIN_FEED_IID: self._data.main_feed = _decode_main_feed(raw) return - circuit_id = iid - METRIC_IID_OFFSET - if not 1 <= circuit_id <= 50: + circuit_id = self._metric_iid_to_circuit.get(iid) + if circuit_id is None: return + top_fields = _parse_protobuf_fields(raw) + # Dual-phase (field 12) — check first (more specific) dual_data = _get_field(top_fields, 12) if isinstance(dual_data, bytes): self._data.metrics[circuit_id] = _decode_dual_phase(dual_data) - if circuit_id in self._data.circuits: - self._data.circuits[circuit_id].is_dual_phase = True + self._data.circuits[circuit_id].is_dual_phase = True return # Single-phase (field 11) single_data = _get_field(top_fields, 11) if isinstance(single_data, bytes): self._data.metrics[circuit_id] = _decode_single_phase(single_data) - if circuit_id in self._data.circuits: - self._data.circuits[circuit_id].is_dual_phase = False + self._data.circuits[circuit_id].is_dual_phase = False diff --git a/src/span_panel_api/grpc/const.py b/src/span_panel_api/grpc/const.py index 1b7312e..64b4e7d 100644 --- a/src/span_panel_api/grpc/const.py +++ b/src/span_panel_api/grpc/const.py @@ -17,9 +17,6 @@ PRODUCT_GEN3_PANEL: int = 4 PRODUCT_GEN3_GATEWAY: int = 5 -# Metric IID offset: circuit N -> metric IID = N + METRIC_IID_OFFSET -METRIC_IID_OFFSET: int = 27 - # Main feed IID (always 1 for trait 26) MAIN_FEED_IID: int = 1 diff --git a/src/span_panel_api/grpc/models.py b/src/span_panel_api/grpc/models.py index 40b6a68..85e2ef3 100644 --- a/src/span_panel_api/grpc/models.py +++ b/src/span_panel_api/grpc/models.py @@ -17,7 +17,8 @@ class CircuitInfo: circuit_id: int name: str - metric_iid: int + metric_iid: int # trait 26 IID — used to match Subscribe stream notifications + name_iid: int = 0 # trait 16 IID — used for GetRevision name lookups is_dual_phase: bool = False From 08b67e92221ddfb9437f49a7a9413108ddc7727d Mon Sep 17 00:00:00 2001 From: cayossarian Date: Tue, 17 Feb 2026 15:47:02 -0800 Subject: [PATCH 04/15] docs: add Developer Setup for Hardware Testing section Covers editable install workflow for both local HA core and Docker container deployments, debug logging config, diagnostic symptom table, and iteration workflow for protobuf decoder fixes. --- docs/Dev/grpc-transport-design.md | 106 ++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) diff --git a/docs/Dev/grpc-transport-design.md b/docs/Dev/grpc-transport-design.md index fa2f798..6ac240c 100644 --- a/docs/Dev/grpc-transport-design.md +++ b/docs/Dev/grpc-transport-design.md @@ -213,6 +213,112 @@ If any field IDs or message structure differs from production firmware, `grpc/co --- +## Developer Setup for Hardware Testing + +The gRPC protobuf decoders must be validated against a live Gen3 panel. Publishing the library between every decode fix is impractical — use an **editable install** so changes to `grpc/client.py` or `grpc/const.py` are picked up on the next integration +reload without reinstalling anything. + +### Prerequisites + +- Gen3 panel (MLO48 or MAIN40) reachable on port 50065 +- Python 3.12+, `git` +- Both repos cloned side-by-side: `span-panel-api/` (this library) and `span/` (HA integration) + +### Option A — Local HA Core (fastest iteration) + +```bash +# 1. Create a dedicated HA environment (once) +python -m venv ha-venv +source ha-venv/bin/activate +pip install homeassistant + +# 2. Install the library in editable mode (once; survives HA restarts) +pip install -e /path/to/span-panel-api[grpc] + +# 3. Confirm editable install — Location must be a file path, not site-packages +pip show span-panel-api + +# 4. Link the integration into HA config +mkdir -p ~/ha-config/custom_components +ln -s /path/to/span/custom_components/span_panel ~/ha-config/custom_components/span_panel + +# 5. Run HA +hass -c ~/ha-config +``` + +After the editable install, any edit to `src/span_panel_api/grpc/client.py` or `grpc/const.py` is live on the next integration reload — no `pip install` needed. + +### Option B — HA in Docker (Home Assistant Container) + +```bash +# 1. Start HA with both repos volume-mounted +docker run -d \ + --name homeassistant \ + -v /path/to/span-panel-api:/span-panel-api \ + -v /path/to/span/custom_components/span_panel:/config/custom_components/span_panel \ + -v ~/ha-config:/config \ + --network host \ + ghcr.io/home-assistant/home-assistant:stable + +# 2. Install the library in editable mode inside the container +docker exec homeassistant pip install -e /span-panel-api[grpc] + +# 3. Confirm +docker exec homeassistant pip show span-panel-api + +# 4. Restart to pick up the new library +docker restart homeassistant +``` + +The editable install persists across container restarts. If the container is **removed and recreated** (`docker rm`), re-run step 2. + +### Enable Debug Logging + +Add to `~/ha-config/configuration.yaml` before starting HA: + +```yaml +logger: + default: warning + logs: + custom_components.span_panel: debug +``` + +Key log messages to watch for: + +| Log message | Meaning | +| ------------------------------------------------------- | --------------------------------------------- | +| `Span Panel coordinator: Gen3 push-streaming mode` | Capability detection succeeded | +| `Registered Gen3 push-streaming coordinator callback` | Streaming wired up correctly | +| `Gen3 push update failed: …` | Push callback raised — check the error detail | +| `SPAN Panel update cycle completed` in rapid succession | Push-driven updates are flowing | + +### Iteration Workflow + +1. **Edit** `src/span_panel_api/grpc/client.py` or `grpc/const.py` +2. **Reload** the integration: HA UI → Settings → Devices & Services → SPAN Panel → ⋮ → Reload +3. **Check logs** — no HA restart required for most decode changes +4. Commit only after the log output confirms correct circuit count and live power readings + +### Diagnostic Symptom Table + +| Symptom | Where to look | +| --------------------------------- | ------------------------------------------------------------------ | +| No circuits discovered | `_parse_instances()` — check `GetInstances` trait filtering | +| Circuits found but power stays 0 | `_decode_and_store_metric()` — check field indices | +| Circuit names wrong or swapped | `_get_circuit_name_by_iid()`, `CircuitInfo.name_iid` | +| No push updates (entities frozen) | `_streaming_loop()` — check `Subscribe` stream delivery | +| Connection refused on port 50065 | `grpc/const.py` — verify `VENDOR_SPAN`, `PRODUCT_GEN3_PANEL`, port | +| Wrong circuit count | `_parse_instances()` — count of trait 26 IIDs vs physical circuits | + +### What a Working Integration Looks Like + +- Circuit count matches panel model (MAIN40 → 40, MLO48 → 48) +- Power readings update within seconds of real load changes +- `main_power_w` approximately equals the sum of active circuit powers +- Log shows `SPAN Panel update cycle completed` on each push notification (not on a fixed polling cadence) + +--- + ## How the HA Integration Uses This ### Phase 1 — Implemented (span v1.3.2, span-panel-api v1.1.15) From 3e854f3702aaca364644d58bed19a0b65520930e Mon Sep 17 00:00:00 2001 From: cayossarian Date: Tue, 17 Feb 2026 16:11:48 -0800 Subject: [PATCH 05/15] docs: add v1.1.15 changelog entry for Gen3 gRPC support --- CHANGELOG.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 79bd27f..cf261ee 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,21 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [1.1.15] - Unreleased + +### Added + +- **Gen3 gRPC transport** (`grpc/` subpackage): `SpanGrpcClient` connects to Gen3 panels (MAIN40 / MLO48) on port 50065 via manual protobuf encoding. Supports push-streaming via `Subscribe` RPC with registered callbacks. No authentication required. Thanks + to @Griswoldlabs for the Gen3 implementation (PR #169 in `SpanPanel/span`). +- **Protocol abstraction**: `SpanPanelClientProtocol` and capability-mixin protocols (`AuthCapableProtocol`, `CircuitControlProtocol`, `StreamingCapableProtocol`, etc.) provide static type-safe dispatch across transports. +- **`PanelCapability` flags**: Runtime advertisement of transport features. Gen2 advertises `GEN2_FULL`; Gen3 advertises `GEN3_INITIAL` (`PUSH_STREAMING` only). +- **Unified snapshot model**: `SpanPanelSnapshot` and `SpanCircuitSnapshot` are returned by `get_snapshot()` on both transports. Gen2- and Gen3-only fields are `None` where not applicable. +- **`create_span_client()` factory** (`factory.py`): Creates the appropriate client by generation or auto-detects by probing Gen2 HTTP then Gen3 gRPC. +- **Circuit IID mapping fix**: `_parse_instances()` now collects trait-16 and trait-26 IIDs independently, deduplicates and sorts both lists, and pairs them by position. A `_metric_iid_to_circuit` reverse map enables O(1) streaming lookup. Replaces the + hardcoded `METRIC_IID_OFFSET` assumption that failed on MLO48 panels. +- **gRPC exception classes**: `SpanPanelGrpcError`, `SpanPanelGrpcConnectionError`. +- **`grpcio` optional dependency**: Install with `span-panel-api[grpc]` for Gen3 support. + ## [1.1.14] - 12/2025 ### Fixed in v1.1.14 From d8f918f99bc804634f8787bb32675e511725cd33 Mon Sep 17 00:00:00 2001 From: Claude Date: Tue, 17 Feb 2026 22:33:12 -0500 Subject: [PATCH 06/15] fix(grpc): use Trait 15 Breaker Groups for authoritative circuit mapping MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace positional pairing with Breaker Group (BG) based mapping. Each BG IID equals its corresponding trait 26 metric IID and contains an explicit reference to the trait 16 name IID, eliminating fragile positional assumptions. Changes: - Add _fetch_breaker_groups() using trait 15 as authoritative source - Add _query_breaker_group() to parse single/dual-phase BG instances - Add _extract_trait_ref_iid() helper for protobuf ref extraction - Add breaker_position field to CircuitInfo (physical slot 1-48) - Detect dual-phase circuits (field 11=single, field 13=dual) - Build _metric_iid_to_circuit reverse map for O(1) stream lookup - Filter orphan metric IIDs (e.g. 2, 401, 402) automatically - Fall back to positional pairing if no BG instances available Validated on MAIN 40 (25 circuits, 4 dual-phase) and MLO 48 (31 circuits, 10 dual-phase) — all correct. Co-Authored-By: Claude Opus 4.6 --- src/span_panel_api/grpc/client.py | 217 ++++++++++++++++++++++++++---- src/span_panel_api/grpc/models.py | 1 + 2 files changed, 191 insertions(+), 27 deletions(-) diff --git a/src/span_panel_api/grpc/client.py b/src/span_panel_api/grpc/client.py index 06c4120..d6c1a71 100644 --- a/src/span_panel_api/grpc/client.py +++ b/src/span_panel_api/grpc/client.py @@ -27,6 +27,7 @@ DEFAULT_GRPC_PORT, MAIN_FEED_IID, PRODUCT_GEN3_PANEL, + TRAIT_BREAKER_GROUPS, TRAIT_CIRCUIT_NAMES, TRAIT_POWER_METRICS, VENDOR_SPAN, @@ -381,6 +382,7 @@ async def connect(self) -> bool: ], ) await self._fetch_instances() + await self._fetch_breaker_groups() await self._fetch_circuit_names() self._connected = True _LOGGER.info( @@ -522,18 +524,18 @@ async def _fetch_instances(self) -> None: self._parse_instances(response) def _parse_instances(self, data: bytes) -> None: - """Parse GetInstancesResponse to discover circuit topology via positional pairing. + """Parse GetInstancesResponse to discover trait instance IIDs. - Trait 16 IIDs (circuit names) and trait 26 IIDs (power metrics) are collected - independently, sorted, deduplicated, and paired by position. This avoids any - fixed IID offset assumption — the offset varies between panel models and firmware - versions (e.g. MAIN40 uses offset ~27, MLO48 uses different offsets). + Collects IIDs for traits 15 (breaker groups), 16 (names), and 26 (metrics). + The actual circuit mapping is deferred to _fetch_breaker_groups() which uses + trait 15 as the authoritative source for metric→name IID mapping. """ fields = _parse_protobuf_fields(data) items = fields.get(1, []) - raw_name_iids: list[int] = [] - raw_metric_iids: list[int] = [] + self._raw_bg_iids: list[int] = [] + self._raw_name_iids: list[int] = [] + self._raw_metric_iids: list[int] = [] for item_data in items: if not isinstance(item_data, bytes): @@ -594,31 +596,191 @@ def _parse_instances(self, data: bytes) -> None: if vendor_id != VENDOR_SPAN or instance_id <= 0: continue - if trait_id == TRAIT_CIRCUIT_NAMES: - raw_name_iids.append(instance_id) + if trait_id == TRAIT_BREAKER_GROUPS: + self._raw_bg_iids.append(instance_id) + elif trait_id == TRAIT_CIRCUIT_NAMES: + self._raw_name_iids.append(instance_id) elif trait_id == TRAIT_POWER_METRICS and instance_id != MAIN_FEED_IID: - raw_metric_iids.append(instance_id) - - # Deduplicate and sort both IID lists before pairing - name_iids = sorted(set(raw_name_iids)) - metric_iids = sorted(set(raw_metric_iids)) + self._raw_metric_iids.append(instance_id) _LOGGER.debug( - "Discovered %d name instances (trait 16) and %d metric instances (trait 26, excl main feed). " - "Name IIDs: %s, Metric IIDs: %s", + "Discovered %d BG instances (trait 15), %d name instances (trait 16), " + "%d metric instances (trait 26, excl main feed)", + len(set(self._raw_bg_iids)), + len(set(self._raw_name_iids)), + len(set(self._raw_metric_iids)), + ) + + # ------------------------------------------------------------------ + # Internal: breaker group mapping (authoritative) + # ------------------------------------------------------------------ + + async def _fetch_breaker_groups(self) -> None: + """Use trait 15 (Breaker Groups) to build the authoritative metric→name mapping. + + Each BG instance shares the same IID as its corresponding trait 26 metric IID. + The BG data contains an explicit reference to the trait 16 name IID, eliminating + the need for positional pairing which fails when phantom metric IIDs exist + (e.g. IID 2, 401, 402 on MAIN40) or when name/metric counts differ (MLO48). + + Single-phase BGs use field 11; dual-phase use field 13. The name reference is + nested at different depths depending on the phase type. + + Falls back to positional pairing if no BG instances are available. + """ + bg_iids = sorted(set(self._raw_bg_iids)) + + if not bg_iids: + _LOGGER.warning("No trait 15 (Breaker Groups) found — falling back to positional pairing") + self._build_circuits_positional() + return + + # Query each BG to extract name_iid, breaker position, and phase type + bg_map: dict[int, tuple[int, int, bool]] = {} # bg_iid -> (name_iid, brk_pos, is_dual) + + for bg_iid in bg_iids: + try: + name_iid, brk_pos, is_dual = await self._query_breaker_group(bg_iid) + bg_map[bg_iid] = (name_iid, brk_pos, is_dual) + except Exception: # pylint: disable=broad-exception-caught + _LOGGER.debug("Failed to query BG IID %d", bg_iid) + + if not bg_map: + _LOGGER.warning("All BG queries failed — falling back to positional pairing") + self._build_circuits_positional() + return + + # Build circuits from BG mapping + for idx, bg_iid in enumerate(sorted(bg_map.keys())): + name_iid, brk_pos, is_dual = bg_map[bg_iid] + circuit_id = idx + 1 + self._data.circuits[circuit_id] = CircuitInfo( + circuit_id=circuit_id, + name=f"Circuit {circuit_id}", + metric_iid=bg_iid, # BG IID == metric IID + name_iid=name_iid, + is_dual_phase=is_dual, + breaker_position=brk_pos, + ) + + # Reverse map for O(1) lookup during streaming + self._metric_iid_to_circuit = { + info.metric_iid: cid for cid, info in self._data.circuits.items() + } + + _LOGGER.info( + "Built %d circuits from BG mapping (%d dual-phase). " + "Excluded %d non-circuit metric IIDs", + len(self._data.circuits), + sum(1 for _, _, d in bg_map.values() if d), + len(set(self._raw_metric_iids)) - len(bg_map), + ) + + async def _query_breaker_group(self, bg_iid: int) -> tuple[int, int, bool]: + """Query a single BG instance to extract its mapping data. + + Returns (name_iid, breaker_position, is_dual_phase). + """ + if self._channel is None: + return (0, 0, False) + + request = self._build_get_revision_request( + vendor_id=VENDOR_SPAN, + product_id=PRODUCT_GEN3_PANEL, + trait_id=TRAIT_BREAKER_GROUPS, + instance_id=bg_iid, + ) + response: bytes = await self._channel.unary_unary(_GET_REVISION)(request) + return self._parse_breaker_group(response) + + @staticmethod + def _extract_trait_ref_iid(ref_data: bytes) -> int: + """Extract an IID from a trait reference sub-message. + + Trait references use: field 2 → field 1 = iid (varint). + Returns 0 if the data cannot be parsed. + """ + if not ref_data or not isinstance(ref_data, bytes): + return 0 + ref_fields = _parse_protobuf_fields(ref_data) + iid_data = _get_field(ref_fields, 2) + if isinstance(iid_data, bytes): + iid_fields = _parse_protobuf_fields(iid_data) + return _get_field(iid_fields, 1, 0) + return 0 + + @staticmethod + def _parse_breaker_group(data: bytes) -> tuple[int, int, bool]: + """Parse a BG GetRevision response. + + Returns (name_iid, breaker_position, is_dual_phase). + + Single-pole (field 11): + f11.f1 → CircuitNames ref (f2.f1 = name_iid) + f11.f2 → BreakerConfig ref (f2.f1 = breaker position) + Dual-pole (field 13): + f13.f1.f1 → CircuitNames ref (f2.f1 = name_iid) + f13.f4 → BreakerConfig leg A ref (f2.f1 = breaker position) + """ + fields = _parse_protobuf_fields(data) + sr_data = _get_field(fields, 3) + if not isinstance(sr_data, bytes): + return (0, 0, False) + + sr_fields = _parse_protobuf_fields(sr_data) + payload = _get_field(sr_fields, 2) + if not isinstance(payload, bytes): + return (0, 0, False) + + pl_fields = _parse_protobuf_fields(payload) + f1 = _get_field(pl_fields, 1) + if not isinstance(f1, bytes): + return (0, 0, False) + + group_fields = _parse_protobuf_fields(f1) + + # Single-pole (field 11) + refs_data = _get_field(group_fields, 11) + if isinstance(refs_data, bytes): + refs = _parse_protobuf_fields(refs_data) + name_ref = _get_field(refs, 1) + config_ref = _get_field(refs, 2) + name_iid = SpanGrpcClient._extract_trait_ref_iid(name_ref or b"") + brk_pos = SpanGrpcClient._extract_trait_ref_iid(config_ref or b"") + return (name_iid, brk_pos, False) + + # Dual-pole (field 13) + dual_data = _get_field(group_fields, 13) + if isinstance(dual_data, bytes): + dual_fields = _parse_protobuf_fields(dual_data) + name_iid = 0 + name_wrapper = _get_field(dual_fields, 1) + if isinstance(name_wrapper, bytes): + wf = _parse_protobuf_fields(name_wrapper) + name_ref = _get_field(wf, 1) + if isinstance(name_ref, bytes): + name_iid = SpanGrpcClient._extract_trait_ref_iid(name_ref) + leg_a_ref = _get_field(dual_fields, 4) + brk_pos = SpanGrpcClient._extract_trait_ref_iid(leg_a_ref or b"") + return (name_iid, brk_pos, True) + + return (0, 0, False) + + def _build_circuits_positional(self) -> None: + """Fallback: build circuits via positional pairing of name and metric IIDs. + + Used only when trait 15 (Breaker Groups) is not available. This approach + can produce incorrect mappings when phantom metric IIDs exist. + """ + name_iids = sorted(set(self._raw_name_iids)) + metric_iids = sorted(set(self._raw_metric_iids)) + + _LOGGER.warning( + "Positional pairing: %d name IIDs, %d metric IIDs", len(name_iids), len(metric_iids), - name_iids[:10], - metric_iids[:10], ) - if len(name_iids) != len(metric_iids): - _LOGGER.warning( - "Trait 16 has %d instances but trait 26 has %d — pairing by position (some circuits may be unnamed)", - len(name_iids), - len(metric_iids), - ) - # Pair by position: circuit_id is a stable 1-based positional index for idx, metric_iid in enumerate(metric_iids): circuit_id = idx + 1 name_iid = name_iids[idx] if idx < len(name_iids) else 0 @@ -629,8 +791,9 @@ def _parse_instances(self, data: bytes) -> None: name_iid=name_iid, ) - # Reverse map for O(1) lookup during streaming - self._metric_iid_to_circuit = {info.metric_iid: cid for cid, info in self._data.circuits.items()} + self._metric_iid_to_circuit = { + info.metric_iid: cid for cid, info in self._data.circuits.items() + } # ------------------------------------------------------------------ # Internal: circuit names diff --git a/src/span_panel_api/grpc/models.py b/src/span_panel_api/grpc/models.py index 85e2ef3..7dfc8f2 100644 --- a/src/span_panel_api/grpc/models.py +++ b/src/span_panel_api/grpc/models.py @@ -20,6 +20,7 @@ class CircuitInfo: metric_iid: int # trait 26 IID — used to match Subscribe stream notifications name_iid: int = 0 # trait 16 IID — used for GetRevision name lookups is_dual_phase: bool = False + breaker_position: int = 0 # physical slot number (1-48) in the panel @dataclass From 2e946dd01e9262ef65da59000028f6c1a9adb4d0 Mon Sep 17 00:00:00 2001 From: cayossarian Date: Tue, 17 Feb 2026 21:26:14 -0800 Subject: [PATCH 07/15] chore: regenerate poetry.lock --- poetry.lock | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 546780e..49d74d6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -181,7 +181,7 @@ description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "platform_python_implementation != \"PyPy\"" +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and sys_platform == \"linux\" and platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, @@ -529,6 +529,7 @@ description = "cryptography is a package which provides cryptographic recipes an optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.8" groups = ["dev"] +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and sys_platform == \"linux\"" files = [ {file = "cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad"}, {file = "cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b"}, @@ -1698,7 +1699,7 @@ description = "C parser in Python" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and sys_platform == \"linux\" and platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, From 5f8277fafb8ee39551b90187c0d25e0f681bdb07 Mon Sep 17 00:00:00 2001 From: Claude Date: Wed, 18 Feb 2026 23:47:41 -0500 Subject: [PATCH 08/15] fix: use panel_resource_id as serial number fallback for Gen3 Gen3 gRPC does not expose serial/firmware via a dedicated trait yet. The panel_resource_id (captured during instance discovery) serves as a unique, stable panel identifier that can be used for entity unique_id generation in Home Assistant. Without this fix, serial_number is empty and HA sensors cannot be registered in the entity registry (no unique_id = no persistent entity). Co-Authored-By: Claude Opus 4.6 --- src/span_panel_api/grpc/client.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/span_panel_api/grpc/client.py b/src/span_panel_api/grpc/client.py index d6c1a71..1898b86 100644 --- a/src/span_panel_api/grpc/client.py +++ b/src/span_panel_api/grpc/client.py @@ -384,12 +384,19 @@ async def connect(self) -> bool: await self._fetch_instances() await self._fetch_breaker_groups() await self._fetch_circuit_names() + + # Use panel_resource_id as serial number fallback. + # Gen3 gRPC does not expose serial/firmware via a dedicated trait yet. + if not self._data.serial and self._data.panel_resource_id: + self._data.serial = self._data.panel_resource_id + self._connected = True _LOGGER.info( - "Connected to Gen3 panel at %s:%s — %d circuits discovered", + "Connected to Gen3 panel at %s:%s — %d circuits discovered (serial=%s)", self._host, self._port, len(self._data.circuits), + self._data.serial, ) return True except Exception: # pylint: disable=broad-exception-caught From 05141e34e1b43ce715e1c2374085b5d10f0d40e1 Mon Sep 17 00:00:00 2001 From: cayossarian Date: Thu, 19 Feb 2026 15:04:14 -0800 Subject: [PATCH 09/15] fix: detect stale .deps-installed marker when venv is recreated Add a check before the deps-installed condition to detect when the virtual environment has been recreated (e.g. for a new Python version) but the .deps-installed marker still reflects the old install. Uses pre_commit importability as the sentinel since it is always a dev dependency. Also add exit code checking for `poetry run pre-commit install` so failures are surfaced rather than silently swallowed. --- setup-hooks.sh | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/setup-hooks.sh b/setup-hooks.sh index 457e3bd..ddbe331 100755 --- a/setup-hooks.sh +++ b/setup-hooks.sh @@ -6,6 +6,13 @@ if [[ "$1" == "--update" ]]; then FORCE_UPDATE=true fi +# Detect a stale .deps-installed marker (e.g. venv was recreated after deps were last installed) +VENV_PYTHON="$(poetry env info --path 2>/dev/null)/bin/python" +if [[ -f ".deps-installed" ]] && ! "$VENV_PYTHON" -c "import pre_commit" 2>/dev/null; then + echo "Virtual environment is missing installed packages; reinstalling..." + rm -f .deps-installed +fi + # Ensure dependencies are installed first if [[ ! -f ".deps-installed" ]] || [[ "pyproject.toml" -nt ".deps-installed" ]] || [[ "$FORCE_UPDATE" == "true" ]]; then echo "Installing/updating dependencies..." @@ -25,6 +32,9 @@ if [[ ! -f ".deps-installed" ]] || [[ "pyproject.toml" -nt ".deps-installed" ]] fi # Install pre-commit hooks -poetry run pre-commit install +if ! poetry run pre-commit install; then + echo "Failed to install pre-commit hooks. Please check the output above." >&2 + exit 1 +fi echo "Git hooks installed successfully!" From 8929af9695b442bb6473fd85370445203975a75d Mon Sep 17 00:00:00 2001 From: cayossarian Date: Thu, 19 Feb 2026 15:08:30 -0800 Subject: [PATCH 10/15] chore: update mypy and black target versions for Python 3.14 Home Assistant now requires Python >=3.14.2. Update mypy python_version from 3.13 to 3.14 so type checking reflects the actual runtime. Update black target-version from py312 to py313 (py314 not yet supported by black 25.1.0). --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index aa215fb..b0ca9a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -113,7 +113,7 @@ combine-as-imports = true split-on-trailing-comma = false [tool.mypy] -python_version = "3.13" +python_version = "3.14" strict = true warn_return_any = true warn_unused_configs = true @@ -287,7 +287,7 @@ max-line-length = 125 [tool.black] line-length = 125 -target-version = ["py312"] +target-version = ["py313"] skip-string-normalization = true # Allow magic trailing comma for better line length handling skip-magic-trailing-comma = false From 75454755af336c594140a9c7395de95cf93a2674 Mon Sep 17 00:00:00 2001 From: cayossarian Date: Thu, 19 Feb 2026 15:18:46 -0800 Subject: [PATCH 11/15] refactor: reduce cyclomatic complexity in grpc/client.py Extract _decode_main_feed_leg() from _decode_main_feed() and _parse_instance_item() from _parse_instances() to bring both methods below the complexity threshold flagged by CodeFactor. - _decode_main_feed: CC 20 -> 7 (D -> B) - _parse_instances: CC 23 -> 13 (D -> C) Also fix pre-existing type and attribute issues surfaced by mypy/pylint when the file was first staged: - _extract_trait_ref_iid: broaden parameter to ProtobufValue | None and fix the return type to always produce int - _parse_breaker_group: remove unnecessary "or b""" coercions now that _extract_trait_ref_iid handles None directly - Initialize _raw_bg_iids/_raw_name_iids/_raw_metric_iids in __init__ to satisfy pylint attribute-defined-outside-init --- src/span_panel_api/grpc/client.py | 221 ++++++++++++++++-------------- 1 file changed, 120 insertions(+), 101 deletions(-) diff --git a/src/span_panel_api/grpc/client.py b/src/span_panel_api/grpc/client.py index 1898b86..e1bede3 100644 --- a/src/span_panel_api/grpc/client.py +++ b/src/span_panel_api/grpc/client.py @@ -226,6 +226,41 @@ def _extract_deepest_value(data: bytes, target_field: int = 3) -> int: return best +def _decode_main_feed_leg(leg_data: bytes) -> tuple[float, float, float]: + """Decode a single leg from main feed data. + + Returns ``(power_w, voltage_v, frequency_hz)``. Any undecodable field is + returned as ``0.0``. + """ + leg_fields = _parse_protobuf_fields(leg_data) + + power_w = 0.0 + power_stats = _get_field(leg_fields, 3) + if isinstance(power_stats, bytes): + power_w = _extract_deepest_value(power_stats) / 2000.0 + + voltage_v = 0.0 + voltage_stats = _get_field(leg_fields, 2) + if isinstance(voltage_stats, bytes): + vs_fields = _parse_protobuf_fields(voltage_stats) + f2 = _get_field(vs_fields, 2) + if isinstance(f2, bytes): + inner = _parse_protobuf_fields(f2) + v = _get_field(inner, 3, 0) + if isinstance(v, int) and v > 0: + voltage_v = v / 1000.0 + + frequency_hz = 0.0 + freq_stats = _get_field(leg_fields, 4) + if isinstance(freq_stats, bytes): + freq_fields = _parse_protobuf_fields(freq_stats) + freq_val = _get_field(freq_fields, 3, 0) + if isinstance(freq_val, int) and freq_val > 0: + frequency_hz = freq_val / 1000.0 + + return power_w, voltage_v, frequency_hz + + def _decode_main_feed(data: bytes) -> CircuitMetrics: """Decode main feed metrics from protobuf field 14. @@ -246,47 +281,14 @@ def _decode_main_feed(data: bytes) -> CircuitMetrics: # Primary data block (field 1 = leg A) leg_a = _get_field(main_fields, 1) if isinstance(leg_a, bytes): - la_fields = _parse_protobuf_fields(leg_a) - - power_stats = _get_field(la_fields, 3) - if isinstance(power_stats, bytes): - metrics.power_w = _extract_deepest_value(power_stats) / 2000.0 - - voltage_stats = _get_field(la_fields, 2) - if isinstance(voltage_stats, bytes): - vs_fields = _parse_protobuf_fields(voltage_stats) - f2 = _get_field(vs_fields, 2) - if isinstance(f2, bytes): - inner = _parse_protobuf_fields(f2) - v = _get_field(inner, 3, 0) - if isinstance(v, int) and v > 0: - metrics.voltage_a_v = v / 1000.0 - - freq_stats = _get_field(la_fields, 4) - if isinstance(freq_stats, bytes): - freq_fields = _parse_protobuf_fields(freq_stats) - freq_val = _get_field(freq_fields, 3, 0) - if isinstance(freq_val, int) and freq_val > 0: - metrics.frequency_hz = freq_val / 1000.0 + metrics.power_w, metrics.voltage_a_v, metrics.frequency_hz = _decode_main_feed_leg(leg_a) # Leg B (field 2) leg_b = _get_field(main_fields, 2) if isinstance(leg_b, bytes): - lb_fields = _parse_protobuf_fields(leg_b) - power_stats = _get_field(lb_fields, 3) - if isinstance(power_stats, bytes): - lb_power = _extract_deepest_value(power_stats) / 2000.0 - if lb_power > 0: - metrics.power_w += lb_power - voltage_stats = _get_field(lb_fields, 2) - if isinstance(voltage_stats, bytes): - vs_fields = _parse_protobuf_fields(voltage_stats) - f2 = _get_field(vs_fields, 2) - if isinstance(f2, bytes): - inner = _parse_protobuf_fields(f2) - v = _get_field(inner, 3, 0) - if isinstance(v, int) and v > 0: - metrics.voltage_b_v = v / 1000.0 + lb_power, metrics.voltage_b_v, _ = _decode_main_feed_leg(leg_b) + if lb_power > 0: + metrics.power_w += lb_power # Combined voltage (split-phase: leg A + leg B, or 2x leg A if symmetric) if metrics.voltage_b_v > 0: @@ -360,6 +362,10 @@ def __init__(self, host: str, port: int = DEFAULT_GRPC_PORT) -> None: self._connected = False # Reverse map built at connect time: metric IID -> positional circuit_id self._metric_iid_to_circuit: dict[int, int] = {} + # Populated by _parse_instances during connect + self._raw_bg_iids: list[int] = [] + self._raw_name_iids: list[int] = [] + self._raw_metric_iids: list[int] = [] # ------------------------------------------------------------------ # SpanPanelClientProtocol implementation @@ -530,6 +536,67 @@ async def _fetch_instances(self) -> None: response: bytes = await self._channel.unary_unary(_GET_INSTANCES)(b"") self._parse_instances(response) + @staticmethod + def _parse_instance_item( + item_data: bytes, + ) -> tuple[int, int, int, int, str] | None: + """Parse a single GetInstances item. + + Returns ``(vendor_id, product_id, trait_id, instance_id, resource_id_str)`` + or ``None`` if the item is malformed / missing required fields. + """ + item_fields = _parse_protobuf_fields(item_data) + + trait_info_data = _get_field(item_fields, 1) + if not isinstance(trait_info_data, bytes): + return None + + trait_info_fields = _parse_protobuf_fields(trait_info_data) + external_data = _get_field(trait_info_fields, 2) + if not isinstance(external_data, bytes): + return None + + ext_fields = _parse_protobuf_fields(external_data) + + # resource_id (field 1) + resource_id_str = "" + resource_data = _get_field(ext_fields, 1) + if isinstance(resource_data, bytes): + rid_fields = _parse_protobuf_fields(resource_data) + rid_val = _get_field(rid_fields, 1) + if isinstance(rid_val, bytes): + resource_id_str = rid_val.decode("utf-8", errors="replace") + + # trait_info (field 2) + inner_info = _get_field(ext_fields, 2) + if not isinstance(inner_info, bytes): + return None + + inner_fields = _parse_protobuf_fields(inner_info) + meta_data = _get_field(inner_fields, 1) + if not isinstance(meta_data, bytes): + return None + + meta_fields = _parse_protobuf_fields(meta_data) + vendor_id = _get_field(meta_fields, 1, 0) + product_id = _get_field(meta_fields, 2, 0) + trait_id = _get_field(meta_fields, 3, 0) + + instance_id = 0 + instance_data = _get_field(inner_fields, 2) + if isinstance(instance_data, bytes): + iid_fields = _parse_protobuf_fields(instance_data) + iid_raw = _get_field(iid_fields, 1, 0) + instance_id = iid_raw if isinstance(iid_raw, int) else 0 + + return ( + vendor_id if isinstance(vendor_id, int) else 0, + product_id if isinstance(product_id, int) else 0, + trait_id if isinstance(trait_id, int) else 0, + instance_id, + resource_id_str, + ) + def _parse_instances(self, data: bytes) -> None: """Parse GetInstancesResponse to discover trait instance IIDs. @@ -540,61 +607,17 @@ def _parse_instances(self, data: bytes) -> None: fields = _parse_protobuf_fields(data) items = fields.get(1, []) - self._raw_bg_iids: list[int] = [] - self._raw_name_iids: list[int] = [] - self._raw_metric_iids: list[int] = [] + self._raw_bg_iids = [] + self._raw_name_iids = [] + self._raw_metric_iids = [] for item_data in items: if not isinstance(item_data, bytes): continue - item_fields = _parse_protobuf_fields(item_data) - - trait_info_data = _get_field(item_fields, 1) - if not isinstance(trait_info_data, bytes): - continue - - trait_info_fields = _parse_protobuf_fields(trait_info_data) - - external_data = _get_field(trait_info_fields, 2) - if not isinstance(external_data, bytes): + result = self._parse_instance_item(item_data) + if result is None: continue - - ext_fields = _parse_protobuf_fields(external_data) - - # resource_id (field 1) - resource_data = _get_field(ext_fields, 1) - resource_id_str = "" - if isinstance(resource_data, bytes): - rid_fields = _parse_protobuf_fields(resource_data) - rid_val = _get_field(rid_fields, 1) - if isinstance(rid_val, bytes): - resource_id_str = rid_val.decode("utf-8", errors="replace") - - # trait_info (field 2) - inner_info = _get_field(ext_fields, 2) - if not isinstance(inner_info, bytes): - continue - - inner_fields = _parse_protobuf_fields(inner_info) - - meta_data = _get_field(inner_fields, 1) - if not isinstance(meta_data, bytes): - continue - - meta_fields = _parse_protobuf_fields(meta_data) - vendor_id_raw = _get_field(meta_fields, 1, 0) - product_id_raw = _get_field(meta_fields, 2, 0) - trait_id_raw = _get_field(meta_fields, 3, 0) - vendor_id = vendor_id_raw if isinstance(vendor_id_raw, int) else 0 - product_id = product_id_raw if isinstance(product_id_raw, int) else 0 - trait_id = trait_id_raw if isinstance(trait_id_raw, int) else 0 - - instance_data = _get_field(inner_fields, 2) - instance_id = 0 - if isinstance(instance_data, bytes): - iid_fields = _parse_protobuf_fields(instance_data) - iid_raw = _get_field(iid_fields, 1, 0) - instance_id = iid_raw if isinstance(iid_raw, int) else 0 + vendor_id, product_id, trait_id, instance_id, resource_id_str = result # Capture panel resource_id if product_id == PRODUCT_GEN3_PANEL and resource_id_str and not self._data.panel_resource_id: @@ -671,13 +694,10 @@ async def _fetch_breaker_groups(self) -> None: ) # Reverse map for O(1) lookup during streaming - self._metric_iid_to_circuit = { - info.metric_iid: cid for cid, info in self._data.circuits.items() - } + self._metric_iid_to_circuit = {info.metric_iid: cid for cid, info in self._data.circuits.items()} _LOGGER.info( - "Built %d circuits from BG mapping (%d dual-phase). " - "Excluded %d non-circuit metric IIDs", + "Built %d circuits from BG mapping (%d dual-phase). Excluded %d non-circuit metric IIDs", len(self._data.circuits), sum(1 for _, _, d in bg_map.values() if d), len(set(self._raw_metric_iids)) - len(bg_map), @@ -701,19 +721,20 @@ async def _query_breaker_group(self, bg_iid: int) -> tuple[int, int, bool]: return self._parse_breaker_group(response) @staticmethod - def _extract_trait_ref_iid(ref_data: bytes) -> int: + def _extract_trait_ref_iid(ref_data: ProtobufValue | None) -> int: """Extract an IID from a trait reference sub-message. Trait references use: field 2 → field 1 = iid (varint). Returns 0 if the data cannot be parsed. """ - if not ref_data or not isinstance(ref_data, bytes): + if not isinstance(ref_data, bytes): return 0 ref_fields = _parse_protobuf_fields(ref_data) iid_data = _get_field(ref_fields, 2) if isinstance(iid_data, bytes): iid_fields = _parse_protobuf_fields(iid_data) - return _get_field(iid_fields, 1, 0) + raw = _get_field(iid_fields, 1, 0) + return raw if isinstance(raw, int) else 0 return 0 @staticmethod @@ -752,8 +773,8 @@ def _parse_breaker_group(data: bytes) -> tuple[int, int, bool]: refs = _parse_protobuf_fields(refs_data) name_ref = _get_field(refs, 1) config_ref = _get_field(refs, 2) - name_iid = SpanGrpcClient._extract_trait_ref_iid(name_ref or b"") - brk_pos = SpanGrpcClient._extract_trait_ref_iid(config_ref or b"") + name_iid = SpanGrpcClient._extract_trait_ref_iid(name_ref) + brk_pos = SpanGrpcClient._extract_trait_ref_iid(config_ref) return (name_iid, brk_pos, False) # Dual-pole (field 13) @@ -768,7 +789,7 @@ def _parse_breaker_group(data: bytes) -> tuple[int, int, bool]: if isinstance(name_ref, bytes): name_iid = SpanGrpcClient._extract_trait_ref_iid(name_ref) leg_a_ref = _get_field(dual_fields, 4) - brk_pos = SpanGrpcClient._extract_trait_ref_iid(leg_a_ref or b"") + brk_pos = SpanGrpcClient._extract_trait_ref_iid(leg_a_ref) return (name_iid, brk_pos, True) return (0, 0, False) @@ -798,9 +819,7 @@ def _build_circuits_positional(self) -> None: name_iid=name_iid, ) - self._metric_iid_to_circuit = { - info.metric_iid: cid for cid, info in self._data.circuits.items() - } + self._metric_iid_to_circuit = {info.metric_iid: cid for cid, info in self._data.circuits.items()} # ------------------------------------------------------------------ # Internal: circuit names From e3db1b00bc2890aa816fa41c8b6edc93c727e944 Mon Sep 17 00:00:00 2001 From: cayossarian Date: Thu, 19 Feb 2026 15:19:20 -0800 Subject: [PATCH 12/15] chore: regenerate poetry.lock for Python 3.14 compatibility --- poetry.lock | 242 ++++++++++++++++++++++++++++------------------------ 1 file changed, 132 insertions(+), 110 deletions(-) diff --git a/poetry.lock b/poetry.lock index 49d74d6..62aeed0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1707,21 +1707,21 @@ files = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.12.5" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" groups = ["generate"] files = [ - {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, - {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, + {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, + {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.33.2" -typing-extensions = ">=4.12.2" -typing-inspection = ">=0.4.0" +pydantic-core = "2.41.5" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -1729,115 +1729,137 @@ timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.5" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.9" groups = ["generate"] files = [ - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, - {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, ] [package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +typing-extensions = ">=4.14.1" [[package]] name = "pygments" @@ -2470,14 +2492,14 @@ markers = {main = "python_version <= \"3.12\" or extra == \"grpc\""} [[package]] name = "typing-inspection" -version = "0.4.1" +version = "0.4.2" description = "Runtime typing introspection tools" optional = false python-versions = ">=3.9" groups = ["generate"] files = [ - {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, - {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, ] [package.dependencies] From ccf2a7e7188bd4dfeb5dfedda971078937dac0b1 Mon Sep 17 00:00:00 2001 From: cayossarian Date: Thu, 19 Feb 2026 15:22:53 -0800 Subject: [PATCH 13/15] ci: replace Python 3.12 with 3.14 to match HA minimum requirement --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4d7417d..1f44ed7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.12", "3.13"] + python-version: ["3.13", "3.14"] steps: - name: Checkout code @@ -62,7 +62,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v6 with: - python-version: "3.12" + python-version: "3.14" - name: Install Poetry uses: snok/install-poetry@v1 @@ -96,7 +96,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v6 with: - python-version: "3.12" + python-version: "3.14" - name: Install Poetry uses: snok/install-poetry@v1 From cb0ea43f9ebda73338aecef959be8141a4f9e64f Mon Sep 17 00:00:00 2001 From: cayossarian Date: Thu, 19 Feb 2026 16:04:27 -0800 Subject: [PATCH 14/15] docs: restructure README for Gen2/Gen3 dual-transport support Split the monolithic README into a concise top-level overview with dedicated detail pages: - README.md: high-level introduction, quick start via create_span_client, Gen2 vs Gen3 capability table, documentation table of contents - docs/gen2-client.md: connection patterns, auth, full API reference, timeout/retry/caching, Home Assistant integration, simulation mode - docs/gen3-client.md: gRPC usage, streaming callbacks, snapshot model, low-level PanelData access, error handling - docs/error-handling.md: exception hierarchy, HTTP to exception mapping, retry configuration, Gen3 gRPC errors - docs/development.md: setup, test/lint commands, project structure, OpenAPI client regeneration, Gen3 internals, contributing guide --- README.md | 561 ++++------------------------------------- docs/development.md | 155 ++++++++++++ docs/error-handling.md | 142 +++++++++++ docs/gen2-client.md | 273 ++++++++++++++++++++ docs/gen3-client.md | 197 +++++++++++++++ 5 files changed, 815 insertions(+), 513 deletions(-) create mode 100644 docs/development.md create mode 100644 docs/error-handling.md create mode 100644 docs/gen2-client.md create mode 100644 docs/gen3-client.md diff --git a/README.md b/README.md index be51055..65c48c7 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# SPAN Panel OpenAPI Client +# SPAN Panel API Client [![GitHub Release](https://img.shields.io/github/v/release/SpanPanel/span-panel-api?style=flat-square)](https://github.com/SpanPanel/span-panel-api/releases) [![PyPI Version](https://img.shields.io/pypi/v/span-panel-api?style=flat-square)](https://pypi.org/project/span-panel-api/) @@ -17,554 +17,89 @@ [![Buy Me A Coffee](https://img.shields.io/badge/Buy%20Me%20A%20Coffee-support%20development-FFDD00?style=flat-square&logo=buy-me-a-coffee&logoColor=black)](https://www.buymeacoffee.com/cayossarian) -A Python client library for accessing the SPAN Panel OpenAPI endpoint. - -## Simulation Mode - -The SPAN Panel API client includes a simulation mode for development and testing without requiring a physical SPAN panel. When enabled, the client uses pre-recorded fixture data and applies dynamic variations provided by the API to simulate various load -variations. Simulation mode supports time-based energy accumulation, power fluctuation patterns for different appliance types, and per-circuit or per-branch variation controls. - -For detailed information and usage examples, see [tests/docs/simulation.md](tests/docs/simulation.md). +A Python client library for SPAN Panel smart electrical panels. Supports both **Gen2** panels (REST/OpenAPI) and **Gen3** panels (gRPC — MAIN40/MLO48). ## Installation ```bash +# Core library — Gen2 panels (REST/OpenAPI) pip install span-panel-api -``` - -## Usage Patterns - -The client supports two usage patterns depending on your use case: - -### Context Manager Pattern (Recommended for Scripts) - -**Best for**: Scripts, one-off operations, short-lived applications - -```python -import asyncio -from span_panel_api import SpanPanelClient - -async def main(): - # Context manager automatically handles connection lifecycle - async with SpanPanelClient("192.168.1.100") as client: - # Authenticate - auth = await client.authenticate("my-script", "SPAN Control Script") - - # Get panel status (no auth required) - status = await client.get_status() - print(f"Panel: {status.system.manufacturer}") - # Get circuits (requires auth) - circuits = await client.get_circuits() - for circuit_id, circuit in circuits.circuits.additional_properties.items(): - print(f"{circuit.name}: {circuit.instant_power_w}W") - - # Control a circuit - await client.set_circuit_relay("circuit-1", "OPEN") - await client.set_circuit_priority("circuit-1", "MUST_HAVE") - - # Client is automatically closed when exiting context - -asyncio.run(main()) +# With Gen3 gRPC support +pip install span-panel-api[grpc] ``` -### Long-Lived Pattern (Services or Integrations) +## Quick Start -**Best for**: Long-running services, persistent connections, integration platforms - -> **Note for Home Assistant integrations**: See [Home Assistant Integration](#home-assistant-integration) section for HA-specific compatibility configuration. +Use `create_span_client` to connect to a panel without knowing its generation in advance. The factory auto-detects Gen2 vs Gen3 and returns the appropriate client. ```python import asyncio -from span_panel_api import SpanPanelClient - -class SpanPanelIntegration: - """Example long-running service integration pattern.""" - - def __init__(self, host: str): - # Create client but don't use context manager - self.client = SpanPanelClient(host) - self._authenticated = False - - async def setup(self) -> None: - """Initialize the integration (called once).""" - try: - # Authenticate once during setup - await self.client.authenticate("my-service", "Panel Integration Service") - self._authenticated = True - except Exception as e: - await self.client.close() # Clean up on setup failure - raise - - async def update_data(self) -> dict: - """Update all data (called periodically by coordinator).""" - if not self._authenticated: - await self.client.authenticate("my-service", "Panel Integration Service") - self._authenticated = True - - try: - # Get all data in one update cycle - status = await self.client.get_status() - panel_state = await self.client.get_panel_state() - circuits = await self.client.get_circuits() - storage = await self.client.get_storage_soe() - - return { - "status": status, - "panel": panel_state, - "circuits": circuits, - "storage": storage - } - except Exception: - self._authenticated = False # Reset auth on error - raise - - async def set_circuit_priority(self, circuit_id: str, priority: str) -> None: - """Set circuit priority (called by service).""" - if not self._authenticated: - await self.client.authenticate("my-service", "Panel Integration Service") - self._authenticated = True - - await self.client.set_circuit_priority(circuit_id, priority) +from span_panel_api import create_span_client - async def cleanup(self) -> None: - """Cleanup when integration is unloaded.""" - await self.client.close() - -# Usage in long-running service async def main(): - integration = SpanPanelIntegration("192.168.1.100") + client = await create_span_client("192.168.1.100") + await client.connect() - try: - await integration.setup() + snapshot = await client.get_snapshot() + print(f"Panel: {snapshot.serial_number} ({snapshot.panel_generation})") + print(f"Grid power: {snapshot.main_power_w:.0f} W") - # Simulate coordinator updates - for i in range(10): - data = await integration.update_data() - print(f"Update {i}: {len(data['circuits'].circuits.additional_properties)} circuits") - await asyncio.sleep(30) # Service typically updates every 30 seconds + for circuit_id, circuit in snapshot.circuits.items(): + print(f" [{circuit_id}] {circuit.name}: {circuit.power_w:.0f} W") - finally: - await integration.cleanup() + await client.close() asyncio.run(main()) ``` -### Manual Pattern (Advanced Usage) - -**Best for**: Custom connection management, special requirements - -```python -import asyncio -from span_panel_api import SpanPanelClient - -async def manual_example(): - """Manual client lifecycle management.""" - client = SpanPanelClient("192.168.1.100") - - try: - # Manually authenticate - await client.authenticate("manual-app", "Manual Application") - - # Do work - status = await client.get_status() - circuits = await client.get_circuits() - - print(f"Found {len(circuits.circuits.additional_properties)} circuits") - - except Exception as e: - print(f"Error: {e}") - finally: - # IMPORTANT: Always close the client to free resources - await client.close() - -asyncio.run(manual_example()) -``` - -## When to Use Each Pattern - -| Pattern | Use Case | Pros | Cons | -| ------------------- | ---------------------------------------- | ----------------------------------------------------- | ------------------------------------------------- | -| **Context Manager** | Scripts, one-off tasks, testing | Automatic cleanup • Exception safe • Simple code | Creates/destroys connection each time | -| **Long-Lived** | Services, daemons, integration platforms | Efficient connection reuse Authentication persistence | Manual lifecycle management • Must handle cleanup | -| **Manual** | Custom requirements, debugging | Full control handling | Must remember to call close() • More error-prone | - -## Error Handling - -The client provides error categorization for different retry strategies: - -### Exception Types - -All exceptions inherit from `SpanPanelError`. - -- `SpanPanelAuthError`: Raised for authentication failures (invalid token, login required, etc.) -- `SpanPanelConnectionError`: Raised for network errors, server errors, or API errors -- `SpanPanelTimeoutError`: Raised when a request times out -- `SpanPanelValidationError`: Raised for data validation errors (invalid input, schema mismatch) -- `SpanPanelAPIError`: General API error (fallback for unexpected API issues) -- `SpanPanelRetriableError`: Raised for retriable server errors (502, 503, 504) -- `SpanPanelServerError`: Raised for non-retriable server errors (500) -- `SimulationConfigurationError`: Raised for invalid or missing simulation configuration (simulation mode only) - -```python -from span_panel_api import ( - SpanPanelError, # Base exception - SpanPanelAuthError, - SpanPanelConnectionError, - SpanPanelTimeoutError, - SpanPanelValidationError, - SpanPanelAPIError, - SpanPanelRetriableError, - SpanPanelServerError, - SimulationConfigurationError, -) -``` - -### HTTP Error Code Mapping - -| Status Code | Exception | Retry? | Description | Action | -| ---------------------------- | ------------------------------ | -------------------- | --------------------------------- | ------------------------------ | -| **Authentication Errors** | - | - | - | - | -| 401, 403 | `SpanPanelAuthError` | Once (after re-auth) | Authentication required/failed | Re-authenticate and retry once | -| **Server/Network Errors** | - | - | - | - | -| 500 | `SpanPanelServerError` | No | Server error (non-retriable) | Check server, report issue | -| 502, 503, 504 | `SpanPanelRetriableError` | Yes | Retriable server/network errors | Retry with exponential backoff | -| **Other HTTP Errors** | - | - | - | - | -| 404, 400, etc | `SpanPanelAPIError` | Case by case | Client/request errors | Check request parameters | -| **Timeouts** | `SpanPanelTimeoutError` | Yes | Request timed out | Retry with backoff | -| **Validation Errors** | `SpanPanelValidationError` | No | Data validation failed | Fix input data | -| **Simulation Config Errors** | `SimulationConfigurationError` | No | Invalid/missing simulation config | Fix simulation config | - -### Retry Strategy - -```python -async def example_request_with_retry(): - """Example showing appropriate error handling.""" - try: - return await client.get_circuits() - except SpanPanelAuthError: - # Re-authenticate and retry once - await client.authenticate("my-app", "My Application") - return await client.get_circuits() - except SpanPanelRetriableError as e: - # Temporary server or network issues - should retry with backoff - logger.warning(f"Retriable error, will retry: {e}") - raise # Let retry logic handle the retry - except SpanPanelTimeoutError as e: - # Network timeout - should retry - logger.warning(f"Timeout, will retry: {e}") - raise - except SpanPanelValidationError as e: - # Data validation error - fix input - logger.error(f"Validation error: {e}") - raise - except SimulationConfigurationError as e: - # Simulation config error - fix config - logger.error(f"Simulation config error: {e}") - raise - except SpanPanelAPIError as e: - # Other API errors - logger.error(f"API error: {e}") - raise -``` - -### Exception Handling - -The client configures the underlying OpenAPI client with `raise_on_unexpected_status=True`, ensuring that HTTP errors (especially 500 responses) are converted to appropriate exceptions rather than being silently ignored. - -## API Reference - -### Client Initialization +To target a specific generation, pass `panel_generation` explicitly: ```python -client = SpanPanelClient( - host="192.168.1.100", # Required: SPAN Panel IP - port=80, # Optional: default 80 - timeout=30.0, # Optional: request timeout - use_ssl=False, # Optional: HTTPS (usually False for local) - cache_window=1.0 # Optional: cache window in seconds (0 to disable) -) -``` +from span_panel_api import create_span_client, PanelGeneration -### Authentication +# Force Gen2 (REST/OpenAPI) +client = await create_span_client("192.168.1.100", panel_generation=PanelGeneration.GEN2) -```python -# Register a new API client (one-time setup) -auth = await client.authenticate( - name="my-integration", # Required: client name - description="My Application" # Optional: description -) -# Token is stored and used for subsequent requests +# Force Gen3 (gRPC) — requires span-panel-api[grpc] +client = await create_span_client("192.168.1.100", panel_generation=PanelGeneration.GEN3) ``` -### Panel Information - -```python -# System status (no authentication required) -status = await client.get_status() -print(f"System: {status.system}") -print(f"Network: {status.network}") - -# Detailed panel state (requires authentication) -panel = await client.get_panel_state() -print(f"Grid power: {panel.instant_grid_power_w}W") -print(f"Main relay: {panel.main_relay_state}") +## Gen2 vs Gen3 Capabilities -# Battery storage information -storage = await client.get_storage_soe() -print(f"Battery SOE: {storage.soe * 100:.1f}%") -print(f"Max capacity: {storage.max_energy_kwh}kWh") -``` +| Feature | Gen2 (REST/OpenAPI) | Gen3 (gRPC) | +| ------------------------ | ------------------- | ------------- | +| Authentication | Required (JWT) | None | +| Circuit relay control | Yes | No | +| Circuit priority control | Yes | No | +| Energy history (Wh) | Yes | No | +| Battery / storage SOE | Yes | No | +| Solar / DSM state | Yes | No | +| Real-time power metrics | Polled | Push-streamed | +| Simulation mode | Yes | No | -### Circuit Control +Use `client.capabilities` (a `PanelCapability` flag set) at runtime to guard optional features: ```python -# Get all circuits -circuits = await client.get_circuits() -for circuit_id, circuit in circuits.circuits.additional_properties.items(): - print(f"Circuit {circuit_id}: {circuit.name}") - print(f" Power: {circuit.instant_power_w}W") - print(f" Relay: {circuit.relay_state}") - print(f" Priority: {circuit.priority}") +from span_panel_api import PanelCapability -# Control circuit relay (OPEN/CLOSED) -await client.set_circuit_relay("circuit-1", "OPEN") # Turn off -await client.set_circuit_relay("circuit-1", "CLOSED") # Turn on +if PanelCapability.RELAY_CONTROL in client.capabilities: + await client.set_circuit_relay("1", "OPEN") -# Set circuit priority -await client.set_circuit_priority("circuit-1", "MUST_HAVE") -await client.set_circuit_priority("circuit-1", "NICE_TO_HAVE") +if PanelCapability.PUSH_STREAMING in client.capabilities: + await client.start_streaming() ``` -### Complete Circuit Data - -The `get_circuits()` method includes virtual circuits for unmapped panel tabs, providing complete panel visibility including non-user controlled tabs. - -- Virtual circuits have IDs like `unmapped_tab_1`, `unmapped_tab_2` -- All energy values are correctly mapped from panel branches - -**Example Output:** - -```python -circuits = await client.get_circuits() - -# Standard configured circuits -print(circuits.circuits.additional_properties["1"].name) # "Main Kitchen" -print(circuits.circuits.additional_properties["1"].instant_power_w) # 150 - -# Virtual circuits for unmapped tabs (e.g., solar) -print(circuits.circuits.additional_properties["unmapped_tab_5"].name) # "Unmapped Tab 5" -print(circuits.circuits.additional_properties["unmapped_tab_5"].instant_power_w) # -2500 (solar production) -``` - -## Timeout and Retry Control - -The SPAN Panel API client provides timeout and retry configuration: - -- `timeout` (float, default: 30.0): The maximum time (in seconds) to wait for a response from the panel for each attempt. -- `retries` (int, default: 0): The number of times to retry a failed request due to network or retriable server errors. `retries=0` means no retries (1 total attempt), `retries=1` means 1 retry (2 total attempts), etc. -- `retry_timeout` (float, default: 0.5): The base wait time (in seconds) between retries, with exponential backoff. -- `retry_backoff_multiplier` (float, default: 2.0): The multiplier for exponential backoff between retries. - -### Example Usage - -```python -# No retries (default, fast feedback) -client = SpanPanelClient("192.168.1.100", timeout=10.0) - -# Add retries for production -client = SpanPanelClient("192.168.1.100", timeout=10.0, retries=2, retry_timeout=1.0) - -# Full retry configuration -client = SpanPanelClient( - "192.168.1.100", - timeout=10.0, - retries=3, - retry_timeout=0.5, - retry_backoff_multiplier=2.0 -) - -# Change retry settings at runtime -client.retries = 3 -client.retry_timeout = 2.0 -client.retry_backoff_multiplier = 1.5 -``` - -### What does 'retries' mean? - -| retries | Total Attempts | Description | -| ------- | -------------- | -------------------- | -| 0 | 1 | No retries (default) | -| 1 | 2 | 1 retry | -| 2 | 3 | 2 retries | - -Retry and timeout settings can be queried and changed at runtime. - -## Performance Features - -### Caching - -The client includes a time-based cache that prevents redundant API calls within a configurable window. This feature is particularly useful when multiple operations need the same data. The package itself makes multiple calls to create virtual circuits for -tabs not represented in circuits data so the cache avoids unecessary calls when the user also makes requests the same data. - -**Cache Behavior:** - -- Each API endpoint (status, panel_state, circuits, storage) has independent cache -- Cache window starts when successful data is obtained -- Subsequent calls within the window return cached data -- After expiration, next call makes fresh network request -- Failed requests don't affect cache timing - -**Example Benefits:** - -```python -# These calls demonstrate cache efficiency: -panel_state = await client.get_panel_state() # Network call -circuits = await client.get_circuits() # Uses cached panel_state data internally -panel_state2 = await client.get_panel_state() # Returns cached data (within window) -``` - -## Development Setup - -### Prerequisites - -- Python 3.12 or 3.13 (SPAN Panel requires Python 3.12+) -- [Poetry](https://python-poetry.org/) for dependency management - -### Development Installation - -```bash -# Clone and install -git clone -cd span-panel-api -eval "$(poetry env activate)" -poetry install - -# Run tests -poetry run pytest - -# Check coverage -python scripts/coverage.py -``` - -### Project Structure - -```bash -span_openapi/ -├── src/span_panel_api/ # Main client library -│ ├── client.py # SpanPanelClient (high-level wrapper) -│ ├── simulation.py # Simulation engine for dynamic test mode -│ ├── exceptions.py # Exception hierarchy -│ ├── const.py # HTTP status constants -│ └── generated_client/ # Auto-generated OpenAPI client -├── tests/ # Test suite -│ ├── test_core_client.py # Core client and API error path tests -│ ├── test_context_manager.py # Context manager tests -│ ├── test_cache_functionality.py # Cache and retry tests -│ ├── test_enhanced_circuits.py # Enhanced/virtual circuits tests -│ ├── test_simulation_mode.py # Simulation mode tests -│ ├── test_factories.py # Shared test fixtures and factories -│ ├── conftest.py # Pytest shared fixtures -│ └── simulation_fixtures/ # Simulation fixture data (response .txt files) -├── scripts/coverage.py # Coverage checking utility -├── openapi.json # SPAN Panel OpenAPI specification -├── pyproject.toml # Poetry configuration -└── README.md # Project documentation - -``` - -## Advanced Usage - -### Home Assistant Integration - -For Home Assistant integrations, the client provides a compatibility layer to handle asyncio timing issues that can occur in HA's event loop: - -```python -from span_panel_api import SpanPanelClient, set_async_delay_func -import asyncio - -# In your Home Assistant integration setup: -async def ha_compatible_delay(seconds: float) -> None: - """Custom delay function that works well with HA's event loop.""" - # Use HA's async utilities or implement HA-specific delay logic - await asyncio.sleep(seconds) - -# Configure the client to use HA-compatible delay -set_async_delay_func(ha_compatible_delay) - -# Now create and use clients normally -async with SpanPanelClient("192.168.1.100") as client: - # Client will use your custom delay function for retry logic - await client.authenticate("your_token") - panel_state = await client.get_panel_state() - -# To reset to default behavior (uses asyncio.sleep): -set_async_delay_func(None) -``` - -**Why This Matters:** - -- Home Assistant's event loop can be sensitive to blocking operations -- The default `asyncio.sleep()` used in retry logic may not play well with HA -- Custom delay functions allow HA integrations to use HA's preferred async patterns -- This prevents integration timeouts and improves responsiveness - -**Note:** This only affects the retry delay behavior. Normal API operations remain unchanged. - -### SSL Configuration - -```python -# For panels that support SSL -# Note: We do not currently observe panels supporting SSL for local access -client = SpanPanelClient( - host="span-panel.local", - use_ssl=True, - port=443 -) -``` - -### Timeout Configuration - -```python -# Custom timeout for slow networks -client = SpanPanelClient( - host="192.168.1.100", - timeout=60.0 # 60 second timeout -) -``` - -## Testing and Coverage - -```bash -# Run full test suite -poetry run pytest - -# Generate coverage report -python scripts/coverage.py --full - -# Run just context manager tests -poetry run pytest tests/test_context_manager.py -v - -# Check coverage meets threshold -python scripts/coverage.py --check --threshold 90 - -# Run with coverage -poetry run pytest --cov=span_panel_api tests/ -``` - -## Contributing - -1. Get `openapi.json` SPAN Panel API specs - - (for example via REST Client extension) - - GET +## Documentation -2. Regenerate client: `poetry run python generate_client.py` -3. Update wrapper client in `src/span_panel_api/client.py` if needed -4. Add tests for new functionality -5. Update this README if adding new features +| Topic | Link | +| -------------------------------------------------------------- | ------------------------------------------------ | +| Gen2 REST/OpenAPI client — usage, auth, API reference, caching | [docs/gen2-client.md](docs/gen2-client.md) | +| Gen3 gRPC client — usage, streaming, data models | [docs/gen3-client.md](docs/gen3-client.md) | +| Error handling and retry strategies | [docs/error-handling.md](docs/error-handling.md) | +| Simulation mode | [docs/simulation.md](docs/simulation.md) | +| Development setup and contributing | [docs/development.md](docs/development.md) | ## License -MIT License - see LICENSE file for details. +MIT License - see [LICENSE](LICENSE) for details. diff --git a/docs/development.md b/docs/development.md new file mode 100644 index 0000000..aaf85aa --- /dev/null +++ b/docs/development.md @@ -0,0 +1,155 @@ +# Development Guide + +## Prerequisites + +- Python 3.12 or 3.13 +- [Poetry](https://python-poetry.org/) for dependency management + +## Setup + +```bash +git clone +cd span-panel-api + +# Activate the Poetry-managed environment +eval "$(poetry env activate)" + +# Install all dependencies including dev extras +poetry install + +# Install pre-commit hooks +poetry run pre-commit install +``` + +## Running Tests + +```bash +# Full test suite +poetry run pytest + +# With verbose output +poetry run pytest -v + +# Specific test file +poetry run pytest tests/test_core_client.py -v + +# With coverage +poetry run pytest --cov=span_panel_api tests/ + +# Generate HTML coverage report +python scripts/coverage.py --full + +# Check coverage meets the threshold +python scripts/coverage.py --check --threshold 90 +``` + +## Code Quality + +```bash +# Run all pre-commit hooks on all files (lint, format, type-check, security) +poetry run pre-commit run --all-files + +# Lint only +poetry run ruff check src/span_panel_api/ + +# Format code +poetry run ruff format src/span_panel_api/ + +# Type checking +poetry run mypy src/span_panel_api/ + +# Security audit +poetry run bandit -c pyproject.toml -r src/span_panel_api/ +``` + +## Project Structure + +```text +span-panel-api/ +├── src/span_panel_api/ # Main library +│ ├── __init__.py # Public API surface +│ ├── client.py # SpanPanelClient — Gen2 REST client +│ ├── factory.py # create_span_client — auto-detect factory +│ ├── protocol.py # Protocol definitions for type-safe dispatch +│ ├── models.py # Transport-agnostic data models +│ ├── simulation.py # Simulation engine (Gen2 only) +│ ├── exceptions.py # Exception hierarchy +│ ├── const.py # HTTP status constants +│ ├── phase_validation.py # Solar / phase utilities +│ ├── generated_client/ # Auto-generated OpenAPI client (do not edit) +│ └── grpc/ # Gen3 gRPC client +│ ├── client.py # SpanGrpcClient +│ ├── models.py # Low-level gRPC data models +│ └── const.py # gRPC constants (port, trait IDs, etc.) +├── tests/ # Test suite +│ ├── test_core_client.py +│ ├── test_context_manager.py +│ ├── test_cache_functionality.py +│ ├── test_enhanced_circuits.py +│ ├── test_simulation_mode.py +│ ├── test_factories.py +│ ├── conftest.py +│ └── simulation_fixtures/ # Pre-recorded API response fixtures +├── examples/ # Example scripts and simulation configs +├── scripts/ # Developer utility scripts +├── docs/ # This documentation +├── openapi.json # SPAN Panel OpenAPI specification (Gen2) +└── pyproject.toml # Poetry / project configuration +``` + +## Updating the Gen2 OpenAPI Client + +The `generated_client/` directory is auto-generated from `openapi.json`. Do not edit it manually. + +1. Obtain a fresh `openapi.json` from a live panel: + + ```text + GET http:///api/v1/openapi.json + ``` + +2. Replace `openapi.json` in the repo root. + +3. Regenerate: + + ```bash + poetry run python generate_client.py + ``` + +4. Update `src/span_panel_api/client.py` if the API surface changed. + +5. Add or update tests for any changed behaviour. + +## Gen3 gRPC Development + +The Gen3 client uses manual protobuf encoding/decoding to avoid generated stubs, keeping the dependency surface to the single optional `grpcio` package. + +Key files: + +- `grpc/client.py` — `SpanGrpcClient` implementation, protobuf helpers, metric decoders +- `grpc/models.py` — `CircuitInfo`, `CircuitMetrics`, `PanelData` +- `grpc/const.py` — port number, trait IDs, product identifiers + +The gRPC client connects to `TraitHandlerService` at port 50065 and uses three RPC methods: + +| RPC | Purpose | +| -------------- | -------------------------------- | +| `GetInstances` | Discover circuit trait instances | +| `GetRevision` | Fetch circuit names by trait IID | +| `Subscribe` | Stream real-time power metrics | + +## Adding a New Feature + +1. If adding a new API capability, update `PanelCapability` in `models.py`. +2. If adding a new method to both transports, add it to the appropriate `Protocol` in `protocol.py`. +3. Add type hints and docstrings to all new public functions and classes. +4. Write tests covering the new code (target > 80% coverage for new code). +5. Update the relevant `docs/` page. + +## Release Process + +Versioning follows [Semantic Versioning](https://semver.org/). + +1. Update `__version__` in `src/span_panel_api/__init__.py`. +2. Update `CHANGELOG.md`. +3. Run the full test suite and pre-commit hooks. +4. Tag and push — CI will publish to PyPI automatically. diff --git a/docs/error-handling.md b/docs/error-handling.md new file mode 100644 index 0000000..6453997 --- /dev/null +++ b/docs/error-handling.md @@ -0,0 +1,142 @@ +# Error Handling and Retry + +## Exception Hierarchy + +All exceptions inherit from `SpanPanelError`. + +```text +SpanPanelError +├── SpanPanelAuthError — authentication failures (401, 403) +├── SpanPanelConnectionError — network errors or unreachable panel +├── SpanPanelTimeoutError — request timeout +├── SpanPanelValidationError — invalid input or schema mismatch +├── SpanPanelAPIError — general API error (catch-all for HTTP errors) +├── SpanPanelRetriableError — transient server errors (502, 503, 504) +├── SpanPanelServerError — non-retriable server error (500) +├── SpanPanelGrpcError — base for Gen3 gRPC errors +│ └── SpanPanelGrpcConnectionError — Gen3 connection failure +└── SimulationConfigurationError — invalid simulation config (simulation mode only) +``` + +### Import + +```python +from span_panel_api import ( + SpanPanelError, + SpanPanelAuthError, + SpanPanelConnectionError, + SpanPanelTimeoutError, + SpanPanelValidationError, + SpanPanelAPIError, + SpanPanelRetriableError, + SpanPanelServerError, + SpanPanelGrpcError, + SpanPanelGrpcConnectionError, + SimulationConfigurationError, +) +``` + +## HTTP Error → Exception Mapping (Gen2) + +| HTTP Status | Exception | Retriable | Action | +| ------------------ | ------------------------------ | -------------------- | ------------------------------ | +| 401, 403 | `SpanPanelAuthError` | Once (after re-auth) | Re-authenticate then retry | +| 500 | `SpanPanelServerError` | No | Check server; report issue | +| 502, 503, 504 | `SpanPanelRetriableError` | Yes | Retry with exponential backoff | +| 404, 400, etc. | `SpanPanelAPIError` | Case-by-case | Check request parameters | +| Timeout | `SpanPanelTimeoutError` | Yes | Retry with backoff | +| Validation failure | `SpanPanelValidationError` | No | Fix input data | +| Simulation config | `SimulationConfigurationError` | No | Fix simulation config file | + +The underlying HTTP client is configured with `raise_on_unexpected_status=True`, so unexpected status codes are never silently ignored. + +## Handling Errors in Practice + +```python +from span_panel_api import ( + SpanPanelAuthError, + SpanPanelRetriableError, + SpanPanelTimeoutError, + SpanPanelValidationError, + SpanPanelAPIError, +) + +async def fetch_circuits(client): + try: + return await client.get_circuits() + except SpanPanelAuthError: + # Token expired or not yet authenticated — re-auth and retry once + await client.authenticate("my-app", "My Application") + return await client.get_circuits() + except SpanPanelRetriableError as exc: + # Temporary server overload — let retry logic or coordinator handle this + logger.warning("Transient server error, will retry: %s", exc) + raise + except SpanPanelTimeoutError as exc: + # Network too slow — retry after backoff + logger.warning("Request timed out: %s", exc) + raise + except SpanPanelValidationError as exc: + # Unexpected response structure — not retriable + logger.error("Validation error: %s", exc) + raise + except SpanPanelAPIError as exc: + # Any other API error + logger.error("API error: %s", exc) + raise +``` + +## Retry Configuration (Gen2) + +Configure retries on the client to handle transient network issues automatically: + +```python +from span_panel_api import SpanPanelClient + +client = SpanPanelClient( + "192.168.1.100", + timeout=10.0, + retries=3, # 3 retries → up to 4 total attempts + retry_timeout=0.5, # initial delay before first retry + retry_backoff_multiplier=2.0, # delays: 0.5s, 1.0s, 2.0s +) +``` + +Only `SpanPanelRetriableError` and `SpanPanelTimeoutError` trigger automatic retries. `SpanPanelAuthError` and `SpanPanelValidationError` are not retried automatically. + +### Retry Attempt Count + +| `retries` | Total attempts | +| ----------- | -------------- | +| 0 (default) | 1 | +| 1 | 2 | +| 2 | 3 | +| 3 | 4 | + +Settings can be changed at runtime: + +```python +client.retries = 2 +client.retry_timeout = 1.0 +client.retry_backoff_multiplier = 1.5 +``` + +## Gen3 gRPC Errors + +Gen3 errors use a separate, simpler hierarchy since gRPC does not use HTTP status codes: + +```python +from span_panel_api import SpanPanelGrpcError, SpanPanelGrpcConnectionError + +try: + await client.connect() + snapshot = await client.get_snapshot() +except SpanPanelGrpcConnectionError as exc: + # Panel unreachable or gRPC channel failed + logger.error("Gen3 connection failed: %s", exc) +except SpanPanelGrpcError as exc: + # Other gRPC-level errors + logger.error("Gen3 gRPC error: %s", exc) +``` + +Gen3 does not have built-in retry logic — reconnect handling should be implemented at the integration layer (e.g., the Home Assistant coordinator). diff --git a/docs/gen2-client.md b/docs/gen2-client.md new file mode 100644 index 0000000..22fa912 --- /dev/null +++ b/docs/gen2-client.md @@ -0,0 +1,273 @@ +# Gen2 REST/OpenAPI Client + +The Gen2 client (`SpanPanelClient`) communicates with SPAN panels via the local REST API (HTTP on port 80). Gen2 covers the original SPAN Panel hardware (pre-MAIN40/MLO48). + +> **Note**: For integrations that should work with both generations, prefer [`create_span_client`](../README.md) from the factory module. Use `SpanPanelClient` directly only when targeting Gen2 exclusively. + +## Connection Patterns + +### Context Manager (Recommended for Scripts) + +Best for scripts, one-off operations, and short-lived processes. + +```python +import asyncio +from span_panel_api import SpanPanelClient + +async def main(): + async with SpanPanelClient("192.168.1.100") as client: + await client.authenticate("my-script", "SPAN Control Script") + + status = await client.get_status() + print(f"Panel: {status.system.manufacturer}") + + circuits = await client.get_circuits() + for circuit_id, circuit in circuits.circuits.additional_properties.items(): + print(f"{circuit.name}: {circuit.instant_power_w} W") + +asyncio.run(main()) +``` + +The context manager handles `close()` automatically on exit, including exception paths. + +### Long-Lived Pattern (Services and Integrations) + +Best for long-running processes such as Home Assistant integrations and daemons. + +```python +import asyncio +from span_panel_api import SpanPanelClient + +class SpanPanelService: + def __init__(self, host: str) -> None: + self.client = SpanPanelClient(host) + self._authenticated = False + + async def setup(self) -> None: + try: + await self.client.authenticate("my-service", "Panel Service") + self._authenticated = True + except Exception: + await self.client.close() + raise + + async def update(self) -> dict: + if not self._authenticated: + await self.client.authenticate("my-service", "Panel Service") + self._authenticated = True + try: + return { + "status": await self.client.get_status(), + "panel": await self.client.get_panel_state(), + "circuits": await self.client.get_circuits(), + "storage": await self.client.get_storage_soe(), + } + except Exception: + self._authenticated = False + raise + + async def teardown(self) -> None: + await self.client.close() +``` + +### Manual Pattern (Advanced) + +Full control over lifecycle — useful for debugging or custom requirements. + +```python +client = SpanPanelClient("192.168.1.100") +try: + await client.authenticate("manual-app", "Manual Application") + circuits = await client.get_circuits() + print(f"Found {len(circuits.circuits.additional_properties)} circuits") +finally: + await client.close() # Always close to free resources +``` + +## Client Initialization + +```python +client = SpanPanelClient( + host="192.168.1.100", # Required: panel IP or hostname + port=80, # Optional: default 80 + timeout=30.0, # Optional: request timeout in seconds + use_ssl=False, # Optional: use HTTPS (uncommon for local) + cache_window=1.0, # Optional: response cache window in seconds + retries=0, # Optional: retry attempts on transient errors + retry_timeout=0.5, # Optional: initial delay between retries + retry_backoff_multiplier=2.0, # Optional: exponential backoff multiplier +) +``` + +## Authentication + +SPAN Gen2 panels require JWT authentication. The panel's physical proximity sensor must be triggered (within 15 minutes) on first registration. + +```python +# Register a new API client — one-time setup per client name +auth = await client.authenticate( + name="my-integration", # Identifies the client; shown in panel UI + description="My Application" # Optional display description +) +# The token is stored internally; all subsequent requests use it automatically. + +# If you already have a token (e.g., stored from a previous run): +client.set_access_token("your-jwt-token") +``` + +## API Reference + +### Panel Status and State + +```python +# System info — no authentication required +status = await client.get_status() +print(f"Manufacturer: {status.system.manufacturer}") +print(f"Network: {status.network}") + +# Detailed panel state — authentication required +panel = await client.get_panel_state() +print(f"Grid power: {panel.instant_grid_power_w} W") +print(f"Main relay: {panel.main_relay_state}") + +# Battery / storage state of energy — authentication required +storage = await client.get_storage_soe() +print(f"Battery SOE: {storage.soe * 100:.1f}%") +print(f"Max capacity: {storage.max_energy_kwh} kWh") +``` + +### Circuit Data + +```python +circuits = await client.get_circuits() +for circuit_id, circuit in circuits.circuits.additional_properties.items(): + print(f"[{circuit_id}] {circuit.name}") + print(f" Power: {circuit.instant_power_w} W") + print(f" Relay: {circuit.relay_state}") + print(f" Priority: {circuit.priority}") +``` + +`get_circuits()` enriches the API response with **virtual circuits** for unmapped panel tabs, ensuring complete panel visibility. Virtual circuits have IDs such as `unmapped_tab_1`. + +```python +# Configured circuit +circuits.circuits.additional_properties["1"].name # "Main Kitchen" +circuits.circuits.additional_properties["1"].instant_power_w # 150.0 + +# Virtual circuit for an unmapped tab (e.g., solar feedthrough) +circuits.circuits.additional_properties["unmapped_tab_5"].instant_power_w # -2500.0 +``` + +### Circuit Control + +Authentication is required for all write operations. + +```python +# Relay control +await client.set_circuit_relay("1", "OPEN") # Turn off +await client.set_circuit_relay("1", "CLOSED") # Turn on + +# Load priority (affects behavior during demand-response events) +await client.set_circuit_priority("1", "MUST_HAVE") +await client.set_circuit_priority("1", "NICE_TO_HAVE") +await client.set_circuit_priority("1", "NON_ESSENTIAL") +``` + +### Unified Snapshot (Protocol-Compatible) + +For code that must work with both Gen2 and Gen3, use `get_snapshot()`: + +```python +snapshot = await client.get_snapshot() +print(f"Serial: {snapshot.serial_number}") +print(f"Main power: {snapshot.main_power_w} W") +for cid, circuit in snapshot.circuits.items(): + print(f" [{cid}] {circuit.name}: {circuit.power_w} W relay={circuit.relay_state}") +``` + +## Timeout and Retry Configuration + +| Parameter | Default | Description | +| -------------------------- | ------- | ----------------------------------------------------- | +| `timeout` | `30.0` | Per-request timeout in seconds | +| `retries` | `0` | Retry attempts on transient failures (0 = no retries) | +| `retry_timeout` | `0.5` | Initial delay between retries in seconds | +| `retry_backoff_multiplier` | `2.0` | Multiplier for exponential backoff | + +```python +# Production configuration with retries +client = SpanPanelClient( + "192.168.1.100", + timeout=10.0, + retries=3, + retry_timeout=0.5, + retry_backoff_multiplier=2.0, +) + +# Settings are also mutable at runtime +client.retries = 2 +client.retry_timeout = 1.0 +``` + +| `retries` | Total attempts | +| --------- | -------------- | +| 0 | 1 (no retries) | +| 1 | 2 | +| 2 | 3 | + +## Response Caching + +The client caches responses per endpoint for a configurable window to prevent redundant network calls. Each endpoint (status, panel state, circuits, storage) has an independent cache that starts when data is successfully received. + +```python +panel = await client.get_panel_state() # Network call +circuits = await client.get_circuits() # Uses cached panel state internally +panel2 = await client.get_panel_state() # Returns cached result (within window) +``` + +- `cache_window=0` disables caching entirely +- Failed requests do not start or extend the cache window + +## Home Assistant Integration + +Home Assistant's event loop can be sensitive to `asyncio.sleep()` calls inside retry logic. Use `set_async_delay_func` to replace the default with an HA-compatible implementation: + +```python +from span_panel_api import SpanPanelClient, set_async_delay_func +import asyncio + +async def ha_delay(seconds: float) -> None: + await asyncio.sleep(seconds) + +set_async_delay_func(ha_delay) + +# Use the client normally — retry delays now use your custom function +async with SpanPanelClient("192.168.1.100") as client: + await client.authenticate("ha-integration", "Home Assistant") + panel = await client.get_panel_state() + +# Restore default behavior +set_async_delay_func(None) +``` + +This only affects retry delay behaviour; normal API calls are unaffected. + +## SSL + +Local SPAN panels do not typically support HTTPS, but the option is available: + +```python +client = SpanPanelClient(host="span-panel.local", use_ssl=True, port=443) +``` + +## Simulation Mode + +The Gen2 client supports a simulation mode for testing without real hardware. See [simulation.md](simulation.md) for full details. + +```python +client = SpanPanelClient( + "192.168.1.100", + simulation_mode=True, + simulation_config_path="examples/simulation_config_40_circuit_with_battery.yaml", +) +``` diff --git a/docs/gen3-client.md b/docs/gen3-client.md new file mode 100644 index 0000000..7b44293 --- /dev/null +++ b/docs/gen3-client.md @@ -0,0 +1,197 @@ +# Gen3 gRPC Client + +The Gen3 client (`SpanGrpcClient`) communicates with next-generation SPAN panels (MAIN40, MLO48) using gRPC on port 50065. No authentication is required. + +> **Note**: For integrations that should work with both generations, prefer `create_span_client` from the factory module — it auto-detects the panel generation. Use the Gen3 client directly only when targeting Gen3 hardware exclusively. + +## Prerequisites + +Gen3 support requires the optional `grpcio` dependency: + +```bash +pip install span-panel-api[grpc] +``` + +## Gen3 Panel Behaviour + +| Characteristic | Detail | +| ------------------------ | ------------------------------------------ | +| Hardware | MAIN40, MLO48 | +| Transport | gRPC (port 50065) | +| Authentication | None | +| Circuit discovery | `GetInstances` / `GetRevision` trait calls | +| Power metrics | Push-streamed via `Subscribe` | +| Relay / priority control | Not supported | +| Energy history | Not supported | +| Battery / storage SOE | Not supported | + +## Connection and Usage + +### Using the Factory (Recommended) + +```python +import asyncio +from span_panel_api import create_span_client, PanelGeneration + +async def main(): + # Auto-detect generation + client = await create_span_client("192.168.1.100") + + # Or force Gen3 explicitly + client = await create_span_client("192.168.1.100", panel_generation=PanelGeneration.GEN3) + + await client.connect() + + snapshot = await client.get_snapshot() + print(f"Panel serial: {snapshot.serial_number}") + for cid, circuit in snapshot.circuits.items(): + print(f" [{cid}] {circuit.name}: {circuit.power_w:.0f} W") + + await client.close() + +asyncio.run(main()) +``` + +### Direct Client Usage + +```python +import asyncio +from span_panel_api.grpc.client import SpanGrpcClient + +async def main(): + client = SpanGrpcClient(host="192.168.1.100", port=50065) + + connected = await client.connect() + if not connected: + print("Failed to connect") + return + + print(f"Connected — {len(client.data.circuits)} circuits discovered") + + # One-shot snapshot + snapshot = await client.get_snapshot() + for cid, circuit in snapshot.circuits.items(): + print(f" [{cid}] {circuit.name}: {circuit.power_w:.0f} W") + + await client.close() + +asyncio.run(main()) +``` + +## Real-Time Streaming + +Gen3 panels deliver power metrics via a push stream. Start the streaming background task to receive continuous updates, and register callbacks to react to each update. + +```python +async def main(): + client = SpanGrpcClient("192.168.1.100") + await client.connect() + + # Register a callback — invoked on every streamed update + def on_update() -> None: + data = client.data + main_power = data.main_feed.power_w + print(f"Grid: {main_power:.0f} W") + + unregister = client.register_callback(on_update) + + # Start the streaming loop + await client.start_streaming() + + # Let updates arrive for a while + await asyncio.sleep(60) + + # Clean up + unregister() + await client.stop_streaming() + await client.close() +``` + +The `register_callback` method returns an unregister function. Call it to remove the callback without affecting others. + +## Capability Runtime Check + +Always use `PanelCapability` flags rather than hard-coding the generation: + +```python +from span_panel_api import PanelCapability + +caps = client.capabilities + +if PanelCapability.PUSH_STREAMING in caps: + await client.start_streaming() + +# Gen3 does not support these — guard with capability flags +if PanelCapability.RELAY_CONTROL in caps: + await client.set_circuit_relay("1", "OPEN") # Only reached on Gen2 +``` + +## Snapshot Data Model + +`get_snapshot()` returns a `SpanPanelSnapshot` populated from the latest streamed metrics. Fields that are Gen2-only are `None` for Gen3. + +```python +snapshot: SpanPanelSnapshot = await client.get_snapshot() + +snapshot.panel_generation # PanelGeneration.GEN3 +snapshot.serial_number # panel resource ID (proxy for serial) +snapshot.firmware_version # empty string until exposed by a trait +snapshot.main_power_w # total grid power in watts +snapshot.main_voltage_v # main feed voltage +snapshot.main_current_a # main feed current + +# None on Gen3: +snapshot.grid_power_w # None +snapshot.battery_soe # None +snapshot.dsm_state # None + +# Per-circuit snapshot +for cid, c in snapshot.circuits.items(): + c.circuit_id # str key (positional slot, "1" = slot 1) + c.name # user-assigned name from panel + c.power_w # real power in watts + c.voltage_v # circuit voltage + c.current_a # circuit current + c.is_on # True if voltage above off-threshold + c.is_dual_phase # True for 240 V double-pole circuits + c.apparent_power_va # apparent power (VA) — Gen3 only + c.reactive_power_var # reactive power (VAR) — Gen3 only + c.power_factor # power factor — Gen3 only + # None on Gen3: + c.relay_state # None + c.priority # None + c.energy_consumed_wh # None +``` + +## Low-Level Data Access + +For direct access to the raw gRPC layer data (circuit topology and latest metrics): + +```python +data = client.data # span_panel_api.grpc.models.PanelData + +data.serial # panel serial / resource ID +data.firmware # firmware version string +data.circuits # dict[int, CircuitInfo] — static circuit topology +data.metrics # dict[int, CircuitMetrics] — latest streamed values +data.main_feed # CircuitMetrics for the main service entrance +``` + +`CircuitInfo` fields: `circuit_id`, `name`, `metric_iid`, `is_dual_phase`, `breaker_position` + +`CircuitMetrics` fields: `power_w`, `voltage_v`, `current_a`, `apparent_power_va`, `reactive_power_var`, `frequency_hz`, `power_factor`, `is_on`, `voltage_a_v`, `voltage_b_v`, `current_a_a`, `current_b_a` + +## Error Handling + +```python +from span_panel_api import SpanPanelGrpcError, SpanPanelGrpcConnectionError + +try: + await client.connect() +except SpanPanelGrpcConnectionError as e: + print(f"Could not connect to Gen3 panel: {e}") +except SpanPanelGrpcError as e: + print(f"gRPC error: {e}") +``` + +See [error-handling.md](error-handling.md) for the full exception hierarchy. From 9b979b55e1626010d30aa760c518d478d9036f0c Mon Sep 17 00:00:00 2001 From: cayossarian Date: Thu, 19 Feb 2026 16:15:39 -0800 Subject: [PATCH 15/15] update change log date --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cf261ee..00335f2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [1.1.15] - Unreleased +## [1.1.15] - 2/19/2026 ### Added