diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 67b452db..52d6cf93 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -173,7 +173,7 @@ jobs:
uses: actions/setup-node@v5
with:
node-version: 20
- cache: 'npm'
+ cache: "npm"
cache-dependency-path: docs/package-lock.json
- name: "Install dependencies"
run: npm install
@@ -216,7 +216,6 @@ jobs:
run: uv sync --all-groups --all-extras
- name: Validate generated documentation
run: uv run invoke docs-validate
-
validate-documentation-style:
if: |
always() && !cancelled() &&
@@ -233,6 +232,7 @@ jobs:
# The official GitHub Action for Vale doesn't work, installing manually instead:
# https://github.com/errata-ai/vale-action/issues/103
+ # cf -> https://github.com/nf-core/website/pull/3509
- name: Download Vale
run: |
curl -sL "https://github.com/errata-ai/vale/releases/download/v${VALE_VERSION}/vale_${VALE_VERSION}_Linux_64-bit.tar.gz" -o vale.tar.gz
diff --git a/.github/workflows/sync-docs.yml b/.github/workflows/sync-docs.yml
index 65574ce7..2cb9dc50 100644
--- a/.github/workflows/sync-docs.yml
+++ b/.github/workflows/sync-docs.yml
@@ -8,8 +8,8 @@ on:
- stable
paths:
- 'docs/docs/**'
- - 'docs/sidebars-infrahubctl.ts'
- - 'docs/sidebars-python-sdk.ts'
+ - 'docs/sidebars/sidebars-infrahubctl.ts'
+ - 'docs/sidebars/sidebars-python-sdk.ts'
jobs:
sync:
diff --git a/changelog/201.added.md b/changelog/201.added.md
new file mode 100644
index 00000000..b64cb2fa
--- /dev/null
+++ b/changelog/201.added.md
@@ -0,0 +1 @@
+Add support for automatic Python SDK API from docstrings in the code.
\ No newline at end of file
diff --git a/docs/AGENTS.md b/docs/AGENTS.md
index 36021cbb..f869e84f 100644
--- a/docs/AGENTS.md
+++ b/docs/AGENTS.md
@@ -8,8 +8,10 @@ Docusaurus documentation following Diataxis framework.
cd docs && npm install # Install deps
cd docs && npm start # Dev server at localhost:3000
cd docs && npm run build # Build static site
-uv run invoke docs # Generate auto-docs
-uv run invoke docs-validate # Validate docs are current
+cd docs && npm test # Run sidebar utility tests
+uv run invoke docs # Build documentation website
+uv run invoke docs-generate # Regenerate all docs (infrahubctl CLI + Python SDK)
+uv run invoke docs-validate # Check that generated docs match committed files
```
## Structure
@@ -23,11 +25,19 @@ docs/docs/
└── infrahubctl/ # CLI docs (auto-generated)
```
+## Sidebars
+
+Sidebar navigation is dynamic: `sidebars-*.ts` files read the filesystem at build time via utility functions in `sidebar-utils.ts`.
+
+- **infrahubctl**: all `.mdx` files are discovered automatically and sorted alphabetically.
+- **python-sdk**: guides, topics, and reference sections preserve a defined display order; new files are appended alphabetically at the end.
+
+No manual sidebar update is needed when adding a new `.mdx` file. However, to control the display order of a new page, add its doc ID to the ordered list in the corresponding `sidebars-*.ts` file.
+
## Adding Documentation
1. Create MDX file in appropriate directory
2. Add frontmatter with `title`
-3. Update `sidebars-*.ts` for navigation
## MDX Pattern
@@ -52,7 +62,7 @@ Use callouts for important notes.
✅ **Always**
- Include both async/sync examples using Tabs
-- Run `uv run invoke docs-validate` after code changes
+- Run `uv run invoke docs-validate` after code changes to verify generated docs are up to date
🚫 **Never**
diff --git a/docs/__init__.py b/docs/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/_templates/sdk_config.j2 b/docs/_templates/sdk_config.j2
index 7922a363..4fbc9d39 100644
--- a/docs/_templates/sdk_config.j2
+++ b/docs/_templates/sdk_config.j2
@@ -31,8 +31,6 @@ The following settings can be defined in the `Config` class
{% for property in properties %}
## {{ property.name }}
-
-**Property**: {{ property.name }}
**Description**: {% if '\n' in property.description %} {% endif %}{{ property.description }}
**Type**: `{{ property.type }}`
diff --git a/docs/docs/python-sdk/reference/config.mdx b/docs/docs/python-sdk/reference/config.mdx
index 320aebb4..1b525389 100644
--- a/docs/docs/python-sdk/reference/config.mdx
+++ b/docs/docs/python-sdk/reference/config.mdx
@@ -30,8 +30,6 @@ The Python SDK (Async or Sync) client can be configured using an instance of the
The following settings can be defined in the `Config` class
## address
-
-**Property**: address
**Description**: The URL to use when connecting to Infrahub.
**Type**: `string`
@@ -39,16 +37,12 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_ADDRESS`
## api_token
-
-**Property**: api_token
**Description**: API token for authentication against Infrahub.
**Type**: `string`
**Environment variable**: `INFRAHUB_API_TOKEN`
## echo_graphql_queries
-
-**Property**: echo_graphql_queries
**Description**: If set the GraphQL query and variables will be echoed to the screen
**Type**: `boolean`
@@ -56,24 +50,18 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_ECHO_GRAPHQL_QUERIES`
## username
-
-**Property**: username
**Description**: Username for accessing Infrahub
**Type**: `string`
**Environment variable**: `INFRAHUB_USERNAME`
## password
-
-**Property**: password
**Description**: Password for accessing Infrahub
**Type**: `string`
**Environment variable**: `INFRAHUB_PASSWORD`
## default_branch
-
-**Property**: default_branch
**Description**: Default branch to target if not specified for each request.
**Type**: `string`
@@ -81,8 +69,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_DEFAULT_BRANCH`
## default_branch_from_git
-
-**Property**: default_branch_from_git
**Description**: Indicates if the default Infrahub branch to target should come from the active branch in the local Git repository.
**Type**: `boolean`
@@ -90,16 +76,12 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_DEFAULT_BRANCH_FROM_GIT`
## identifier
-
-**Property**: identifier
**Description**: Tracker identifier
**Type**: `string`
**Environment variable**: `INFRAHUB_IDENTIFIER`
## insert_tracker
-
-**Property**: insert_tracker
**Description**: Insert a tracker on queries to the server
**Type**: `boolean`
@@ -107,8 +89,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_INSERT_TRACKER`
## max_concurrent_execution
-
-**Property**: max_concurrent_execution
**Description**: Max concurrent execution in batch mode
**Type**: `integer`
@@ -116,16 +96,12 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_MAX_CONCURRENT_EXECUTION`
## mode
-
-**Property**: mode
**Description**: Default mode for the client
**Type**: `object`
**Environment variable**: `INFRAHUB_MODE`
## pagination_size
-
-**Property**: pagination_size
**Description**: Page size for queries to the server
**Type**: `integer`
@@ -133,8 +109,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_PAGINATION_SIZE`
## retry_delay
-
-**Property**: retry_delay
**Description**: Number of seconds to wait until attempting a retry.
**Type**: `integer`
@@ -142,8 +116,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_RETRY_DELAY`
## retry_on_failure
-
-**Property**: retry_on_failure
**Description**: Retry operation in case of failure
**Type**: `boolean`
@@ -151,8 +123,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_RETRY_ON_FAILURE`
## max_retry_duration
-
-**Property**: max_retry_duration
**Description**: Maximum duration until we stop attempting to retry if enabled.
**Type**: `integer`
@@ -160,8 +130,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_MAX_RETRY_DURATION`
## schema_converge_timeout
-
-**Property**: schema_converge_timeout
**Description**: Number of seconds to wait for schema to have converged
**Type**: `integer`
@@ -169,8 +137,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_SCHEMA_CONVERGE_TIMEOUT`
## timeout
-
-**Property**: timeout
**Description**: Default connection timeout in seconds
**Type**: `integer`
@@ -178,32 +144,24 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_TIMEOUT`
## transport
-
-**Property**: transport
**Description**: Set an alternate transport using a predefined option
**Type**: `object`
**Environment variable**: `INFRAHUB_TRANSPORT`
## proxy
-
-**Property**: proxy
**Description**: Proxy address
**Type**: `string`
**Environment variable**: `INFRAHUB_PROXY`
## proxy_mounts
-
-**Property**: proxy_mounts
**Description**: Proxy mounts configuration
**Type**: `object`
**Environment variable**: `INFRAHUB_PROXY_MOUNTS`
## update_group_context
-
-**Property**: update_group_context
**Description**: Update GraphQL query groups
**Type**: `boolean`
@@ -211,8 +169,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_UPDATE_GROUP_CONTEXT`
## tls_insecure
-
-**Property**: tls_insecure
**Description**:
Indicates if TLS certificates are verified.
@@ -223,8 +179,6 @@ The following settings can be defined in the `Config` class
**Environment variable**: `INFRAHUB_TLS_INSECURE`
## tls_ca_file
-
-**Property**: tls_ca_file
**Description**: File path to CA cert or bundle in PEM format
**Type**: `string`
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx
new file mode 100644
index 00000000..53b0ac20
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx
@@ -0,0 +1,905 @@
+---
+title: client
+sidebarTitle: client
+---
+
+# `infrahub_sdk.client`
+
+## Functions
+
+### `handle_relogin`
+
+```python
+handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]) -> Callable[..., Coroutine[Any, Any, httpx.Response]]
+```
+
+### `handle_relogin_sync`
+
+```python
+handle_relogin_sync(func: Callable[..., httpx.Response]) -> Callable[..., httpx.Response]
+```
+
+### `raise_for_error_deprecation_warning`
+
+```python
+raise_for_error_deprecation_warning(value: bool | None) -> None
+```
+
+## Classes
+
+### `ProcessRelationsNode`
+
+### `ProxyConfig`
+
+### `ProxyConfigSync`
+
+### `ProcessRelationsNodeSync`
+
+### `BaseClient`
+
+
+Base class for InfrahubClient and InfrahubClientSync
+
+
+**Methods:**
+
+#### `request_context`
+
+```python
+request_context(self) -> RequestContext | None
+```
+
+#### `request_context`
+
+```python
+request_context(self, request_context: RequestContext) -> None
+```
+
+#### `start_tracking`
+
+```python
+start_tracking(self, identifier: str | None = None, params: dict[str, Any] | None = None, delete_unused_nodes: bool = False, group_type: str | None = None, group_params: dict[str, Any] | None = None, branch: str | None = None) -> Self
+```
+
+#### `set_context_properties`
+
+```python
+set_context_properties(self, identifier: str, params: dict[str, str] | None = None, delete_unused_nodes: bool = True, reset: bool = True, group_type: str | None = None, group_params: dict[str, Any] | None = None, branch: str | None = None) -> None
+```
+
+### `InfrahubClient`
+
+
+GraphQL Client to interact with Infrahub.
+
+
+**Methods:**
+
+#### `get_version`
+
+```python
+get_version(self) -> str
+```
+
+Return the Infrahub version.
+
+
+#### `get_user`
+
+```python
+get_user(self) -> dict
+```
+
+Return user information
+
+
+#### `get_user_permissions`
+
+```python
+get_user_permissions(self) -> dict
+```
+
+Return user permissions
+
+
+#### `create`
+
+```python
+create(self, kind: str, data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> InfrahubNode
+```
+
+#### `create`
+
+```python
+create(self, kind: type[SchemaType], data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> SchemaType
+```
+
+#### `create`
+
+```python
+create(self, kind: str | type[SchemaType], data: dict | None = None, branch: str | None = None, timeout: int | None = None, **kwargs: Any) -> InfrahubNode | SchemaType
+```
+
+#### `delete`
+
+```python
+delete(self, kind: str | type[SchemaType], id: str, branch: str | None = None) -> None
+```
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaType], raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType | None
+```
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaType], raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType
+```
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaType], raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType
+```
+
+#### `get`
+
+```python
+get(self, kind: str, raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode | None
+```
+
+#### `get`
+
+```python
+get(self, kind: str, raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode
+```
+
+#### `get`
+
+```python
+get(self, kind: str, raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode
+```
+
+#### `get`
+
+```python
+get(self, kind: str | type[SchemaType], raise_when_missing: bool = True, at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, id: str | None = None, hfid: list[str] | None = None, include: list[str] | None = None, exclude: list[str] | None = None, populate_store: bool = True, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False, **kwargs: Any) -> InfrahubNode | SchemaType | None
+```
+
+#### `count`
+
+```python
+count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, partial_match: bool = False, **kwargs: Any) -> int
+```
+
+Return the number of nodes of a given kind.
+
+
+#### `all`
+
+```python
+all(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[SchemaType]
+```
+
+#### `all`
+
+```python
+all(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[InfrahubNode]
+```
+
+#### `all`
+
+```python
+all(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False) -> list[InfrahubNode] | list[SchemaType]
+```
+
+Retrieve all nodes of a given kind
+
+**Args:**
+- `kind`: kind of the nodes to query
+- `at`: Time of the query. Defaults to Now.
+- `branch`: Name of the branch to query from. Defaults to default_branch.
+- `populate_store`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `timeout`: Overrides default timeout used when querying the GraphQL API. Specified in seconds.
+- `offset`: The offset for pagination.
+- `limit`: The limit for pagination.
+- `include`: List of attributes or relationships to include in the query.
+- `exclude`: List of attributes or relationships to exclude from the query.
+- `fragment`: Flag to use GraphQL fragments for generic schemas.
+- `prefetch_relationships`: Flag to indicate whether to prefetch related node data.
+- `parallel`: Whether to use parallel processing for the query.
+- `order`: Ordering related options. Setting `disable=True` enhances performances.
+- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query.
+
+**Returns:**
+- list\[InfrahubNode]: List of Nodes
+
+
+#### `filters`
+
+```python
+filters(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[SchemaType]
+```
+
+#### `filters`
+
+```python
+filters(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[InfrahubNode]
+```
+
+#### `filters`
+
+```python
+filters(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False, **kwargs: Any) -> list[InfrahubNode] | list[SchemaType]
+```
+
+Retrieve nodes of a given kind based on provided filters.
+
+**Args:**
+- `kind`: kind of the nodes to query
+- `at`: Time of the query. Defaults to Now.
+- `branch`: Name of the branch to query from. Defaults to default_branch.
+- `timeout`: Overrides default timeout used when querying the GraphQL API. Specified in seconds.
+- `populate_store`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `offset`: The offset for pagination.
+- `limit`: The limit for pagination.
+- `include`: List of attributes or relationships to include in the query.
+- `exclude`: List of attributes or relationships to exclude from the query.
+- `fragment`: Flag to use GraphQL fragments for generic schemas.
+- `prefetch_relationships`: Flag to indicate whether to prefetch related node data.
+- `partial_match`: Allow partial match of filter criteria for the query.
+- `parallel`: Whether to use parallel processing for the query.
+- `order`: Ordering related options. Setting `disable=True` enhances performances.
+- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query.
+- `**kwargs`: Additional filter criteria for the query.
+
+**Returns:**
+- list\[InfrahubNodeSync]: List of Nodes that match the given filters.
+
+
+#### `clone`
+
+```python
+clone(self, branch: str | None = None) -> InfrahubClient
+```
+
+Return a cloned version of the client using the same configuration
+
+
+#### `execute_graphql`
+
+```python
+execute_graphql(self, query: str, variables: dict | None = None, branch_name: str | None = None, at: str | Timestamp | None = None, timeout: int | None = None, raise_for_error: bool | None = None, tracker: str | None = None) -> dict
+```
+
+Execute a GraphQL query (or mutation).
+If retry_on_failure is True, the query will retry until the server becomes reacheable.
+
+**Args:**
+- `query`: GraphQL Query to execute, can be a query or a mutation
+- `variables`: Variables to pass along with the GraphQL query. Defaults to None.
+- `branch_name`: Name of the branch on which the query will be executed. Defaults to None.
+- `at`: Time when the query should be executed. Defaults to None.
+- `timeout`: Timeout in second for the query. Defaults to None.
+- `raise_for_error`: Deprecated. Controls only HTTP status handling.
+- None (default) or True\: HTTP errors raise via resp.raise_for_status().
+- False\: HTTP errors are not automatically raised. Defaults to None.
+
+**Raises:**
+- `GraphQLError`: When the GraphQL response contains errors.
+
+**Returns:**
+- The GraphQL data payload (response["data"]).
+
+
+#### `refresh_login`
+
+```python
+refresh_login(self) -> None
+```
+
+#### `login`
+
+```python
+login(self, refresh: bool = False) -> None
+```
+
+#### `query_gql_query`
+
+```python
+query_gql_query(self, name: str, variables: dict | None = None, update_group: bool = False, subscribers: list[str] | None = None, params: dict | None = None, branch_name: str | None = None, at: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> dict
+```
+
+#### `create_diff`
+
+```python
+create_diff(self, branch: str, name: str, from_time: datetime, to_time: datetime, wait_until_completion: bool = True) -> bool | str
+```
+
+#### `get_diff_summary`
+
+```python
+get_diff_summary(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> list[NodeDiff]
+```
+
+#### `get_diff_tree`
+
+```python
+get_diff_tree(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None) -> DiffTreeData | None
+```
+
+Get complete diff tree with metadata and nodes.
+
+Returns None if no diff exists.
+
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaType
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaType | None
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaType
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNode
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNode | None
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNode | None
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType] | None = None, identifier: str | None = None, prefix_length: int | None = None, address_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNode | SchemaType | None
+```
+
+Allocate a new IP address by using the provided resource pool.
+
+**Args:**
+- `resource_pool`: Node corresponding to the pool to allocate resources from.
+- `identifier`: Value to perform idempotent allocation, the same resource will be returned for a given identifier.
+- `prefix_length`: Length of the prefix to set on the address to allocate.
+- `address_type`: Kind of the address to allocate.
+- `data`: A key/value map to use to set attributes values on the allocated address.
+- `branch`: Name of the branch to allocate from. Defaults to default_branch.
+- `timeout`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `tracker`: The offset for pagination.
+- `raise_for_error`: Deprecated, raise an error if the HTTP status is not 2XX.
+
+Returns:
+ InfrahubNode: Node corresponding to the allocated resource.
+
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaType
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaType | None
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaType
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNode
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNode | None
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNode | None
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType] | None = None, identifier: str | None = None, prefix_length: int | None = None, member_type: str | None = None, prefix_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNode | SchemaType | None
+```
+
+Allocate a new IP prefix by using the provided resource pool.
+
+**Args:**
+- `resource_pool`: Node corresponding to the pool to allocate resources from.
+- `identifier`: Value to perform idempotent allocation, the same resource will be returned for a given identifier.
+- `prefix_length`: Length of the prefix to allocate.
+- `member_type`: Member type of the prefix to allocate.
+- `prefix_type`: Kind of the prefix to allocate.
+- `data`: A key/value map to use to set attributes values on the allocated prefix.
+- `branch`: Name of the branch to allocate from. Defaults to default_branch.
+- `timeout`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `tracker`: The offset for pagination.
+- `raise_for_error`: Deprecated, raise an error if the HTTP status is not 2XX.
+
+Returns:
+ InfrahubNode: Node corresponding to the allocated resource.
+
+
+#### `create_batch`
+
+```python
+create_batch(self, return_exceptions: bool = False) -> InfrahubBatch
+```
+
+#### `get_list_repositories`
+
+```python
+get_list_repositories(self, branches: dict[str, BranchData] | None = None, kind: str = 'CoreGenericRepository') -> dict[str, RepositoryData]
+```
+
+#### `repository_update_commit`
+
+```python
+repository_update_commit(self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False) -> bool
+```
+
+#### `convert_object_type`
+
+```python
+convert_object_type(self, node_id: str, target_kind: str, branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None) -> InfrahubNode
+```
+
+Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names
+and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field
+in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type
+for more information.
+
+
+### `InfrahubClientSync`
+
+**Methods:**
+
+#### `get_version`
+
+```python
+get_version(self) -> str
+```
+
+Return the Infrahub version.
+
+
+#### `get_user`
+
+```python
+get_user(self) -> dict
+```
+
+Return user information
+
+
+#### `get_user_permissions`
+
+```python
+get_user_permissions(self) -> dict
+```
+
+Return user permissions
+
+
+#### `create`
+
+```python
+create(self, kind: str, data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> InfrahubNodeSync
+```
+
+#### `create`
+
+```python
+create(self, kind: type[SchemaTypeSync], data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> SchemaTypeSync
+```
+
+#### `create`
+
+```python
+create(self, kind: str | type[SchemaTypeSync], data: dict | None = None, branch: str | None = None, timeout: int | None = None, **kwargs: Any) -> InfrahubNodeSync | SchemaTypeSync
+```
+
+#### `delete`
+
+```python
+delete(self, kind: str | type[SchemaTypeSync], id: str, branch: str | None = None) -> None
+```
+
+#### `clone`
+
+```python
+clone(self, branch: str | None = None) -> InfrahubClientSync
+```
+
+Return a cloned version of the client using the same configuration
+
+
+#### `execute_graphql`
+
+```python
+execute_graphql(self, query: str, variables: dict | None = None, branch_name: str | None = None, at: str | Timestamp | None = None, timeout: int | None = None, raise_for_error: bool | None = None, tracker: str | None = None) -> dict
+```
+
+Execute a GraphQL query (or mutation).
+If retry_on_failure is True, the query will retry until the server becomes reacheable.
+
+**Args:**
+- `query`: GraphQL Query to execute, can be a query or a mutation
+- `variables`: Variables to pass along with the GraphQL query. Defaults to None.
+- `branch_name`: Name of the branch on which the query will be executed. Defaults to None.
+- `at`: Time when the query should be executed. Defaults to None.
+- `timeout`: Timeout in second for the query. Defaults to None.
+- `raise_for_error`: Deprecated. Controls only HTTP status handling.
+- None (default) or True\: HTTP errors raise via `resp.raise_for_status()`.
+- False\: HTTP errors are not automatically raised.
+GraphQL errors always raise `GraphQLError`. Defaults to None.
+
+**Raises:**
+- `GraphQLError`: When the GraphQL response contains errors.
+
+**Returns:**
+- The GraphQL data payload (`response["data"]`).
+
+
+#### `count`
+
+```python
+count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, partial_match: bool = False, **kwargs: Any) -> int
+```
+
+Return the number of nodes of a given kind.
+
+
+#### `all`
+
+```python
+all(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[SchemaTypeSync]
+```
+
+#### `all`
+
+```python
+all(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[InfrahubNodeSync]
+```
+
+#### `all`
+
+```python
+all(self, kind: str | type[SchemaTypeSync], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False) -> list[InfrahubNodeSync] | list[SchemaTypeSync]
+```
+
+Retrieve all nodes of a given kind
+
+**Args:**
+- `kind`: kind of the nodes to query
+- `at`: Time of the query. Defaults to Now.
+- `branch`: Name of the branch to query from. Defaults to default_branch.
+- `timeout`: Overrides default timeout used when querying the GraphQL API. Specified in seconds.
+- `populate_store`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `offset`: The offset for pagination.
+- `limit`: The limit for pagination.
+- `include`: List of attributes or relationships to include in the query.
+- `exclude`: List of attributes or relationships to exclude from the query.
+- `fragment`: Flag to use GraphQL fragments for generic schemas.
+- `prefetch_relationships`: Flag to indicate whether to prefetch related node data.
+- `parallel`: Whether to use parallel processing for the query.
+- `order`: Ordering related options. Setting `disable=True` enhances performances.
+- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query.
+
+**Returns:**
+- list\[InfrahubNodeSync]: List of Nodes
+
+
+#### `filters`
+
+```python
+filters(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[SchemaTypeSync]
+```
+
+#### `filters`
+
+```python
+filters(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[InfrahubNodeSync]
+```
+
+#### `filters`
+
+```python
+filters(self, kind: str | type[SchemaTypeSync], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False, **kwargs: Any) -> list[InfrahubNodeSync] | list[SchemaTypeSync]
+```
+
+Retrieve nodes of a given kind based on provided filters.
+
+**Args:**
+- `kind`: kind of the nodes to query
+- `at`: Time of the query. Defaults to Now.
+- `branch`: Name of the branch to query from. Defaults to default_branch.
+- `timeout`: Overrides default timeout used when querying the GraphQL API. Specified in seconds.
+- `populate_store`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `offset`: The offset for pagination.
+- `limit`: The limit for pagination.
+- `include`: List of attributes or relationships to include in the query.
+- `exclude`: List of attributes or relationships to exclude from the query.
+- `fragment`: Flag to use GraphQL fragments for generic schemas.
+- `prefetch_relationships`: Flag to indicate whether to prefetch related node data.
+- `partial_match`: Allow partial match of filter criteria for the query.
+- `parallel`: Whether to use parallel processing for the query.
+- `order`: Ordering related options. Setting `disable=True` enhances performances.
+- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query.
+- `**kwargs`: Additional filter criteria for the query.
+
+**Returns:**
+- list\[InfrahubNodeSync]: List of Nodes that match the given filters.
+
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaTypeSync], raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync | None
+```
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaTypeSync], raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync
+```
+
+#### `get`
+
+```python
+get(self, kind: type[SchemaTypeSync], raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync
+```
+
+#### `get`
+
+```python
+get(self, kind: str, raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync | None
+```
+
+#### `get`
+
+```python
+get(self, kind: str, raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync
+```
+
+#### `get`
+
+```python
+get(self, kind: str, raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync
+```
+
+#### `get`
+
+```python
+get(self, kind: str | type[SchemaTypeSync], raise_when_missing: bool = True, at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, id: str | None = None, hfid: list[str] | None = None, include: list[str] | None = None, exclude: list[str] | None = None, populate_store: bool = True, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False, **kwargs: Any) -> InfrahubNodeSync | SchemaTypeSync | None
+```
+
+#### `create_batch`
+
+```python
+create_batch(self, return_exceptions: bool = False) -> InfrahubBatchSync
+```
+
+Create a batch to execute multiple queries concurrently.
+
+Executing the batch will be performed using a thread pool, meaning it cannot guarantee the execution order. It is not recommended to use such
+batch to manipulate objects that depend on each others.
+
+
+#### `get_list_repositories`
+
+```python
+get_list_repositories(self, branches: dict[str, BranchData] | None = None, kind: str = 'CoreGenericRepository') -> dict[str, RepositoryData]
+```
+
+#### `query_gql_query`
+
+```python
+query_gql_query(self, name: str, variables: dict | None = None, update_group: bool = False, subscribers: list[str] | None = None, params: dict | None = None, branch_name: str | None = None, at: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> dict
+```
+
+#### `create_diff`
+
+```python
+create_diff(self, branch: str, name: str, from_time: datetime, to_time: datetime, wait_until_completion: bool = True) -> bool | str
+```
+
+#### `get_diff_summary`
+
+```python
+get_diff_summary(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> list[NodeDiff]
+```
+
+#### `get_diff_tree`
+
+```python
+get_diff_tree(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None) -> DiffTreeData | None
+```
+
+Get complete diff tree with metadata and nodes.
+
+Returns None if no diff exists.
+
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaTypeSync
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaTypeSync | None
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaTypeSync
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNodeSync
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNodeSync | None
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNodeSync | None
+```
+
+#### `allocate_next_ip_address`
+
+```python
+allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync] | None = None, identifier: str | None = None, prefix_length: int | None = None, address_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNodeSync | SchemaTypeSync | None
+```
+
+Allocate a new IP address by using the provided resource pool.
+
+**Args:**
+- `resource_pool`: Node corresponding to the pool to allocate resources from.
+- `identifier`: Value to perform idempotent allocation, the same resource will be returned for a given identifier.
+- `prefix_length`: Length of the prefix to set on the address to allocate.
+- `address_type`: Kind of the address to allocate.
+- `data`: A key/value map to use to set attributes values on the allocated address.
+- `branch`: Name of the branch to allocate from. Defaults to default_branch.
+- `timeout`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `tracker`: The offset for pagination.
+- `raise_for_error`: The limit for pagination.
+
+Returns:
+ InfrahubNodeSync: Node corresponding to the allocated resource.
+
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaTypeSync
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaTypeSync | None
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaTypeSync
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNodeSync
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNodeSync | None
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNodeSync | None
+```
+
+#### `allocate_next_ip_prefix`
+
+```python
+allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync] | None = None, identifier: str | None = None, prefix_length: int | None = None, member_type: str | None = None, prefix_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNodeSync | SchemaTypeSync | None
+```
+
+Allocate a new IP prefix by using the provided resource pool.
+
+**Args:**
+- `resource_pool`: Node corresponding to the pool to allocate resources from.
+- `identifier`: Value to perform idempotent allocation, the same resource will be returned for a given identifier.
+- `size`: Length of the prefix to allocate.
+- `member_type`: Member type of the prefix to allocate.
+- `prefix_type`: Kind of the prefix to allocate.
+- `data`: A key/value map to use to set attributes values on the allocated prefix.
+- `branch`: Name of the branch to allocate from. Defaults to default_branch.
+- `timeout`: Flag to indicate whether to populate the store with the retrieved nodes.
+- `tracker`: The offset for pagination.
+- `raise_for_error`: The limit for pagination.
+
+Returns:
+ InfrahubNodeSync: Node corresponding to the allocated resource.
+
+
+#### `repository_update_commit`
+
+```python
+repository_update_commit(self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False) -> bool
+```
+
+#### `refresh_login`
+
+```python
+refresh_login(self) -> None
+```
+
+#### `login`
+
+```python
+login(self, refresh: bool = False) -> None
+```
+
+#### `convert_object_type`
+
+```python
+convert_object_type(self, node_id: str, target_kind: str, branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None) -> InfrahubNodeSync
+```
+
+Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names
+and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field
+in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type
+for more information.
+
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx
new file mode 100644
index 00000000..c38ddcf1
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/attribute.mdx
@@ -0,0 +1,28 @@
+---
+title: attribute
+sidebarTitle: attribute
+---
+
+# `infrahub_sdk.node.attribute`
+
+## Classes
+
+### `Attribute`
+
+
+Represents an attribute of a Node, including its schema, value, and properties.
+
+
+**Methods:**
+
+#### `value`
+
+```python
+value(self) -> Any
+```
+
+#### `value`
+
+```python
+value(self, value: Any) -> None
+```
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/constants.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/constants.mdx
new file mode 100644
index 00000000..290f923e
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/constants.mdx
@@ -0,0 +1,8 @@
+---
+title: constants
+sidebarTitle: constants
+---
+
+# `infrahub_sdk.node.constants`
+
+*This module is empty or contains only private/internal implementations.*
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx
new file mode 100644
index 00000000..782b212e
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/metadata.mdx
@@ -0,0 +1,20 @@
+---
+title: metadata
+sidebarTitle: metadata
+---
+
+# `infrahub_sdk.node.metadata`
+
+## Classes
+
+### `NodeMetadata`
+
+
+Represents metadata about a node (created_at, created_by, updated_at, updated_by).
+
+
+### `RelationshipMetadata`
+
+
+Represents metadata about a relationship edge (updated_at, updated_by).
+
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx
new file mode 100644
index 00000000..9a4ec036
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx
@@ -0,0 +1,393 @@
+---
+title: node
+sidebarTitle: node
+---
+
+# `infrahub_sdk.node.node`
+
+## Classes
+
+### `InfrahubNodeBase`
+
+
+Base class for InfrahubNode and InfrahubNodeSync
+
+
+**Methods:**
+
+#### `get_branch`
+
+```python
+get_branch(self) -> str
+```
+
+#### `get_path_value`
+
+```python
+get_path_value(self, path: str) -> Any
+```
+
+#### `get_human_friendly_id`
+
+```python
+get_human_friendly_id(self) -> list[str] | None
+```
+
+#### `get_human_friendly_id_as_string`
+
+```python
+get_human_friendly_id_as_string(self, include_kind: bool = False) -> str | None
+```
+
+#### `hfid`
+
+```python
+hfid(self) -> list[str] | None
+```
+
+#### `hfid_str`
+
+```python
+hfid_str(self) -> str | None
+```
+
+#### `get_node_metadata`
+
+```python
+get_node_metadata(self) -> NodeMetadata | None
+```
+
+Returns the node metadata (created_at, created_by, updated_at, updated_by) if fetched.
+
+
+#### `get_kind`
+
+```python
+get_kind(self) -> str
+```
+
+#### `get_all_kinds`
+
+```python
+get_all_kinds(self) -> list[str]
+```
+
+#### `is_ip_prefix`
+
+```python
+is_ip_prefix(self) -> bool
+```
+
+#### `is_ip_address`
+
+```python
+is_ip_address(self) -> bool
+```
+
+#### `is_resource_pool`
+
+```python
+is_resource_pool(self) -> bool
+```
+
+#### `get_raw_graphql_data`
+
+```python
+get_raw_graphql_data(self) -> dict | None
+```
+
+#### `generate_query_data_init`
+
+```python
+generate_query_data_init(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, partial_match: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict]
+```
+
+### `InfrahubNode`
+
+
+Represents a Infrahub node in an asynchronous context.
+
+
+**Methods:**
+
+#### `from_graphql`
+
+```python
+from_graphql(cls, client: InfrahubClient, branch: str, data: dict, schema: MainSchemaTypesAPI | None = None, timeout: int | None = None) -> Self
+```
+
+#### `generate`
+
+```python
+generate(self, nodes: list[str] | None = None) -> None
+```
+
+#### `artifact_generate`
+
+```python
+artifact_generate(self, name: str) -> None
+```
+
+#### `artifact_fetch`
+
+```python
+artifact_fetch(self, name: str) -> str | dict[str, Any]
+```
+
+#### `delete`
+
+```python
+delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `save`
+
+```python
+save(self, allow_upsert: bool = False, update_group_context: bool | None = None, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `generate_query_data`
+
+```python
+generate_query_data(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict]
+```
+
+#### `generate_query_data_node`
+
+```python
+generate_query_data_node(self, include: list[str] | None = None, exclude: list[str] | None = None, inherited: bool = True, insert_alias: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False) -> dict[str, Any | dict]
+```
+
+Generate the node part of a GraphQL Query with attributes and nodes.
+
+**Args:**
+- `include`: List of attributes or relationships to include. Defaults to None.
+- `exclude`: List of attributes or relationships to exclude. Defaults to None.
+- `inherited`: Indicated of the attributes and the relationships inherited from generics should be included as well.
+ Defaults to True.
+- `insert_alias`: If True, inserts aliases in the query for each attribute or relationship.
+- `prefetch_relationships`: If True, pre-fetches relationship data as part of the query.
+- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query.
+
+**Returns:**
+- dict\[str, Union\[Any, Dict]]: GraphQL query in dictionary format
+
+
+#### `add_relationships`
+
+```python
+add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None
+```
+
+#### `remove_relationships`
+
+```python
+remove_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None
+```
+
+#### `create`
+
+```python
+create(self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `update`
+
+```python
+update(self, do_full_update: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `get_pool_allocated_resources`
+
+```python
+get_pool_allocated_resources(self, resource: InfrahubNode) -> list[InfrahubNode]
+```
+
+Fetch all nodes that were allocated for the pool and a given resource.
+
+**Args:**
+- `resource`: The resource from which the nodes were allocated.
+
+**Returns:**
+- list\[InfrahubNode]: The allocated nodes.
+
+
+#### `get_pool_resources_utilization`
+
+```python
+get_pool_resources_utilization(self) -> list[dict[str, Any]]
+```
+
+Fetch the utilization of each resource for the pool.
+
+**Returns:**
+- list\[dict\[str, Any]]: A list containing the allocation numbers for each resource of the pool.
+
+
+#### `get_flat_value`
+
+```python
+get_flat_value(self, key: str, separator: str = '__') -> Any
+```
+
+Query recursively a value defined in a flat notation (string), on a hierarchy of objects
+
+**Examples:**
+
+name__value
+module.object.value
+
+
+#### `extract`
+
+```python
+extract(self, params: dict[str, str]) -> dict[str, Any]
+```
+
+Extract some datapoints defined in a flat notation.
+
+
+### `InfrahubNodeSync`
+
+
+Represents a Infrahub node in a synchronous context.
+
+
+**Methods:**
+
+#### `from_graphql`
+
+```python
+from_graphql(cls, client: InfrahubClientSync, branch: str, data: dict, schema: MainSchemaTypesAPI | None = None, timeout: int | None = None) -> Self
+```
+
+#### `generate`
+
+```python
+generate(self, nodes: list[str] | None = None) -> None
+```
+
+#### `artifact_generate`
+
+```python
+artifact_generate(self, name: str) -> None
+```
+
+#### `artifact_fetch`
+
+```python
+artifact_fetch(self, name: str) -> str | dict[str, Any]
+```
+
+#### `delete`
+
+```python
+delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `save`
+
+```python
+save(self, allow_upsert: bool = False, update_group_context: bool | None = None, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `generate_query_data`
+
+```python
+generate_query_data(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict]
+```
+
+#### `generate_query_data_node`
+
+```python
+generate_query_data_node(self, include: list[str] | None = None, exclude: list[str] | None = None, inherited: bool = True, insert_alias: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False) -> dict[str, Any | dict]
+```
+
+Generate the node part of a GraphQL Query with attributes and nodes.
+
+**Args:**
+- `include`: List of attributes or relationships to include. Defaults to None.
+- `exclude`: List of attributes or relationships to exclude. Defaults to None.
+- `inherited`: Indicated of the attributes and the relationships inherited from generics should be included as well.
+ Defaults to True.
+- `insert_alias`: If True, inserts aliases in the query for each attribute or relationship.
+- `prefetch_relationships`: If True, pre-fetches relationship data as part of the query.
+- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query.
+
+**Returns:**
+- dict\[str, Union\[Any, Dict]]: GraphQL query in dictionary format
+
+
+#### `add_relationships`
+
+```python
+add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None
+```
+
+#### `remove_relationships`
+
+```python
+remove_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None
+```
+
+#### `create`
+
+```python
+create(self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `update`
+
+```python
+update(self, do_full_update: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None
+```
+
+#### `get_pool_allocated_resources`
+
+```python
+get_pool_allocated_resources(self, resource: InfrahubNodeSync) -> list[InfrahubNodeSync]
+```
+
+Fetch all nodes that were allocated for the pool and a given resource.
+
+**Args:**
+- `resource`: The resource from which the nodes were allocated.
+
+**Returns:**
+- list\[InfrahubNodeSync]: The allocated nodes.
+
+
+#### `get_pool_resources_utilization`
+
+```python
+get_pool_resources_utilization(self) -> list[dict[str, Any]]
+```
+
+Fetch the utilization of each resource for the pool.
+
+**Returns:**
+- list\[dict\[str, Any]]: A list containing the allocation numbers for each resource of the pool.
+
+
+#### `get_flat_value`
+
+```python
+get_flat_value(self, key: str, separator: str = '__') -> Any
+```
+
+Query recursively a value defined in a flat notation (string), on a hierarchy of objects
+
+**Examples:**
+
+name__value
+module.object.value
+
+
+#### `extract`
+
+```python
+extract(self, params: dict[str, str]) -> dict[str, Any]
+```
+
+Extract some datapoints defined in a flat notation.
+
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx
new file mode 100644
index 00000000..6eb519d8
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/parsers.mdx
@@ -0,0 +1,18 @@
+---
+title: parsers
+sidebarTitle: parsers
+---
+
+# `infrahub_sdk.node.parsers`
+
+## Functions
+
+### `parse_human_friendly_id`
+
+```python
+parse_human_friendly_id(hfid: str | list[str]) -> tuple[str | None, list[str]]
+```
+
+
+Parse a human friendly ID into a kind and an identifier.
+
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx
new file mode 100644
index 00000000..a45ec08a
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/property.mdx
@@ -0,0 +1,14 @@
+---
+title: property
+sidebarTitle: property
+---
+
+# `infrahub_sdk.node.property`
+
+## Classes
+
+### `NodeProperty`
+
+
+Represents a property of a node, typically used for metadata like display labels.
+
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx
new file mode 100644
index 00000000..ffaee1a3
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/related_node.mdx
@@ -0,0 +1,134 @@
+---
+title: related_node
+sidebarTitle: related_node
+---
+
+# `infrahub_sdk.node.related_node`
+
+## Classes
+
+### `RelatedNodeBase`
+
+
+Base class for representing a related node in a relationship.
+
+
+**Methods:**
+
+#### `id`
+
+```python
+id(self) -> str | None
+```
+
+#### `hfid`
+
+```python
+hfid(self) -> list[Any] | None
+```
+
+#### `hfid_str`
+
+```python
+hfid_str(self) -> str | None
+```
+
+#### `is_resource_pool`
+
+```python
+is_resource_pool(self) -> bool
+```
+
+#### `initialized`
+
+```python
+initialized(self) -> bool
+```
+
+#### `display_label`
+
+```python
+display_label(self) -> str | None
+```
+
+#### `typename`
+
+```python
+typename(self) -> str | None
+```
+
+#### `kind`
+
+```python
+kind(self) -> str | None
+```
+
+#### `is_from_profile`
+
+```python
+is_from_profile(self) -> bool
+```
+
+Return whether this relationship was set from a profile. Done by checking if the source is of a profile kind.
+
+
+#### `get_relationship_metadata`
+
+```python
+get_relationship_metadata(self) -> RelationshipMetadata | None
+```
+
+Returns the relationship metadata (updated_at, updated_by) if fetched.
+
+
+### `RelatedNode`
+
+
+Represents a RelatedNodeBase in an asynchronous context.
+
+
+**Methods:**
+
+#### `fetch`
+
+```python
+fetch(self, timeout: int | None = None) -> None
+```
+
+#### `peer`
+
+```python
+peer(self) -> InfrahubNode
+```
+
+#### `get`
+
+```python
+get(self) -> InfrahubNode
+```
+
+### `RelatedNodeSync`
+
+
+Represents a related node in a synchronous context.
+
+
+**Methods:**
+
+#### `fetch`
+
+```python
+fetch(self, timeout: int | None = None) -> None
+```
+
+#### `peer`
+
+```python
+peer(self) -> InfrahubNodeSync
+```
+
+#### `get`
+
+```python
+get(self) -> InfrahubNodeSync
+```
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx
new file mode 100644
index 00000000..84231b4e
--- /dev/null
+++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/relationship.mdx
@@ -0,0 +1,125 @@
+---
+title: relationship
+sidebarTitle: relationship
+---
+
+# `infrahub_sdk.node.relationship`
+
+## Classes
+
+### `RelationshipManagerBase`
+
+
+Base class for RelationshipManager and RelationshipManagerSync
+
+
+**Methods:**
+
+#### `peer_ids`
+
+```python
+peer_ids(self) -> list[str]
+```
+
+#### `peer_hfids`
+
+```python
+peer_hfids(self) -> list[list[Any]]
+```
+
+#### `peer_hfids_str`
+
+```python
+peer_hfids_str(self) -> list[str]
+```
+
+#### `has_update`
+
+```python
+has_update(self) -> bool
+```
+
+#### `is_from_profile`
+
+```python
+is_from_profile(self) -> bool
+```
+
+Return whether this relationship was set from a profile. All its peers must be from a profile.
+
+
+### `RelationshipManager`
+
+
+Manages relationships of a node in an asynchronous context.
+
+
+**Methods:**
+
+#### `fetch`
+
+```python
+fetch(self) -> None
+```
+
+#### `add`
+
+```python
+add(self, data: str | RelatedNode | dict) -> None
+```
+
+Add a new peer to this relationship.
+
+
+#### `extend`
+
+```python
+extend(self, data: Iterable[str | RelatedNode | dict]) -> None
+```
+
+Add new peers to this relationship.
+
+
+#### `remove`
+
+```python
+remove(self, data: str | RelatedNode | dict) -> None
+```
+
+### `RelationshipManagerSync`
+
+
+Manages relationships of a node in a synchronous context.
+
+
+**Methods:**
+
+#### `fetch`
+
+```python
+fetch(self) -> None
+```
+
+#### `add`
+
+```python
+add(self, data: str | RelatedNodeSync | dict) -> None
+```
+
+Add a new peer to this relationship.
+
+
+#### `extend`
+
+```python
+extend(self, data: Iterable[str | RelatedNodeSync | dict]) -> None
+```
+
+Add new peers to this relationship.
+
+
+#### `remove`
+
+```python
+remove(self, data: str | RelatedNodeSync | dict) -> None
+```
diff --git a/docs/docs_generation/__init__.py b/docs/docs_generation/__init__.py
new file mode 100644
index 00000000..4b462a71
--- /dev/null
+++ b/docs/docs_generation/__init__.py
@@ -0,0 +1,26 @@
+from __future__ import annotations
+
+from .content_gen_methods import (
+ ACommand,
+ ADocContentGenMethod,
+ CommandOutputDocContentGenMethod,
+ FilePrintingDocContentGenMethod,
+ Jinja2DocContentGenMethod,
+ MdxCodeDocumentation,
+ TyperGroupCommand,
+ TyperSingleCommand,
+)
+from .pages import DocPage, MDXDocPage
+
+__all__ = [
+ "ACommand",
+ "ADocContentGenMethod",
+ "CommandOutputDocContentGenMethod",
+ "DocPage",
+ "FilePrintingDocContentGenMethod",
+ "Jinja2DocContentGenMethod",
+ "MDXDocPage",
+ "MdxCodeDocumentation",
+ "TyperGroupCommand",
+ "TyperSingleCommand",
+]
diff --git a/docs/docs_generation/content_gen_methods/__init__.py b/docs/docs_generation/content_gen_methods/__init__.py
new file mode 100644
index 00000000..93f42b91
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/__init__.py
@@ -0,0 +1,21 @@
+from __future__ import annotations
+
+from docs.docs_generation.content_gen_methods.command.command import ACommand
+from docs.docs_generation.content_gen_methods.command.typer_command import TyperGroupCommand, TyperSingleCommand
+from docs.docs_generation.content_gen_methods.mdx.mdx_code_doc import MdxCodeDocumentation
+
+from .base import ADocContentGenMethod
+from .command_output_method import CommandOutputDocContentGenMethod
+from .file_printing_method import FilePrintingDocContentGenMethod
+from .jinja2_method import Jinja2DocContentGenMethod
+
+__all__ = [
+ "ACommand",
+ "ADocContentGenMethod",
+ "CommandOutputDocContentGenMethod",
+ "FilePrintingDocContentGenMethod",
+ "Jinja2DocContentGenMethod",
+ "MdxCodeDocumentation",
+ "TyperGroupCommand",
+ "TyperSingleCommand",
+]
diff --git a/docs/docs_generation/content_gen_methods/base.py b/docs/docs_generation/content_gen_methods/base.py
new file mode 100644
index 00000000..ddab2dde
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/base.py
@@ -0,0 +1,15 @@
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+
+
+class ADocContentGenMethod(ABC):
+ """Strategy for producing documentation content as a string.
+
+ Each subclass implements ``apply()`` for a specific content source
+ (Jinja2 template, CLI command, pre-generated file, ...).
+ """
+
+ @abstractmethod
+ def apply(self) -> str:
+ """Generate the documentation content."""
diff --git a/docs/docs_generation/content_gen_methods/command/__init__.py b/docs/docs_generation/content_gen_methods/command/__init__.py
new file mode 100644
index 00000000..a1bc4e0a
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/command/__init__.py
@@ -0,0 +1,7 @@
+from __future__ import annotations
+
+from .command import ACommand
+
+__all__ = [
+ "ACommand",
+]
diff --git a/docs/docs_generation/content_gen_methods/command/command.py b/docs/docs_generation/content_gen_methods/command/command.py
new file mode 100644
index 00000000..83a646c2
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/command/command.py
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+
+
+class ACommand(ABC):
+ """Abstract base for building a shell command string."""
+
+ @abstractmethod
+ def build(self) -> str:
+ """Return the full command string to execute."""
diff --git a/docs/docs_generation/content_gen_methods/command/typer_command.py b/docs/docs_generation/content_gen_methods/command/typer_command.py
new file mode 100644
index 00000000..180f9b57
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/command/typer_command.py
@@ -0,0 +1,26 @@
+from __future__ import annotations
+
+from abc import ABC
+
+from .command import ACommand
+
+
+class ATyperCommand(ACommand, ABC):
+ def __init__(self, name: str) -> None:
+ self.name = name
+
+
+class TyperSingleCommand(ATyperCommand):
+ """A single (non-group) infrahubctl command."""
+
+ def build(self) -> str:
+ return (
+ f'uv run typer --func {self.name} infrahub_sdk.ctl.cli_commands utils docs --name "infrahubctl {self.name}"'
+ )
+
+
+class TyperGroupCommand(ATyperCommand):
+ """An infrahubctl command group (e.g. ``branch``, ``schema``)."""
+
+ def build(self) -> str:
+ return f'uv run typer infrahub_sdk.ctl.{self.name} utils docs --name "infrahubctl {self.name}"'
diff --git a/docs/docs_generation/content_gen_methods/command_output_method.py b/docs/docs_generation/content_gen_methods/command_output_method.py
new file mode 100644
index 00000000..ad2e9a5b
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/command_output_method.py
@@ -0,0 +1,50 @@
+from __future__ import annotations
+
+import tempfile
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from invoke import Context
+
+ from .command import ACommand
+
+from .base import ADocContentGenMethod
+
+
+class CommandOutputDocContentGenMethod(ADocContentGenMethod):
+ """Run a command and return the content it writes to a temporary file.
+
+ ``--output `` is appended to the command automatically.
+
+ Args:
+ context: Invoke execution context.
+ working_directory: Directory in which the command is executed.
+ command: An ``ACommand`` whose ``build()`` returns the base command string.
+
+ Example::
+
+ method = CommandOutputDocContentGenMethod(
+ context=ctx,
+ working_directory=project_root,
+ command=TyperCommand(module="infrahub_sdk.ctl.cli_commands", name="dump", app_name="infrahubctl", is_function=True),
+ )
+ content = method.apply()
+ """
+
+ def __init__(self, context: Context, working_directory: Path, command: ACommand) -> None:
+ self.context = context
+ self.working_directory = working_directory
+ self.command = command
+
+ def apply(self) -> str:
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".mdx", delete=False, encoding="utf-8") as tmp:
+ tmp_path = Path(tmp.name)
+
+ full_cmd = f"{self.command.build()} --output {tmp_path}"
+ with self.context.cd(self.working_directory):
+ self.context.run(full_cmd)
+
+ content = tmp_path.read_text(encoding="utf-8")
+ tmp_path.unlink(missing_ok=True)
+ return content
diff --git a/docs/docs_generation/content_gen_methods/file_printing_method.py b/docs/docs_generation/content_gen_methods/file_printing_method.py
new file mode 100644
index 00000000..1b07aa20
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/file_printing_method.py
@@ -0,0 +1,22 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from .base import ADocContentGenMethod
+
+if TYPE_CHECKING:
+ from docs.docs_generation.content_gen_methods.mdx.mdx_code_doc import MdxFile
+
+
+class FilePrintingDocContentGenMethod(ADocContentGenMethod):
+ """Return the content of an already-generated file as-is.
+
+ Args:
+ file: The ``MdxFile`` whose content will be returned.
+ """
+
+ def __init__(self, file: MdxFile) -> None:
+ self.file = file
+
+ def apply(self) -> str:
+ return self.file.content
diff --git a/docs/docs_generation/content_gen_methods/jinja2_method.py b/docs/docs_generation/content_gen_methods/jinja2_method.py
new file mode 100644
index 00000000..b880d856
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/jinja2_method.py
@@ -0,0 +1,39 @@
+from __future__ import annotations
+
+import asyncio
+from typing import TYPE_CHECKING, Any
+
+from .base import ADocContentGenMethod
+
+if TYPE_CHECKING:
+ from infrahub_sdk.template.base import ATemplate
+
+
+class Jinja2DocContentGenMethod(ADocContentGenMethod):
+ """Render a template using an ``ATemplate`` implementation.
+
+ The template engine is async; rendering is run synchronously via ``asyncio.run``.
+
+ Args:
+ template: A template instance implementing ``ATemplate``.
+ template_variables: Variables passed to the template during rendering.
+
+ Example::
+
+ template = Jinja2Template(
+ template=Path("sdk_template_reference.j2"),
+ template_directory=docs_dir / "_templates",
+ )
+ method = Jinja2DocContentGenMethod(
+ template=template,
+ template_variables={"builtin": BUILTIN_FILTERS},
+ )
+ content = method.apply()
+ """
+
+ def __init__(self, template: ATemplate, template_variables: dict[str, Any]) -> None:
+ self.template = template
+ self.template_variables = template_variables
+
+ def apply(self) -> str:
+ return asyncio.run(self.template.render(variables=self.template_variables))
diff --git a/docs/docs_generation/content_gen_methods/mdx/__init__.py b/docs/docs_generation/content_gen_methods/mdx/__init__.py
new file mode 100644
index 00000000..e747fa79
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/mdx/__init__.py
@@ -0,0 +1,8 @@
+from __future__ import annotations
+
+from .mdx_code_doc import MdxCodeDocumentation, MdxFile
+
+__all__ = [
+ "MdxCodeDocumentation",
+ "MdxFile",
+]
diff --git a/docs/docs_generation/content_gen_methods/mdx/mdx_code_doc.py b/docs/docs_generation/content_gen_methods/mdx/mdx_code_doc.py
new file mode 100644
index 00000000..3712a02a
--- /dev/null
+++ b/docs/docs_generation/content_gen_methods/mdx/mdx_code_doc.py
@@ -0,0 +1,58 @@
+from __future__ import annotations
+
+import tempfile
+from dataclasses import dataclass
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from invoke import Context
+
+
+@dataclass
+class MdxFile:
+ """Content of a single ``.mdx`` file produced by mdxify."""
+
+ path: Path
+ content: str
+
+
+class MdxCodeDocumentation:
+ """Run mdxify once and cache the resulting files.
+
+ Args:
+ file_filters: Substrings to exclude from output filenames.
+ Defaults to ``["__init__"]``.
+
+ Example::
+
+ doc = MdxCodeDocumentation()
+ files = doc.generate(context=ctx, modules_to_document=["infrahub_sdk.node"])
+ """
+
+ def __init__(
+ self,
+ file_filters: list[str] | None = None,
+ ) -> None:
+ self.file_filters = file_filters or ["__init__"]
+ self._files: dict[str, MdxFile] | None = None
+
+ def generate(self, context: Context, modules_to_document: list[str]) -> dict[str, MdxFile]:
+ """Return mdxify results, running the tool on first call only."""
+ if self._files is None:
+ self._files = self._execute_mdxify(context, modules_to_document)
+ return self._files
+
+ def _execute_mdxify(self, context: Context, modules_to_document: list[str]) -> dict[str, MdxFile]:
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ exec_cmd = f"mdxify {' '.join(modules_to_document)} --output-dir {tmp_dir}"
+ context.run(exec_cmd, pty=True)
+
+ results: dict[str, MdxFile] = {}
+ for mdx_file in Path(tmp_dir).glob("*.mdx"):
+ if any(f.lower() in mdx_file.name for f in self.file_filters):
+ continue
+ content = mdx_file.read_text(encoding="utf-8")
+ results[mdx_file.name] = MdxFile(path=mdx_file, content=content)
+
+ return results
diff --git a/docs/docs_generation/helpers.py b/docs/docs_generation/helpers.py
new file mode 100644
index 00000000..9951a80c
--- /dev/null
+++ b/docs/docs_generation/helpers.py
@@ -0,0 +1,71 @@
+from __future__ import annotations
+
+from collections import defaultdict
+from typing import Any
+
+from pydantic_settings import EnvSettingsSource
+
+from infrahub_sdk.config import ConfigBase
+
+
+def get_env_vars() -> dict[str, list[str]]:
+ """Extract environment variable names for each field of ``ConfigBase``.
+
+ Returns:
+ Mapping of field name to list of upper-cased environment variable names.
+ """
+ env_vars: dict[str, list[str]] = defaultdict(list)
+ settings = ConfigBase()
+ env_settings = EnvSettingsSource(settings.__class__, env_prefix=settings.model_config.get("env_prefix", ""))
+
+ for field_name, field in settings.model_fields.items():
+ for field_key, field_env_name, _ in env_settings._extract_field_info(field, field_name):
+ env_vars[field_key].append(field_env_name.upper())
+
+ return env_vars
+
+
+def _resolve_allof(prop: dict[str, Any], definitions: dict[str, Any]) -> tuple[list[Any], str]:
+ """Resolve an ``allOf`` JSON Schema reference to extract enum choices and type."""
+ if "allOf" not in prop:
+ return [], ""
+ ref_name = prop["allOf"][0]["$ref"].split("/")[-1]
+ ref_def = definitions.get(ref_name, {})
+ return ref_def.get("enum", []), ref_def.get("type", "")
+
+
+def _resolve_anyof_type(prop: dict[str, Any]) -> str:
+ """Resolve an ``anyOf`` to a comma-separated type string, excluding ``null``."""
+ if "anyOf" not in prop:
+ return ""
+ return ", ".join(i["type"] for i in prop["anyOf"] if "type" in i and i["type"] != "null")
+
+
+def build_config_properties() -> list[dict[str, Any]]:
+ """Build the list of configuration properties for SDK config documentation.
+
+ Returns:
+ List of dicts with keys: ``name``, ``description``, ``type``,
+ ``choices``, ``default``, ``env_vars``.
+ """
+ schema = ConfigBase.model_json_schema()
+ env_vars = get_env_vars()
+ definitions = schema.get("$defs", {})
+
+ properties = []
+ for name, prop in schema["properties"].items():
+ choices, kind = _resolve_allof(prop, definitions)
+ composed_type = _resolve_anyof_type(prop)
+
+ properties.append(
+ {
+ "name": name,
+ "description": prop.get("description", ""),
+ "type": prop.get("type", kind) or composed_type or "object",
+ "choices": choices,
+ "default": prop.get("default", ""),
+ "env_vars": env_vars.get(name, []),
+ }
+ )
+
+ return properties
diff --git a/docs/docs_generation/pages/__init__.py b/docs/docs_generation/pages/__init__.py
new file mode 100644
index 00000000..58211bd4
--- /dev/null
+++ b/docs/docs_generation/pages/__init__.py
@@ -0,0 +1,8 @@
+from __future__ import annotations
+
+from .base import DocPage, MDXDocPage
+
+__all__ = [
+ "DocPage",
+ "MDXDocPage",
+]
diff --git a/docs/docs_generation/pages/base.py b/docs/docs_generation/pages/base.py
new file mode 100644
index 00000000..6527d1b8
--- /dev/null
+++ b/docs/docs_generation/pages/base.py
@@ -0,0 +1,50 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from docs.docs_generation.content_gen_methods import ADocContentGenMethod
+
+
+class DocPage:
+ """A documentation page whose content is produced by an injected generation method.
+
+ Args:
+ content_gen_method: Strategy that produces the page content as a string.
+
+ Example::
+
+ page = DocPage(content_gen_method=Jinja2DocContentGenMethod(...))
+ print(page.content())
+ """
+
+ def __init__(self, content_gen_method: ADocContentGenMethod) -> None:
+ self.content_gen_method = content_gen_method
+
+ def content(self) -> str:
+ return self.content_gen_method.apply()
+
+
+class MDXDocPage:
+ """Decorator which is a documentation page that can be written in an ``.mdx`` file.
+
+ Args:
+ page: The documentation page whose content will be rendered.
+ output_path: File path where the ``.mdx`` output will be written.
+
+ Example::
+
+ mdx = MDXDocPage(page=my_page, output_path=Path("docs/ref/client.mdx"))
+ mdx.to_mdx()
+ """
+
+ def __init__(self, page: DocPage, output_path: Path) -> None:
+ self.page = page
+ self.output_path = output_path
+
+ def to_mdx(self) -> None:
+ rendered = self.page.content()
+ self.output_path.parent.mkdir(parents=True, exist_ok=True)
+ self.output_path.write_text(rendered, encoding="utf-8")
+ print(f"Docs saved to: {self.output_path}")
diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts
index c9f85753..e3a23cca 100644
--- a/docs/docusaurus.config.ts
+++ b/docs/docusaurus.config.ts
@@ -37,7 +37,7 @@ const config: Config = {
editUrl: "https://github.com/opsmill/infrahub-sdk-python/tree/stable/docs",
path: 'docs/python-sdk',
routeBasePath: 'python-sdk',
- sidebarPath: './sidebars-python-sdk.ts',
+ sidebarPath: './sidebars/sidebars-python-sdk.ts',
sidebarCollapsed: true,
},
blog: false,
@@ -55,7 +55,7 @@ const config: Config = {
path: 'docs/infrahubctl',
routeBasePath: 'infrahubctl',
sidebarCollapsed: false,
- sidebarPath: './sidebars-infrahubctl.ts',
+ sidebarPath: './sidebars/sidebars-infrahubctl.ts',
},
],
],
diff --git a/docs/package-lock.json b/docs/package-lock.json
index 6a594bda..73190008 100644
--- a/docs/package-lock.json
+++ b/docs/package-lock.json
@@ -10,6 +10,7 @@
"dependencies": {
"@docusaurus/core": "^3.8.1",
"@docusaurus/preset-classic": "^3.8.1",
+ "@iconify/react": "^6.0.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@@ -22,7 +23,8 @@
"@docusaurus/module-type-aliases": "^3.8.1",
"@docusaurus/tsconfig": "^3.8.1",
"@docusaurus/types": "^3.8.1",
- "typescript": "~5.5.2"
+ "typescript": "~5.5.2",
+ "vitest": "^4.0.17"
},
"engines": {
"node": ">=18.0"
@@ -4005,6 +4007,448 @@
"node": ">=20.0"
}
},
+ "node_modules/@esbuild/aix-ppc64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz",
+ "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "aix"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/android-arm": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz",
+ "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/android-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz",
+ "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/android-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz",
+ "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/darwin-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz",
+ "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/darwin-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz",
+ "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/freebsd-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz",
+ "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/freebsd-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz",
+ "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-arm": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz",
+ "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz",
+ "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-ia32": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz",
+ "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-loong64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz",
+ "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-mips64el": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz",
+ "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==",
+ "cpu": [
+ "mips64el"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-ppc64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz",
+ "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-riscv64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz",
+ "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-s390x": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz",
+ "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz",
+ "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/netbsd-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz",
+ "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/netbsd-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz",
+ "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openbsd-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz",
+ "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openbsd-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz",
+ "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openharmony-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz",
+ "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openharmony"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/sunos-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz",
+ "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "sunos"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/win32-arm64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz",
+ "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/win32-ia32": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz",
+ "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/win32-x64": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz",
+ "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/@hapi/hoek": {
"version": "9.3.0",
"resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz",
@@ -4020,6 +4464,27 @@
"@hapi/hoek": "^9.0.0"
}
},
+ "node_modules/@iconify/react": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/@iconify/react/-/react-6.0.0.tgz",
+ "integrity": "sha512-eqNscABVZS8eCpZLU/L5F5UokMS9mnCf56iS1nM9YYHdH8ZxqZL9zyjSwW60IOQFsXZkilbBiv+1paMXBhSQnw==",
+ "license": "MIT",
+ "dependencies": {
+ "@iconify/types": "^2.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/cyberalien"
+ },
+ "peerDependencies": {
+ "react": ">=16"
+ }
+ },
+ "node_modules/@iconify/types": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/@iconify/types/-/types-2.0.0.tgz",
+ "integrity": "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==",
+ "license": "MIT"
+ },
"node_modules/@jest/schemas": {
"version": "29.6.3",
"resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz",
@@ -4436,6 +4901,356 @@
"integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==",
"license": "MIT"
},
+ "node_modules/@rollup/rollup-android-arm-eabi": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.1.tgz",
+ "integrity": "sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ]
+ },
+ "node_modules/@rollup/rollup-android-arm64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.57.1.tgz",
+ "integrity": "sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ]
+ },
+ "node_modules/@rollup/rollup-darwin-arm64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.57.1.tgz",
+ "integrity": "sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ]
+ },
+ "node_modules/@rollup/rollup-darwin-x64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.57.1.tgz",
+ "integrity": "sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ]
+ },
+ "node_modules/@rollup/rollup-freebsd-arm64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.57.1.tgz",
+ "integrity": "sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-freebsd-x64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.57.1.tgz",
+ "integrity": "sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm-gnueabihf": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.57.1.tgz",
+ "integrity": "sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm-musleabihf": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.57.1.tgz",
+ "integrity": "sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm64-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.57.1.tgz",
+ "integrity": "sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-arm64-musl": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.57.1.tgz",
+ "integrity": "sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-loong64-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.57.1.tgz",
+ "integrity": "sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-loong64-musl": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.57.1.tgz",
+ "integrity": "sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-ppc64-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.57.1.tgz",
+ "integrity": "sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-ppc64-musl": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.57.1.tgz",
+ "integrity": "sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-riscv64-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.57.1.tgz",
+ "integrity": "sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-riscv64-musl": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.57.1.tgz",
+ "integrity": "sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-s390x-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.57.1.tgz",
+ "integrity": "sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-x64-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.57.1.tgz",
+ "integrity": "sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-linux-x64-musl": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.57.1.tgz",
+ "integrity": "sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ]
+ },
+ "node_modules/@rollup/rollup-openbsd-x64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.57.1.tgz",
+ "integrity": "sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ]
+ },
+ "node_modules/@rollup/rollup-openharmony-arm64": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.57.1.tgz",
+ "integrity": "sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openharmony"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-arm64-msvc": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.57.1.tgz",
+ "integrity": "sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-ia32-msvc": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.57.1.tgz",
+ "integrity": "sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-x64-gnu": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.57.1.tgz",
+ "integrity": "sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
+ "node_modules/@rollup/rollup-win32-x64-msvc": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.57.1.tgz",
+ "integrity": "sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ]
+ },
"node_modules/@sideway/address": {
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.5.tgz",
@@ -4795,6 +5610,17 @@
"@types/node": "*"
}
},
+ "node_modules/@types/chai": {
+ "version": "5.2.3",
+ "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz",
+ "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/deep-eql": "*",
+ "assertion-error": "^2.0.1"
+ }
+ },
"node_modules/@types/connect": {
"version": "3.4.38",
"resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz",
@@ -4823,6 +5649,13 @@
"@types/ms": "*"
}
},
+ "node_modules/@types/deep-eql": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz",
+ "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/@types/eslint": {
"version": "9.6.1",
"resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-9.6.1.tgz",
@@ -5163,23 +5996,134 @@
"integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==",
"license": "ISC"
},
- "node_modules/@vercel/oidc": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/@vercel/oidc/-/oidc-3.0.1.tgz",
- "integrity": "sha512-V/YRVrJDqM6VaMBjRUrd6qRMrTKvZjHdVdEmdXsOZMulTa3iK98ijKTc3wldBmst6W5rHpqMoKllKcBAHgN7GQ==",
- "license": "Apache-2.0",
- "engines": {
- "node": ">= 20"
+ "node_modules/@vercel/oidc": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/@vercel/oidc/-/oidc-3.0.1.tgz",
+ "integrity": "sha512-V/YRVrJDqM6VaMBjRUrd6qRMrTKvZjHdVdEmdXsOZMulTa3iK98ijKTc3wldBmst6W5rHpqMoKllKcBAHgN7GQ==",
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">= 20"
+ }
+ },
+ "node_modules/@vimeo/player": {
+ "version": "2.29.0",
+ "resolved": "https://registry.npmjs.org/@vimeo/player/-/player-2.29.0.tgz",
+ "integrity": "sha512-9JjvjeqUndb9otCCFd0/+2ESsLk7VkDE6sxOBy9iy2ukezuQbplVRi+g9g59yAurKofbmTi/KcKxBGO/22zWRw==",
+ "license": "MIT",
+ "dependencies": {
+ "native-promise-only": "0.8.1",
+ "weakmap-polyfill": "2.0.4"
+ }
+ },
+ "node_modules/@vitest/expect": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.18.tgz",
+ "integrity": "sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@standard-schema/spec": "^1.0.0",
+ "@types/chai": "^5.2.2",
+ "@vitest/spy": "4.0.18",
+ "@vitest/utils": "4.0.18",
+ "chai": "^6.2.1",
+ "tinyrainbow": "^3.0.3"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/mocker": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.18.tgz",
+ "integrity": "sha512-HhVd0MDnzzsgevnOWCBj5Otnzobjy5wLBe4EdeeFGv8luMsGcYqDuFRMcttKWZA5vVO8RFjexVovXvAM4JoJDQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/spy": "4.0.18",
+ "estree-walker": "^3.0.3",
+ "magic-string": "^0.30.21"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ },
+ "peerDependencies": {
+ "msw": "^2.4.9",
+ "vite": "^6.0.0 || ^7.0.0-0"
+ },
+ "peerDependenciesMeta": {
+ "msw": {
+ "optional": true
+ },
+ "vite": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/@vitest/pretty-format": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.18.tgz",
+ "integrity": "sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "tinyrainbow": "^3.0.3"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/runner": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.0.18.tgz",
+ "integrity": "sha512-rpk9y12PGa22Jg6g5M3UVVnTS7+zycIGk9ZNGN+m6tZHKQb7jrP7/77WfZy13Y/EUDd52NDsLRQhYKtv7XfPQw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/utils": "4.0.18",
+ "pathe": "^2.0.3"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/snapshot": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.18.tgz",
+ "integrity": "sha512-PCiV0rcl7jKQjbgYqjtakly6T1uwv/5BQ9SwBLekVg/EaYeQFPiXcgrC2Y7vDMA8dM1SUEAEV82kgSQIlXNMvA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/pretty-format": "4.0.18",
+ "magic-string": "^0.30.21",
+ "pathe": "^2.0.3"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ }
+ },
+ "node_modules/@vitest/spy": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.0.18.tgz",
+ "integrity": "sha512-cbQt3PTSD7P2OARdVW3qWER5EGq7PHlvE+QfzSC0lbwO+xnt7+XH06ZzFjFRgzUX//JmpxrCu92VdwvEPlWSNw==",
+ "dev": true,
+ "license": "MIT",
+ "funding": {
+ "url": "https://opencollective.com/vitest"
}
},
- "node_modules/@vimeo/player": {
- "version": "2.29.0",
- "resolved": "https://registry.npmjs.org/@vimeo/player/-/player-2.29.0.tgz",
- "integrity": "sha512-9JjvjeqUndb9otCCFd0/+2ESsLk7VkDE6sxOBy9iy2ukezuQbplVRi+g9g59yAurKofbmTi/KcKxBGO/22zWRw==",
+ "node_modules/@vitest/utils": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.0.18.tgz",
+ "integrity": "sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA==",
+ "dev": true,
"license": "MIT",
"dependencies": {
- "native-promise-only": "0.8.1",
- "weakmap-polyfill": "2.0.4"
+ "@vitest/pretty-format": "4.0.18",
+ "tinyrainbow": "^3.0.3"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
}
},
"node_modules/@webassemblyjs/ast": {
@@ -5682,6 +6626,16 @@
"node": ">=8"
}
},
+ "node_modules/assertion-error": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz",
+ "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ }
+ },
"node_modules/astring": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/astring/-/astring-1.9.0.tgz",
@@ -6229,6 +7183,16 @@
"react": ">=17.0.0"
}
},
+ "node_modules/chai": {
+ "version": "6.2.2",
+ "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz",
+ "integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/chalk": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
@@ -7882,6 +8846,48 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/esbuild": {
+ "version": "0.27.3",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz",
+ "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==",
+ "dev": true,
+ "hasInstallScript": true,
+ "license": "MIT",
+ "bin": {
+ "esbuild": "bin/esbuild"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "optionalDependencies": {
+ "@esbuild/aix-ppc64": "0.27.3",
+ "@esbuild/android-arm": "0.27.3",
+ "@esbuild/android-arm64": "0.27.3",
+ "@esbuild/android-x64": "0.27.3",
+ "@esbuild/darwin-arm64": "0.27.3",
+ "@esbuild/darwin-x64": "0.27.3",
+ "@esbuild/freebsd-arm64": "0.27.3",
+ "@esbuild/freebsd-x64": "0.27.3",
+ "@esbuild/linux-arm": "0.27.3",
+ "@esbuild/linux-arm64": "0.27.3",
+ "@esbuild/linux-ia32": "0.27.3",
+ "@esbuild/linux-loong64": "0.27.3",
+ "@esbuild/linux-mips64el": "0.27.3",
+ "@esbuild/linux-ppc64": "0.27.3",
+ "@esbuild/linux-riscv64": "0.27.3",
+ "@esbuild/linux-s390x": "0.27.3",
+ "@esbuild/linux-x64": "0.27.3",
+ "@esbuild/netbsd-arm64": "0.27.3",
+ "@esbuild/netbsd-x64": "0.27.3",
+ "@esbuild/openbsd-arm64": "0.27.3",
+ "@esbuild/openbsd-x64": "0.27.3",
+ "@esbuild/openharmony-arm64": "0.27.3",
+ "@esbuild/sunos-x64": "0.27.3",
+ "@esbuild/win32-arm64": "0.27.3",
+ "@esbuild/win32-ia32": "0.27.3",
+ "@esbuild/win32-x64": "0.27.3"
+ }
+ },
"node_modules/escalade": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
@@ -8169,6 +9175,16 @@
"url": "https://github.com/sindresorhus/execa?sponsor=1"
}
},
+ "node_modules/expect-type": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz",
+ "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "engines": {
+ "node": ">=12.0.0"
+ }
+ },
"node_modules/express": {
"version": "4.21.2",
"resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz",
@@ -10342,6 +11358,16 @@
"yallist": "^3.0.2"
}
},
+ "node_modules/magic-string": {
+ "version": "0.30.21",
+ "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz",
+ "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@jridgewell/sourcemap-codec": "^1.5.5"
+ }
+ },
"node_modules/markdown-extensions": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/markdown-extensions/-/markdown-extensions-2.0.0.tgz",
@@ -13064,6 +14090,17 @@
"integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==",
"license": "MIT"
},
+ "node_modules/obug": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz",
+ "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==",
+ "dev": true,
+ "funding": [
+ "https://github.com/sponsors/sxzz",
+ "https://opencollective.com/debug"
+ ],
+ "license": "MIT"
+ },
"node_modules/on-finished": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
@@ -13433,6 +14470,13 @@
"node": ">=8"
}
},
+ "node_modules/pathe": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz",
+ "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/picocolors": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
@@ -15988,6 +17032,51 @@
"node": ">=0.10.0"
}
},
+ "node_modules/rollup": {
+ "version": "4.57.1",
+ "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.57.1.tgz",
+ "integrity": "sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/estree": "1.0.8"
+ },
+ "bin": {
+ "rollup": "dist/bin/rollup"
+ },
+ "engines": {
+ "node": ">=18.0.0",
+ "npm": ">=8.0.0"
+ },
+ "optionalDependencies": {
+ "@rollup/rollup-android-arm-eabi": "4.57.1",
+ "@rollup/rollup-android-arm64": "4.57.1",
+ "@rollup/rollup-darwin-arm64": "4.57.1",
+ "@rollup/rollup-darwin-x64": "4.57.1",
+ "@rollup/rollup-freebsd-arm64": "4.57.1",
+ "@rollup/rollup-freebsd-x64": "4.57.1",
+ "@rollup/rollup-linux-arm-gnueabihf": "4.57.1",
+ "@rollup/rollup-linux-arm-musleabihf": "4.57.1",
+ "@rollup/rollup-linux-arm64-gnu": "4.57.1",
+ "@rollup/rollup-linux-arm64-musl": "4.57.1",
+ "@rollup/rollup-linux-loong64-gnu": "4.57.1",
+ "@rollup/rollup-linux-loong64-musl": "4.57.1",
+ "@rollup/rollup-linux-ppc64-gnu": "4.57.1",
+ "@rollup/rollup-linux-ppc64-musl": "4.57.1",
+ "@rollup/rollup-linux-riscv64-gnu": "4.57.1",
+ "@rollup/rollup-linux-riscv64-musl": "4.57.1",
+ "@rollup/rollup-linux-s390x-gnu": "4.57.1",
+ "@rollup/rollup-linux-x64-gnu": "4.57.1",
+ "@rollup/rollup-linux-x64-musl": "4.57.1",
+ "@rollup/rollup-openbsd-x64": "4.57.1",
+ "@rollup/rollup-openharmony-arm64": "4.57.1",
+ "@rollup/rollup-win32-arm64-msvc": "4.57.1",
+ "@rollup/rollup-win32-ia32-msvc": "4.57.1",
+ "@rollup/rollup-win32-x64-gnu": "4.57.1",
+ "@rollup/rollup-win32-x64-msvc": "4.57.1",
+ "fsevents": "~2.3.2"
+ }
+ },
"node_modules/rtlcss": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/rtlcss/-/rtlcss-4.3.0.tgz",
@@ -16499,6 +17588,13 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/siginfo": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz",
+ "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==",
+ "dev": true,
+ "license": "ISC"
+ },
"node_modules/signal-exit": {
"version": "3.0.7",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
@@ -16708,6 +17804,13 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/stackback": {
+ "version": "0.0.2",
+ "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz",
+ "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/statuses": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
@@ -16718,9 +17821,9 @@
}
},
"node_modules/std-env": {
- "version": "3.9.0",
- "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz",
- "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==",
+ "version": "3.10.0",
+ "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz",
+ "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==",
"license": "MIT"
},
"node_modules/string_decoder": {
@@ -17115,6 +18218,71 @@
"integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==",
"license": "MIT"
},
+ "node_modules/tinybench": {
+ "version": "2.9.0",
+ "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz",
+ "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/tinyexec": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz",
+ "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/tinyglobby": {
+ "version": "0.2.15",
+ "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz",
+ "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "fdir": "^6.5.0",
+ "picomatch": "^4.0.3"
+ },
+ "engines": {
+ "node": ">=12.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/SuperchupuDev"
+ }
+ },
+ "node_modules/tinyglobby/node_modules/fdir": {
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
+ "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12.0.0"
+ },
+ "peerDependencies": {
+ "picomatch": "^3 || ^4"
+ },
+ "peerDependenciesMeta": {
+ "picomatch": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/tinyglobby/node_modules/picomatch": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
+ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
"node_modules/tinypool": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz",
@@ -17124,6 +18292,16 @@
"node": "^18.0.0 || >=20.0.0"
}
},
+ "node_modules/tinyrainbow": {
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.0.3.tgz",
+ "integrity": "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=14.0.0"
+ }
+ },
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
@@ -17809,6 +18987,203 @@
"@vimeo/player": "2.29.0"
}
},
+ "node_modules/vite": {
+ "version": "7.3.1",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz",
+ "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "esbuild": "^0.27.0",
+ "fdir": "^6.5.0",
+ "picomatch": "^4.0.3",
+ "postcss": "^8.5.6",
+ "rollup": "^4.43.0",
+ "tinyglobby": "^0.2.15"
+ },
+ "bin": {
+ "vite": "bin/vite.js"
+ },
+ "engines": {
+ "node": "^20.19.0 || >=22.12.0"
+ },
+ "funding": {
+ "url": "https://github.com/vitejs/vite?sponsor=1"
+ },
+ "optionalDependencies": {
+ "fsevents": "~2.3.3"
+ },
+ "peerDependencies": {
+ "@types/node": "^20.19.0 || >=22.12.0",
+ "jiti": ">=1.21.0",
+ "less": "^4.0.0",
+ "lightningcss": "^1.21.0",
+ "sass": "^1.70.0",
+ "sass-embedded": "^1.70.0",
+ "stylus": ">=0.54.8",
+ "sugarss": "^5.0.0",
+ "terser": "^5.16.0",
+ "tsx": "^4.8.1",
+ "yaml": "^2.4.2"
+ },
+ "peerDependenciesMeta": {
+ "@types/node": {
+ "optional": true
+ },
+ "jiti": {
+ "optional": true
+ },
+ "less": {
+ "optional": true
+ },
+ "lightningcss": {
+ "optional": true
+ },
+ "sass": {
+ "optional": true
+ },
+ "sass-embedded": {
+ "optional": true
+ },
+ "stylus": {
+ "optional": true
+ },
+ "sugarss": {
+ "optional": true
+ },
+ "terser": {
+ "optional": true
+ },
+ "tsx": {
+ "optional": true
+ },
+ "yaml": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/vite/node_modules/fdir": {
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
+ "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12.0.0"
+ },
+ "peerDependencies": {
+ "picomatch": "^3 || ^4"
+ },
+ "peerDependenciesMeta": {
+ "picomatch": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/vite/node_modules/picomatch": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
+ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
+ "node_modules/vitest": {
+ "version": "4.0.18",
+ "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.18.tgz",
+ "integrity": "sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@vitest/expect": "4.0.18",
+ "@vitest/mocker": "4.0.18",
+ "@vitest/pretty-format": "4.0.18",
+ "@vitest/runner": "4.0.18",
+ "@vitest/snapshot": "4.0.18",
+ "@vitest/spy": "4.0.18",
+ "@vitest/utils": "4.0.18",
+ "es-module-lexer": "^1.7.0",
+ "expect-type": "^1.2.2",
+ "magic-string": "^0.30.21",
+ "obug": "^2.1.1",
+ "pathe": "^2.0.3",
+ "picomatch": "^4.0.3",
+ "std-env": "^3.10.0",
+ "tinybench": "^2.9.0",
+ "tinyexec": "^1.0.2",
+ "tinyglobby": "^0.2.15",
+ "tinyrainbow": "^3.0.3",
+ "vite": "^6.0.0 || ^7.0.0",
+ "why-is-node-running": "^2.3.0"
+ },
+ "bin": {
+ "vitest": "vitest.mjs"
+ },
+ "engines": {
+ "node": "^20.0.0 || ^22.0.0 || >=24.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/vitest"
+ },
+ "peerDependencies": {
+ "@edge-runtime/vm": "*",
+ "@opentelemetry/api": "^1.9.0",
+ "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0",
+ "@vitest/browser-playwright": "4.0.18",
+ "@vitest/browser-preview": "4.0.18",
+ "@vitest/browser-webdriverio": "4.0.18",
+ "@vitest/ui": "4.0.18",
+ "happy-dom": "*",
+ "jsdom": "*"
+ },
+ "peerDependenciesMeta": {
+ "@edge-runtime/vm": {
+ "optional": true
+ },
+ "@opentelemetry/api": {
+ "optional": true
+ },
+ "@types/node": {
+ "optional": true
+ },
+ "@vitest/browser-playwright": {
+ "optional": true
+ },
+ "@vitest/browser-preview": {
+ "optional": true
+ },
+ "@vitest/browser-webdriverio": {
+ "optional": true
+ },
+ "@vitest/ui": {
+ "optional": true
+ },
+ "happy-dom": {
+ "optional": true
+ },
+ "jsdom": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/vitest/node_modules/picomatch": {
+ "version": "4.0.3",
+ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
+ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=12"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/jonschlinkert"
+ }
+ },
"node_modules/watchpack": {
"version": "2.4.4",
"resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.4.tgz",
@@ -18254,6 +19629,23 @@
"node": ">= 8"
}
},
+ "node_modules/why-is-node-running": {
+ "version": "2.3.0",
+ "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz",
+ "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "siginfo": "^2.0.0",
+ "stackback": "0.0.2"
+ },
+ "bin": {
+ "why-is-node-running": "cli.js"
+ },
+ "engines": {
+ "node": ">=8"
+ }
+ },
"node_modules/widest-line": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz",
diff --git a/docs/package.json b/docs/package.json
index 0dc1e714..b1ca202e 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -12,11 +12,14 @@
"serve": "docusaurus serve",
"write-translations": "docusaurus write-translations",
"write-heading-ids": "docusaurus write-heading-ids",
- "typecheck": "tsc"
+ "typecheck": "tsc",
+ "test": "vitest run",
+ "test:watch": "vitest watch"
},
"dependencies": {
"@docusaurus/core": "^3.8.1",
"@docusaurus/preset-classic": "^3.8.1",
+ "@iconify/react": "^6.0.0",
"@mdx-js/react": "^3.0.0",
"clsx": "^2.0.0",
"prism-react-renderer": "^2.3.0",
@@ -29,7 +32,8 @@
"@docusaurus/module-type-aliases": "^3.8.1",
"@docusaurus/tsconfig": "^3.8.1",
"@docusaurus/types": "^3.8.1",
- "typescript": "~5.5.2"
+ "typescript": "~5.5.2",
+ "vitest": "^4.0.17"
},
"browserslist": {
"production": [
diff --git a/docs/sidebars-infrahubctl.ts b/docs/sidebars-infrahubctl.ts
deleted file mode 100644
index c50587f8..00000000
--- a/docs/sidebars-infrahubctl.ts
+++ /dev/null
@@ -1,36 +0,0 @@
-import type {SidebarsConfig} from '@docusaurus/plugin-content-docs';
-
-const sidebars: SidebarsConfig = {
- infrahubctlSidebar: [
- {
- type: 'doc',
- id: 'infrahubctl',
- label: 'Infrahubctl CLI Tool',
- },
- {
- type: 'category',
- label: 'Commands',
- items: [
- 'infrahubctl-branch',
- 'infrahubctl-check',
- 'infrahubctl-dump',
- 'infrahubctl-generator',
- 'infrahubctl-info',
- 'infrahubctl-load',
- 'infrahubctl-menu',
- 'infrahubctl-object',
- 'infrahubctl-protocols',
- 'infrahubctl-render',
- 'infrahubctl-repository',
- 'infrahubctl-run',
- 'infrahubctl-schema',
- 'infrahubctl-task',
- 'infrahubctl-transform',
- 'infrahubctl-validate',
- 'infrahubctl-version'
- ],
- },
- ],
-};
-
-export default sidebars;
\ No newline at end of file
diff --git a/docs/sidebars-python-sdk.ts b/docs/sidebars-python-sdk.ts
deleted file mode 100644
index e2fc932c..00000000
--- a/docs/sidebars-python-sdk.ts
+++ /dev/null
@@ -1,51 +0,0 @@
-import type {SidebarsConfig} from '@docusaurus/plugin-content-docs';
-
-const sidebars: SidebarsConfig = {
- pythonSdkSidebar: [
- {
- type: 'category',
- label: 'Python SDK docs',
- link: {
- type: 'doc',
- id: 'introduction',
- },
- items: [
- {
- type: 'category',
- label: 'Guides',
- items: [
- 'guides/installation',
- 'guides/client',
- 'guides/query_data',
- 'guides/create_update_delete',
- 'guides/branches',
- 'guides/store',
- 'guides/tracking',
- 'guides/python-typing',
- 'guides/batch',
- 'guides/object-storage',
- 'guides/resource-manager',
- ],
- },
- {
- type: 'category',
- label: 'Topics',
- items: [
- 'topics/tracking',
- 'topics/object_file',
- ],
- },
- {
- type: 'category',
- label: 'Reference',
- items: [
- 'reference/config',
- 'reference/templating',
- ],
- },
- ],
- },
- ],
-};
-
-export default sidebars;
\ No newline at end of file
diff --git a/docs/sidebars/sidebar-utils.test.ts b/docs/sidebars/sidebar-utils.test.ts
new file mode 100644
index 00000000..010a9552
--- /dev/null
+++ b/docs/sidebars/sidebar-utils.test.ts
@@ -0,0 +1,119 @@
+import { describe, expect, it } from "vitest";
+
+import { getCommandItems, getItemsWithOrder } from "./sidebar-utils";
+
+describe("getCommandItems", () => {
+ it("should filter and sort mdx command files", () => {
+ const files = [
+ "infrahubctl.mdx",
+ "infrahubctl-branch.mdx",
+ "infrahubctl-validate.mdx",
+ "infrahubctl-check.mdx",
+ ];
+
+ const result = getCommandItems(files);
+
+ expect(result).toStrictEqual([
+ "infrahubctl-branch",
+ "infrahubctl-check",
+ "infrahubctl-validate",
+ ]);
+ });
+
+ it("should exclude the index file", () => {
+ const files = ["infrahubctl.mdx", "infrahubctl-branch.mdx"];
+
+ const result = getCommandItems(files);
+
+ expect(result).toStrictEqual(["infrahubctl-branch"]);
+ });
+
+ it("should ignore non-mdx files", () => {
+ const files = ["infrahubctl-branch.mdx", "README.md", ".DS_Store", "image.png"];
+
+ const result = getCommandItems(files);
+
+ expect(result).toStrictEqual(["infrahubctl-branch"]);
+ });
+
+ it("should return an empty array when only the index file exists", () => {
+ const result = getCommandItems(["infrahubctl.mdx"]);
+
+ expect(result).toStrictEqual([]);
+ });
+
+ it("should return an empty array for an empty directory", () => {
+ const result = getCommandItems([]);
+
+ expect(result).toStrictEqual([]);
+ });
+
+ it("should support a custom index file name", () => {
+ const files = ["index.mdx", "command-a.mdx", "command-b.mdx"];
+
+ const result = getCommandItems(files, "index.mdx");
+
+ expect(result).toStrictEqual(["command-a", "command-b"]);
+ });
+});
+
+describe("getItemsWithOrder", () => {
+ it("should preserve the defined order for known items", () => {
+ const files = ["client.mdx", "installation.mdx", "batch.mdx"];
+ const orderedIds = ["guides/installation", "guides/client", "guides/batch"];
+
+ const result = getItemsWithOrder(files, orderedIds, "guides");
+
+ expect(result).toStrictEqual(["guides/installation", "guides/client", "guides/batch"]);
+ });
+
+ it("should append new files sorted alphabetically after ordered items", () => {
+ const files = ["client.mdx", "installation.mdx", "batch.mdx", "new-guide.mdx", "advanced.mdx"];
+ const orderedIds = ["guides/installation", "guides/client", "guides/batch"];
+
+ const result = getItemsWithOrder(files, orderedIds, "guides");
+
+ expect(result).toStrictEqual([
+ "guides/installation",
+ "guides/client",
+ "guides/batch",
+ "guides/advanced",
+ "guides/new-guide",
+ ]);
+ });
+
+ it("should skip ordered items that no longer exist on disk", () => {
+ const files = ["installation.mdx", "batch.mdx"];
+ const orderedIds = ["guides/installation", "guides/client", "guides/batch"];
+
+ const result = getItemsWithOrder(files, orderedIds, "guides");
+
+ expect(result).toStrictEqual(["guides/installation", "guides/batch"]);
+ });
+
+ it("should ignore non-mdx files", () => {
+ const files = ["installation.mdx", "README.md", ".DS_Store"];
+ const orderedIds = ["guides/installation"];
+
+ const result = getItemsWithOrder(files, orderedIds, "guides");
+
+ expect(result).toStrictEqual(["guides/installation"]);
+ });
+
+ it("should work without a prefix", () => {
+ const files = ["tracking.mdx", "object_file.mdx", "new-topic.mdx"];
+ const orderedIds = ["tracking", "object_file"];
+
+ const result = getItemsWithOrder(files, orderedIds);
+
+ expect(result).toStrictEqual(["tracking", "object_file", "new-topic"]);
+ });
+
+ it("should return all files sorted when no ordered ids are provided", () => {
+ const files = ["batch.mdx", "installation.mdx", "client.mdx"];
+
+ const result = getItemsWithOrder(files, [], "guides");
+
+ expect(result).toStrictEqual(["guides/batch", "guides/client", "guides/installation"]);
+ });
+});
diff --git a/docs/sidebars/sidebar-utils.ts b/docs/sidebars/sidebar-utils.ts
new file mode 100644
index 00000000..0860904c
--- /dev/null
+++ b/docs/sidebars/sidebar-utils.ts
@@ -0,0 +1,23 @@
+export function getCommandItems(files: string[], indexFile: string = 'infrahubctl.mdx'): string[] {
+ return files
+ .filter(file => file.endsWith('.mdx') && file !== indexFile)
+ .map(file => file.replace('.mdx', ''))
+ .sort();
+}
+
+export function getItemsWithOrder(files: string[], orderedIds: string[], prefix: string = ''): string[] {
+ const allIds = files
+ .filter(file => file.endsWith('.mdx'))
+ .map(file => {
+ const base = file.replace('.mdx', '');
+ return prefix ? `${prefix}/${base}` : base;
+ });
+
+ const existingIds = new Set(allIds);
+ const ordered = orderedIds.filter(id => existingIds.has(id));
+
+ const orderedSet = new Set(orderedIds);
+ const remaining = allIds.filter(id => !orderedSet.has(id)).sort();
+
+ return [...ordered, ...remaining];
+}
diff --git a/docs/sidebars/sidebars-infrahubctl.ts b/docs/sidebars/sidebars-infrahubctl.ts
new file mode 100644
index 00000000..bd92511d
--- /dev/null
+++ b/docs/sidebars/sidebars-infrahubctl.ts
@@ -0,0 +1,24 @@
+import type {SidebarsConfig} from '@docusaurus/plugin-content-docs';
+import {readdirSync} from 'fs';
+import {join} from 'path';
+import {getCommandItems} from './sidebar-utils';
+
+const docsDir = join(__dirname, '..', 'docs', 'infrahubctl');
+const commandItems = getCommandItems(readdirSync(docsDir));
+
+const sidebars: SidebarsConfig = {
+ infrahubctlSidebar: [
+ {
+ type: 'doc',
+ id: 'infrahubctl',
+ label: 'Infrahubctl CLI Tool',
+ },
+ {
+ type: 'category',
+ label: 'Commands',
+ items: commandItems,
+ },
+ ],
+};
+
+export default sidebars;
\ No newline at end of file
diff --git a/docs/sidebars/sidebars-python-sdk.ts b/docs/sidebars/sidebars-python-sdk.ts
new file mode 100644
index 00000000..3adbac3e
--- /dev/null
+++ b/docs/sidebars/sidebars-python-sdk.ts
@@ -0,0 +1,86 @@
+import type { SidebarsConfig } from '@docusaurus/plugin-content-docs';
+import { readdirSync } from 'fs';
+import { join } from 'path';
+import { getItemsWithOrder } from './sidebar-utils';
+
+const pythonSdkDocsDir = join(__dirname, '..', 'docs', 'python-sdk');
+
+const guidesItems = getItemsWithOrder(
+ readdirSync(join(pythonSdkDocsDir, 'guides')),
+ [
+ 'guides/installation',
+ 'guides/client',
+ 'guides/query_data',
+ 'guides/create_update_delete',
+ 'guides/branches',
+ 'guides/store',
+ 'guides/tracking',
+ 'guides/python-typing',
+ 'guides/batch',
+ 'guides/object-storage',
+ 'guides/resource-manager',
+ ],
+ 'guides',
+);
+
+const topicsItems = getItemsWithOrder(
+ readdirSync(join(pythonSdkDocsDir, 'topics')),
+ [
+ 'topics/tracking',
+ 'topics/object_file',
+ ],
+ 'topics',
+);
+
+const referenceItems = getItemsWithOrder(
+ readdirSync(join(pythonSdkDocsDir, 'reference')),
+ [
+ 'reference/config',
+ 'reference/templating',
+ ],
+ 'reference',
+);
+
+const sidebars: SidebarsConfig = {
+ pythonSdkSidebar: [
+ {
+ type: 'category',
+ label: 'Python SDK docs',
+ link: {
+ type: 'doc',
+ id: 'introduction',
+ },
+ items: [
+ {
+ type: 'category',
+ label: 'Guides',
+ items: guidesItems,
+ },
+ {
+ type: 'category',
+ label: 'Topics',
+ items: topicsItems,
+ },
+ {
+ type: 'category',
+ label: 'Reference',
+ items: [
+ {
+ type: 'category',
+ label: 'Python SDK API',
+ items: [
+ {
+ type: 'autogenerated',
+ dirName: 'sdk_ref',
+ },
+ ],
+ },
+ ...referenceItems,
+ ],
+ },
+ ],
+ },
+ ],
+};
+
+export default sidebars;
\ No newline at end of file
diff --git a/docs/src/theme/MDXComponents.js b/docs/src/theme/MDXComponents.js
new file mode 100644
index 00000000..cea81a13
--- /dev/null
+++ b/docs/src/theme/MDXComponents.js
@@ -0,0 +1,10 @@
+import React from 'react';
+// Import the original mapper
+import MDXComponents from '@theme-original/MDXComponents';
+import { Icon } from '@iconify/react'; // Import the entire Iconify library.
+
+export default {
+ // Re-use the default mapping
+ ...MDXComponents,
+ Icon: Icon, // Make the iconify Icon component available in MDX as .
+};
\ No newline at end of file
diff --git a/docs/vitest.config.ts b/docs/vitest.config.ts
new file mode 100644
index 00000000..7d2a0b3b
--- /dev/null
+++ b/docs/vitest.config.ts
@@ -0,0 +1,8 @@
+import { defineConfig } from "vitest/config";
+
+export default defineConfig({
+ test: {
+ include: ["**/*.test.ts"],
+ exclude: ["**/node_modules/**", "**/build/**"],
+ },
+});
diff --git a/infrahub_sdk/ctl/utils.py b/infrahub_sdk/ctl/utils.py
index 968f6093..7130ea80 100644
--- a/infrahub_sdk/ctl/utils.py
+++ b/infrahub_sdk/ctl/utils.py
@@ -51,7 +51,7 @@ def init_logging(debug: bool = False) -> None:
def handle_exception(exc: Exception, console: Console, exit_code: int) -> NoReturn:
- """Handle exeception in a different fashion based on its type."""
+ """Handle exception in a different fashion based on its type."""
if isinstance(exc, Exit):
raise typer.Exit(code=exc.exit_code)
if isinstance(exc, AuthenticationError):
diff --git a/infrahub_sdk/jinja2.py b/infrahub_sdk/jinja2.py
index 29afbf06..d64d22c1 100644
--- a/infrahub_sdk/jinja2.py
+++ b/infrahub_sdk/jinja2.py
@@ -7,7 +7,7 @@
def identify_faulty_jinja_code(traceback: Traceback, nbr_context_lines: int = 3) -> list[tuple[Frame, Syntax]]:
"""This function identifies the faulty Jinja2 code and beautify it to provide meaningful information to the user.
- We use the rich's Traceback to parse the complete stack trace and extract Frames for each expection found in the trace.
+ We use the rich's Traceback to parse the complete stack trace and extract Frames for each exception found in the trace.
"""
response = []
diff --git a/infrahub_sdk/pytest_plugin/items/base.py b/infrahub_sdk/pytest_plugin/items/base.py
index a1b35a00..ae08f036 100644
--- a/infrahub_sdk/pytest_plugin/items/base.py
+++ b/infrahub_sdk/pytest_plugin/items/base.py
@@ -75,7 +75,7 @@ def reportinfo(self) -> tuple[Path | str, int | None, str]:
def repository_base(self) -> str:
"""Return the path to the root of the repository
- This will be an absolute path if --infrahub-config-path is an absolut path as happens when
+ This will be an absolute path if --infrahub-config-path is an absolute path as happens when
tests are started from within Infrahub server.
"""
config_path: Path = getattr(self.session, _infrahub_config_path_attribute)
diff --git a/infrahub_sdk/template/__init__.py b/infrahub_sdk/template/__init__.py
index ff866ecd..c5607043 100644
--- a/infrahub_sdk/template/__init__.py
+++ b/infrahub_sdk/template/__init__.py
@@ -1,217 +1,28 @@
from __future__ import annotations
-import linecache
-from collections.abc import Callable
-from pathlib import Path
-from typing import Any, NoReturn
-
-import jinja2
-from jinja2 import meta, nodes
-from jinja2.sandbox import SandboxedEnvironment
-from netutils.utils import jinja2_convenience_function
-from rich.syntax import Syntax
-from rich.traceback import Traceback
-
-from .exceptions import (
+from .base import ATemplate
+from .jinja2 import Jinja2Template
+from .jinja2.exceptions import (
JinjaTemplateError,
JinjaTemplateNotFoundError,
JinjaTemplateOperationViolationError,
JinjaTemplateSyntaxError,
JinjaTemplateUndefinedError,
)
-from .filters import AVAILABLE_FILTERS
-from .models import UndefinedJinja2Error
-
-netutils_filters = jinja2_convenience_function()
-
-
-class Jinja2Template:
- def __init__(
- self,
- template: str | Path,
- template_directory: Path | None = None,
- filters: dict[str, Callable] | None = None,
- ) -> None:
- self.is_string_based = isinstance(template, str)
- self.is_file_based = isinstance(template, Path)
- self._template = str(template)
- self._template_directory = template_directory
- self._environment: jinja2.Environment | None = None
-
- self._available_filters = [filter_definition.name for filter_definition in AVAILABLE_FILTERS]
- self._trusted_filters = [
- filter_definition.name for filter_definition in AVAILABLE_FILTERS if filter_definition.trusted
- ]
-
- self._filters = filters or {}
- for user_filter in self._filters:
- self._available_filters.append(user_filter)
- self._trusted_filters.append(user_filter)
-
- self._template_definition: jinja2.Template | None = None
-
- def get_environment(self) -> jinja2.Environment:
- if self._environment:
- return self._environment
-
- if self.is_string_based:
- return self._get_string_based_environment()
-
- return self._get_file_based_environment()
-
- def get_template(self) -> jinja2.Template:
- if self._template_definition:
- return self._template_definition
-
- try:
- if self.is_string_based:
- template = self._get_string_based_template()
- else:
- template = self._get_file_based_template()
- except jinja2.TemplateSyntaxError as exc:
- self._raise_template_syntax_error(error=exc)
- except jinja2.TemplateNotFound as exc:
- raise JinjaTemplateNotFoundError(message=exc.message, filename=str(exc.name))
-
- return template
-
- def get_variables(self) -> list[str]:
- env = self.get_environment()
-
- template_source = self._template
- if self.is_file_based and env.loader:
- template_source = env.loader.get_source(env, self._template)[0]
-
- try:
- template = env.parse(template_source)
- except jinja2.TemplateSyntaxError as exc:
- self._raise_template_syntax_error(error=exc)
-
- return sorted(meta.find_undeclared_variables(template))
-
- def validate(self, restricted: bool = True) -> None:
- allowed_list = self._available_filters
- if restricted:
- allowed_list = self._trusted_filters
-
- env = self.get_environment()
- template_source = self._template
- if self.is_file_based and env.loader:
- template_source = env.loader.get_source(env, self._template)[0]
-
- try:
- template = env.parse(template_source)
- except jinja2.TemplateSyntaxError as exc:
- self._raise_template_syntax_error(error=exc)
-
- for node in template.find_all(nodes.Filter):
- if node.name not in allowed_list:
- raise JinjaTemplateOperationViolationError(f"The '{node.name}' filter isn't allowed to be used")
-
- forbidden_operations = ["Call", "Import", "Include"]
- if self.is_string_based and any(node.__class__.__name__ in forbidden_operations for node in template.body):
- raise JinjaTemplateOperationViolationError(
- f"These operations are forbidden for string based templates: {forbidden_operations}"
- )
-
- async def render(self, variables: dict[str, Any]) -> str:
- template = self.get_template()
- try:
- output = await template.render_async(variables)
- except jinja2.exceptions.TemplateNotFound as exc:
- raise JinjaTemplateNotFoundError(message=exc.message, filename=str(exc.name), base_template=template.name)
- except jinja2.TemplateSyntaxError as exc:
- self._raise_template_syntax_error(error=exc)
- except jinja2.UndefinedError as exc:
- traceback = Traceback(show_locals=False)
- errors = _identify_faulty_jinja_code(traceback=traceback)
- raise JinjaTemplateUndefinedError(message=exc.message, errors=errors)
- except Exception as exc:
- if error_message := getattr(exc, "message", None):
- message = error_message
- else:
- message = str(exc)
- raise JinjaTemplateError(message=message or "Unknown template error")
-
- return output
-
- def _get_string_based_environment(self) -> jinja2.Environment:
- env = SandboxedEnvironment(enable_async=True, undefined=jinja2.StrictUndefined)
- self._set_filters(env=env)
- self._environment = env
- return self._environment
-
- def _get_file_based_environment(self) -> jinja2.Environment:
- template_loader = jinja2.FileSystemLoader(searchpath=str(self._template_directory))
- env = jinja2.Environment(
- loader=template_loader,
- trim_blocks=True,
- lstrip_blocks=True,
- enable_async=True,
- )
- self._set_filters(env=env)
- self._environment = env
- return self._environment
-
- def _set_filters(self, env: jinja2.Environment) -> None:
- for default_filter in list(env.filters.keys()):
- if default_filter not in self._available_filters:
- del env.filters[default_filter]
-
- # Add filters from netutils
- env.filters.update(
- {name: jinja_filter for name, jinja_filter in netutils_filters.items() if name in self._available_filters}
- )
- # Add user supplied filters
- env.filters.update(self._filters)
-
- def _get_string_based_template(self) -> jinja2.Template:
- env = self.get_environment()
- self._template_definition = env.from_string(self._template)
- return self._template_definition
-
- def _get_file_based_template(self) -> jinja2.Template:
- env = self.get_environment()
- self._template_definition = env.get_template(self._template)
- return self._template_definition
-
- def _raise_template_syntax_error(self, error: jinja2.TemplateSyntaxError) -> NoReturn:
- filename: str | None = None
- if error.filename and self._template_directory:
- filename = error.filename
- if error.filename.startswith(str(self._template_directory)):
- filename = error.filename[len(str(self._template_directory)) :]
-
- raise JinjaTemplateSyntaxError(message=error.message, filename=filename, lineno=error.lineno)
-
-
-def _identify_faulty_jinja_code(traceback: Traceback, nbr_context_lines: int = 3) -> list[UndefinedJinja2Error]:
- """This function identifies the faulty Jinja2 code and beautify it to provide meaningful information to the user.
-
- We use the rich's Traceback to parse the complete stack trace and extract Frames for each exception found in the trace.
- """
- response = []
-
- # Extract only the Jinja related exception
- for frame in [frame for frame in traceback.trace.stacks[0].frames if not frame.filename.endswith(".py")]:
- code = "".join(linecache.getlines(frame.filename))
- if frame.filename == "":
- lexer_name = "text"
- else:
- lexer_name = Traceback._guess_lexer(frame.filename, code)
- syntax = Syntax(
- code,
- lexer_name,
- line_numbers=True,
- line_range=(
- frame.lineno - nbr_context_lines,
- frame.lineno + nbr_context_lines,
- ),
- highlight_lines={frame.lineno},
- code_width=88,
- theme=traceback.theme,
- dedent=False,
- )
- response.append(UndefinedJinja2Error(frame=frame, syntax=syntax))
-
- return response
+from .jinja2.filters import AVAILABLE_FILTERS, BUILTIN_FILTERS, NETUTILS_FILTERS, FilterDefinition
+from .jinja2.models import UndefinedJinja2Error
+
+__all__ = [
+ "AVAILABLE_FILTERS",
+ "BUILTIN_FILTERS",
+ "NETUTILS_FILTERS",
+ "ATemplate",
+ "FilterDefinition",
+ "Jinja2Template",
+ "JinjaTemplateError",
+ "JinjaTemplateNotFoundError",
+ "JinjaTemplateOperationViolationError",
+ "JinjaTemplateSyntaxError",
+ "JinjaTemplateUndefinedError",
+ "UndefinedJinja2Error",
+]
diff --git a/infrahub_sdk/template/base.py b/infrahub_sdk/template/base.py
new file mode 100644
index 00000000..9dfcb628
--- /dev/null
+++ b/infrahub_sdk/template/base.py
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+from typing import Any
+
+
+class ATemplate(ABC):
+ """Abstract base class defining the minimal template rendering contract."""
+
+ @abstractmethod
+ async def render(self, variables: dict[str, Any]) -> str: ...
diff --git a/infrahub_sdk/template/exceptions.py b/infrahub_sdk/template/exceptions.py
index 6ef60b43..5c45db88 100644
--- a/infrahub_sdk/template/exceptions.py
+++ b/infrahub_sdk/template/exceptions.py
@@ -1,41 +1,17 @@
from __future__ import annotations
-from typing import TYPE_CHECKING
-
-from infrahub_sdk.exceptions import Error
-
-if TYPE_CHECKING:
- from .models import UndefinedJinja2Error
-
-
-class JinjaTemplateError(Error):
- def __init__(self, message: str) -> None:
- self.message = message
-
- def __str__(self) -> str:
- return str(self.message)
-
-
-class JinjaTemplateNotFoundError(JinjaTemplateError):
- def __init__(self, message: str | None, filename: str, base_template: str | None = None) -> None:
- self.message = message or "Template Not Found"
- self.filename = filename
- self.base_template = base_template
-
-
-class JinjaTemplateSyntaxError(JinjaTemplateError):
- def __init__(self, message: str | None, lineno: int, filename: str | None = None) -> None:
- self.message = message or "Syntax Error"
- self.filename = filename
- self.lineno = lineno
-
-
-class JinjaTemplateUndefinedError(JinjaTemplateError):
- def __init__(self, message: str | None, errors: list[UndefinedJinja2Error]) -> None:
- self.message = message or "Undefined Error"
- self.errors = errors
-
-
-class JinjaTemplateOperationViolationError(JinjaTemplateError):
- def __init__(self, message: str | None = None) -> None:
- self.message = message or "Forbidden code found in the template"
+from .jinja2.exceptions import (
+ JinjaTemplateError,
+ JinjaTemplateNotFoundError,
+ JinjaTemplateOperationViolationError,
+ JinjaTemplateSyntaxError,
+ JinjaTemplateUndefinedError,
+)
+
+__all__ = [
+ "JinjaTemplateError",
+ "JinjaTemplateNotFoundError",
+ "JinjaTemplateOperationViolationError",
+ "JinjaTemplateSyntaxError",
+ "JinjaTemplateUndefinedError",
+]
diff --git a/infrahub_sdk/template/filters.py b/infrahub_sdk/template/filters.py
index 1d082b39..2580e8f9 100644
--- a/infrahub_sdk/template/filters.py
+++ b/infrahub_sdk/template/filters.py
@@ -1,151 +1,10 @@
-from dataclasses import dataclass
+from __future__ import annotations
+from .jinja2.filters import AVAILABLE_FILTERS, BUILTIN_FILTERS, NETUTILS_FILTERS, FilterDefinition
-@dataclass
-class FilterDefinition:
- name: str
- trusted: bool
- source: str
-
-
-BUILTIN_FILTERS = [
- FilterDefinition(name="abs", trusted=True, source="jinja2"),
- FilterDefinition(name="attr", trusted=False, source="jinja2"),
- FilterDefinition(name="batch", trusted=False, source="jinja2"),
- FilterDefinition(name="capitalize", trusted=True, source="jinja2"),
- FilterDefinition(name="center", trusted=True, source="jinja2"),
- FilterDefinition(name="count", trusted=True, source="jinja2"),
- FilterDefinition(name="d", trusted=True, source="jinja2"),
- FilterDefinition(name="default", trusted=True, source="jinja2"),
- FilterDefinition(name="dictsort", trusted=False, source="jinja2"),
- FilterDefinition(name="e", trusted=True, source="jinja2"),
- FilterDefinition(name="escape", trusted=True, source="jinja2"),
- FilterDefinition(name="filesizeformat", trusted=True, source="jinja2"),
- FilterDefinition(name="first", trusted=True, source="jinja2"),
- FilterDefinition(name="float", trusted=True, source="jinja2"),
- FilterDefinition(name="forceescape", trusted=True, source="jinja2"),
- FilterDefinition(name="format", trusted=True, source="jinja2"),
- FilterDefinition(name="groupby", trusted=False, source="jinja2"),
- FilterDefinition(name="indent", trusted=True, source="jinja2"),
- FilterDefinition(name="int", trusted=True, source="jinja2"),
- FilterDefinition(name="items", trusted=False, source="jinja2"),
- FilterDefinition(name="join", trusted=True, source="jinja2"),
- FilterDefinition(name="last", trusted=True, source="jinja2"),
- FilterDefinition(name="length", trusted=True, source="jinja2"),
- FilterDefinition(name="list", trusted=True, source="jinja2"),
- FilterDefinition(name="lower", trusted=True, source="jinja2"),
- FilterDefinition(name="map", trusted=False, source="jinja2"),
- FilterDefinition(name="max", trusted=True, source="jinja2"),
- FilterDefinition(name="min", trusted=True, source="jinja2"),
- FilterDefinition(name="pprint", trusted=False, source="jinja2"),
- FilterDefinition(name="random", trusted=False, source="jinja2"),
- FilterDefinition(name="reject", trusted=False, source="jinja2"),
- FilterDefinition(name="rejectattr", trusted=False, source="jinja2"),
- FilterDefinition(name="replace", trusted=True, source="jinja2"),
- FilterDefinition(name="reverse", trusted=True, source="jinja2"),
- FilterDefinition(name="round", trusted=True, source="jinja2"),
- FilterDefinition(name="safe", trusted=False, source="jinja2"),
- FilterDefinition(name="select", trusted=False, source="jinja2"),
- FilterDefinition(name="selectattr", trusted=False, source="jinja2"),
- FilterDefinition(name="slice", trusted=True, source="jinja2"),
- FilterDefinition(name="sort", trusted=False, source="jinja2"),
- FilterDefinition(name="string", trusted=True, source="jinja2"),
- FilterDefinition(name="striptags", trusted=True, source="jinja2"),
- FilterDefinition(name="sum", trusted=True, source="jinja2"),
- FilterDefinition(name="title", trusted=True, source="jinja2"),
- FilterDefinition(name="tojson", trusted=False, source="jinja2"),
- FilterDefinition(name="trim", trusted=True, source="jinja2"),
- FilterDefinition(name="truncate", trusted=True, source="jinja2"),
- FilterDefinition(name="unique", trusted=False, source="jinja2"),
- FilterDefinition(name="upper", trusted=True, source="jinja2"),
- FilterDefinition(name="urlencode", trusted=True, source="jinja2"),
- FilterDefinition(name="urlize", trusted=False, source="jinja2"),
- FilterDefinition(name="wordcount", trusted=True, source="jinja2"),
- FilterDefinition(name="wordwrap", trusted=True, source="jinja2"),
- FilterDefinition(name="xmlattr", trusted=False, source="jinja2"),
+__all__ = [
+ "AVAILABLE_FILTERS",
+ "BUILTIN_FILTERS",
+ "NETUTILS_FILTERS",
+ "FilterDefinition",
]
-
-
-NETUTILS_FILTERS = [
- FilterDefinition(name="abbreviated_interface_name", trusted=True, source="netutils"),
- FilterDefinition(name="abbreviated_interface_name_list", trusted=True, source="netutils"),
- FilterDefinition(name="asn_to_int", trusted=True, source="netutils"),
- FilterDefinition(name="bits_to_name", trusted=True, source="netutils"),
- FilterDefinition(name="bytes_to_name", trusted=True, source="netutils"),
- FilterDefinition(name="canonical_interface_name", trusted=True, source="netutils"),
- FilterDefinition(name="canonical_interface_name_list", trusted=True, source="netutils"),
- FilterDefinition(name="cidr_to_netmask", trusted=True, source="netutils"),
- FilterDefinition(name="cidr_to_netmaskv6", trusted=True, source="netutils"),
- FilterDefinition(name="clean_config", trusted=True, source="netutils"),
- FilterDefinition(name="compare_version_loose", trusted=True, source="netutils"),
- FilterDefinition(name="compare_version_strict", trusted=True, source="netutils"),
- FilterDefinition(name="config_compliance", trusted=True, source="netutils"),
- FilterDefinition(name="config_section_not_parsed", trusted=True, source="netutils"),
- FilterDefinition(name="delimiter_change", trusted=True, source="netutils"),
- FilterDefinition(name="diff_network_config", trusted=True, source="netutils"),
- FilterDefinition(name="feature_compliance", trusted=True, source="netutils"),
- FilterDefinition(name="find_unordered_cfg_lines", trusted=True, source="netutils"),
- FilterDefinition(name="fqdn_to_ip", trusted=False, source="netutils"),
- FilterDefinition(name="get_all_host", trusted=False, source="netutils"),
- FilterDefinition(name="get_broadcast_address", trusted=True, source="netutils"),
- FilterDefinition(name="get_first_usable", trusted=True, source="netutils"),
- FilterDefinition(name="get_ips_sorted", trusted=True, source="netutils"),
- FilterDefinition(name="get_nist_urls", trusted=True, source="netutils"),
- FilterDefinition(name="get_nist_vendor_platform_urls", trusted=True, source="netutils"),
- FilterDefinition(name="get_oui", trusted=True, source="netutils"),
- FilterDefinition(name="get_peer_ip", trusted=True, source="netutils"),
- FilterDefinition(name="get_range_ips", trusted=True, source="netutils"),
- FilterDefinition(name="get_upgrade_path", trusted=True, source="netutils"),
- FilterDefinition(name="get_usable_range", trusted=True, source="netutils"),
- FilterDefinition(name="hash_data", trusted=True, source="netutils"),
- FilterDefinition(name="int_to_asdot", trusted=True, source="netutils"),
- FilterDefinition(name="interface_range_compress", trusted=True, source="netutils"),
- FilterDefinition(name="interface_range_expansion", trusted=True, source="netutils"),
- FilterDefinition(name="ip_addition", trusted=True, source="netutils"),
- FilterDefinition(name="ip_subtract", trusted=True, source="netutils"),
- FilterDefinition(name="ip_to_bin", trusted=True, source="netutils"),
- FilterDefinition(name="ip_to_hex", trusted=True, source="netutils"),
- FilterDefinition(name="ipaddress_address", trusted=True, source="netutils"),
- FilterDefinition(name="ipaddress_interface", trusted=True, source="netutils"),
- FilterDefinition(name="ipaddress_network", trusted=True, source="netutils"),
- FilterDefinition(name="is_classful", trusted=True, source="netutils"),
- FilterDefinition(name="is_fqdn_resolvable", trusted=False, source="netutils"),
- FilterDefinition(name="is_ip", trusted=True, source="netutils"),
- FilterDefinition(name="is_ip_range", trusted=True, source="netutils"),
- FilterDefinition(name="is_ip_within", trusted=True, source="netutils"),
- FilterDefinition(name="is_netmask", trusted=True, source="netutils"),
- FilterDefinition(name="is_network", trusted=True, source="netutils"),
- FilterDefinition(name="is_reversible_wildcardmask", trusted=True, source="netutils"),
- FilterDefinition(name="is_valid_mac", trusted=True, source="netutils"),
- FilterDefinition(name="longest_prefix_match", trusted=True, source="netutils"),
- FilterDefinition(name="mac_normalize", trusted=True, source="netutils"),
- FilterDefinition(name="mac_to_format", trusted=True, source="netutils"),
- FilterDefinition(name="mac_to_int", trusted=True, source="netutils"),
- FilterDefinition(name="mac_type", trusted=True, source="netutils"),
- FilterDefinition(name="name_to_bits", trusted=True, source="netutils"),
- FilterDefinition(name="name_to_bytes", trusted=True, source="netutils"),
- FilterDefinition(name="name_to_name", trusted=True, source="netutils"),
- FilterDefinition(name="netmask_to_cidr", trusted=True, source="netutils"),
- FilterDefinition(name="netmask_to_wildcardmask", trusted=True, source="netutils"),
- FilterDefinition(name="normalise_delimiter_caret_c", trusted=True, source="netutils"),
- FilterDefinition(name="paloalto_panos_brace_to_set", trusted=True, source="netutils"),
- FilterDefinition(name="paloalto_panos_clean_newlines", trusted=True, source="netutils"),
- FilterDefinition(name="regex_findall", trusted=False, source="netutils"),
- FilterDefinition(name="regex_match", trusted=False, source="netutils"),
- FilterDefinition(name="regex_search", trusted=False, source="netutils"),
- FilterDefinition(name="regex_split", trusted=False, source="netutils"),
- FilterDefinition(name="regex_sub", trusted=False, source="netutils"),
- FilterDefinition(name="sanitize_config", trusted=True, source="netutils"),
- FilterDefinition(name="section_config", trusted=True, source="netutils"),
- FilterDefinition(name="sort_interface_list", trusted=True, source="netutils"),
- FilterDefinition(name="split_interface", trusted=True, source="netutils"),
- FilterDefinition(name="uptime_seconds_to_string", trusted=True, source="netutils"),
- FilterDefinition(name="uptime_string_to_seconds", trusted=True, source="netutils"),
- FilterDefinition(name="version_metadata", trusted=True, source="netutils"),
- FilterDefinition(name="vlanconfig_to_list", trusted=True, source="netutils"),
- FilterDefinition(name="vlanlist_to_config", trusted=True, source="netutils"),
- FilterDefinition(name="wildcardmask_to_netmask", trusted=True, source="netutils"),
-]
-
-
-AVAILABLE_FILTERS = BUILTIN_FILTERS + NETUTILS_FILTERS
diff --git a/infrahub_sdk/template/jinja2/__init__.py b/infrahub_sdk/template/jinja2/__init__.py
new file mode 100644
index 00000000..9da419de
--- /dev/null
+++ b/infrahub_sdk/template/jinja2/__init__.py
@@ -0,0 +1,218 @@
+from __future__ import annotations
+
+import linecache
+from collections.abc import Callable
+from pathlib import Path
+from typing import Any, NoReturn
+
+import jinja2
+from jinja2 import meta, nodes
+from jinja2.sandbox import SandboxedEnvironment
+from netutils.utils import jinja2_convenience_function
+from rich.syntax import Syntax
+from rich.traceback import Traceback
+
+from ..base import ATemplate
+from .exceptions import (
+ JinjaTemplateError,
+ JinjaTemplateNotFoundError,
+ JinjaTemplateOperationViolationError,
+ JinjaTemplateSyntaxError,
+ JinjaTemplateUndefinedError,
+)
+from .filters import AVAILABLE_FILTERS
+from .models import UndefinedJinja2Error
+
+netutils_filters = jinja2_convenience_function()
+
+
+class Jinja2Template(ATemplate):
+ def __init__(
+ self,
+ template: str | Path,
+ template_directory: Path | None = None,
+ filters: dict[str, Callable] | None = None,
+ ) -> None:
+ self.is_string_based = isinstance(template, str)
+ self.is_file_based = isinstance(template, Path)
+ self._template = str(template)
+ self._template_directory = template_directory
+ self._environment: jinja2.Environment | None = None
+
+ self._available_filters = [filter_definition.name for filter_definition in AVAILABLE_FILTERS]
+ self._trusted_filters = [
+ filter_definition.name for filter_definition in AVAILABLE_FILTERS if filter_definition.trusted
+ ]
+
+ self._filters = filters or {}
+ for user_filter in self._filters:
+ self._available_filters.append(user_filter)
+ self._trusted_filters.append(user_filter)
+
+ self._template_definition: jinja2.Template | None = None
+
+ def get_environment(self) -> jinja2.Environment:
+ if self._environment:
+ return self._environment
+
+ if self.is_string_based:
+ return self._get_string_based_environment()
+
+ return self._get_file_based_environment()
+
+ def get_template(self) -> jinja2.Template:
+ if self._template_definition:
+ return self._template_definition
+
+ try:
+ if self.is_string_based:
+ template = self._get_string_based_template()
+ else:
+ template = self._get_file_based_template()
+ except jinja2.TemplateSyntaxError as exc:
+ self._raise_template_syntax_error(error=exc)
+ except jinja2.TemplateNotFound as exc:
+ raise JinjaTemplateNotFoundError(message=exc.message, filename=str(exc.name))
+
+ return template
+
+ def get_variables(self) -> list[str]:
+ env = self.get_environment()
+
+ template_source = self._template
+ if self.is_file_based and env.loader:
+ template_source = env.loader.get_source(env, self._template)[0]
+
+ try:
+ template = env.parse(template_source)
+ except jinja2.TemplateSyntaxError as exc:
+ self._raise_template_syntax_error(error=exc)
+
+ return sorted(meta.find_undeclared_variables(template))
+
+ def validate(self, restricted: bool = True) -> None:
+ allowed_list = self._available_filters
+ if restricted:
+ allowed_list = self._trusted_filters
+
+ env = self.get_environment()
+ template_source = self._template
+ if self.is_file_based and env.loader:
+ template_source = env.loader.get_source(env, self._template)[0]
+
+ try:
+ template = env.parse(template_source)
+ except jinja2.TemplateSyntaxError as exc:
+ self._raise_template_syntax_error(error=exc)
+
+ for node in template.find_all(nodes.Filter):
+ if node.name not in allowed_list:
+ raise JinjaTemplateOperationViolationError(f"The '{node.name}' filter isn't allowed to be used")
+
+ forbidden_operations = ["Call", "Import", "Include"]
+ if self.is_string_based and any(node.__class__.__name__ in forbidden_operations for node in template.body):
+ raise JinjaTemplateOperationViolationError(
+ f"These operations are forbidden for string based templates: {forbidden_operations}"
+ )
+
+ async def render(self, variables: dict[str, Any]) -> str:
+ template = self.get_template()
+ try:
+ output = await template.render_async(variables)
+ except jinja2.exceptions.TemplateNotFound as exc:
+ raise JinjaTemplateNotFoundError(message=exc.message, filename=str(exc.name), base_template=template.name)
+ except jinja2.TemplateSyntaxError as exc:
+ self._raise_template_syntax_error(error=exc)
+ except jinja2.UndefinedError as exc:
+ traceback = Traceback(show_locals=False)
+ errors = _identify_faulty_jinja_code(traceback=traceback)
+ raise JinjaTemplateUndefinedError(message=exc.message, errors=errors)
+ except Exception as exc:
+ if error_message := getattr(exc, "message", None):
+ message = error_message
+ else:
+ message = str(exc)
+ raise JinjaTemplateError(message=message or "Unknown template error")
+
+ return output
+
+ def _get_string_based_environment(self) -> jinja2.Environment:
+ env = SandboxedEnvironment(enable_async=True, undefined=jinja2.StrictUndefined)
+ self._set_filters(env=env)
+ self._environment = env
+ return self._environment
+
+ def _get_file_based_environment(self) -> jinja2.Environment:
+ template_loader = jinja2.FileSystemLoader(searchpath=str(self._template_directory))
+ env = jinja2.Environment(
+ loader=template_loader,
+ trim_blocks=True,
+ lstrip_blocks=True,
+ enable_async=True,
+ )
+ self._set_filters(env=env)
+ self._environment = env
+ return self._environment
+
+ def _set_filters(self, env: jinja2.Environment) -> None:
+ for default_filter in list(env.filters.keys()):
+ if default_filter not in self._available_filters:
+ del env.filters[default_filter]
+
+ # Add filters from netutils
+ env.filters.update(
+ {name: jinja_filter for name, jinja_filter in netutils_filters.items() if name in self._available_filters}
+ )
+ # Add user supplied filters
+ env.filters.update(self._filters)
+
+ def _get_string_based_template(self) -> jinja2.Template:
+ env = self.get_environment()
+ self._template_definition = env.from_string(self._template)
+ return self._template_definition
+
+ def _get_file_based_template(self) -> jinja2.Template:
+ env = self.get_environment()
+ self._template_definition = env.get_template(self._template)
+ return self._template_definition
+
+ def _raise_template_syntax_error(self, error: jinja2.TemplateSyntaxError) -> NoReturn:
+ filename: str | None = None
+ if error.filename and self._template_directory:
+ filename = error.filename
+ if error.filename.startswith(str(self._template_directory)):
+ filename = error.filename[len(str(self._template_directory)) :]
+
+ raise JinjaTemplateSyntaxError(message=error.message, filename=filename, lineno=error.lineno)
+
+
+def _identify_faulty_jinja_code(traceback: Traceback, nbr_context_lines: int = 3) -> list[UndefinedJinja2Error]:
+ """This function identifies the faulty Jinja2 code and beautify it to provide meaningful information to the user.
+
+ We use the rich's Traceback to parse the complete stack trace and extract Frames for each exception found in the trace.
+ """
+ response = []
+
+ # Extract only the Jinja related exception
+ for frame in [frame for frame in traceback.trace.stacks[0].frames if not frame.filename.endswith(".py")]:
+ code = "".join(linecache.getlines(frame.filename))
+ if frame.filename == "":
+ lexer_name = "text"
+ else:
+ lexer_name = Traceback._guess_lexer(frame.filename, code)
+ syntax = Syntax(
+ code,
+ lexer_name,
+ line_numbers=True,
+ line_range=(
+ frame.lineno - nbr_context_lines,
+ frame.lineno + nbr_context_lines,
+ ),
+ highlight_lines={frame.lineno},
+ code_width=88,
+ theme=traceback.theme,
+ dedent=False,
+ )
+ response.append(UndefinedJinja2Error(frame=frame, syntax=syntax))
+
+ return response
diff --git a/infrahub_sdk/template/jinja2/exceptions.py b/infrahub_sdk/template/jinja2/exceptions.py
new file mode 100644
index 00000000..6ef60b43
--- /dev/null
+++ b/infrahub_sdk/template/jinja2/exceptions.py
@@ -0,0 +1,41 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from infrahub_sdk.exceptions import Error
+
+if TYPE_CHECKING:
+ from .models import UndefinedJinja2Error
+
+
+class JinjaTemplateError(Error):
+ def __init__(self, message: str) -> None:
+ self.message = message
+
+ def __str__(self) -> str:
+ return str(self.message)
+
+
+class JinjaTemplateNotFoundError(JinjaTemplateError):
+ def __init__(self, message: str | None, filename: str, base_template: str | None = None) -> None:
+ self.message = message or "Template Not Found"
+ self.filename = filename
+ self.base_template = base_template
+
+
+class JinjaTemplateSyntaxError(JinjaTemplateError):
+ def __init__(self, message: str | None, lineno: int, filename: str | None = None) -> None:
+ self.message = message or "Syntax Error"
+ self.filename = filename
+ self.lineno = lineno
+
+
+class JinjaTemplateUndefinedError(JinjaTemplateError):
+ def __init__(self, message: str | None, errors: list[UndefinedJinja2Error]) -> None:
+ self.message = message or "Undefined Error"
+ self.errors = errors
+
+
+class JinjaTemplateOperationViolationError(JinjaTemplateError):
+ def __init__(self, message: str | None = None) -> None:
+ self.message = message or "Forbidden code found in the template"
diff --git a/infrahub_sdk/template/jinja2/filters.py b/infrahub_sdk/template/jinja2/filters.py
new file mode 100644
index 00000000..1d082b39
--- /dev/null
+++ b/infrahub_sdk/template/jinja2/filters.py
@@ -0,0 +1,151 @@
+from dataclasses import dataclass
+
+
+@dataclass
+class FilterDefinition:
+ name: str
+ trusted: bool
+ source: str
+
+
+BUILTIN_FILTERS = [
+ FilterDefinition(name="abs", trusted=True, source="jinja2"),
+ FilterDefinition(name="attr", trusted=False, source="jinja2"),
+ FilterDefinition(name="batch", trusted=False, source="jinja2"),
+ FilterDefinition(name="capitalize", trusted=True, source="jinja2"),
+ FilterDefinition(name="center", trusted=True, source="jinja2"),
+ FilterDefinition(name="count", trusted=True, source="jinja2"),
+ FilterDefinition(name="d", trusted=True, source="jinja2"),
+ FilterDefinition(name="default", trusted=True, source="jinja2"),
+ FilterDefinition(name="dictsort", trusted=False, source="jinja2"),
+ FilterDefinition(name="e", trusted=True, source="jinja2"),
+ FilterDefinition(name="escape", trusted=True, source="jinja2"),
+ FilterDefinition(name="filesizeformat", trusted=True, source="jinja2"),
+ FilterDefinition(name="first", trusted=True, source="jinja2"),
+ FilterDefinition(name="float", trusted=True, source="jinja2"),
+ FilterDefinition(name="forceescape", trusted=True, source="jinja2"),
+ FilterDefinition(name="format", trusted=True, source="jinja2"),
+ FilterDefinition(name="groupby", trusted=False, source="jinja2"),
+ FilterDefinition(name="indent", trusted=True, source="jinja2"),
+ FilterDefinition(name="int", trusted=True, source="jinja2"),
+ FilterDefinition(name="items", trusted=False, source="jinja2"),
+ FilterDefinition(name="join", trusted=True, source="jinja2"),
+ FilterDefinition(name="last", trusted=True, source="jinja2"),
+ FilterDefinition(name="length", trusted=True, source="jinja2"),
+ FilterDefinition(name="list", trusted=True, source="jinja2"),
+ FilterDefinition(name="lower", trusted=True, source="jinja2"),
+ FilterDefinition(name="map", trusted=False, source="jinja2"),
+ FilterDefinition(name="max", trusted=True, source="jinja2"),
+ FilterDefinition(name="min", trusted=True, source="jinja2"),
+ FilterDefinition(name="pprint", trusted=False, source="jinja2"),
+ FilterDefinition(name="random", trusted=False, source="jinja2"),
+ FilterDefinition(name="reject", trusted=False, source="jinja2"),
+ FilterDefinition(name="rejectattr", trusted=False, source="jinja2"),
+ FilterDefinition(name="replace", trusted=True, source="jinja2"),
+ FilterDefinition(name="reverse", trusted=True, source="jinja2"),
+ FilterDefinition(name="round", trusted=True, source="jinja2"),
+ FilterDefinition(name="safe", trusted=False, source="jinja2"),
+ FilterDefinition(name="select", trusted=False, source="jinja2"),
+ FilterDefinition(name="selectattr", trusted=False, source="jinja2"),
+ FilterDefinition(name="slice", trusted=True, source="jinja2"),
+ FilterDefinition(name="sort", trusted=False, source="jinja2"),
+ FilterDefinition(name="string", trusted=True, source="jinja2"),
+ FilterDefinition(name="striptags", trusted=True, source="jinja2"),
+ FilterDefinition(name="sum", trusted=True, source="jinja2"),
+ FilterDefinition(name="title", trusted=True, source="jinja2"),
+ FilterDefinition(name="tojson", trusted=False, source="jinja2"),
+ FilterDefinition(name="trim", trusted=True, source="jinja2"),
+ FilterDefinition(name="truncate", trusted=True, source="jinja2"),
+ FilterDefinition(name="unique", trusted=False, source="jinja2"),
+ FilterDefinition(name="upper", trusted=True, source="jinja2"),
+ FilterDefinition(name="urlencode", trusted=True, source="jinja2"),
+ FilterDefinition(name="urlize", trusted=False, source="jinja2"),
+ FilterDefinition(name="wordcount", trusted=True, source="jinja2"),
+ FilterDefinition(name="wordwrap", trusted=True, source="jinja2"),
+ FilterDefinition(name="xmlattr", trusted=False, source="jinja2"),
+]
+
+
+NETUTILS_FILTERS = [
+ FilterDefinition(name="abbreviated_interface_name", trusted=True, source="netutils"),
+ FilterDefinition(name="abbreviated_interface_name_list", trusted=True, source="netutils"),
+ FilterDefinition(name="asn_to_int", trusted=True, source="netutils"),
+ FilterDefinition(name="bits_to_name", trusted=True, source="netutils"),
+ FilterDefinition(name="bytes_to_name", trusted=True, source="netutils"),
+ FilterDefinition(name="canonical_interface_name", trusted=True, source="netutils"),
+ FilterDefinition(name="canonical_interface_name_list", trusted=True, source="netutils"),
+ FilterDefinition(name="cidr_to_netmask", trusted=True, source="netutils"),
+ FilterDefinition(name="cidr_to_netmaskv6", trusted=True, source="netutils"),
+ FilterDefinition(name="clean_config", trusted=True, source="netutils"),
+ FilterDefinition(name="compare_version_loose", trusted=True, source="netutils"),
+ FilterDefinition(name="compare_version_strict", trusted=True, source="netutils"),
+ FilterDefinition(name="config_compliance", trusted=True, source="netutils"),
+ FilterDefinition(name="config_section_not_parsed", trusted=True, source="netutils"),
+ FilterDefinition(name="delimiter_change", trusted=True, source="netutils"),
+ FilterDefinition(name="diff_network_config", trusted=True, source="netutils"),
+ FilterDefinition(name="feature_compliance", trusted=True, source="netutils"),
+ FilterDefinition(name="find_unordered_cfg_lines", trusted=True, source="netutils"),
+ FilterDefinition(name="fqdn_to_ip", trusted=False, source="netutils"),
+ FilterDefinition(name="get_all_host", trusted=False, source="netutils"),
+ FilterDefinition(name="get_broadcast_address", trusted=True, source="netutils"),
+ FilterDefinition(name="get_first_usable", trusted=True, source="netutils"),
+ FilterDefinition(name="get_ips_sorted", trusted=True, source="netutils"),
+ FilterDefinition(name="get_nist_urls", trusted=True, source="netutils"),
+ FilterDefinition(name="get_nist_vendor_platform_urls", trusted=True, source="netutils"),
+ FilterDefinition(name="get_oui", trusted=True, source="netutils"),
+ FilterDefinition(name="get_peer_ip", trusted=True, source="netutils"),
+ FilterDefinition(name="get_range_ips", trusted=True, source="netutils"),
+ FilterDefinition(name="get_upgrade_path", trusted=True, source="netutils"),
+ FilterDefinition(name="get_usable_range", trusted=True, source="netutils"),
+ FilterDefinition(name="hash_data", trusted=True, source="netutils"),
+ FilterDefinition(name="int_to_asdot", trusted=True, source="netutils"),
+ FilterDefinition(name="interface_range_compress", trusted=True, source="netutils"),
+ FilterDefinition(name="interface_range_expansion", trusted=True, source="netutils"),
+ FilterDefinition(name="ip_addition", trusted=True, source="netutils"),
+ FilterDefinition(name="ip_subtract", trusted=True, source="netutils"),
+ FilterDefinition(name="ip_to_bin", trusted=True, source="netutils"),
+ FilterDefinition(name="ip_to_hex", trusted=True, source="netutils"),
+ FilterDefinition(name="ipaddress_address", trusted=True, source="netutils"),
+ FilterDefinition(name="ipaddress_interface", trusted=True, source="netutils"),
+ FilterDefinition(name="ipaddress_network", trusted=True, source="netutils"),
+ FilterDefinition(name="is_classful", trusted=True, source="netutils"),
+ FilterDefinition(name="is_fqdn_resolvable", trusted=False, source="netutils"),
+ FilterDefinition(name="is_ip", trusted=True, source="netutils"),
+ FilterDefinition(name="is_ip_range", trusted=True, source="netutils"),
+ FilterDefinition(name="is_ip_within", trusted=True, source="netutils"),
+ FilterDefinition(name="is_netmask", trusted=True, source="netutils"),
+ FilterDefinition(name="is_network", trusted=True, source="netutils"),
+ FilterDefinition(name="is_reversible_wildcardmask", trusted=True, source="netutils"),
+ FilterDefinition(name="is_valid_mac", trusted=True, source="netutils"),
+ FilterDefinition(name="longest_prefix_match", trusted=True, source="netutils"),
+ FilterDefinition(name="mac_normalize", trusted=True, source="netutils"),
+ FilterDefinition(name="mac_to_format", trusted=True, source="netutils"),
+ FilterDefinition(name="mac_to_int", trusted=True, source="netutils"),
+ FilterDefinition(name="mac_type", trusted=True, source="netutils"),
+ FilterDefinition(name="name_to_bits", trusted=True, source="netutils"),
+ FilterDefinition(name="name_to_bytes", trusted=True, source="netutils"),
+ FilterDefinition(name="name_to_name", trusted=True, source="netutils"),
+ FilterDefinition(name="netmask_to_cidr", trusted=True, source="netutils"),
+ FilterDefinition(name="netmask_to_wildcardmask", trusted=True, source="netutils"),
+ FilterDefinition(name="normalise_delimiter_caret_c", trusted=True, source="netutils"),
+ FilterDefinition(name="paloalto_panos_brace_to_set", trusted=True, source="netutils"),
+ FilterDefinition(name="paloalto_panos_clean_newlines", trusted=True, source="netutils"),
+ FilterDefinition(name="regex_findall", trusted=False, source="netutils"),
+ FilterDefinition(name="regex_match", trusted=False, source="netutils"),
+ FilterDefinition(name="regex_search", trusted=False, source="netutils"),
+ FilterDefinition(name="regex_split", trusted=False, source="netutils"),
+ FilterDefinition(name="regex_sub", trusted=False, source="netutils"),
+ FilterDefinition(name="sanitize_config", trusted=True, source="netutils"),
+ FilterDefinition(name="section_config", trusted=True, source="netutils"),
+ FilterDefinition(name="sort_interface_list", trusted=True, source="netutils"),
+ FilterDefinition(name="split_interface", trusted=True, source="netutils"),
+ FilterDefinition(name="uptime_seconds_to_string", trusted=True, source="netutils"),
+ FilterDefinition(name="uptime_string_to_seconds", trusted=True, source="netutils"),
+ FilterDefinition(name="version_metadata", trusted=True, source="netutils"),
+ FilterDefinition(name="vlanconfig_to_list", trusted=True, source="netutils"),
+ FilterDefinition(name="vlanlist_to_config", trusted=True, source="netutils"),
+ FilterDefinition(name="wildcardmask_to_netmask", trusted=True, source="netutils"),
+]
+
+
+AVAILABLE_FILTERS = BUILTIN_FILTERS + NETUTILS_FILTERS
diff --git a/infrahub_sdk/template/jinja2/models.py b/infrahub_sdk/template/jinja2/models.py
new file mode 100644
index 00000000..e40393ab
--- /dev/null
+++ b/infrahub_sdk/template/jinja2/models.py
@@ -0,0 +1,10 @@
+from dataclasses import dataclass
+
+from rich.syntax import Syntax
+from rich.traceback import Frame
+
+
+@dataclass
+class UndefinedJinja2Error:
+ frame: Frame
+ syntax: Syntax
diff --git a/infrahub_sdk/template/models.py b/infrahub_sdk/template/models.py
index e40393ab..ce3cc7ff 100644
--- a/infrahub_sdk/template/models.py
+++ b/infrahub_sdk/template/models.py
@@ -1,10 +1,7 @@
-from dataclasses import dataclass
+from __future__ import annotations
-from rich.syntax import Syntax
-from rich.traceback import Frame
+from .jinja2.models import UndefinedJinja2Error
-
-@dataclass
-class UndefinedJinja2Error:
- frame: Frame
- syntax: Syntax
+__all__ = [
+ "UndefinedJinja2Error",
+]
diff --git a/pyproject.toml b/pyproject.toml
index c2e471cd..5edb9ab2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -51,6 +51,7 @@ ctl = [
"typer>=0.12.5",
"click==8.1.*",
"ariadne-codegen==0.15.3",
+ "mdxify>=0.2.23; python_version>='3.10'",
]
all = [
@@ -319,7 +320,7 @@ max-complexity = 17
"S105", # 'PASS' is not a password but a state
]
-"infrahub_sdk/template/__init__.py" = [
+"infrahub_sdk/template/jinja2/__init__.py" = [
##################################################################################################
# Review and change the below later #
##################################################################################################
diff --git a/tasks.py b/tasks.py
index b2434df0..88554fa2 100644
--- a/tasks.py
+++ b/tasks.py
@@ -1,11 +1,18 @@
-import asyncio
+from __future__ import annotations
+
import json
+import operator
+import shutil
import sys
+from functools import reduce
from pathlib import Path
from shutil import which
-from typing import Any
+from typing import TYPE_CHECKING
+
+from invoke import Context, Exit, task
-from invoke import Context, task
+if TYPE_CHECKING:
+ from docs.docs_generation.content_gen_methods.command.typer_command import ATyperCommand
CURRENT_DIRECTORY = Path(__file__).resolve()
DOCUMENTATION_DIRECTORY = CURRENT_DIRECTORY.parent / "docs"
@@ -18,15 +25,21 @@ def is_tool_installed(name: str) -> bool:
return which(name) is not None
-def _generate(context: Context) -> None:
- """Generate documentation output from code."""
+@task(name="docs-generate")
+def docs_generate(context: Context) -> None:
+ """Generate all documentation (infrahubctl CLI + Python SDK)."""
_generate_infrahubctl_documentation(context=context)
- _generate_infrahub_sdk_configuration_documentation()
- _generate_infrahub_sdk_template_documentation()
+ generate_python_sdk(context)
def _generate_infrahubctl_documentation(context: Context) -> None:
"""Generate the documentation for infrahubctl CLI using typer-cli."""
+ from docs.docs_generation.content_gen_methods import (
+ CommandOutputDocContentGenMethod,
+ TyperGroupCommand,
+ TyperSingleCommand,
+ )
+ from docs.docs_generation.pages import DocPage, MDXDocPage
from infrahub_sdk.ctl.cli import app
output_dir = DOCUMENTATION_DIRECTORY / "docs" / "infrahubctl"
@@ -37,113 +50,149 @@ def _generate_infrahubctl_documentation(context: Context) -> None:
file.unlink()
print(" - Generate infrahubctl CLI documentation")
- for cmd in app.registered_commands:
- if cmd.hidden:
- continue
- exec_cmd = (
- f'uv run typer --func {cmd.name} infrahub_sdk.ctl.cli_commands utils docs --name "infrahubctl {cmd.name}"'
+ commands: list[ATyperCommand] = [
+ TyperSingleCommand(name=cmd.name) for cmd in app.registered_commands if not cmd.hidden and cmd.name
+ ]
+ commands.extend(TyperGroupCommand(name=cmd.name) for cmd in app.registered_groups if not cmd.hidden and cmd.name)
+
+ for typer_cmd in commands:
+ # Generating one documentation page for one command
+ page = DocPage(
+ content_gen_method=CommandOutputDocContentGenMethod(
+ context=context,
+ working_directory=MAIN_DIRECTORY_PATH,
+ command=typer_cmd,
+ ),
)
- exec_cmd += f" --output docs/docs/infrahubctl/infrahubctl-{cmd.name}.mdx"
- with context.cd(MAIN_DIRECTORY_PATH):
- context.run(exec_cmd)
-
- for cmd in app.registered_groups:
- if cmd.hidden:
- continue
- exec_cmd = f"uv run typer infrahub_sdk.ctl.{cmd.name} utils docs"
- exec_cmd += f' --name "infrahubctl {cmd.name}" --output docs/docs/infrahubctl/infrahubctl-{cmd.name}.mdx'
- with context.cd(MAIN_DIRECTORY_PATH):
- context.run(exec_cmd)
+ output_path = output_dir / f"infrahubctl-{typer_cmd.name}.mdx"
+ MDXDocPage(page=page, output_path=output_path).to_mdx()
def _generate_infrahub_sdk_configuration_documentation() -> None:
"""Generate documentation for the Infrahub SDK configuration."""
- import jinja2
-
- from infrahub_sdk.config import ConfigBase
-
- schema = ConfigBase.model_json_schema()
- env_vars = _get_env_vars()
- definitions = schema.get("$defs", {})
-
- properties = []
- for name, prop in schema["properties"].items():
- choices: list[dict[str, Any]] = []
- kind = ""
- composed_type = ""
-
- if "allOf" in prop:
- choices = definitions.get(prop["allOf"][0]["$ref"].split("/")[-1], {}).get("enum", [])
- kind = definitions.get(prop["allOf"][0]["$ref"].split("/")[-1], {}).get("type", "")
-
- if "anyOf" in prop:
- composed_type = ", ".join(i["type"] for i in prop.get("anyOf", []) if "type" in i and i["type"] != "null")
-
- properties.append(
- {
- "name": name,
- "description": prop.get("description", ""),
- "type": prop.get("type", kind) or composed_type or "object",
- "choices": choices,
- "default": prop.get("default", ""),
- "env_vars": env_vars.get(name, []),
- }
- )
+ from docs.docs_generation.content_gen_methods import Jinja2DocContentGenMethod
+ from docs.docs_generation.helpers import build_config_properties
+ from docs.docs_generation.pages import DocPage, MDXDocPage
+ from infrahub_sdk.template import Jinja2Template
print(" - Generate Infrahub SDK configuration documentation")
-
- template_file = DOCUMENTATION_DIRECTORY / "_templates" / "sdk_config.j2"
- output_file = DOCUMENTATION_DIRECTORY / "docs" / "python-sdk" / "reference" / "config.mdx"
-
- if not template_file.exists():
- print(f"Unable to find the template file at {template_file}")
- sys.exit(-1)
-
- template_text = template_file.read_text(encoding="utf-8")
-
- environment = jinja2.Environment(trim_blocks=True, autoescape=jinja2.select_autoescape(default_for_string=False))
- template = environment.from_string(template_text)
- rendered_file = template.render(properties=properties)
-
- output_file.write_text(rendered_file, encoding="utf-8")
- print(f"Docs saved to: {output_file}")
+ # Generating one documentation page for the ConfigBase.model_json_schema()
+ page = DocPage(
+ content_gen_method=Jinja2DocContentGenMethod(
+ template=Jinja2Template(
+ template=Path("sdk_config.j2"),
+ template_directory=DOCUMENTATION_DIRECTORY / "_templates",
+ ),
+ template_variables={"properties": build_config_properties()},
+ ),
+ )
+ output_path = DOCUMENTATION_DIRECTORY / "docs" / "python-sdk" / "reference" / "config.mdx"
+ MDXDocPage(page=page, output_path=output_path).to_mdx()
def _generate_infrahub_sdk_template_documentation() -> None:
"""Generate documentation for the Infrahub SDK template reference."""
+ from docs.docs_generation.content_gen_methods import Jinja2DocContentGenMethod
+ from docs.docs_generation.pages import DocPage, MDXDocPage
from infrahub_sdk.template import Jinja2Template
from infrahub_sdk.template.filters import BUILTIN_FILTERS, NETUTILS_FILTERS
- output_file = DOCUMENTATION_DIRECTORY / "docs" / "python-sdk" / "reference" / "templating.mdx"
- jinja2_template = Jinja2Template(
- template=Path("sdk_template_reference.j2"),
- template_directory=DOCUMENTATION_DIRECTORY / "_templates",
+ print(" - Generate Infrahub SDK template documentation")
+ # Generating one documentation page for template documentation
+ page = DocPage(
+ content_gen_method=Jinja2DocContentGenMethod(
+ template=Jinja2Template(
+ template=Path("sdk_template_reference.j2"),
+ template_directory=DOCUMENTATION_DIRECTORY / "_templates",
+ ),
+ template_variables={"builtin": BUILTIN_FILTERS, "netutils": NETUTILS_FILTERS},
+ ),
)
+ output_path = DOCUMENTATION_DIRECTORY / "docs" / "python-sdk" / "reference" / "templating.mdx"
+ MDXDocPage(page=page, output_path=output_path).to_mdx()
+
+
+def get_modules_to_document() -> list[str]:
+ """Return the list of Python module paths to document with mdxify.
+
+ Auto-discovers packages under ``infrahub_sdk/`` and validates that every
+ discovered package is explicitly categorised as either *to document* or
+ *to ignore*. Individual ``.py`` modules can be added via
+ ``extra_modules_to_document``.
+ """
+ # Packages (sub-folders of infrahub_sdk/) to document.
+ # Passed to mdxify as "infrahub_sdk.".
+ packages_to_document = [
+ "node",
+ ]
+
+ # Packages explicitly ignored for API doc generation.
+ packages_to_ignore = [
+ "ctl",
+ "graphql",
+ "protocols_generator",
+ "pytest_plugin",
+ "schema",
+ "spec",
+ "task",
+ "template",
+ "testing",
+ "transfer",
+ ]
+
+ # Extra modules (individual .py files, not packages) to document.
+ extra_modules_to_document = [
+ "infrahub_sdk.client",
+ ]
+
+ # Auto-discover all packages under infrahub_sdk/
+ sdk_dir = Path(__file__).parent / "infrahub_sdk"
+ discovered_packages = {d.name for d in sdk_dir.iterdir() if d.is_dir() and (d / "__init__.py").exists()}
+
+ # Validate that every discovered package is categorized and vice versa
+ declared = set(packages_to_document) | set(packages_to_ignore)
+ uncategorized = discovered_packages - declared
+ unknown = declared - discovered_packages
+
+ if uncategorized:
+ raise ValueError(
+ f"Uncategorized packages under infrahub_sdk/: {sorted(uncategorized)}. "
+ "Add them to packages_to_document or packages_to_ignore in tasks.py"
+ )
- rendered_file = asyncio.run(
- jinja2_template.render(variables={"builtin": BUILTIN_FILTERS, "netutils": NETUTILS_FILTERS})
- )
- output_file.write_text(rendered_file, encoding="utf-8")
- print(f"Docs saved to: {output_file}")
+ if unknown:
+ raise ValueError(f"Declared packages that no longer exist: {sorted(unknown)}")
+
+ return [f"infrahub_sdk.{pkg}" for pkg in packages_to_document] + extra_modules_to_document
-def _get_env_vars() -> dict[str, list[str]]:
- """Retrieve environment variables for Infrahub SDK configuration."""
- from collections import defaultdict
+@task(name="generate-sdk-api-docs")
+def _generate_sdk_api_docs(context: Context) -> None:
+ """Generate API documentation for the Python SDK."""
+ from docs.docs_generation.content_gen_methods import FilePrintingDocContentGenMethod, MdxCodeDocumentation
+ from docs.docs_generation.pages import DocPage, MDXDocPage
- from pydantic_settings import EnvSettingsSource
+ modules_to_document = get_modules_to_document()
- from infrahub_sdk.config import ConfigBase
+ output_dir = DOCUMENTATION_DIRECTORY / "docs" / "python-sdk" / "sdk_ref"
- env_vars: dict[str, list[str]] = defaultdict(list)
- settings = ConfigBase()
- env_settings = EnvSettingsSource(settings.__class__, env_prefix=settings.model_config.get("env_prefix", ""))
+ if not is_tool_installed("mdxify"):
+ print(" - mdxify is not installed, skipping documentation generation")
+ return
+
+ if (output_dir / "infrahub_sdk").exists():
+ shutil.rmtree(output_dir / "infrahub_sdk")
+
+ documentation = MdxCodeDocumentation()
+ generated_files = documentation.generate(context=context, modules_to_document=modules_to_document)
- for field_name, field in settings.model_fields.items():
- for field_key, field_env_name, _ in env_settings._extract_field_info(field, field_name):
- env_vars[field_key].append(field_env_name.upper())
+ for file_key, mdxified_file in generated_files.items():
+ page = DocPage(content_gen_method=FilePrintingDocContentGenMethod(file=mdxified_file))
+ target_path = output_dir / reduce(operator.truediv, (Path(part) for part in file_key.split("-")))
+ MDXDocPage(page=page, output_path=target_path).to_mdx()
- return env_vars
+ if is_tool_installed("markdownlint-cli2"):
+ context.run(f"markdownlint-cli2 {output_dir}/ --fix --config .markdownlint.yaml", pty=True)
@task
@@ -237,11 +286,27 @@ def lint_all(context: Context) -> None:
@task(name="docs-validate")
def docs_validate(context: Context) -> None:
- """Validate that the generated documentation is committed to Git."""
- _generate(context=context)
- exec_cmd = "git diff --exit-code docs"
+ """Validate that the generated documentation matches the committed version.
+
+ Regenerates all documentation and checks for modified, deleted, or new
+ untracked files under docs/. Exits with a non-zero code and a descriptive
+ message when the working tree diverges from what is committed.
+ """
+ docs_generate(context=context)
with context.cd(MAIN_DIRECTORY_PATH):
- context.run(exec_cmd)
+ diff_result = context.run("git diff --name-only docs", hide=True)
+ changed_files = diff_result.stdout.strip() if diff_result else ""
+ untracked_result = context.run("git ls-files --others --exclude-standard docs", hide=True)
+ untracked_files = untracked_result.stdout.strip() if untracked_result else ""
+
+ if changed_files or untracked_files:
+ message = "Generated documentation is out of sync with the committed version.\n"
+ message += "Run 'uv run invoke docs-generate' and commit the result.\n\n"
+ if changed_files:
+ message += f"Modified or deleted files:\n{changed_files}\n\n"
+ if untracked_files:
+ message += f"New untracked files:\n{untracked_files}\n"
+ raise Exit(message, code=1)
@task(name="docs")
@@ -263,10 +328,11 @@ def generate_infrahubctl(context: Context) -> None:
@task(name="generate-sdk")
-def generate_python_sdk(context: Context) -> None: # noqa: ARG001
+def generate_python_sdk(context: Context) -> None:
"""Generate documentation for the Python SDK."""
_generate_infrahub_sdk_configuration_documentation()
_generate_infrahub_sdk_template_documentation()
+ _generate_sdk_api_docs(context)
@task
diff --git a/tests/unit/doc_generation/__init__.py b/tests/unit/doc_generation/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/doc_generation/content_gen_methods/__init__.py b/tests/unit/doc_generation/content_gen_methods/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/doc_generation/content_gen_methods/command/__init__.py b/tests/unit/doc_generation/content_gen_methods/command/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/doc_generation/content_gen_methods/command/test_typer_command.py b/tests/unit/doc_generation/content_gen_methods/command/test_typer_command.py
new file mode 100644
index 00000000..4b0d2363
--- /dev/null
+++ b/tests/unit/doc_generation/content_gen_methods/command/test_typer_command.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+from docs.docs_generation.content_gen_methods import TyperGroupCommand, TyperSingleCommand
+
+
+class TestTyperSingleCommand:
+ def test_build_exec_cmd(self) -> None:
+ # Arrange
+ cmd = TyperSingleCommand(name="dump")
+
+ # Act
+ result = cmd.build()
+
+ # Assert
+ assert "uv run typer --func dump" in result
+ assert "infrahub_sdk.ctl.cli_commands" in result
+ assert 'utils docs --name "infrahubctl dump"' in result
+
+
+class TestTyperGroupCommand:
+ def test_build_exec_cmd(self) -> None:
+ # Arrange
+ cmd = TyperGroupCommand(name="branch")
+
+ # Act
+ result = cmd.build()
+
+ # Assert
+ assert "uv run typer infrahub_sdk.ctl.branch" in result
+ assert 'utils docs --name "infrahubctl branch"' in result
diff --git a/tests/unit/doc_generation/content_gen_methods/mdx/__init__.py b/tests/unit/doc_generation/content_gen_methods/mdx/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/doc_generation/content_gen_methods/mdx/test_mdx_code_doc.py b/tests/unit/doc_generation/content_gen_methods/mdx/test_mdx_code_doc.py
new file mode 100644
index 00000000..83d97c3a
--- /dev/null
+++ b/tests/unit/doc_generation/content_gen_methods/mdx/test_mdx_code_doc.py
@@ -0,0 +1,155 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from invoke import Result
+from invoke.context import MockContext
+
+from docs.docs_generation.content_gen_methods import (
+ MdxCodeDocumentation,
+)
+
+
+def _make_mock_context(
+ module_files: dict[str, dict[str, str]],
+ calls: list[str] | None = None,
+) -> MockContext:
+ """Build a ``MockContext`` whose ``run()`` writes files based on requested modules.
+
+ Args:
+ module_files: Mapping of module name to its output files
+ (e.g. ``{"infrahub_sdk.node": {"node.mdx": "# Node"}}``).
+ Only files belonging to modules present in the ``mdxify`` command
+ are written to the output directory.
+ calls: If provided, each executed command string is appended to this
+ list so the caller can verify how many times ``run()`` was invoked.
+ """
+ ctx = MockContext(run=Result())
+
+ def fake_run(cmd: str, **kwargs: object) -> Result:
+ if calls is not None:
+ calls.append(cmd)
+ prefix, output_dir_str = cmd.split("--output-dir ")
+ output_dir = Path(output_dir_str.strip())
+ requested_modules = prefix.replace("mdxify ", "").split()
+ for module in requested_modules:
+ for filename, content in module_files.get(module, {}).items():
+ (output_dir / filename).write_text(content, encoding="utf-8")
+ return Result()
+
+ ctx.run.side_effect = fake_run
+ return ctx
+
+
+class TestMdxCodeDocumentation:
+ def test_generate_default_filter_returns_filtered_files(self) -> None:
+ """Files matching the default filter (``__init__``) are excluded."""
+ # Arrange
+ mock_context = _make_mock_context(
+ {
+ "infrahub_sdk.node": {
+ "infrahub_sdk-node-node.mdx": "# Node",
+ "infrahub_sdk-node-__init__.mdx": "# Init (should be filtered)",
+ },
+ "infrahub_sdk.client": {
+ "infrahub_sdk-client.mdx": "# Client",
+ },
+ }
+ )
+ doc = MdxCodeDocumentation()
+
+ # Act
+ results = doc.generate(
+ context=mock_context,
+ modules_to_document=["infrahub_sdk.node", "infrahub_sdk.client"],
+ )
+
+ # Assert
+ assert "infrahub_sdk-node-node.mdx" in results
+ assert "infrahub_sdk-client.mdx" in results
+ assert "infrahub_sdk-node-__init__.mdx" not in results
+
+ def test_generate_runs_mdxify_only_once(self) -> None:
+ """Second call returns the same result without re-running mdxify."""
+ # Arrange
+ calls: list[str] = []
+ mock_context = _make_mock_context(
+ {"infrahub_sdk.client": {"infrahub_sdk-client.mdx": "# Client"}},
+ calls=calls,
+ )
+ doc = MdxCodeDocumentation()
+
+ # Act
+ result1 = doc.generate(
+ context=mock_context,
+ modules_to_document=["infrahub_sdk.client"],
+ )
+ result2 = doc.generate(
+ context=mock_context,
+ modules_to_document=["infrahub_sdk.client"],
+ )
+
+ # Assert
+ assert result1 is result2
+ assert len(calls) == 1
+
+ def test_generate_with_custom_filters(self) -> None:
+ # Arrange
+ mock_context = _make_mock_context(
+ {
+ "infrahub_sdk.node": {
+ "infrahub_sdk-node-_private.mdx": "# Private",
+ "infrahub_sdk-node-public.mdx": "# Public",
+ },
+ }
+ )
+ doc = MdxCodeDocumentation(file_filters=["_private"])
+
+ # Act
+ results = doc.generate(
+ context=mock_context,
+ modules_to_document=["infrahub_sdk.node"],
+ )
+
+ # Assert
+ assert "infrahub_sdk-node-public.mdx" in results
+ assert "infrahub_sdk-node-_private.mdx" not in results
+
+ def test_generate_empty_output(self) -> None:
+ # Arrange
+ mock_context = _make_mock_context({})
+ doc = MdxCodeDocumentation()
+
+ # Act
+ results = doc.generate(
+ context=mock_context,
+ modules_to_document=["infrahub_sdk.empty"],
+ )
+
+ # Assert
+ assert results == {}
+
+ def test_generate_only_includes_requested_modules(self) -> None:
+ """Only files belonging to requested modules are returned."""
+ # Arrange
+ mock_context = _make_mock_context(
+ {
+ "infrahub_sdk.node": {
+ "infrahub_sdk-node-node.mdx": "# Node",
+ },
+ "infrahub_sdk.client": {
+ "infrahub_sdk-client.mdx": "# Client",
+ },
+ }
+ )
+ doc = MdxCodeDocumentation()
+
+ # Act
+ results = doc.generate(
+ context=mock_context,
+ modules_to_document=["infrahub_sdk.node"],
+ )
+
+ # Assert
+ assert "infrahub_sdk-node-node.mdx" in results
+ assert "infrahub_sdk-client.mdx" not in results
diff --git a/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py b/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py
new file mode 100644
index 00000000..8a3d56c8
--- /dev/null
+++ b/tests/unit/doc_generation/content_gen_methods/test_command_output_method.py
@@ -0,0 +1,71 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from invoke import Result
+from invoke.context import MockContext
+
+from docs.docs_generation import ACommand, CommandOutputDocContentGenMethod
+
+
+class StubCommand(ACommand):
+ def __init__(self, cmd: str) -> None:
+ self.cmd = cmd
+
+ def build(self) -> str:
+ return self.cmd
+
+
+class TestCommandOutputDocContentGenMethod:
+ def test_apply_runs_command_and_reads_output(self, tmp_path: Path) -> None:
+ """The method executes the command via context.run, then reads
+ the content from the temp file whose path was appended via --output."""
+ output_content = "# Generated docs"
+
+ # Arrange
+ def fake_run(cmd: str, **kwargs: object) -> Result:
+ parts = cmd.split("--output ")
+ output_path = Path(parts[1].strip())
+ output_path.write_text(output_content, encoding="utf-8")
+ return Result()
+
+ mock_context = MockContext(run=Result())
+ mock_context.run.side_effect = fake_run
+
+ method = CommandOutputDocContentGenMethod(
+ context=mock_context,
+ working_directory=tmp_path,
+ command=StubCommand("some_command"),
+ )
+
+ # Act
+ result = method.apply()
+
+ # Assert
+ assert result == output_content
+
+ def test_apply_appends_output_flag(self, tmp_path: Path) -> None:
+ """Verify that --output is appended to the command."""
+ captured_cmd: list[str] = []
+
+ # Arrange
+ def fake_run(cmd: str, **kwargs: object) -> Result:
+ captured_cmd.append(cmd)
+ parts = cmd.split("--output ")
+ Path(parts[1].strip()).write_text("", encoding="utf-8")
+ return Result()
+
+ mock_context = MockContext(run=Result())
+ mock_context.run.side_effect = fake_run
+
+ method = CommandOutputDocContentGenMethod(
+ context=mock_context,
+ working_directory=tmp_path,
+ command=StubCommand("base_cmd"),
+ )
+
+ # Act
+ method.apply()
+
+ # Assert
+ assert captured_cmd[0].startswith("base_cmd --output ")
diff --git a/tests/unit/doc_generation/content_gen_methods/test_file_printing_method.py b/tests/unit/doc_generation/content_gen_methods/test_file_printing_method.py
new file mode 100644
index 00000000..220e60a3
--- /dev/null
+++ b/tests/unit/doc_generation/content_gen_methods/test_file_printing_method.py
@@ -0,0 +1,20 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from docs.docs_generation.content_gen_methods.file_printing_method import FilePrintingDocContentGenMethod
+from docs.docs_generation.content_gen_methods.mdx import MdxFile
+
+
+class TestFilePrintingDocContentGenMethod:
+ def test_apply_returns_file_content(self) -> None:
+ file = MdxFile(path=Path("node.mdx"), content="# Node API\n\nSome content")
+ method = FilePrintingDocContentGenMethod(file=file)
+
+ assert method.apply() == "# Node API\n\nSome content"
+
+ def test_apply_returns_empty_string(self) -> None:
+ file = MdxFile(path=Path("empty.mdx"), content="")
+ method = FilePrintingDocContentGenMethod(file=file)
+
+ assert not method.apply()
diff --git a/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py b/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py
new file mode 100644
index 00000000..29d9fc43
--- /dev/null
+++ b/tests/unit/doc_generation/content_gen_methods/test_jinja2_method.py
@@ -0,0 +1,76 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from docs.docs_generation import Jinja2DocContentGenMethod
+from infrahub_sdk.template import Jinja2Template
+from tests.unit.sdk.dummy_template import DummyTemplate
+
+
+class TestJinja2DocContentGenMethod:
+ def test_apply_calls_template(self) -> None:
+ """Inject a DummyTemplate to verify the method renders
+ using the template engine correctly."""
+ # Arrange
+ template = DummyTemplate(content="rendered content")
+ method = Jinja2DocContentGenMethod(
+ template=template,
+ template_variables={"key": "value"},
+ )
+
+ # Act
+ result = method.apply()
+
+ # Assert
+ assert result == "rendered content"
+
+ def test_apply_renders_template(self, tmp_path: Path) -> None:
+ # Arrange
+ template_file = tmp_path / "test.j2"
+ template_file.write_text("Hello {{ name }}!", encoding="utf-8")
+ template = Jinja2Template(template=Path("test.j2"), template_directory=tmp_path)
+ method = Jinja2DocContentGenMethod(
+ template=template,
+ template_variables={"name": "World"},
+ )
+
+ # Act
+ result = method.apply()
+
+ # Assert
+ assert result == "Hello World!"
+
+ def test_apply_renders_with_multiple_variables(self, tmp_path: Path) -> None:
+ # Arrange
+ template_file = tmp_path / "test.j2"
+ template_file.write_text("{{ greeting }} {{ target }}!", encoding="utf-8")
+ template = Jinja2Template(template=Path("test.j2"), template_directory=tmp_path)
+ method = Jinja2DocContentGenMethod(
+ template=template,
+ template_variables={"greeting": "Hi", "target": "there"},
+ )
+
+ # Act
+ result = method.apply()
+
+ # Assert
+ assert result == "Hi there!"
+
+ def test_auto_escaping_is_disabled(self, tmp_path: Path) -> None:
+ """HTML content in template variables must not be auto-escaped,
+ since the SDK Jinja2 environment does not enable autoescape."""
+ # Arrange
+ template_file = tmp_path / "test.j2"
+ template_file.write_text("{{ html_content }}", encoding="utf-8")
+ html_input = 'text'
+ template = Jinja2Template(template=Path("test.j2"), template_directory=tmp_path)
+ method = Jinja2DocContentGenMethod(
+ template=template,
+ template_variables={"html_content": html_input},
+ )
+
+ # Act
+ result: str = method.apply()
+
+ # Assert
+ assert result == html_input
diff --git a/tests/unit/doc_generation/pages/__init__.py b/tests/unit/doc_generation/pages/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/doc_generation/pages/test_doc_page.py b/tests/unit/doc_generation/pages/test_doc_page.py
new file mode 100644
index 00000000..51dcf190
--- /dev/null
+++ b/tests/unit/doc_generation/pages/test_doc_page.py
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from docs.docs_generation.content_gen_methods import ADocContentGenMethod
+from docs.docs_generation.pages import DocPage, MDXDocPage
+
+
+class StubContentGenMethod(ADocContentGenMethod):
+ def __init__(self, content: str) -> None:
+ self._content = content
+
+ def apply(self) -> str:
+ return self._content
+
+
+class TestDocPage:
+ def test_content_delegates_to_method(self) -> None:
+ # Arrange
+ page = DocPage(content_gen_method=StubContentGenMethod("test content"))
+
+ # Act
+ result = page.content()
+
+ # Assert
+ assert result == "test content"
+
+
+class TestMDXDocPage:
+ def test_to_mdx_writes_file(self, tmp_path: Path) -> None:
+ # Arrange
+ page = DocPage(content_gen_method=StubContentGenMethod("# Hello MDX"))
+ output_path = tmp_path / "output.mdx"
+
+ # Act
+ MDXDocPage(page=page, output_path=output_path).to_mdx()
+
+ # Assert
+ assert output_path.exists()
+ assert output_path.read_text(encoding="utf-8") == "# Hello MDX"
+
+ def test_to_mdx_creates_parent_directories(self, tmp_path: Path) -> None:
+ # Arrange
+ page = DocPage(content_gen_method=StubContentGenMethod("content"))
+ output_path = tmp_path / "nested" / "dir" / "output.mdx"
+
+ # Act
+ MDXDocPage(page=page, output_path=output_path).to_mdx()
+
+ # Assert
+ assert output_path.exists()
+ assert output_path.read_text(encoding="utf-8") == "content"
diff --git a/tests/unit/doc_generation/test_docs_validate.py b/tests/unit/doc_generation/test_docs_validate.py
new file mode 100644
index 00000000..00159106
--- /dev/null
+++ b/tests/unit/doc_generation/test_docs_validate.py
@@ -0,0 +1,93 @@
+from __future__ import annotations
+
+import os
+import subprocess # noqa: S404
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+import pytest
+from invoke import Context, Exit
+
+import tasks
+
+if TYPE_CHECKING:
+ from pytest import MonkeyPatch
+
+_GIT_ENV = {
+ "GIT_AUTHOR_NAME": "test",
+ "GIT_AUTHOR_EMAIL": "test@test.com",
+ "GIT_COMMITTER_NAME": "test",
+ "GIT_COMMITTER_EMAIL": "test@test.com",
+ "PATH": os.environ.get("PATH", ""),
+ "HOME": os.environ.get("HOME", ""),
+}
+
+
+def _git(repo: Path, *args: str) -> None:
+ """Run a git command inside *repo* with deterministic author info."""
+ subprocess.check_call(["git", *args], cwd=repo, env={**_GIT_ENV, "HOME": str(repo)}) # noqa: S603, S607
+
+
+@pytest.fixture
+def git_repo_with_docs(tmp_path: Path) -> Path:
+ """Create a temporary git repo with a committed docs/ directory."""
+ docs_dir = tmp_path / "docs"
+ docs_dir.mkdir()
+ (docs_dir / "generated.mdx").write_text("# Original content\n")
+
+ _git(tmp_path, "init")
+ _git(tmp_path, "add", ".")
+ _git(tmp_path, "commit", "-m", "initial")
+ return tmp_path
+
+
+class TestDocsValidate:
+ def test_passes_when_generation_produces_no_changes(
+ self, git_repo_with_docs: Path, monkeypatch: MonkeyPatch
+ ) -> None:
+ # Arrange
+ monkeypatch.setattr(tasks, "docs_generate", lambda context: None) # noqa: ARG005
+ monkeypatch.setattr(tasks, "MAIN_DIRECTORY_PATH", git_repo_with_docs)
+
+ # Act / Assert — no exception means docs are in sync
+ tasks.docs_validate(Context())
+
+ def test_fails_when_generation_modifies_existing_file(
+ self, git_repo_with_docs: Path, monkeypatch: MonkeyPatch
+ ) -> None:
+ # Arrange
+ def fake_generate(context: Context) -> None:
+ (git_repo_with_docs / "docs" / "generated.mdx").write_text("# Modified content\n")
+
+ monkeypatch.setattr(tasks, "docs_generate", fake_generate)
+ monkeypatch.setattr(tasks, "MAIN_DIRECTORY_PATH", git_repo_with_docs)
+
+ # Act / Assert
+ with pytest.raises(Exit, match="out of sync"):
+ tasks.docs_validate(Context())
+
+ def test_fails_when_generation_deletes_tracked_file(
+ self, git_repo_with_docs: Path, monkeypatch: MonkeyPatch
+ ) -> None:
+ # Arrange
+ def fake_generate(context: Context) -> None:
+ (git_repo_with_docs / "docs" / "generated.mdx").unlink()
+
+ monkeypatch.setattr(tasks, "docs_generate", fake_generate)
+ monkeypatch.setattr(tasks, "MAIN_DIRECTORY_PATH", git_repo_with_docs)
+
+ # Act / Assert
+ with pytest.raises(Exit, match="Modified or deleted files"):
+ tasks.docs_validate(Context())
+
+ def test_fails_when_generation_creates_new_file(self, git_repo_with_docs: Path, monkeypatch: MonkeyPatch) -> None:
+ # Arrange
+ def fake_generate(context: Context) -> None:
+ (git_repo_with_docs / "docs" / "new_file.mdx").write_text("# New\n")
+
+ monkeypatch.setattr(tasks, "docs_generate", fake_generate)
+ monkeypatch.setattr(tasks, "MAIN_DIRECTORY_PATH", git_repo_with_docs)
+
+ # Act / Assert
+ with pytest.raises(Exit, match="New untracked files"):
+ tasks.docs_validate(Context())
diff --git a/tests/unit/doc_generation/test_helpers.py b/tests/unit/doc_generation/test_helpers.py
new file mode 100644
index 00000000..359654f0
--- /dev/null
+++ b/tests/unit/doc_generation/test_helpers.py
@@ -0,0 +1,77 @@
+from __future__ import annotations
+
+from docs.docs_generation.helpers import build_config_properties, get_env_vars
+
+
+class TestGetEnvVars:
+ def test_returns_dict(self) -> None:
+ # Act
+ result = get_env_vars()
+
+ # Assert
+ assert isinstance(result, dict)
+
+ def test_values_are_lists_of_strings(self) -> None:
+ # Act
+ result = get_env_vars()
+
+ # Assert
+ for key, values in result.items():
+ assert isinstance(key, str)
+ assert isinstance(values, list)
+ for v in values:
+ assert isinstance(v, str)
+
+ def test_env_vars_are_upper_case(self) -> None:
+ # Act
+ result = get_env_vars()
+
+ # Assert
+ for values in result.values():
+ for v in values:
+ assert v == v.upper()
+
+ def test_address_field_has_env_var(self) -> None:
+ # Act
+ result = get_env_vars()
+
+ # Assert
+ assert "address" in result
+ assert len(result["address"]) > 0
+
+
+class TestBuildConfigProperties:
+ def test_returns_list(self) -> None:
+ # Act
+ result = build_config_properties()
+
+ # Assert
+ assert isinstance(result, list)
+ assert len(result) > 0
+
+ def test_each_property_has_required_keys(self) -> None:
+ # Arrange
+ required_keys = {"name", "description", "type", "choices", "default", "env_vars"}
+
+ # Act
+ result = build_config_properties()
+
+ # Assert
+ for prop in result:
+ assert required_keys.issubset(prop.keys())
+
+ def test_address_property_exists(self) -> None:
+ # Act
+ result = build_config_properties()
+
+ # Assert
+ names = [p["name"] for p in result]
+ assert "address" in names
+
+ def test_address_has_env_vars(self) -> None:
+ # Act
+ result = build_config_properties()
+
+ # Assert
+ address_prop = next(p for p in result if p["name"] == "address")
+ assert len(address_prop["env_vars"]) > 0
diff --git a/tests/unit/sdk/dummy_template.py b/tests/unit/sdk/dummy_template.py
new file mode 100644
index 00000000..81c66310
--- /dev/null
+++ b/tests/unit/sdk/dummy_template.py
@@ -0,0 +1,22 @@
+from __future__ import annotations
+
+from typing import Any
+
+from infrahub_sdk.template.base import ATemplate
+
+
+class DummyTemplate(ATemplate):
+ """Test double that returns fixed content.
+
+ Args:
+ content: The string returned by ``render()``.
+ **kwargs: Absorbed so that ``DummyTemplate`` can replace
+ ``Jinja2Template`` which receives ``template=`` and
+ ``template_directory=`` from production code.
+ """
+
+ def __init__(self, content: str) -> None:
+ self._content = content
+
+ async def render(self, variables: dict[str, Any]) -> str:
+ return self._content
diff --git a/uv.lock b/uv.lock
index 7563b99c..c258ba70 100644
--- a/uv.lock
+++ b/uv.lock
@@ -717,6 +717,7 @@ ctl = [
{ name = "ariadne-codegen" },
{ name = "click" },
{ name = "jinja2" },
+ { name = "mdxify" },
{ name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
{ name = "numpy", version = "2.3.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
{ name = "pyarrow" },
@@ -783,6 +784,7 @@ requires-dist = [
{ name = "httpx", specifier = ">=0.20" },
{ name = "jinja2", marker = "extra == 'all'", specifier = ">=3" },
{ name = "jinja2", marker = "extra == 'ctl'", specifier = ">=3" },
+ { name = "mdxify", marker = "python_full_version >= '3.10' and extra == 'ctl'", specifier = ">=0.2.23" },
{ name = "netutils", specifier = ">=1.0.0" },
{ name = "numpy", marker = "python_full_version >= '3.12' and extra == 'all'", specifier = ">=1.26.2" },
{ name = "numpy", marker = "python_full_version >= '3.12' and extra == 'ctl'", specifier = ">=1.26.2" },
@@ -1149,6 +1151,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
]
+[[package]]
+name = "mdxify"
+version = "0.2.36"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "griffe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/57/8b/eec3cc2f5b9e15a1d5d1a7399cf68b420bbd7ab8c363c789cfb14f783a09/mdxify-0.2.36.tar.gz", hash = "sha256:bd8afc3036b8258b13cd6d44413f1805088a9959b1b2d63eae9160cc037ee8e4", size = 1250127, upload-time = "2026-02-06T17:58:19.542Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/aa/b4/3ad6aac18dbd5913201cd3bbf19a896a59fd418c7e87a5abf18575fb339a/mdxify-0.2.36-py3-none-any.whl", hash = "sha256:9dbe9b3e608ad1b9d5d95f95fcc66788d0d737a52eadd8bdb1244e628dc6d98c", size = 24552, upload-time = "2026-02-06T17:58:18.542Z" },
+]
+
[[package]]
name = "mypy"
version = "1.11.2"