From ceb7e611b190995a2e0744dc9ae05cbb5e4b2378 Mon Sep 17 00:00:00 2001 From: Antoine Delannoy Date: Wed, 23 Jul 2025 10:07:36 +0200 Subject: [PATCH 01/25] docs: fix docusaurus icon interpretation IHS-198 --- docs/package-lock.json | 22 ++++++++++++++++++++++ docs/package.json | 1 + docs/src/theme/MDXComponents.js | 10 ++++++++++ 3 files changed, 33 insertions(+) create mode 100644 docs/src/theme/MDXComponents.js diff --git a/docs/package-lock.json b/docs/package-lock.json index 6a594bda..348ea86e 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -10,6 +10,7 @@ "dependencies": { "@docusaurus/core": "^3.8.1", "@docusaurus/preset-classic": "^3.8.1", + "@iconify/react": "^6.0.0", "@mdx-js/react": "^3.0.0", "clsx": "^2.0.0", "prism-react-renderer": "^2.3.0", @@ -4020,6 +4021,27 @@ "@hapi/hoek": "^9.0.0" } }, + "node_modules/@iconify/react": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@iconify/react/-/react-6.0.0.tgz", + "integrity": "sha512-eqNscABVZS8eCpZLU/L5F5UokMS9mnCf56iS1nM9YYHdH8ZxqZL9zyjSwW60IOQFsXZkilbBiv+1paMXBhSQnw==", + "license": "MIT", + "dependencies": { + "@iconify/types": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/cyberalien" + }, + "peerDependencies": { + "react": ">=16" + } + }, + "node_modules/@iconify/types": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@iconify/types/-/types-2.0.0.tgz", + "integrity": "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==", + "license": "MIT" + }, "node_modules/@jest/schemas": { "version": "29.6.3", "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", diff --git a/docs/package.json b/docs/package.json index 0dc1e714..0816be34 100644 --- a/docs/package.json +++ b/docs/package.json @@ -17,6 +17,7 @@ "dependencies": { "@docusaurus/core": "^3.8.1", "@docusaurus/preset-classic": "^3.8.1", + "@iconify/react": "^6.0.0", "@mdx-js/react": "^3.0.0", "clsx": "^2.0.0", "prism-react-renderer": "^2.3.0", diff --git a/docs/src/theme/MDXComponents.js b/docs/src/theme/MDXComponents.js new file mode 100644 index 00000000..cea81a13 --- /dev/null +++ b/docs/src/theme/MDXComponents.js @@ -0,0 +1,10 @@ +import React from 'react'; +// Import the original mapper +import MDXComponents from '@theme-original/MDXComponents'; +import { Icon } from '@iconify/react'; // Import the entire Iconify library. + +export default { + // Re-use the default mapping + ...MDXComponents, + Icon: Icon, // Make the iconify Icon component available in MDX as . +}; \ No newline at end of file From 8175d228d5066fa28d35c8625b0dfc85be310cd6 Mon Sep 17 00:00:00 2001 From: Antoine Delannoy Date: Wed, 23 Jul 2025 10:08:02 +0200 Subject: [PATCH 02/25] docs: Docusaurus sidebar configuration -> new sdk reference section IHS-198 --- docs/sidebars-python-sdk.ts | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/docs/sidebars-python-sdk.ts b/docs/sidebars-python-sdk.ts index e2fc932c..49d7a606 100644 --- a/docs/sidebars-python-sdk.ts +++ b/docs/sidebars-python-sdk.ts @@ -1,4 +1,4 @@ -import type {SidebarsConfig} from '@docusaurus/plugin-content-docs'; +import type { SidebarsConfig } from '@docusaurus/plugin-content-docs'; const sidebars: SidebarsConfig = { pythonSdkSidebar: [ @@ -39,6 +39,16 @@ const sidebars: SidebarsConfig = { type: 'category', label: 'Reference', items: [ + { + type: 'category', + label: 'Python SDK API', + items: [ + { + type: 'autogenerated', + dirName: 'sdk_ref', + }, + ], + }, 'reference/config', 'reference/templating', ], From 4a8818eaaa7cb14b5299b15a51b3691df6428aa8 Mon Sep 17 00:00:00 2001 From: Antoine Delannoy Date: Wed, 23 Jul 2025 11:14:14 +0200 Subject: [PATCH 03/25] feat: new invoke command generate-sdk-api-docs IHS-196 --- tasks.py | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/tasks.py b/tasks.py index b2434df0..475c9152 100644 --- a/tasks.py +++ b/tasks.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import json import sys @@ -281,3 +283,49 @@ def generate_repository_jsonschema(context: Context) -> None: repository_jsonschema.parent.mkdir(parents=True, exist_ok=True) repository_jsonschema.write_text(schema) print(f"Wrote to {repository_jsonschema}") + + +@task(name="generate-sdk-api-docs") +def generate_sdk_api_docs(context: Context, output: str | None = None) -> None: + """Generate API documentation for the Python SDK.""" + + # This is the list of code modules to generate documentation for. + MODULES_LIST = [ + "infrahub_sdk.client", + "infrahub_sdk.node.node", + ] + + import operator + import shutil + import tempfile + from functools import reduce + + output_dir = Path(output) if output else DOCUMENTATION_DIRECTORY / "docs" / "python-sdk" / "sdk_ref" + + # Create a temporary directory to store the generated documentation + with tempfile.TemporaryDirectory() as tmp_dir: + # Generate the API documentation using mdxify and get flat file structure + exec_cmd = f"mdxify {' '.join(MODULES_LIST)} --output-dir {tmp_dir}" + context.run(exec_cmd, pty=True) + + # Remove current obsolete documentation file structure + if (output_dir / "infrahub_sdk").exists(): + shutil.rmtree(output_dir / "infrahub_sdk") + + # Get all .mdx files in the generated doc folder and apply filters + filters = ["__init__"] + filtered_files = [ + file + for file in list(Path(tmp_dir).glob("*.mdx")) + if all(filter.lower() not in file.name for filter in filters) + ] + + # Reorganize the generated relevant files into the desired structure + for mdx_file in filtered_files: + target_path = output_dir / reduce(operator.truediv, (Path(part) for part in mdx_file.name.split("-"))) + + # Create the future parent directory if it doesn't exist + target_path.parent.mkdir(parents=True, exist_ok=True) + + # Move the file to the new location + shutil.move(mdx_file, target_path) From 673fd757b0ddeb299ebbcd822e8fd074a434a12a Mon Sep 17 00:00:00 2001 From: Antoine Delannoy Date: Wed, 23 Jul 2025 11:31:03 +0200 Subject: [PATCH 04/25] chore: add mdxify package IHS-196 --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index c2e471cd..6233ce07 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,7 @@ ctl = [ "typer>=0.12.5", "click==8.1.*", "ariadne-codegen==0.15.3", + "mdxify>=0.2.23; python_version>='3.10'", ] all = [ From a6d8d05b61487f801489d10ae3d90e30fbcbb5a9 Mon Sep 17 00:00:00 2001 From: Antoine Delannoy Date: Wed, 23 Jul 2025 14:54:51 +0200 Subject: [PATCH 05/25] feat: new ci check to ensure sdk api documentation is up to date IHS-197 --- .github/workflows/ci.yml | 48 ++++++++++++++++++++++++++++++++++++++-- 1 file changed, 46 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 67b452db..d19bf953 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -173,7 +173,7 @@ jobs: uses: actions/setup-node@v5 with: node-version: 20 - cache: 'npm' + cache: "npm" cache-dependency-path: docs/package-lock.json - name: "Install dependencies" run: npm install @@ -217,6 +217,50 @@ jobs: - name: Validate generated documentation run: uv run invoke docs-validate + check-api-documentation-obsolescence: + if: | + always() && !cancelled() && + !contains(needs.*.result, 'failure') && + !contains(needs.*.result, 'cancelled') && + (needs.files-changed.outputs.python == 'true') || (needs.files-changed.outputs.documentation_generated == 'true') + needs: ["prepare-environment", "files-changed", "yaml-lint", "python-lint"] + runs-on: "ubuntu-22.04" + env: + DOCS_COMMAND: "poetry run invoke generate-sdk-api-docs" + SDK_API_DOCS_DIR: "docs/docs/python-sdk/sdk_ref" + timeout-minutes: 5 + steps: + - name: "Check out repository code" + uses: "actions/checkout@v4" + with: + submodules: true + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + - name: "Setup Python environment" + run: | + pipx install poetry==${{ needs.prepare-environment.outputs.POETRY_VERSION }} + poetry config virtualenvs.create true --local + poetry env use 3.12 + - name: "Install dependencies" + run: "poetry install --no-interaction --no-ansi --extras ctl" + - name: "Setup environment" + run: "pip install invoke toml" + - name: Install Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: "npm" + cache-dependency-path: "**/package-lock.json" + - name: Install markdown linter + run: npm install -g markdownlint-cli2 + - name: "Generate SDK API documentation" + run: ${{ env.DOCS_COMMAND }} + - name: "Check if SDK API documentation needs to be refreshed" + run: | + git diff --quiet ${SDK_API_DOCS_DIR} + validate-documentation-style: if: | always() && !cancelled() && @@ -240,7 +284,7 @@ jobs: env: VALE_VERSION: ${{ env.VALE_VERSION }} - name: "Validate documentation style" - run: ./vale $(find ./docs -type f \( -name "*.mdx" -o -name "*.md" \) ) + run: ./vale $(find ./docs -type d -name sdk_ref -prune -false -o -type f \( -name "*.mdx" -o -name "*.md" \) ) unit-tests: env: From f9d63f22015257b1d9912a1fa75d5865a907585e Mon Sep 17 00:00:00 2001 From: Antoine Delannoy Date: Wed, 23 Jul 2025 15:34:03 +0200 Subject: [PATCH 06/25] internal: fixing docstrings & misspelling IHS-196 --- infrahub_sdk/ctl/utils.py | 2 +- infrahub_sdk/jinja2.py | 2 +- infrahub_sdk/pytest_plugin/items/base.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/infrahub_sdk/ctl/utils.py b/infrahub_sdk/ctl/utils.py index 968f6093..7130ea80 100644 --- a/infrahub_sdk/ctl/utils.py +++ b/infrahub_sdk/ctl/utils.py @@ -51,7 +51,7 @@ def init_logging(debug: bool = False) -> None: def handle_exception(exc: Exception, console: Console, exit_code: int) -> NoReturn: - """Handle exeception in a different fashion based on its type.""" + """Handle exception in a different fashion based on its type.""" if isinstance(exc, Exit): raise typer.Exit(code=exc.exit_code) if isinstance(exc, AuthenticationError): diff --git a/infrahub_sdk/jinja2.py b/infrahub_sdk/jinja2.py index 29afbf06..d64d22c1 100644 --- a/infrahub_sdk/jinja2.py +++ b/infrahub_sdk/jinja2.py @@ -7,7 +7,7 @@ def identify_faulty_jinja_code(traceback: Traceback, nbr_context_lines: int = 3) -> list[tuple[Frame, Syntax]]: """This function identifies the faulty Jinja2 code and beautify it to provide meaningful information to the user. - We use the rich's Traceback to parse the complete stack trace and extract Frames for each expection found in the trace. + We use the rich's Traceback to parse the complete stack trace and extract Frames for each exception found in the trace. """ response = [] diff --git a/infrahub_sdk/pytest_plugin/items/base.py b/infrahub_sdk/pytest_plugin/items/base.py index a1b35a00..ae08f036 100644 --- a/infrahub_sdk/pytest_plugin/items/base.py +++ b/infrahub_sdk/pytest_plugin/items/base.py @@ -75,7 +75,7 @@ def reportinfo(self) -> tuple[Path | str, int | None, str]: def repository_base(self) -> str: """Return the path to the root of the repository - This will be an absolute path if --infrahub-config-path is an absolut path as happens when + This will be an absolute path if --infrahub-config-path is an absolute path as happens when tests are started from within Infrahub server. """ config_path: Path = getattr(self.session, _infrahub_config_path_attribute) From a7e5c630d33ebfc0e6fe99567f3834684ce397b1 Mon Sep 17 00:00:00 2001 From: Antoine Delannoy Date: Fri, 25 Jul 2025 10:21:51 +0200 Subject: [PATCH 07/25] chore: add towncrier changelog fragment for #201 --- changelog/201.added.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/201.added.md diff --git a/changelog/201.added.md b/changelog/201.added.md new file mode 100644 index 00000000..b64cb2fa --- /dev/null +++ b/changelog/201.added.md @@ -0,0 +1 @@ +Add support for automatic Python SDK API from docstrings in the code. \ No newline at end of file From 6983fa5c12ad507d46b0ae0bb215db1293deb701 Mon Sep 17 00:00:00 2001 From: Pol Michel Date: Tue, 10 Feb 2026 10:36:52 +0100 Subject: [PATCH 08/25] docs: generation of documentation after legacy work IHS-199 --- docs/docs/python-sdk/reference/config.mdx | 46 - .../sdk_ref/infrahub_sdk/client.mdx | 905 ++++++++++++++++++ .../sdk_ref/infrahub_sdk/node/node.mdx | 393 ++++++++ 3 files changed, 1298 insertions(+), 46 deletions(-) create mode 100644 docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx create mode 100644 docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx diff --git a/docs/docs/python-sdk/reference/config.mdx b/docs/docs/python-sdk/reference/config.mdx index 320aebb4..1b525389 100644 --- a/docs/docs/python-sdk/reference/config.mdx +++ b/docs/docs/python-sdk/reference/config.mdx @@ -30,8 +30,6 @@ The Python SDK (Async or Sync) client can be configured using an instance of the The following settings can be defined in the `Config` class ## address - -**Property**: address
**Description**: The URL to use when connecting to Infrahub.
**Type**: `string`
@@ -39,16 +37,12 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_ADDRESS`
## api_token - -**Property**: api_token
**Description**: API token for authentication against Infrahub.
**Type**: `string`
**Environment variable**: `INFRAHUB_API_TOKEN`
## echo_graphql_queries - -**Property**: echo_graphql_queries
**Description**: If set the GraphQL query and variables will be echoed to the screen
**Type**: `boolean`
@@ -56,24 +50,18 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_ECHO_GRAPHQL_QUERIES`
## username - -**Property**: username
**Description**: Username for accessing Infrahub
**Type**: `string`
**Environment variable**: `INFRAHUB_USERNAME`
## password - -**Property**: password
**Description**: Password for accessing Infrahub
**Type**: `string`
**Environment variable**: `INFRAHUB_PASSWORD`
## default_branch - -**Property**: default_branch
**Description**: Default branch to target if not specified for each request.
**Type**: `string`
@@ -81,8 +69,6 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_DEFAULT_BRANCH`
## default_branch_from_git - -**Property**: default_branch_from_git
**Description**: Indicates if the default Infrahub branch to target should come from the active branch in the local Git repository.
**Type**: `boolean`
@@ -90,16 +76,12 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_DEFAULT_BRANCH_FROM_GIT`
## identifier - -**Property**: identifier
**Description**: Tracker identifier
**Type**: `string`
**Environment variable**: `INFRAHUB_IDENTIFIER`
## insert_tracker - -**Property**: insert_tracker
**Description**: Insert a tracker on queries to the server
**Type**: `boolean`
@@ -107,8 +89,6 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_INSERT_TRACKER`
## max_concurrent_execution - -**Property**: max_concurrent_execution
**Description**: Max concurrent execution in batch mode
**Type**: `integer`
@@ -116,16 +96,12 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_MAX_CONCURRENT_EXECUTION`
## mode - -**Property**: mode
**Description**: Default mode for the client
**Type**: `object`
**Environment variable**: `INFRAHUB_MODE`
## pagination_size - -**Property**: pagination_size
**Description**: Page size for queries to the server
**Type**: `integer`
@@ -133,8 +109,6 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_PAGINATION_SIZE`
## retry_delay - -**Property**: retry_delay
**Description**: Number of seconds to wait until attempting a retry.
**Type**: `integer`
@@ -142,8 +116,6 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_RETRY_DELAY`
## retry_on_failure - -**Property**: retry_on_failure
**Description**: Retry operation in case of failure
**Type**: `boolean`
@@ -151,8 +123,6 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_RETRY_ON_FAILURE`
## max_retry_duration - -**Property**: max_retry_duration
**Description**: Maximum duration until we stop attempting to retry if enabled.
**Type**: `integer`
@@ -160,8 +130,6 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_MAX_RETRY_DURATION`
## schema_converge_timeout - -**Property**: schema_converge_timeout
**Description**: Number of seconds to wait for schema to have converged
**Type**: `integer`
@@ -169,8 +137,6 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_SCHEMA_CONVERGE_TIMEOUT`
## timeout - -**Property**: timeout
**Description**: Default connection timeout in seconds
**Type**: `integer`
@@ -178,32 +144,24 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_TIMEOUT`
## transport - -**Property**: transport
**Description**: Set an alternate transport using a predefined option
**Type**: `object`
**Environment variable**: `INFRAHUB_TRANSPORT`
## proxy - -**Property**: proxy
**Description**: Proxy address
**Type**: `string`
**Environment variable**: `INFRAHUB_PROXY`
## proxy_mounts - -**Property**: proxy_mounts
**Description**: Proxy mounts configuration
**Type**: `object`
**Environment variable**: `INFRAHUB_PROXY_MOUNTS`
## update_group_context - -**Property**: update_group_context
**Description**: Update GraphQL query groups
**Type**: `boolean`
@@ -211,8 +169,6 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_UPDATE_GROUP_CONTEXT`
## tls_insecure - -**Property**: tls_insecure
**Description**: Indicates if TLS certificates are verified. @@ -223,8 +179,6 @@ The following settings can be defined in the `Config` class **Environment variable**: `INFRAHUB_TLS_INSECURE`
## tls_ca_file - -**Property**: tls_ca_file
**Description**: File path to CA cert or bundle in PEM format
**Type**: `string`
diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx new file mode 100644 index 00000000..53b0ac20 --- /dev/null +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/client.mdx @@ -0,0 +1,905 @@ +--- +title: client +sidebarTitle: client +--- + +# `infrahub_sdk.client` + +## Functions + +### `handle_relogin` + +```python +handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]) -> Callable[..., Coroutine[Any, Any, httpx.Response]] +``` + +### `handle_relogin_sync` + +```python +handle_relogin_sync(func: Callable[..., httpx.Response]) -> Callable[..., httpx.Response] +``` + +### `raise_for_error_deprecation_warning` + +```python +raise_for_error_deprecation_warning(value: bool | None) -> None +``` + +## Classes + +### `ProcessRelationsNode` + +### `ProxyConfig` + +### `ProxyConfigSync` + +### `ProcessRelationsNodeSync` + +### `BaseClient` + + +Base class for InfrahubClient and InfrahubClientSync + + +**Methods:** + +#### `request_context` + +```python +request_context(self) -> RequestContext | None +``` + +#### `request_context` + +```python +request_context(self, request_context: RequestContext) -> None +``` + +#### `start_tracking` + +```python +start_tracking(self, identifier: str | None = None, params: dict[str, Any] | None = None, delete_unused_nodes: bool = False, group_type: str | None = None, group_params: dict[str, Any] | None = None, branch: str | None = None) -> Self +``` + +#### `set_context_properties` + +```python +set_context_properties(self, identifier: str, params: dict[str, str] | None = None, delete_unused_nodes: bool = True, reset: bool = True, group_type: str | None = None, group_params: dict[str, Any] | None = None, branch: str | None = None) -> None +``` + +### `InfrahubClient` + + +GraphQL Client to interact with Infrahub. + + +**Methods:** + +#### `get_version` + +```python +get_version(self) -> str +``` + +Return the Infrahub version. + + +#### `get_user` + +```python +get_user(self) -> dict +``` + +Return user information + + +#### `get_user_permissions` + +```python +get_user_permissions(self) -> dict +``` + +Return user permissions + + +#### `create` + +```python +create(self, kind: str, data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> InfrahubNode +``` + +#### `create` + +```python +create(self, kind: type[SchemaType], data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> SchemaType +``` + +#### `create` + +```python +create(self, kind: str | type[SchemaType], data: dict | None = None, branch: str | None = None, timeout: int | None = None, **kwargs: Any) -> InfrahubNode | SchemaType +``` + +#### `delete` + +```python +delete(self, kind: str | type[SchemaType], id: str, branch: str | None = None) -> None +``` + +#### `get` + +```python +get(self, kind: type[SchemaType], raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType | None +``` + +#### `get` + +```python +get(self, kind: type[SchemaType], raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType +``` + +#### `get` + +```python +get(self, kind: type[SchemaType], raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaType +``` + +#### `get` + +```python +get(self, kind: str, raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode | None +``` + +#### `get` + +```python +get(self, kind: str, raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode +``` + +#### `get` + +```python +get(self, kind: str, raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNode +``` + +#### `get` + +```python +get(self, kind: str | type[SchemaType], raise_when_missing: bool = True, at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, id: str | None = None, hfid: list[str] | None = None, include: list[str] | None = None, exclude: list[str] | None = None, populate_store: bool = True, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False, **kwargs: Any) -> InfrahubNode | SchemaType | None +``` + +#### `count` + +```python +count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, partial_match: bool = False, **kwargs: Any) -> int +``` + +Return the number of nodes of a given kind. + + +#### `all` + +```python +all(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[SchemaType] +``` + +#### `all` + +```python +all(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[InfrahubNode] +``` + +#### `all` + +```python +all(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False) -> list[InfrahubNode] | list[SchemaType] +``` + +Retrieve all nodes of a given kind + +**Args:** +- `kind`: kind of the nodes to query +- `at`: Time of the query. Defaults to Now. +- `branch`: Name of the branch to query from. Defaults to default_branch. +- `populate_store`: Flag to indicate whether to populate the store with the retrieved nodes. +- `timeout`: Overrides default timeout used when querying the GraphQL API. Specified in seconds. +- `offset`: The offset for pagination. +- `limit`: The limit for pagination. +- `include`: List of attributes or relationships to include in the query. +- `exclude`: List of attributes or relationships to exclude from the query. +- `fragment`: Flag to use GraphQL fragments for generic schemas. +- `prefetch_relationships`: Flag to indicate whether to prefetch related node data. +- `parallel`: Whether to use parallel processing for the query. +- `order`: Ordering related options. Setting `disable=True` enhances performances. +- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query. + +**Returns:** +- list\[InfrahubNode]: List of Nodes + + +#### `filters` + +```python +filters(self, kind: type[SchemaType], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[SchemaType] +``` + +#### `filters` + +```python +filters(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[InfrahubNode] +``` + +#### `filters` + +```python +filters(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False, **kwargs: Any) -> list[InfrahubNode] | list[SchemaType] +``` + +Retrieve nodes of a given kind based on provided filters. + +**Args:** +- `kind`: kind of the nodes to query +- `at`: Time of the query. Defaults to Now. +- `branch`: Name of the branch to query from. Defaults to default_branch. +- `timeout`: Overrides default timeout used when querying the GraphQL API. Specified in seconds. +- `populate_store`: Flag to indicate whether to populate the store with the retrieved nodes. +- `offset`: The offset for pagination. +- `limit`: The limit for pagination. +- `include`: List of attributes or relationships to include in the query. +- `exclude`: List of attributes or relationships to exclude from the query. +- `fragment`: Flag to use GraphQL fragments for generic schemas. +- `prefetch_relationships`: Flag to indicate whether to prefetch related node data. +- `partial_match`: Allow partial match of filter criteria for the query. +- `parallel`: Whether to use parallel processing for the query. +- `order`: Ordering related options. Setting `disable=True` enhances performances. +- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query. +- `**kwargs`: Additional filter criteria for the query. + +**Returns:** +- list\[InfrahubNodeSync]: List of Nodes that match the given filters. + + +#### `clone` + +```python +clone(self, branch: str | None = None) -> InfrahubClient +``` + +Return a cloned version of the client using the same configuration + + +#### `execute_graphql` + +```python +execute_graphql(self, query: str, variables: dict | None = None, branch_name: str | None = None, at: str | Timestamp | None = None, timeout: int | None = None, raise_for_error: bool | None = None, tracker: str | None = None) -> dict +``` + +Execute a GraphQL query (or mutation). +If retry_on_failure is True, the query will retry until the server becomes reacheable. + +**Args:** +- `query`: GraphQL Query to execute, can be a query or a mutation +- `variables`: Variables to pass along with the GraphQL query. Defaults to None. +- `branch_name`: Name of the branch on which the query will be executed. Defaults to None. +- `at`: Time when the query should be executed. Defaults to None. +- `timeout`: Timeout in second for the query. Defaults to None. +- `raise_for_error`: Deprecated. Controls only HTTP status handling. +- None (default) or True\: HTTP errors raise via resp.raise_for_status(). +- False\: HTTP errors are not automatically raised. Defaults to None. + +**Raises:** +- `GraphQLError`: When the GraphQL response contains errors. + +**Returns:** +- The GraphQL data payload (response["data"]). + + +#### `refresh_login` + +```python +refresh_login(self) -> None +``` + +#### `login` + +```python +login(self, refresh: bool = False) -> None +``` + +#### `query_gql_query` + +```python +query_gql_query(self, name: str, variables: dict | None = None, update_group: bool = False, subscribers: list[str] | None = None, params: dict | None = None, branch_name: str | None = None, at: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> dict +``` + +#### `create_diff` + +```python +create_diff(self, branch: str, name: str, from_time: datetime, to_time: datetime, wait_until_completion: bool = True) -> bool | str +``` + +#### `get_diff_summary` + +```python +get_diff_summary(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> list[NodeDiff] +``` + +#### `get_diff_tree` + +```python +get_diff_tree(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None) -> DiffTreeData | None +``` + +Get complete diff tree with metadata and nodes. + +Returns None if no diff exists. + + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaType +``` + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaType | None +``` + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaType +``` + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNode +``` + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNode | None +``` + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNode | None +``` + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNode, kind: type[SchemaType] | None = None, identifier: str | None = None, prefix_length: int | None = None, address_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNode | SchemaType | None +``` + +Allocate a new IP address by using the provided resource pool. + +**Args:** +- `resource_pool`: Node corresponding to the pool to allocate resources from. +- `identifier`: Value to perform idempotent allocation, the same resource will be returned for a given identifier. +- `prefix_length`: Length of the prefix to set on the address to allocate. +- `address_type`: Kind of the address to allocate. +- `data`: A key/value map to use to set attributes values on the allocated address. +- `branch`: Name of the branch to allocate from. Defaults to default_branch. +- `timeout`: Flag to indicate whether to populate the store with the retrieved nodes. +- `tracker`: The offset for pagination. +- `raise_for_error`: Deprecated, raise an error if the HTTP status is not 2XX. + +Returns: + InfrahubNode: Node corresponding to the allocated resource. + + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaType +``` + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaType | None +``` + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaType +``` + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNode +``` + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNode | None +``` + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNode | None +``` + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNode, kind: type[SchemaType] | None = None, identifier: str | None = None, prefix_length: int | None = None, member_type: str | None = None, prefix_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNode | SchemaType | None +``` + +Allocate a new IP prefix by using the provided resource pool. + +**Args:** +- `resource_pool`: Node corresponding to the pool to allocate resources from. +- `identifier`: Value to perform idempotent allocation, the same resource will be returned for a given identifier. +- `prefix_length`: Length of the prefix to allocate. +- `member_type`: Member type of the prefix to allocate. +- `prefix_type`: Kind of the prefix to allocate. +- `data`: A key/value map to use to set attributes values on the allocated prefix. +- `branch`: Name of the branch to allocate from. Defaults to default_branch. +- `timeout`: Flag to indicate whether to populate the store with the retrieved nodes. +- `tracker`: The offset for pagination. +- `raise_for_error`: Deprecated, raise an error if the HTTP status is not 2XX. + +Returns: + InfrahubNode: Node corresponding to the allocated resource. + + +#### `create_batch` + +```python +create_batch(self, return_exceptions: bool = False) -> InfrahubBatch +``` + +#### `get_list_repositories` + +```python +get_list_repositories(self, branches: dict[str, BranchData] | None = None, kind: str = 'CoreGenericRepository') -> dict[str, RepositoryData] +``` + +#### `repository_update_commit` + +```python +repository_update_commit(self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False) -> bool +``` + +#### `convert_object_type` + +```python +convert_object_type(self, node_id: str, target_kind: str, branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None) -> InfrahubNode +``` + +Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names +and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field +in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type +for more information. + + +### `InfrahubClientSync` + +**Methods:** + +#### `get_version` + +```python +get_version(self) -> str +``` + +Return the Infrahub version. + + +#### `get_user` + +```python +get_user(self) -> dict +``` + +Return user information + + +#### `get_user_permissions` + +```python +get_user_permissions(self) -> dict +``` + +Return user permissions + + +#### `create` + +```python +create(self, kind: str, data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> InfrahubNodeSync +``` + +#### `create` + +```python +create(self, kind: type[SchemaTypeSync], data: dict | None = ..., branch: str | None = ..., **kwargs: Any) -> SchemaTypeSync +``` + +#### `create` + +```python +create(self, kind: str | type[SchemaTypeSync], data: dict | None = None, branch: str | None = None, timeout: int | None = None, **kwargs: Any) -> InfrahubNodeSync | SchemaTypeSync +``` + +#### `delete` + +```python +delete(self, kind: str | type[SchemaTypeSync], id: str, branch: str | None = None) -> None +``` + +#### `clone` + +```python +clone(self, branch: str | None = None) -> InfrahubClientSync +``` + +Return a cloned version of the client using the same configuration + + +#### `execute_graphql` + +```python +execute_graphql(self, query: str, variables: dict | None = None, branch_name: str | None = None, at: str | Timestamp | None = None, timeout: int | None = None, raise_for_error: bool | None = None, tracker: str | None = None) -> dict +``` + +Execute a GraphQL query (or mutation). +If retry_on_failure is True, the query will retry until the server becomes reacheable. + +**Args:** +- `query`: GraphQL Query to execute, can be a query or a mutation +- `variables`: Variables to pass along with the GraphQL query. Defaults to None. +- `branch_name`: Name of the branch on which the query will be executed. Defaults to None. +- `at`: Time when the query should be executed. Defaults to None. +- `timeout`: Timeout in second for the query. Defaults to None. +- `raise_for_error`: Deprecated. Controls only HTTP status handling. +- None (default) or True\: HTTP errors raise via `resp.raise_for_status()`. +- False\: HTTP errors are not automatically raised. +GraphQL errors always raise `GraphQLError`. Defaults to None. + +**Raises:** +- `GraphQLError`: When the GraphQL response contains errors. + +**Returns:** +- The GraphQL data payload (`response["data"]`). + + +#### `count` + +```python +count(self, kind: str | type[SchemaType], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, partial_match: bool = False, **kwargs: Any) -> int +``` + +Return the number of nodes of a given kind. + + +#### `all` + +```python +all(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[SchemaTypeSync] +``` + +#### `all` + +```python +all(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ...) -> list[InfrahubNodeSync] +``` + +#### `all` + +```python +all(self, kind: str | type[SchemaTypeSync], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False) -> list[InfrahubNodeSync] | list[SchemaTypeSync] +``` + +Retrieve all nodes of a given kind + +**Args:** +- `kind`: kind of the nodes to query +- `at`: Time of the query. Defaults to Now. +- `branch`: Name of the branch to query from. Defaults to default_branch. +- `timeout`: Overrides default timeout used when querying the GraphQL API. Specified in seconds. +- `populate_store`: Flag to indicate whether to populate the store with the retrieved nodes. +- `offset`: The offset for pagination. +- `limit`: The limit for pagination. +- `include`: List of attributes or relationships to include in the query. +- `exclude`: List of attributes or relationships to exclude from the query. +- `fragment`: Flag to use GraphQL fragments for generic schemas. +- `prefetch_relationships`: Flag to indicate whether to prefetch related node data. +- `parallel`: Whether to use parallel processing for the query. +- `order`: Ordering related options. Setting `disable=True` enhances performances. +- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query. + +**Returns:** +- list\[InfrahubNodeSync]: List of Nodes + + +#### `filters` + +```python +filters(self, kind: type[SchemaTypeSync], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[SchemaTypeSync] +``` + +#### `filters` + +```python +filters(self, kind: str, at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., populate_store: bool = ..., offset: int | None = ..., limit: int | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., fragment: bool = ..., prefetch_relationships: bool = ..., partial_match: bool = ..., property: bool = ..., parallel: bool = ..., order: Order | None = ..., include_metadata: bool = ..., **kwargs: Any) -> list[InfrahubNodeSync] +``` + +#### `filters` + +```python +filters(self, kind: str | type[SchemaTypeSync], at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, populate_store: bool = True, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, parallel: bool = False, order: Order | None = None, include_metadata: bool = False, **kwargs: Any) -> list[InfrahubNodeSync] | list[SchemaTypeSync] +``` + +Retrieve nodes of a given kind based on provided filters. + +**Args:** +- `kind`: kind of the nodes to query +- `at`: Time of the query. Defaults to Now. +- `branch`: Name of the branch to query from. Defaults to default_branch. +- `timeout`: Overrides default timeout used when querying the GraphQL API. Specified in seconds. +- `populate_store`: Flag to indicate whether to populate the store with the retrieved nodes. +- `offset`: The offset for pagination. +- `limit`: The limit for pagination. +- `include`: List of attributes or relationships to include in the query. +- `exclude`: List of attributes or relationships to exclude from the query. +- `fragment`: Flag to use GraphQL fragments for generic schemas. +- `prefetch_relationships`: Flag to indicate whether to prefetch related node data. +- `partial_match`: Allow partial match of filter criteria for the query. +- `parallel`: Whether to use parallel processing for the query. +- `order`: Ordering related options. Setting `disable=True` enhances performances. +- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query. +- `**kwargs`: Additional filter criteria for the query. + +**Returns:** +- list\[InfrahubNodeSync]: List of Nodes that match the given filters. + + +#### `get` + +```python +get(self, kind: type[SchemaTypeSync], raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync | None +``` + +#### `get` + +```python +get(self, kind: type[SchemaTypeSync], raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync +``` + +#### `get` + +```python +get(self, kind: type[SchemaTypeSync], raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> SchemaTypeSync +``` + +#### `get` + +```python +get(self, kind: str, raise_when_missing: Literal[False], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync | None +``` + +#### `get` + +```python +get(self, kind: str, raise_when_missing: Literal[True], at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync +``` + +#### `get` + +```python +get(self, kind: str, raise_when_missing: bool = ..., at: Timestamp | None = ..., branch: str | None = ..., timeout: int | None = ..., id: str | None = ..., hfid: list[str] | None = ..., include: list[str] | None = ..., exclude: list[str] | None = ..., populate_store: bool = ..., fragment: bool = ..., prefetch_relationships: bool = ..., property: bool = ..., include_metadata: bool = ..., **kwargs: Any) -> InfrahubNodeSync +``` + +#### `get` + +```python +get(self, kind: str | type[SchemaTypeSync], raise_when_missing: bool = True, at: Timestamp | None = None, branch: str | None = None, timeout: int | None = None, id: str | None = None, hfid: list[str] | None = None, include: list[str] | None = None, exclude: list[str] | None = None, populate_store: bool = True, fragment: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False, **kwargs: Any) -> InfrahubNodeSync | SchemaTypeSync | None +``` + +#### `create_batch` + +```python +create_batch(self, return_exceptions: bool = False) -> InfrahubBatchSync +``` + +Create a batch to execute multiple queries concurrently. + +Executing the batch will be performed using a thread pool, meaning it cannot guarantee the execution order. It is not recommended to use such +batch to manipulate objects that depend on each others. + + +#### `get_list_repositories` + +```python +get_list_repositories(self, branches: dict[str, BranchData] | None = None, kind: str = 'CoreGenericRepository') -> dict[str, RepositoryData] +``` + +#### `query_gql_query` + +```python +query_gql_query(self, name: str, variables: dict | None = None, update_group: bool = False, subscribers: list[str] | None = None, params: dict | None = None, branch_name: str | None = None, at: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> dict +``` + +#### `create_diff` + +```python +create_diff(self, branch: str, name: str, from_time: datetime, to_time: datetime, wait_until_completion: bool = True) -> bool | str +``` + +#### `get_diff_summary` + +```python +get_diff_summary(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> list[NodeDiff] +``` + +#### `get_diff_tree` + +```python +get_diff_tree(self, branch: str, name: str | None = None, from_time: datetime | None = None, to_time: datetime | None = None, timeout: int | None = None, tracker: str | None = None) -> DiffTreeData | None +``` + +Get complete diff tree with metadata and nodes. + +Returns None if no diff exists. + + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaTypeSync +``` + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaTypeSync | None +``` + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaTypeSync +``` + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNodeSync +``` + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNodeSync | None +``` + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., address_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNodeSync | None +``` + +#### `allocate_next_ip_address` + +```python +allocate_next_ip_address(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync] | None = None, identifier: str | None = None, prefix_length: int | None = None, address_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNodeSync | SchemaTypeSync | None +``` + +Allocate a new IP address by using the provided resource pool. + +**Args:** +- `resource_pool`: Node corresponding to the pool to allocate resources from. +- `identifier`: Value to perform idempotent allocation, the same resource will be returned for a given identifier. +- `prefix_length`: Length of the prefix to set on the address to allocate. +- `address_type`: Kind of the address to allocate. +- `data`: A key/value map to use to set attributes values on the allocated address. +- `branch`: Name of the branch to allocate from. Defaults to default_branch. +- `timeout`: Flag to indicate whether to populate the store with the retrieved nodes. +- `tracker`: The offset for pagination. +- `raise_for_error`: The limit for pagination. + +Returns: + InfrahubNodeSync: Node corresponding to the allocated resource. + + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> SchemaTypeSync +``` + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> SchemaTypeSync | None +``` + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync], identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> SchemaTypeSync +``` + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[True] = True) -> CoreNodeSync +``` + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: Literal[False] = False) -> CoreNodeSync | None +``` + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: None = ..., identifier: str | None = ..., prefix_length: int | None = ..., member_type: str | None = ..., prefix_type: str | None = ..., data: dict[str, Any] | None = ..., branch: str | None = ..., timeout: int | None = ..., tracker: str | None = ..., raise_for_error: bool | None = ...) -> CoreNodeSync | None +``` + +#### `allocate_next_ip_prefix` + +```python +allocate_next_ip_prefix(self, resource_pool: CoreNodeSync, kind: type[SchemaTypeSync] | None = None, identifier: str | None = None, prefix_length: int | None = None, member_type: str | None = None, prefix_type: str | None = None, data: dict[str, Any] | None = None, branch: str | None = None, timeout: int | None = None, tracker: str | None = None, raise_for_error: bool | None = None) -> CoreNodeSync | SchemaTypeSync | None +``` + +Allocate a new IP prefix by using the provided resource pool. + +**Args:** +- `resource_pool`: Node corresponding to the pool to allocate resources from. +- `identifier`: Value to perform idempotent allocation, the same resource will be returned for a given identifier. +- `size`: Length of the prefix to allocate. +- `member_type`: Member type of the prefix to allocate. +- `prefix_type`: Kind of the prefix to allocate. +- `data`: A key/value map to use to set attributes values on the allocated prefix. +- `branch`: Name of the branch to allocate from. Defaults to default_branch. +- `timeout`: Flag to indicate whether to populate the store with the retrieved nodes. +- `tracker`: The offset for pagination. +- `raise_for_error`: The limit for pagination. + +Returns: + InfrahubNodeSync: Node corresponding to the allocated resource. + + +#### `repository_update_commit` + +```python +repository_update_commit(self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False) -> bool +``` + +#### `refresh_login` + +```python +refresh_login(self) -> None +``` + +#### `login` + +```python +login(self, refresh: bool = False) -> None +``` + +#### `convert_object_type` + +```python +convert_object_type(self, node_id: str, target_kind: str, branch: str | None = None, fields_mapping: dict[str, ConversionFieldInput] | None = None) -> InfrahubNodeSync +``` + +Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names +and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field +in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type +for more information. + diff --git a/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx new file mode 100644 index 00000000..9a4ec036 --- /dev/null +++ b/docs/docs/python-sdk/sdk_ref/infrahub_sdk/node/node.mdx @@ -0,0 +1,393 @@ +--- +title: node +sidebarTitle: node +--- + +# `infrahub_sdk.node.node` + +## Classes + +### `InfrahubNodeBase` + + +Base class for InfrahubNode and InfrahubNodeSync + + +**Methods:** + +#### `get_branch` + +```python +get_branch(self) -> str +``` + +#### `get_path_value` + +```python +get_path_value(self, path: str) -> Any +``` + +#### `get_human_friendly_id` + +```python +get_human_friendly_id(self) -> list[str] | None +``` + +#### `get_human_friendly_id_as_string` + +```python +get_human_friendly_id_as_string(self, include_kind: bool = False) -> str | None +``` + +#### `hfid` + +```python +hfid(self) -> list[str] | None +``` + +#### `hfid_str` + +```python +hfid_str(self) -> str | None +``` + +#### `get_node_metadata` + +```python +get_node_metadata(self) -> NodeMetadata | None +``` + +Returns the node metadata (created_at, created_by, updated_at, updated_by) if fetched. + + +#### `get_kind` + +```python +get_kind(self) -> str +``` + +#### `get_all_kinds` + +```python +get_all_kinds(self) -> list[str] +``` + +#### `is_ip_prefix` + +```python +is_ip_prefix(self) -> bool +``` + +#### `is_ip_address` + +```python +is_ip_address(self) -> bool +``` + +#### `is_resource_pool` + +```python +is_resource_pool(self) -> bool +``` + +#### `get_raw_graphql_data` + +```python +get_raw_graphql_data(self) -> dict | None +``` + +#### `generate_query_data_init` + +```python +generate_query_data_init(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, partial_match: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict] +``` + +### `InfrahubNode` + + +Represents a Infrahub node in an asynchronous context. + + +**Methods:** + +#### `from_graphql` + +```python +from_graphql(cls, client: InfrahubClient, branch: str, data: dict, schema: MainSchemaTypesAPI | None = None, timeout: int | None = None) -> Self +``` + +#### `generate` + +```python +generate(self, nodes: list[str] | None = None) -> None +``` + +#### `artifact_generate` + +```python +artifact_generate(self, name: str) -> None +``` + +#### `artifact_fetch` + +```python +artifact_fetch(self, name: str) -> str | dict[str, Any] +``` + +#### `delete` + +```python +delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None +``` + +#### `save` + +```python +save(self, allow_upsert: bool = False, update_group_context: bool | None = None, timeout: int | None = None, request_context: RequestContext | None = None) -> None +``` + +#### `generate_query_data` + +```python +generate_query_data(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict] +``` + +#### `generate_query_data_node` + +```python +generate_query_data_node(self, include: list[str] | None = None, exclude: list[str] | None = None, inherited: bool = True, insert_alias: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False) -> dict[str, Any | dict] +``` + +Generate the node part of a GraphQL Query with attributes and nodes. + +**Args:** +- `include`: List of attributes or relationships to include. Defaults to None. +- `exclude`: List of attributes or relationships to exclude. Defaults to None. +- `inherited`: Indicated of the attributes and the relationships inherited from generics should be included as well. + Defaults to True. +- `insert_alias`: If True, inserts aliases in the query for each attribute or relationship. +- `prefetch_relationships`: If True, pre-fetches relationship data as part of the query. +- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query. + +**Returns:** +- dict\[str, Union\[Any, Dict]]: GraphQL query in dictionary format + + +#### `add_relationships` + +```python +add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None +``` + +#### `remove_relationships` + +```python +remove_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None +``` + +#### `create` + +```python +create(self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None +``` + +#### `update` + +```python +update(self, do_full_update: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None +``` + +#### `get_pool_allocated_resources` + +```python +get_pool_allocated_resources(self, resource: InfrahubNode) -> list[InfrahubNode] +``` + +Fetch all nodes that were allocated for the pool and a given resource. + +**Args:** +- `resource`: The resource from which the nodes were allocated. + +**Returns:** +- list\[InfrahubNode]: The allocated nodes. + + +#### `get_pool_resources_utilization` + +```python +get_pool_resources_utilization(self) -> list[dict[str, Any]] +``` + +Fetch the utilization of each resource for the pool. + +**Returns:** +- list\[dict\[str, Any]]: A list containing the allocation numbers for each resource of the pool. + + +#### `get_flat_value` + +```python +get_flat_value(self, key: str, separator: str = '__') -> Any +``` + +Query recursively a value defined in a flat notation (string), on a hierarchy of objects + +**Examples:** + +name__value +module.object.value + + +#### `extract` + +```python +extract(self, params: dict[str, str]) -> dict[str, Any] +``` + +Extract some datapoints defined in a flat notation. + + +### `InfrahubNodeSync` + + +Represents a Infrahub node in a synchronous context. + + +**Methods:** + +#### `from_graphql` + +```python +from_graphql(cls, client: InfrahubClientSync, branch: str, data: dict, schema: MainSchemaTypesAPI | None = None, timeout: int | None = None) -> Self +``` + +#### `generate` + +```python +generate(self, nodes: list[str] | None = None) -> None +``` + +#### `artifact_generate` + +```python +artifact_generate(self, name: str) -> None +``` + +#### `artifact_fetch` + +```python +artifact_fetch(self, name: str) -> str | dict[str, Any] +``` + +#### `delete` + +```python +delete(self, timeout: int | None = None, request_context: RequestContext | None = None) -> None +``` + +#### `save` + +```python +save(self, allow_upsert: bool = False, update_group_context: bool | None = None, timeout: int | None = None, request_context: RequestContext | None = None) -> None +``` + +#### `generate_query_data` + +```python +generate_query_data(self, filters: dict[str, Any] | None = None, offset: int | None = None, limit: int | None = None, include: list[str] | None = None, exclude: list[str] | None = None, fragment: bool = False, prefetch_relationships: bool = False, partial_match: bool = False, property: bool = False, order: Order | None = None, include_metadata: bool = False) -> dict[str, Any | dict] +``` + +#### `generate_query_data_node` + +```python +generate_query_data_node(self, include: list[str] | None = None, exclude: list[str] | None = None, inherited: bool = True, insert_alias: bool = False, prefetch_relationships: bool = False, property: bool = False, include_metadata: bool = False) -> dict[str, Any | dict] +``` + +Generate the node part of a GraphQL Query with attributes and nodes. + +**Args:** +- `include`: List of attributes or relationships to include. Defaults to None. +- `exclude`: List of attributes or relationships to exclude. Defaults to None. +- `inherited`: Indicated of the attributes and the relationships inherited from generics should be included as well. + Defaults to True. +- `insert_alias`: If True, inserts aliases in the query for each attribute or relationship. +- `prefetch_relationships`: If True, pre-fetches relationship data as part of the query. +- `include_metadata`: If True, includes node_metadata and relationship_metadata in the query. + +**Returns:** +- dict\[str, Union\[Any, Dict]]: GraphQL query in dictionary format + + +#### `add_relationships` + +```python +add_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None +``` + +#### `remove_relationships` + +```python +remove_relationships(self, relation_to_update: str, related_nodes: list[str]) -> None +``` + +#### `create` + +```python +create(self, allow_upsert: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None +``` + +#### `update` + +```python +update(self, do_full_update: bool = False, timeout: int | None = None, request_context: RequestContext | None = None) -> None +``` + +#### `get_pool_allocated_resources` + +```python +get_pool_allocated_resources(self, resource: InfrahubNodeSync) -> list[InfrahubNodeSync] +``` + +Fetch all nodes that were allocated for the pool and a given resource. + +**Args:** +- `resource`: The resource from which the nodes were allocated. + +**Returns:** +- list\[InfrahubNodeSync]: The allocated nodes. + + +#### `get_pool_resources_utilization` + +```python +get_pool_resources_utilization(self) -> list[dict[str, Any]] +``` + +Fetch the utilization of each resource for the pool. + +**Returns:** +- list\[dict\[str, Any]]: A list containing the allocation numbers for each resource of the pool. + + +#### `get_flat_value` + +```python +get_flat_value(self, key: str, separator: str = '__') -> Any +``` + +Query recursively a value defined in a flat notation (string), on a hierarchy of objects + +**Examples:** + +name__value +module.object.value + + +#### `extract` + +```python +extract(self, params: dict[str, str]) -> dict[str, Any] +``` + +Extract some datapoints defined in a flat notation. + From 93eeff907a7dab6fe09dcfc678b801ae4b96c50d Mon Sep 17 00:00:00 2001 From: Pol Michel Date: Tue, 10 Feb 2026 10:37:47 +0100 Subject: [PATCH 09/25] feat: Adapted mdxify commit to uv, as poetry was used before IHS-195 --- uv.lock | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/uv.lock b/uv.lock index 7563b99c..c258ba70 100644 --- a/uv.lock +++ b/uv.lock @@ -717,6 +717,7 @@ ctl = [ { name = "ariadne-codegen" }, { name = "click" }, { name = "jinja2" }, + { name = "mdxify" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "numpy", version = "2.3.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pyarrow" }, @@ -783,6 +784,7 @@ requires-dist = [ { name = "httpx", specifier = ">=0.20" }, { name = "jinja2", marker = "extra == 'all'", specifier = ">=3" }, { name = "jinja2", marker = "extra == 'ctl'", specifier = ">=3" }, + { name = "mdxify", marker = "python_full_version >= '3.10' and extra == 'ctl'", specifier = ">=0.2.23" }, { name = "netutils", specifier = ">=1.0.0" }, { name = "numpy", marker = "python_full_version >= '3.12' and extra == 'all'", specifier = ">=1.26.2" }, { name = "numpy", marker = "python_full_version >= '3.12' and extra == 'ctl'", specifier = ">=1.26.2" }, @@ -1149,6 +1151,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] +[[package]] +name = "mdxify" +version = "0.2.36" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "griffe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/57/8b/eec3cc2f5b9e15a1d5d1a7399cf68b420bbd7ab8c363c789cfb14f783a09/mdxify-0.2.36.tar.gz", hash = "sha256:bd8afc3036b8258b13cd6d44413f1805088a9959b1b2d63eae9160cc037ee8e4", size = 1250127, upload-time = "2026-02-06T17:58:19.542Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/b4/3ad6aac18dbd5913201cd3bbf19a896a59fd418c7e87a5abf18575fb339a/mdxify-0.2.36-py3-none-any.whl", hash = "sha256:9dbe9b3e608ad1b9d5d95f95fcc66788d0d737a52eadd8bdb1244e628dc6d98c", size = 24552, upload-time = "2026-02-06T17:58:18.542Z" }, +] + [[package]] name = "mypy" version = "1.11.2" From 03fa4027d11cf34704351dcbf92330bfe908be42 Mon Sep 17 00:00:00 2001 From: Pol Michel Date: Tue, 10 Feb 2026 12:57:35 +0100 Subject: [PATCH 10/25] refactor: made the Jinja2Template extends the ATemplate class, allowing the future documentation generation method to be tested in isolation IHS-201 --- infrahub_sdk/template/__init__.py | 229 ++------------------- infrahub_sdk/template/base.py | 11 + infrahub_sdk/template/exceptions.py | 54 ++--- infrahub_sdk/template/filters.py | 155 +------------- infrahub_sdk/template/jinja2/__init__.py | 218 ++++++++++++++++++++ infrahub_sdk/template/jinja2/exceptions.py | 41 ++++ infrahub_sdk/template/jinja2/filters.py | 151 ++++++++++++++ infrahub_sdk/template/jinja2/models.py | 10 + infrahub_sdk/template/models.py | 13 +- pyproject.toml | 2 +- 10 files changed, 479 insertions(+), 405 deletions(-) create mode 100644 infrahub_sdk/template/base.py create mode 100644 infrahub_sdk/template/jinja2/__init__.py create mode 100644 infrahub_sdk/template/jinja2/exceptions.py create mode 100644 infrahub_sdk/template/jinja2/filters.py create mode 100644 infrahub_sdk/template/jinja2/models.py diff --git a/infrahub_sdk/template/__init__.py b/infrahub_sdk/template/__init__.py index ff866ecd..c5607043 100644 --- a/infrahub_sdk/template/__init__.py +++ b/infrahub_sdk/template/__init__.py @@ -1,217 +1,28 @@ from __future__ import annotations -import linecache -from collections.abc import Callable -from pathlib import Path -from typing import Any, NoReturn - -import jinja2 -from jinja2 import meta, nodes -from jinja2.sandbox import SandboxedEnvironment -from netutils.utils import jinja2_convenience_function -from rich.syntax import Syntax -from rich.traceback import Traceback - -from .exceptions import ( +from .base import ATemplate +from .jinja2 import Jinja2Template +from .jinja2.exceptions import ( JinjaTemplateError, JinjaTemplateNotFoundError, JinjaTemplateOperationViolationError, JinjaTemplateSyntaxError, JinjaTemplateUndefinedError, ) -from .filters import AVAILABLE_FILTERS -from .models import UndefinedJinja2Error - -netutils_filters = jinja2_convenience_function() - - -class Jinja2Template: - def __init__( - self, - template: str | Path, - template_directory: Path | None = None, - filters: dict[str, Callable] | None = None, - ) -> None: - self.is_string_based = isinstance(template, str) - self.is_file_based = isinstance(template, Path) - self._template = str(template) - self._template_directory = template_directory - self._environment: jinja2.Environment | None = None - - self._available_filters = [filter_definition.name for filter_definition in AVAILABLE_FILTERS] - self._trusted_filters = [ - filter_definition.name for filter_definition in AVAILABLE_FILTERS if filter_definition.trusted - ] - - self._filters = filters or {} - for user_filter in self._filters: - self._available_filters.append(user_filter) - self._trusted_filters.append(user_filter) - - self._template_definition: jinja2.Template | None = None - - def get_environment(self) -> jinja2.Environment: - if self._environment: - return self._environment - - if self.is_string_based: - return self._get_string_based_environment() - - return self._get_file_based_environment() - - def get_template(self) -> jinja2.Template: - if self._template_definition: - return self._template_definition - - try: - if self.is_string_based: - template = self._get_string_based_template() - else: - template = self._get_file_based_template() - except jinja2.TemplateSyntaxError as exc: - self._raise_template_syntax_error(error=exc) - except jinja2.TemplateNotFound as exc: - raise JinjaTemplateNotFoundError(message=exc.message, filename=str(exc.name)) - - return template - - def get_variables(self) -> list[str]: - env = self.get_environment() - - template_source = self._template - if self.is_file_based and env.loader: - template_source = env.loader.get_source(env, self._template)[0] - - try: - template = env.parse(template_source) - except jinja2.TemplateSyntaxError as exc: - self._raise_template_syntax_error(error=exc) - - return sorted(meta.find_undeclared_variables(template)) - - def validate(self, restricted: bool = True) -> None: - allowed_list = self._available_filters - if restricted: - allowed_list = self._trusted_filters - - env = self.get_environment() - template_source = self._template - if self.is_file_based and env.loader: - template_source = env.loader.get_source(env, self._template)[0] - - try: - template = env.parse(template_source) - except jinja2.TemplateSyntaxError as exc: - self._raise_template_syntax_error(error=exc) - - for node in template.find_all(nodes.Filter): - if node.name not in allowed_list: - raise JinjaTemplateOperationViolationError(f"The '{node.name}' filter isn't allowed to be used") - - forbidden_operations = ["Call", "Import", "Include"] - if self.is_string_based and any(node.__class__.__name__ in forbidden_operations for node in template.body): - raise JinjaTemplateOperationViolationError( - f"These operations are forbidden for string based templates: {forbidden_operations}" - ) - - async def render(self, variables: dict[str, Any]) -> str: - template = self.get_template() - try: - output = await template.render_async(variables) - except jinja2.exceptions.TemplateNotFound as exc: - raise JinjaTemplateNotFoundError(message=exc.message, filename=str(exc.name), base_template=template.name) - except jinja2.TemplateSyntaxError as exc: - self._raise_template_syntax_error(error=exc) - except jinja2.UndefinedError as exc: - traceback = Traceback(show_locals=False) - errors = _identify_faulty_jinja_code(traceback=traceback) - raise JinjaTemplateUndefinedError(message=exc.message, errors=errors) - except Exception as exc: - if error_message := getattr(exc, "message", None): - message = error_message - else: - message = str(exc) - raise JinjaTemplateError(message=message or "Unknown template error") - - return output - - def _get_string_based_environment(self) -> jinja2.Environment: - env = SandboxedEnvironment(enable_async=True, undefined=jinja2.StrictUndefined) - self._set_filters(env=env) - self._environment = env - return self._environment - - def _get_file_based_environment(self) -> jinja2.Environment: - template_loader = jinja2.FileSystemLoader(searchpath=str(self._template_directory)) - env = jinja2.Environment( - loader=template_loader, - trim_blocks=True, - lstrip_blocks=True, - enable_async=True, - ) - self._set_filters(env=env) - self._environment = env - return self._environment - - def _set_filters(self, env: jinja2.Environment) -> None: - for default_filter in list(env.filters.keys()): - if default_filter not in self._available_filters: - del env.filters[default_filter] - - # Add filters from netutils - env.filters.update( - {name: jinja_filter for name, jinja_filter in netutils_filters.items() if name in self._available_filters} - ) - # Add user supplied filters - env.filters.update(self._filters) - - def _get_string_based_template(self) -> jinja2.Template: - env = self.get_environment() - self._template_definition = env.from_string(self._template) - return self._template_definition - - def _get_file_based_template(self) -> jinja2.Template: - env = self.get_environment() - self._template_definition = env.get_template(self._template) - return self._template_definition - - def _raise_template_syntax_error(self, error: jinja2.TemplateSyntaxError) -> NoReturn: - filename: str | None = None - if error.filename and self._template_directory: - filename = error.filename - if error.filename.startswith(str(self._template_directory)): - filename = error.filename[len(str(self._template_directory)) :] - - raise JinjaTemplateSyntaxError(message=error.message, filename=filename, lineno=error.lineno) - - -def _identify_faulty_jinja_code(traceback: Traceback, nbr_context_lines: int = 3) -> list[UndefinedJinja2Error]: - """This function identifies the faulty Jinja2 code and beautify it to provide meaningful information to the user. - - We use the rich's Traceback to parse the complete stack trace and extract Frames for each exception found in the trace. - """ - response = [] - - # Extract only the Jinja related exception - for frame in [frame for frame in traceback.trace.stacks[0].frames if not frame.filename.endswith(".py")]: - code = "".join(linecache.getlines(frame.filename)) - if frame.filename == "