From 6784f951b1c0a1c97de810364703355209e948b4 Mon Sep 17 00:00:00 2001 From: Gabor Feher Date: Mon, 9 Mar 2026 13:18:04 +0000 Subject: [PATCH 1/4] feat: Implement a vertex based task store for the 1.0 branch --- pyproject.toml | 2 + src/a2a/contrib/tasks/__init__.py | 0 .../contrib/tasks/vertex_task_converter.py | 158 ++++++ src/a2a/contrib/tasks/vertex_task_store.py | 229 ++++++++ tck/sut_agent_with_vertex_task_store.py | 54 ++ tests/contrib/tasks/fake_vertex_client.py | 137 +++++ tests/contrib/tasks/run_vertex_tests.sh | 17 + .../tasks/test_vertex_task_converter.py | 391 ++++++++++++++ tests/contrib/tasks/test_vertex_task_store.py | 503 ++++++++++++++++++ uv.lock | 337 +++++++++++- 10 files changed, 1821 insertions(+), 7 deletions(-) create mode 100644 src/a2a/contrib/tasks/__init__.py create mode 100644 src/a2a/contrib/tasks/vertex_task_converter.py create mode 100644 src/a2a/contrib/tasks/vertex_task_store.py create mode 100644 tck/sut_agent_with_vertex_task_store.py create mode 100644 tests/contrib/tasks/fake_vertex_client.py create mode 100755 tests/contrib/tasks/run_vertex_tests.sh create mode 100644 tests/contrib/tasks/test_vertex_task_converter.py create mode 100644 tests/contrib/tasks/test_vertex_task_store.py diff --git a/pyproject.toml b/pyproject.toml index 0814a70e5..a4f8506c0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,7 @@ mysql = ["sqlalchemy[asyncio,aiomysql]>=2.0.0"] signing = ["PyJWT>=2.0.0"] sqlite = ["sqlalchemy[asyncio,aiosqlite]>=2.0.0"] db-cli = ["alembic>=1.14.0"] +vertex = ["google-cloud-aiplatform>=1.140.0"] sql = ["a2a-sdk[postgresql,mysql,sqlite]"] @@ -51,6 +52,7 @@ all = [ "a2a-sdk[telemetry]", "a2a-sdk[signing]", "a2a-sdk[db-cli]", + "a2a-sdk[vertex]", ] [project.urls] diff --git a/src/a2a/contrib/tasks/__init__.py b/src/a2a/contrib/tasks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/a2a/contrib/tasks/vertex_task_converter.py b/src/a2a/contrib/tasks/vertex_task_converter.py new file mode 100644 index 000000000..71ccbc288 --- /dev/null +++ b/src/a2a/contrib/tasks/vertex_task_converter.py @@ -0,0 +1,158 @@ +try: + from vertexai import types as vertexai_types +except ImportError as e: + raise ImportError( + 'vertex_task_converter requires vertexai. ' + 'Install with: ' + "'pip install a2a-sdk[vertex]'" + ) from e + +import base64 +import json + +from a2a.compat.v0_3.types import ( + Artifact, + DataPart, + FilePart, + FileWithBytes, + FileWithUri, + Part, + Task, + TaskState, + TaskStatus, + TextPart, +) + + +_TO_SDK_TASK_STATE = { + vertexai_types.State.STATE_UNSPECIFIED: TaskState.unknown, + vertexai_types.State.SUBMITTED: TaskState.submitted, + vertexai_types.State.WORKING: TaskState.working, + vertexai_types.State.COMPLETED: TaskState.completed, + vertexai_types.State.CANCELLED: TaskState.canceled, + vertexai_types.State.FAILED: TaskState.failed, + vertexai_types.State.REJECTED: TaskState.rejected, + vertexai_types.State.INPUT_REQUIRED: TaskState.input_required, + vertexai_types.State.AUTH_REQUIRED: TaskState.auth_required, +} + +_SDK_TO_STORED_TASK_STATE = {v: k for k, v in _TO_SDK_TASK_STATE.items()} + + +def to_sdk_task_state(stored_state: vertexai_types.State) -> TaskState: + """Converts a proto A2aTask.State to a TaskState enum.""" + return _TO_SDK_TASK_STATE.get(stored_state, TaskState.unknown) + + +def to_stored_task_state(task_state: TaskState) -> vertexai_types.State: + """Converts a TaskState enum to a proto A2aTask.State enum value.""" + return _SDK_TO_STORED_TASK_STATE.get( + task_state, vertexai_types.State.STATE_UNSPECIFIED + ) + + +def to_stored_part(part: Part) -> vertexai_types.Part: + """Converts a SDK Part to a proto Part.""" + if isinstance(part.root, TextPart): + return vertexai_types.Part(text=part.root.text) + if isinstance(part.root, DataPart): + data_bytes = json.dumps(part.root.data).encode('utf-8') + return vertexai_types.Part( + inline_data=vertexai_types.Blob( + mime_type='application/json', data=data_bytes + ) + ) + if isinstance(part.root, FilePart): + file_content = part.root.file + if isinstance(file_content, FileWithBytes): + decoded_bytes = base64.b64decode(file_content.bytes) + return vertexai_types.Part( + inline_data=vertexai_types.Blob( + mime_type=file_content.mime_type or '', data=decoded_bytes + ) + ) + if isinstance(file_content, FileWithUri): + return vertexai_types.Part( + file_data=vertexai_types.FileData( + mime_type=file_content.mime_type or '', + file_uri=file_content.uri, + ) + ) + raise ValueError(f'Unsupported part type: {type(part.root)}') + + +def to_sdk_part(stored_part: vertexai_types.Part) -> Part: + """Converts a proto Part to a SDK Part.""" + if stored_part.text: + return Part(root=TextPart(text=stored_part.text)) + if stored_part.inline_data: + encoded_bytes = base64.b64encode(stored_part.inline_data.data).decode( + 'utf-8' + ) + return Part( + root=FilePart( + file=FileWithBytes( + mime_type=stored_part.inline_data.mime_type, + bytes=encoded_bytes, + ) + ) + ) + if stored_part.file_data: + return Part( + root=FilePart( + file=FileWithUri( + mime_type=stored_part.file_data.mime_type, + uri=stored_part.file_data.file_uri, + ) + ) + ) + + raise ValueError(f'Unsupported part: {stored_part}') + + +def to_stored_artifact(artifact: Artifact) -> vertexai_types.TaskArtifact: + """Converts a SDK Artifact to a proto TaskArtifact.""" + return vertexai_types.TaskArtifact( + artifact_id=artifact.artifact_id, + parts=[to_stored_part(part) for part in artifact.parts], + ) + + +def to_sdk_artifact(stored_artifact: vertexai_types.TaskArtifact) -> Artifact: + """Converts a proto TaskArtifact to a SDK Artifact.""" + return Artifact( + artifact_id=stored_artifact.artifact_id, + parts=[to_sdk_part(part) for part in stored_artifact.parts], + ) + + +def to_stored_task(task: Task) -> vertexai_types.A2aTask: + """Converts a SDK Task to a proto A2aTask.""" + return vertexai_types.A2aTask( + context_id=task.context_id, + metadata=task.metadata, + state=to_stored_task_state(task.status.state), + output=vertexai_types.TaskOutput( + artifacts=[ + to_stored_artifact(artifact) + for artifact in task.artifacts or [] + ] + ), + ) + + +def to_sdk_task(a2a_task: vertexai_types.A2aTask) -> Task: + """Converts a proto A2aTask to a SDK Task.""" + return Task( + id=a2a_task.name.split('/')[-1], + context_id=a2a_task.context_id, + status=TaskStatus(state=to_sdk_task_state(a2a_task.state)), + metadata=a2a_task.metadata or {}, + artifacts=[ + to_sdk_artifact(artifact) + for artifact in a2a_task.output.artifacts or [] + ] + if a2a_task.output + else [], + history=[], + ) diff --git a/src/a2a/contrib/tasks/vertex_task_store.py b/src/a2a/contrib/tasks/vertex_task_store.py new file mode 100644 index 000000000..3b87c824d --- /dev/null +++ b/src/a2a/contrib/tasks/vertex_task_store.py @@ -0,0 +1,229 @@ +import logging + + +try: + import vertexai + + from google.genai import errors as genai_errors + from vertexai import types as vertexai_types +except ImportError as e: + raise ImportError( + 'VertexTaskStore requires vertexai. ' + 'Install with: ' + "'pip install a2a-sdk[vertex]'" + ) from e + +from a2a.compat.v0_3.conversions import to_compat_task, to_core_task +from a2a.compat.v0_3.types import Task as CompatTask +from a2a.contrib.tasks import vertex_task_converter +from a2a.server.context import ServerCallContext +from a2a.server.tasks.task_store import TaskStore +from a2a.types.a2a_pb2 import ListTasksRequest, ListTasksResponse, Task + + +logger = logging.getLogger(__name__) + + +class VertexTaskStore(TaskStore): + """Implementation of TaskStore using Vertex AI Agent Engine Task Store. + + Stores task objects in Vertex AI Agent Engine Task Store. + """ + + def __init__( + self, + client: vertexai.Client, + agent_engine_resource_id: str, + ) -> None: + """Initializes the VertexTaskStore. + + Args: + client: The Vertex AI client. + agent_engine_resource_id: The resource ID of the agent engine. + """ + self._client = client + self._agent_engine_resource_id = agent_engine_resource_id + + async def save( + self, task: Task, context: ServerCallContext | None = None + ) -> None: + """Saves or updates a task in the store.""" + compat_task = to_compat_task(task) + previous_task = await self._get_stored_task(compat_task.id) + if previous_task is None: + await self._create(compat_task) + else: + await self._update(previous_task, compat_task) + + async def _create(self, sdk_task: CompatTask) -> None: + stored_task = vertex_task_converter.to_stored_task(sdk_task) + await self._client.aio.agent_engines.a2a_tasks.create( + name=self._agent_engine_resource_id, + a2a_task_id=sdk_task.id, + config=vertexai_types.CreateAgentEngineTaskConfig( + context_id=stored_task.context_id, + metadata=stored_task.metadata, + output=stored_task.output, + ), + ) + + def _get_status_change_event( + self, + previous_task: CompatTask, + task: CompatTask, + event_sequence_number: int, + ) -> vertexai_types.TaskEvent | None: + if task.status.state != previous_task.status.state: + return vertexai_types.TaskEvent( + event_data=vertexai_types.TaskEventData( + state_change=vertexai_types.TaskStateChange( + new_state=vertex_task_converter.to_stored_task_state( + task.status.state + ), + ), + ), + event_sequence_number=event_sequence_number, + ) + return None + + def _get_metadata_change_event( + self, + previous_task: CompatTask, + task: CompatTask, + event_sequence_number: int, + ) -> vertexai_types.TaskEvent | None: + if task.metadata != previous_task.metadata: + return vertexai_types.TaskEvent( + event_data=vertexai_types.TaskEventData( + metadata_change=vertexai_types.TaskMetadataChange( + new_metadata=task.metadata, + ) + ), + event_sequence_number=event_sequence_number, + ) + return None + + def _get_artifacts_change_event( + self, + previous_task: CompatTask, + task: CompatTask, + event_sequence_number: int, + ) -> vertexai_types.TaskEvent | None: + if task.artifacts != previous_task.artifacts: + task_artifact_change = vertexai_types.TaskArtifactChange() + event = vertexai_types.TaskEvent( + event_data=vertexai_types.TaskEventData( + output_change=vertexai_types.TaskOutputChange( + task_artifact_change=task_artifact_change + ) + ), + event_sequence_number=event_sequence_number, + ) + task_artifacts = ( + {artifact.artifact_id: artifact for artifact in task.artifacts} + if task.artifacts + else {} + ) + previous_task_artifacts = ( + { + artifact.artifact_id: artifact + for artifact in previous_task.artifacts + } + if previous_task.artifacts + else {} + ) + for artifact in previous_task_artifacts.values(): + if artifact.artifact_id not in task_artifacts: + if not task_artifact_change.deleted_artifact_ids: + task_artifact_change.deleted_artifact_ids = [] + task_artifact_change.deleted_artifact_ids.append( + artifact.artifact_id + ) + for artifact in task_artifacts.values(): + if artifact.artifact_id not in previous_task_artifacts: + if not task_artifact_change.added_artifacts: + task_artifact_change.added_artifacts = [] + task_artifact_change.added_artifacts.append( + vertex_task_converter.to_stored_artifact(artifact) + ) + elif artifact != previous_task_artifacts[artifact.artifact_id]: + if not task_artifact_change.updated_artifacts: + task_artifact_change.updated_artifacts = [] + task_artifact_change.updated_artifacts.append( + vertex_task_converter.to_stored_artifact(artifact) + ) + if task_artifact_change != vertexai_types.TaskArtifactChange(): + return event + return None + + async def _update( + self, previous_stored_task: vertexai_types.A2aTask, task: CompatTask + ) -> None: + previous_task = vertex_task_converter.to_sdk_task(previous_stored_task) + events = [] + event_sequence_number = previous_stored_task.next_event_sequence_number + + status_event = self._get_status_change_event( + previous_task, task, event_sequence_number + ) + if status_event: + events.append(status_event) + event_sequence_number += 1 + + metadata_event = self._get_metadata_change_event( + previous_task, task, event_sequence_number + ) + if metadata_event: + events.append(metadata_event) + event_sequence_number += 1 + + artifacts_event = self._get_artifacts_change_event( + previous_task, task, event_sequence_number + ) + if artifacts_event: + events.append(artifacts_event) + event_sequence_number += 1 + + if not events: + return + await self._client.aio.agent_engines.a2a_tasks.events.append( + name=self._agent_engine_resource_id + '/a2aTasks/' + task.id, + task_events=events, + ) + + async def _get_stored_task( + self, task_id: str + ) -> vertexai_types.A2aTask | None: + try: + a2a_task = await self._client.aio.agent_engines.a2a_tasks.get( + name=self._agent_engine_resource_id + '/a2aTasks/' + task_id, + ) + except genai_errors.APIError as e: + if e.status == 'NOT_FOUND': + logger.debug('Task %s not found in store.', task_id) + return None + raise + return a2a_task + + async def get( + self, task_id: str, context: ServerCallContext | None = None + ) -> Task | None: + """Retrieves a task from the database by ID.""" + a2a_task = await self._get_stored_task(task_id) + if a2a_task is None: + return None + return to_core_task(vertex_task_converter.to_sdk_task(a2a_task)) + + async def list( + self, + params: ListTasksRequest, + context: ServerCallContext | None = None, + ) -> ListTasksResponse: + """Retrieves a list of tasks from the store.""" + raise NotImplementedError + + async def delete( + self, task_id: str, context: ServerCallContext | None = None + ) -> None: + """The backend doesn't support deleting tasks, so this is not implemented.""" + raise NotImplementedError diff --git a/tck/sut_agent_with_vertex_task_store.py b/tck/sut_agent_with_vertex_task_store.py new file mode 100644 index 000000000..0fadcdd94 --- /dev/null +++ b/tck/sut_agent_with_vertex_task_store.py @@ -0,0 +1,54 @@ +import os + +import sut_agent + + +try: + import vertexai +except ImportError as e: + raise ImportError( + 'VertexTaskStore requires vertexai. ' + 'Install with: ' + "'pip install a2a-sdk[vertex]'" + ) from e + +from a2a.contrib.tasks.vertex_task_store import VertexTaskStore + + +def main() -> None: + """Main entrypoint.""" + project = os.environ.get('VERTEX_PROJECT') + location = os.environ.get('VERTEX_LOCATION') + base_url = os.environ.get('VERTEX_BASE_URL') + api_version = os.environ.get('VERTEX_API_VERSION') + agent_engine_resource_id = os.environ.get('AGENT_ENGINE_RESOURCE_ID') + + if ( + not project + or not location + or not base_url + or not api_version + or not agent_engine_resource_id + ): + raise ValueError( + 'Environment variables VERTEX_PROJECT, VERTEX_LOCATION, ' + 'VERTEX_BASE_URL, VERTEX_API_VERSION, and ' + 'AGENT_ENGINE_RESOURCE_ID must be defined' + ) + + client = vertexai.Client( + project=project, + location=location, + http_options={'base_url': base_url, 'api_version': api_version}, + ) + + sut_agent.serve( + VertexTaskStore( + client=client, + agent_engine_resource_id=agent_engine_resource_id, + ) + ) + + +if __name__ == '__main__': + main() diff --git a/tests/contrib/tasks/fake_vertex_client.py b/tests/contrib/tasks/fake_vertex_client.py new file mode 100644 index 000000000..86d14ede0 --- /dev/null +++ b/tests/contrib/tasks/fake_vertex_client.py @@ -0,0 +1,137 @@ +"""Fake Vertex AI Client implementations for testing.""" + +import copy + +from google.genai import errors as genai_errors +from vertexai import types as vertexai_types + + +class FakeAgentEnginesA2aTasksEventsClient: + def __init__(self, parent_client): + self.parent_client = parent_client + + async def append( + self, name: str, task_events: list[vertexai_types.TaskEvent] + ) -> None: + task = self.parent_client.tasks.get(name) + if not task: + raise genai_errors.APIError( + code=404, + response_json={ + 'error': { + 'status': 'NOT_FOUND', + 'message': 'Task not found', + } + }, + ) + + task = copy.deepcopy(task) + if ( + not hasattr(task, 'next_event_sequence_number') + or not task.next_event_sequence_number + ): + task.next_event_sequence_number = 0 + + for event in task_events: + data = event.event_data + if getattr(data, 'state_change', None): + task.state = getattr(data.state_change, 'new_state', task.state) + if getattr(data, 'metadata_change', None): + task.metadata = getattr( + data.metadata_change, 'new_metadata', task.metadata + ) + if getattr(data, 'output_change', None): + change = getattr( + data.output_change, 'task_artifact_change', None + ) + if not change: + continue + if not getattr(task, 'output', None): + task.output = vertexai_types.TaskOutput() + + current_artifacts = ( + list(task.output.artifacts) + if getattr(task.output, 'artifacts', None) + else [] + ) + + deleted_ids = getattr(change, 'deleted_artifact_ids', []) or [] + if deleted_ids: + current_artifacts = [ + a + for a in current_artifacts + if a.artifact_id not in deleted_ids + ] + + added = getattr(change, 'added_artifacts', []) or [] + if added: + current_artifacts.extend(added) + + updated = getattr(change, 'updated_artifacts', []) or [] + if updated: + updated_map = {a.artifact_id: a for a in updated} + current_artifacts = [ + updated_map.get(a.artifact_id, a) + for a in current_artifacts + ] + + try: + del task.output.artifacts[:] + task.output.artifacts.extend(current_artifacts) + except Exception: + task.output.artifacts = current_artifacts + task.next_event_sequence_number += 1 + + self.parent_client.tasks[name] = task + + +class FakeAgentEnginesA2aTasksClient: + def __init__(self): + self.tasks: dict[str, vertexai_types.A2aTask] = {} + self.events = FakeAgentEnginesA2aTasksEventsClient(self) + + async def create( + self, + name: str, + a2a_task_id: str, + config: vertexai_types.CreateAgentEngineTaskConfig, + ) -> vertexai_types.A2aTask: + full_name = f'{name}/a2aTasks/{a2a_task_id}' + task = vertexai_types.A2aTask( + name=full_name, + context_id=config.context_id, + metadata=config.metadata, + output=config.output, + state=vertexai_types.State.SUBMITTED, + ) + task.next_event_sequence_number = 1 + self.tasks[full_name] = task + return task + + async def get(self, name: str) -> vertexai_types.A2aTask: + if name not in self.tasks: + raise genai_errors.APIError( + code=404, + response_json={ + 'error': { + 'status': 'NOT_FOUND', + 'message': 'Task not found', + } + }, + ) + return copy.deepcopy(self.tasks[name]) + + +class FakeAgentEnginesClient: + def __init__(self): + self.a2a_tasks = FakeAgentEnginesA2aTasksClient() + + +class FakeAioClient: + def __init__(self): + self.agent_engines = FakeAgentEnginesClient() + + +class FakeVertexClient: + def __init__(self): + self.aio = FakeAioClient() diff --git a/tests/contrib/tasks/run_vertex_tests.sh b/tests/contrib/tasks/run_vertex_tests.sh new file mode 100755 index 000000000..12c0395d2 --- /dev/null +++ b/tests/contrib/tasks/run_vertex_tests.sh @@ -0,0 +1,17 @@ +#!/bin/bash +set -e + +for var in VERTEX_PROJECT VERTEX_LOCATION VERTEX_BASE_URL VERTEX_API_VERSION; do + if [ -z "${!var}" ]; then + echo "Error: Environment variable $var is undefined or empty." >&2 + exit 1 + fi +done + +PYTEST_ARGS=("$@") + +echo "Running Vertex tests..." + +cd $(git rev-parse --show-toplevel) + +uv run pytest -v "${PYTEST_ARGS[@]}" tests/contrib/tasks/test_vertex_task_store.py tests/contrib/tasks/test_vertex_task_converter.py diff --git a/tests/contrib/tasks/test_vertex_task_converter.py b/tests/contrib/tasks/test_vertex_task_converter.py new file mode 100644 index 000000000..d71f764b7 --- /dev/null +++ b/tests/contrib/tasks/test_vertex_task_converter.py @@ -0,0 +1,391 @@ +import base64 + +import pytest + + +pytest.importorskip( + 'vertexai', reason='Vertex Task Converter tests require vertexai' +) +from vertexai import types as vertexai_types + +from a2a.contrib.tasks.vertex_task_converter import ( + to_sdk_artifact, + to_sdk_part, + to_sdk_task, + to_sdk_task_state, + to_stored_artifact, + to_stored_part, + to_stored_task, + to_stored_task_state, +) +from a2a.compat.v0_3.types import ( + Artifact, + DataPart, + FilePart, + FileWithBytes, + FileWithUri, + Part, + Task, + TaskState, + TaskStatus, + TextPart, +) + + +def test_to_sdk_task_state() -> None: + assert ( + to_sdk_task_state(vertexai_types.State.STATE_UNSPECIFIED) + == TaskState.unknown + ) + assert ( + to_sdk_task_state(vertexai_types.State.SUBMITTED) == TaskState.submitted + ) + assert to_sdk_task_state(vertexai_types.State.WORKING) == TaskState.working + assert ( + to_sdk_task_state(vertexai_types.State.COMPLETED) == TaskState.completed + ) + assert ( + to_sdk_task_state(vertexai_types.State.CANCELLED) == TaskState.canceled + ) + assert to_sdk_task_state(vertexai_types.State.FAILED) == TaskState.failed + assert ( + to_sdk_task_state(vertexai_types.State.REJECTED) == TaskState.rejected + ) + assert ( + to_sdk_task_state(vertexai_types.State.INPUT_REQUIRED) + == TaskState.input_required + ) + assert ( + to_sdk_task_state(vertexai_types.State.AUTH_REQUIRED) + == TaskState.auth_required + ) + assert to_sdk_task_state(999) == TaskState.unknown # type: ignore + + +def test_to_stored_task_state() -> None: + assert ( + to_stored_task_state(TaskState.unknown) + == vertexai_types.State.STATE_UNSPECIFIED + ) + assert ( + to_stored_task_state(TaskState.submitted) + == vertexai_types.State.SUBMITTED + ) + assert ( + to_stored_task_state(TaskState.working) == vertexai_types.State.WORKING + ) + assert ( + to_stored_task_state(TaskState.completed) + == vertexai_types.State.COMPLETED + ) + assert ( + to_stored_task_state(TaskState.canceled) + == vertexai_types.State.CANCELLED + ) + assert to_stored_task_state(TaskState.failed) == vertexai_types.State.FAILED + assert ( + to_stored_task_state(TaskState.rejected) + == vertexai_types.State.REJECTED + ) + assert ( + to_stored_task_state(TaskState.input_required) + == vertexai_types.State.INPUT_REQUIRED + ) + assert ( + to_stored_task_state(TaskState.auth_required) + == vertexai_types.State.AUTH_REQUIRED + ) + + +def test_to_stored_part_text() -> None: + sdk_part = Part(root=TextPart(text='hello world')) + stored_part = to_stored_part(sdk_part) + assert stored_part.text == 'hello world' + assert not stored_part.inline_data + assert not stored_part.file_data + + +def test_to_stored_part_data() -> None: + sdk_part = Part(root=DataPart(data={'key': 'value'})) + stored_part = to_stored_part(sdk_part) + assert stored_part.inline_data is not None + assert stored_part.inline_data.mime_type == 'application/json' + assert stored_part.inline_data.data == b'{"key": "value"}' + + +def test_to_stored_part_file_bytes() -> None: + encoded_b64 = base64.b64encode(b'test data').decode('utf-8') + sdk_part = Part( + root=FilePart( + file=FileWithBytes( + bytes=encoded_b64, + mime_type='text/plain', + ) + ) + ) + stored_part = to_stored_part(sdk_part) + assert stored_part.inline_data is not None + assert stored_part.inline_data.mime_type == 'text/plain' + assert stored_part.inline_data.data == b'test data' + + +def test_to_stored_part_file_uri() -> None: + sdk_part = Part( + root=FilePart( + file=FileWithUri( + uri='gs://test-bucket/file.txt', + mime_type='text/plain', + ) + ) + ) + stored_part = to_stored_part(sdk_part) + assert stored_part.file_data is not None + assert stored_part.file_data.mime_type == 'text/plain' + assert stored_part.file_data.file_uri == 'gs://test-bucket/file.txt' + + +def test_to_stored_part_unsupported() -> None: + class BadPart: + pass + + part = Part(root=TextPart(text='t')) + part.root = BadPart() # type: ignore + with pytest.raises(ValueError, match='Unsupported part type'): + to_stored_part(part) + + +def test_to_sdk_part_text() -> None: + stored_part = vertexai_types.Part(text='hello back') + sdk_part = to_sdk_part(stored_part) + assert isinstance(sdk_part.root, TextPart) + assert sdk_part.root.text == 'hello back' + + +def test_to_sdk_part_inline_data() -> None: + stored_part = vertexai_types.Part( + inline_data=vertexai_types.Blob( + mime_type='application/json', + data=b'{"key": "val"}', + ) + ) + sdk_part = to_sdk_part(stored_part) + assert isinstance(sdk_part.root, FilePart) + assert isinstance(sdk_part.root.file, FileWithBytes) + expected_b64 = base64.b64encode(b'{"key": "val"}').decode('utf-8') + assert sdk_part.root.file.mime_type == 'application/json' + assert sdk_part.root.file.bytes == expected_b64 + + +def test_to_sdk_part_file_data() -> None: + stored_part = vertexai_types.Part( + file_data=vertexai_types.FileData( + mime_type='image/jpeg', + file_uri='gs://bucket/image.jpg', + ) + ) + sdk_part = to_sdk_part(stored_part) + assert isinstance(sdk_part.root, FilePart) + assert isinstance(sdk_part.root.file, FileWithUri) + assert sdk_part.root.file.mime_type == 'image/jpeg' + assert sdk_part.root.file.uri == 'gs://bucket/image.jpg' + + +def test_to_sdk_part_unsupported() -> None: + stored_part = vertexai_types.Part() + with pytest.raises(ValueError, match='Unsupported part:'): + to_sdk_part(stored_part) + + +def test_to_stored_artifact() -> None: + sdk_artifact = Artifact( + artifact_id='art-123', + parts=[Part(root=TextPart(text='part_1'))], + ) + stored_artifact = to_stored_artifact(sdk_artifact) + assert stored_artifact.artifact_id == 'art-123' + assert len(stored_artifact.parts) == 1 + assert stored_artifact.parts[0].text == 'part_1' + + +def test_to_sdk_artifact() -> None: + stored_artifact = vertexai_types.TaskArtifact( + artifact_id='art-456', + parts=[vertexai_types.Part(text='part_2')], + ) + sdk_artifact = to_sdk_artifact(stored_artifact) + assert sdk_artifact.artifact_id == 'art-456' + assert len(sdk_artifact.parts) == 1 + assert isinstance(sdk_artifact.parts[0].root, TextPart) + assert sdk_artifact.parts[0].root.text == 'part_2' + + +def test_to_stored_task() -> None: + sdk_task = Task( + id='task-1', + context_id='ctx-1', + status=TaskStatus(state=TaskState.working), + metadata={'foo': 'bar'}, + artifacts=[ + Artifact( + artifact_id='art-1', + parts=[Part(root=TextPart(text='stuff'))], + ) + ], + history=[], + ) + stored_task = to_stored_task(sdk_task) + assert stored_task.context_id == 'ctx-1' + assert stored_task.metadata == {'foo': 'bar'} + assert stored_task.state == vertexai_types.State.WORKING + assert stored_task.output is not None + assert stored_task.output.artifacts is not None + assert len(stored_task.output.artifacts) == 1 + assert stored_task.output.artifacts[0].artifact_id == 'art-1' + + +def test_to_sdk_task() -> None: + stored_task = vertexai_types.A2aTask( + name='projects/123/locations/us-central1/agentEngines/456/tasks/task-2', + context_id='ctx-2', + state=vertexai_types.State.COMPLETED, + metadata={'a': 'b'}, + output=vertexai_types.TaskOutput( + artifacts=[ + vertexai_types.TaskArtifact( + artifact_id='art-2', + parts=[vertexai_types.Part(text='result')], + ) + ] + ), + ) + sdk_task = to_sdk_task(stored_task) + assert sdk_task.id == 'task-2' + assert sdk_task.context_id == 'ctx-2' + assert sdk_task.status.state == TaskState.completed + assert sdk_task.metadata == {'a': 'b'} + assert sdk_task.history == [] + assert sdk_task.artifacts is not None + assert len(sdk_task.artifacts) == 1 + assert sdk_task.artifacts[0].artifact_id == 'art-2' + assert isinstance(sdk_task.artifacts[0].parts[0].root, TextPart) + assert sdk_task.artifacts[0].parts[0].root.text == 'result' + + +def test_to_sdk_task_no_output() -> None: + stored_task = vertexai_types.A2aTask( + name='tasks/task-3', + context_id='ctx-3', + state=vertexai_types.State.SUBMITTED, + metadata=None, + ) + sdk_task = to_sdk_task(stored_task) + assert sdk_task.id == 'task-3' + assert sdk_task.metadata == {} + assert sdk_task.artifacts == [] + + +def test_sdk_task_state_conversion_round_trip() -> None: + for state in TaskState: + stored_state = to_stored_task_state(state) + round_trip_state = to_sdk_task_state(stored_state) + assert round_trip_state == state + + +def test_sdk_part_text_conversion_round_trip() -> None: + sdk_part = Part(root=TextPart(text='hello world')) + stored_part = to_stored_part(sdk_part) + round_trip_sdk_part = to_sdk_part(stored_part) + assert round_trip_sdk_part == sdk_part + + +def test_sdk_part_data_conversion_round_trip() -> None: + # A DataPart is converted to `inline_data` in Vertex AI, which lacks the original + # `DataPart` vs `FilePart` distinction. When reading it back from the stored + # protocol format, it becomes a `FilePart` with base64-encoded `FileWithBytes` + # and `mime_type="application/json"`. + sdk_part = Part(root=DataPart(data={'key': 'value'})) + stored_part = to_stored_part(sdk_part) + round_trip_sdk_part = to_sdk_part(stored_part) + + expected_b64 = base64.b64encode(b'{"key": "value"}').decode('utf-8') + assert round_trip_sdk_part == Part( + root=FilePart( + file=FileWithBytes( + bytes=expected_b64, + mime_type='application/json', + ) + ) + ) + + +def test_sdk_part_file_bytes_conversion_round_trip() -> None: + encoded_b64 = base64.b64encode(b'test data').decode('utf-8') + sdk_part = Part( + root=FilePart( + file=FileWithBytes( + bytes=encoded_b64, + mime_type='text/plain', + ) + ) + ) + stored_part = to_stored_part(sdk_part) + round_trip_sdk_part = to_sdk_part(stored_part) + assert round_trip_sdk_part == sdk_part + + +def test_sdk_part_file_uri_conversion_round_trip() -> None: + sdk_part = Part( + root=FilePart( + file=FileWithUri( + uri='gs://test-bucket/file.txt', + mime_type='text/plain', + ) + ) + ) + stored_part = to_stored_part(sdk_part) + round_trip_sdk_part = to_sdk_part(stored_part) + assert round_trip_sdk_part == sdk_part + + +def test_sdk_artifact_conversion_round_trip() -> None: + sdk_artifact = Artifact( + artifact_id='art-123', + parts=[Part(root=TextPart(text='part_1'))], + ) + stored_artifact = to_stored_artifact(sdk_artifact) + round_trip_sdk_artifact = to_sdk_artifact(stored_artifact) + assert round_trip_sdk_artifact == sdk_artifact + + +def test_sdk_task_conversion_round_trip() -> None: + sdk_task = Task( + id='task-1', + context_id='ctx-1', + status=TaskStatus(state=TaskState.working), + metadata={'foo': 'bar'}, + artifacts=[ + Artifact( + artifact_id='art-1', + parts=[Part(root=TextPart(text='stuff'))], + ) + ], + history=[ + # History is not yet implemented and later will be supported + # via events. + ], + ) + stored_task = to_stored_task(sdk_task) + # Simulate Vertex storing the ID in the fully qualified resource name. + # The task ID during creation gets appended to the parent name. + stored_task.name = ( + f'projects/p/locations/l/agentEngines/e/tasks/{sdk_task.id}' + ) + + round_trip_sdk_task = to_sdk_task(stored_task) + + assert round_trip_sdk_task.id == sdk_task.id + assert round_trip_sdk_task.context_id == sdk_task.context_id + assert round_trip_sdk_task.status == sdk_task.status + assert round_trip_sdk_task.metadata == sdk_task.metadata + assert round_trip_sdk_task.artifacts == sdk_task.artifacts + assert round_trip_sdk_task.history == [] diff --git a/tests/contrib/tasks/test_vertex_task_store.py b/tests/contrib/tasks/test_vertex_task_store.py new file mode 100644 index 000000000..1f80be3bd --- /dev/null +++ b/tests/contrib/tasks/test_vertex_task_store.py @@ -0,0 +1,503 @@ +""" +Tests for the VertexTaskStore. + +These tests can be run with a real or fake Vertex AI Agent Engine as a backend. +The real ones are skipped by default unless the necessary environment variables\ +are set, which prevents them from failing in GitHub Actions. + +To run these tests locally, you can use the provided script: + ./run_vertex_tests.sh + +The following environment variables are required for the real backend: + VERTEX_PROJECT="your-project" \ + VERTEX_LOCATION="your-location" \ + VERTEX_BASE_URL="your-base-url" \ + VERTEX_API_VERSION="your-api-version" \ +""" + +import os + +from collections.abc import AsyncGenerator + +import pytest +import pytest_asyncio + + +# Skip the entire test module if vertexai is not installed +pytest.importorskip( + 'vertexai', reason='Vertex Task Store tests require vertexai' +) +import vertexai + + +# Skip the real backend tests if required environment variables are not set +missing_env_vars = not all( + os.environ.get(var) + for var in [ + 'VERTEX_PROJECT', + 'VERTEX_LOCATION', + 'VERTEX_BASE_URL', + 'VERTEX_API_VERSION', + ] +) +import sys + + +@pytest.fixture( + scope='module', + params=[ + 'fake', + pytest.param( + 'real', + marks=pytest.mark.skipif( + missing_env_vars, + reason='Missing required environment variables for real Vertex Task Store.', + ), + ), + ], +) +def backend_type(request) -> str: + return request.param + + +from a2a.contrib.tasks.vertex_task_store import VertexTaskStore +from a2a.types.a2a_pb2 import ( + Artifact, + Part, + Task, + TaskState, + TaskStatus, +) + + +# Minimal Task object for testing +MINIMAL_TASK_OBJ = Task( + id='task-abc', + context_id='session-xyz', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), +) +MINIMAL_TASK_OBJ.metadata['test_key'] = 'test_value' + + +from collections.abc import Generator + + +@pytest.fixture(scope='module') +def agent_engine_resource_id(backend_type: str) -> Generator[str, None, None]: + """ + Module-scoped fixture that creates and deletes a single Agent Engine + for all the tests. For fake backend, it yields a mock resource. + """ + if backend_type == 'fake': + yield 'projects/mock-project/locations/mock-location/agentEngines/mock-engine' + return + + project = os.environ.get('VERTEX_PROJECT') + location = os.environ.get('VERTEX_LOCATION') + base_url = os.environ.get('VERTEX_BASE_URL') + + client = vertexai.Client(project=project, location=location) + client._api_client._http_options.base_url = base_url + + agent_engine = client.agent_engines.create() + yield agent_engine.api_resource.name + agent_engine.delete() + + +@pytest_asyncio.fixture +async def vertex_store( + backend_type: str, + agent_engine_resource_id: str, +) -> AsyncGenerator[VertexTaskStore, None]: + """ + Function-scoped fixture providing a fresh VertexTaskStore per test, + reusing the module-scoped engine. Uses fake client for 'fake' backend. + """ + if backend_type == 'fake': + sys.path.append(os.path.dirname(__file__)) + from fake_vertex_client import FakeVertexClient + + client = FakeVertexClient() + else: + project = os.environ.get('VERTEX_PROJECT') + location = os.environ.get('VERTEX_LOCATION') + base_url = os.environ.get('VERTEX_BASE_URL') + api_version = os.environ.get('VERTEX_API_VERSION') + + client = vertexai.Client(project=project, location=location) + client._api_client._http_options.base_url = base_url + client._api_client._http_options.api_version = api_version + + store = VertexTaskStore( + client=client, # type: ignore + agent_engine_resource_id=agent_engine_resource_id, + ) + yield store + + +@pytest.mark.asyncio +async def test_save_task(vertex_store: VertexTaskStore) -> None: + """Test saving a task to the VertexTaskStore.""" + # Ensure unique ID for parameterized tests if needed, or rely on table isolation + task_to_save = Task() + task_to_save.CopyFrom(MINIMAL_TASK_OBJ) + task_to_save.id = 'save-test-task-2' + await vertex_store.save(task_to_save) + + retrieved_task = await vertex_store.get(task_to_save.id) + assert retrieved_task is not None + assert retrieved_task.id == task_to_save.id + + assert retrieved_task == task_to_save + + +@pytest.mark.asyncio +async def test_get_task(vertex_store: VertexTaskStore) -> None: + """Test retrieving a task from the VertexTaskStore.""" + task_id = 'get-test-task-1' + task_to_save = Task() + task_to_save.CopyFrom(MINIMAL_TASK_OBJ) + task_to_save.id = task_id + await vertex_store.save(task_to_save) + + retrieved_task = await vertex_store.get(task_to_save.id) + assert retrieved_task is not None + assert retrieved_task.id == task_to_save.id + assert retrieved_task.context_id == task_to_save.context_id + assert retrieved_task.status.state == TaskState.TASK_STATE_SUBMITTED + + +@pytest.mark.asyncio +async def test_get_nonexistent_task( + vertex_store: VertexTaskStore, +) -> None: + """Test retrieving a nonexistent task.""" + retrieved_task = await vertex_store.get('nonexistent-task-id') + assert retrieved_task is None + + +@pytest.mark.asyncio +async def test_save_and_get_detailed_task( + vertex_store: VertexTaskStore, +) -> None: + """Test saving and retrieving a task with more fields populated.""" + task_id = 'detailed-task-test-vertex' + test_task = Task( + id=task_id, + context_id='test-session-1', + status=TaskStatus( + state=TaskState.TASK_STATE_SUBMITTED, + ), + artifacts=[ + Artifact( + artifact_id='artifact-1', + parts=[Part(text='hello')], + ) + ], + ) + test_task.metadata['key1'] = 'value1' + test_task.metadata['key2'] = 123 + + await vertex_store.save(test_task) + retrieved_task = await vertex_store.get(test_task.id) + + assert retrieved_task is not None + assert retrieved_task.id == test_task.id + assert retrieved_task.context_id == test_task.context_id + assert retrieved_task.status.state == TaskState.TASK_STATE_SUBMITTED + assert retrieved_task.metadata['key1'] == 'value1' + assert retrieved_task.metadata['key2'] == 123 + + # Pydantic models handle their own serialization for comparison if model_dump is used + assert retrieved_task.artifacts == test_task.artifacts + + +@pytest.mark.asyncio +async def test_update_task_status_and_metadata( + vertex_store: VertexTaskStore, +) -> None: + """Test updating an existing task.""" + task_id = 'update-test-task-1' + original_task = Task( + id=task_id, + context_id='session-update', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + artifacts=[], + history=[], + ) + await vertex_store.save(original_task) + + retrieved_before_update = await vertex_store.get(task_id) + assert retrieved_before_update is not None + assert ( + retrieved_before_update.status.state == TaskState.TASK_STATE_SUBMITTED + ) + assert retrieved_before_update.metadata == {} + + updated_task = Task() + updated_task.CopyFrom(original_task) + updated_task.status.state = TaskState.TASK_STATE_COMPLETED + updated_task.status.timestamp.FromJsonString('2023-01-02T11:00:00Z') + updated_task.metadata.update({'update_key': 'update_value'}) + + await vertex_store.save(updated_task) + + retrieved_after_update = await vertex_store.get(task_id) + assert retrieved_after_update is not None + assert retrieved_after_update.status.state == TaskState.TASK_STATE_COMPLETED + assert retrieved_after_update.metadata == {'update_key': 'update_value'} + + +@pytest.mark.asyncio +async def test_update_task_add_artifact(vertex_store: VertexTaskStore) -> None: + """Test updating an existing task by adding an artifact.""" + task_id = 'update-test-task-2' + original_task = Task( + id=task_id, + context_id='session-update', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + artifacts=[ + Artifact( + artifact_id='artifact-1', + parts=[Part(text='hello')], + ) + ], + history=[], + ) + await vertex_store.save(original_task) + + retrieved_before_update = await vertex_store.get(task_id) + assert retrieved_before_update is not None + assert ( + retrieved_before_update.status.state == TaskState.TASK_STATE_SUBMITTED + ) + assert retrieved_before_update.metadata == {} + + updated_task = Task() + updated_task.CopyFrom(original_task) + updated_task.status.state = TaskState.TASK_STATE_WORKING + updated_task.status.timestamp.FromJsonString('2023-01-02T11:00:00Z') + + updated_task.artifacts.append( + Artifact( + artifact_id='artifact-2', + parts=[Part(text='world')], + ) + ) + + await vertex_store.save(updated_task) + + retrieved_after_update = await vertex_store.get(task_id) + assert retrieved_after_update is not None + assert retrieved_after_update.status.state == TaskState.TASK_STATE_WORKING + + assert retrieved_after_update.artifacts == [ + Artifact( + artifact_id='artifact-1', + parts=[Part(text='hello')], + ), + Artifact( + artifact_id='artifact-2', + parts=[Part(text='world')], + ), + ] + + +@pytest.mark.asyncio +async def test_update_task_update_artifact( + vertex_store: VertexTaskStore, +) -> None: + """Test updating an existing task by changing an artifact.""" + task_id = 'update-test-task-3' + original_task = Task( + id=task_id, + context_id='session-update', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + artifacts=[ + Artifact( + artifact_id='artifact-1', + parts=[Part(text='hello')], + ), + Artifact( + artifact_id='artifact-2', + parts=[Part(text='world')], + ), + ], + history=[], + ) + await vertex_store.save(original_task) + + retrieved_before_update = await vertex_store.get(task_id) + assert retrieved_before_update is not None + assert ( + retrieved_before_update.status.state == TaskState.TASK_STATE_SUBMITTED + ) + assert retrieved_before_update.metadata == {} + + updated_task = Task() + updated_task.CopyFrom(original_task) + updated_task.status.state = TaskState.TASK_STATE_WORKING + updated_task.status.timestamp.FromJsonString('2023-01-02T11:00:00Z') + + updated_task.artifacts[0].parts[0].text = 'ahoy' + + await vertex_store.save(updated_task) + + retrieved_after_update = await vertex_store.get(task_id) + assert retrieved_after_update is not None + assert retrieved_after_update.status.state == TaskState.TASK_STATE_WORKING + + assert retrieved_after_update.artifacts == [ + Artifact( + artifact_id='artifact-1', + parts=[Part(text='ahoy')], + ), + Artifact( + artifact_id='artifact-2', + parts=[Part(text='world')], + ), + ] + + +@pytest.mark.asyncio +async def test_update_task_delete_artifact( + vertex_store: VertexTaskStore, +) -> None: + """Test updating an existing task by deleting an artifact.""" + task_id = 'update-test-task-4' + original_task = Task( + id=task_id, + context_id='session-update', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + artifacts=[ + Artifact( + artifact_id='artifact-1', + parts=[Part(text='hello')], + ), + Artifact( + artifact_id='artifact-2', + parts=[Part(text='world')], + ), + ], + history=[], + ) + await vertex_store.save(original_task) + + retrieved_before_update = await vertex_store.get(task_id) + assert retrieved_before_update is not None + assert ( + retrieved_before_update.status.state == TaskState.TASK_STATE_SUBMITTED + ) + assert retrieved_before_update.metadata == {} + + updated_task = Task() + updated_task.CopyFrom(original_task) + updated_task.status.state = TaskState.TASK_STATE_WORKING + updated_task.status.timestamp.FromJsonString('2023-01-02T11:00:00Z') + + del updated_task.artifacts[1] + + await vertex_store.save(updated_task) + + retrieved_after_update = await vertex_store.get(task_id) + assert retrieved_after_update is not None + assert retrieved_after_update.status.state == TaskState.TASK_STATE_WORKING + + assert retrieved_after_update.artifacts == [ + Artifact( + artifact_id='artifact-1', + parts=[Part(text='hello')], + ) + ] + + +@pytest.mark.asyncio +async def test_metadata_field_mapping( + vertex_store: VertexTaskStore, +) -> None: + """Test that metadata field is correctly mapped between Pydantic and SQLAlchemy. + + This test verifies: + 1. Metadata can be None + 2. Metadata can be a simple dict + 3. Metadata can contain nested structures + 4. Metadata is correctly saved and retrieved + 5. The mapping between task.metadata and task_metadata column works + """ + # Test 1: Task with no metadata (None) + task_no_metadata = Task( + id='task-metadata-test-1', + context_id='session-meta-1', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + ) + await vertex_store.save(task_no_metadata) + retrieved_no_metadata = await vertex_store.get('task-metadata-test-1') + assert retrieved_no_metadata is not None + assert retrieved_no_metadata.metadata == {} + + # Test 2: Task with simple metadata + simple_metadata = {'key': 'value', 'number': 42, 'boolean': True} + task_simple_metadata = Task( + id='task-metadata-test-2', + context_id='session-meta-2', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + metadata=simple_metadata, + ) + await vertex_store.save(task_simple_metadata) + retrieved_simple = await vertex_store.get('task-metadata-test-2') + assert retrieved_simple is not None + assert retrieved_simple.metadata == simple_metadata + + # Test 3: Task with complex nested metadata + complex_metadata = { + 'level1': { + 'level2': { + 'level3': ['a', 'b', 'c'], + 'numeric': 3.14159, + }, + 'array': [1, 2, {'nested': 'value'}], + }, + 'special_chars': 'Hello\nWorld\t!', + 'unicode': '🚀 Unicode test 你好', + 'null_value': None, + } + task_complex_metadata = Task( + id='task-metadata-test-3', + context_id='session-meta-3', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + metadata=complex_metadata, + ) + await vertex_store.save(task_complex_metadata) + retrieved_complex = await vertex_store.get('task-metadata-test-3') + assert retrieved_complex is not None + assert retrieved_complex.metadata == complex_metadata + + # Test 4: Update metadata from None to dict + task_update_metadata = Task( + id='task-metadata-test-4', + context_id='session-meta-4', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + ) + await vertex_store.save(task_update_metadata) + + # Update metadata + task_update_metadata.metadata.Clear() + task_update_metadata.metadata.update( + {'updated': True, 'timestamp': '2024-01-01'} + ) + await vertex_store.save(task_update_metadata) + + retrieved_updated = await vertex_store.get('task-metadata-test-4') + assert retrieved_updated is not None + assert retrieved_updated.metadata == { + 'updated': True, + 'timestamp': '2024-01-01', + } + + # Test 5: Update metadata from dict to None + task_update_metadata.metadata.Clear() + await vertex_store.save(task_update_metadata) + + retrieved_none = await vertex_store.get('task-metadata-test-4') + assert retrieved_none is not None + assert retrieved_none.metadata == {} diff --git a/uv.lock b/uv.lock index 8c7dfb31c..0f3890680 100644 --- a/uv.lock +++ b/uv.lock @@ -4,7 +4,8 @@ requires-python = ">=3.10" resolution-markers = [ "python_full_version >= '3.14'", "python_full_version == '3.13.*'", - "python_full_version < '3.13'", + "python_full_version >= '3.11' and python_full_version < '3.13'", + "python_full_version < '3.11'", ] [[package]] @@ -21,13 +22,11 @@ dependencies = [ ] [package.optional-dependencies] -db-cli = [ - { name = "alembic" }, -] all = [ { name = "alembic" }, { name = "cryptography" }, { name = "fastapi" }, + { name = "google-cloud-aiplatform" }, { name = "grpcio" }, { name = "grpcio-reflection" }, { name = "grpcio-tools" }, @@ -38,6 +37,9 @@ all = [ { name = "sse-starlette" }, { name = "starlette" }, ] +db-cli = [ + { name = "alembic" }, +] encryption = [ { name = "cryptography" }, ] @@ -70,6 +72,9 @@ telemetry = [ { name = "opentelemetry-api" }, { name = "opentelemetry-sdk" }, ] +vertex = [ + { name = "google-cloud-aiplatform" }, +] [package.dev-dependencies] dev = [ @@ -97,13 +102,15 @@ dev = [ [package.metadata] requires-dist = [ - { name = "alembic", marker = "extra == 'db-cli'", specifier = ">=1.14.0" }, { name = "alembic", marker = "extra == 'all'", specifier = ">=1.14.0" }, + { name = "alembic", marker = "extra == 'db-cli'", specifier = ">=1.14.0" }, { name = "cryptography", marker = "extra == 'all'", specifier = ">=43.0.0" }, { name = "cryptography", marker = "extra == 'encryption'", specifier = ">=43.0.0" }, { name = "fastapi", marker = "extra == 'all'", specifier = ">=0.115.2" }, { name = "fastapi", marker = "extra == 'http-server'", specifier = ">=0.115.2" }, { name = "google-api-core", specifier = ">=1.26.0" }, + { name = "google-cloud-aiplatform", marker = "extra == 'all'", specifier = ">=1.140.0" }, + { name = "google-cloud-aiplatform", marker = "extra == 'vertex'", specifier = ">=1.140.0" }, { name = "googleapis-common-protos", specifier = ">=1.70.0" }, { name = "grpcio", marker = "extra == 'all'", specifier = ">=1.60" }, { name = "grpcio", marker = "extra == 'grpc'", specifier = ">=1.60" }, @@ -136,7 +143,7 @@ requires-dist = [ { name = "starlette", marker = "extra == 'all'" }, { name = "starlette", marker = "extra == 'http-server'" }, ] -provides-extras = ["db-cli", "all", "encryption", "grpc", "http-server", "mysql", "postgresql", "signing", "sql", "sqlite", "telemetry"] +provides-extras = ["all", "db-cli", "encryption", "grpc", "http-server", "mysql", "postgresql", "signing", "sql", "sqlite", "telemetry", "vertex"] [package.metadata.requires-dev] dev = [ @@ -726,6 +733,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, ] +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + +[[package]] +name = "docstring-parser" +version = "0.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, +] + [[package]] name = "dunamai" version = "1.26.0" @@ -743,7 +768,7 @@ name = "exceptiongroup" version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ @@ -800,6 +825,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/77/b6/85c4d21067220b9a78cfb81f516f9725ea6befc1544ec9bd2c1acd97c324/google_api_core-2.29.0-py3-none-any.whl", hash = "sha256:d30bc60980daa36e314b5d5a3e5958b0200cb44ca8fa1be2b614e932b75a3ea9", size = 173906, upload-time = "2026-01-08T22:21:36.093Z" }, ] +[package.optional-dependencies] +grpc = [ + { name = "grpcio" }, + { name = "grpcio-status" }, +] + [[package]] name = "google-auth" version = "2.48.0" @@ -814,6 +845,167 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/83/1d/d6466de3a5249d35e832a52834115ca9d1d0de6abc22065f049707516d47/google_auth-2.48.0-py3-none-any.whl", hash = "sha256:2e2a537873d449434252a9632c28bfc268b0adb1e53f9fb62afc5333a975903f", size = 236499, upload-time = "2026-01-26T19:22:45.099Z" }, ] +[package.optional-dependencies] +requests = [ + { name = "requests" }, +] + +[[package]] +name = "google-cloud-aiplatform" +version = "1.140.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docstring-parser" }, + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "google-cloud-bigquery" }, + { name = "google-cloud-resource-manager" }, + { name = "google-cloud-storage" }, + { name = "google-genai" }, + { name = "packaging" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/14/1c223faf986afffdd61c994a10c30a04985ed5ba072201058af2c6e1e572/google_cloud_aiplatform-1.140.0.tar.gz", hash = "sha256:ea7eb1870b4cf600f8c2472102e21c3a1bcaf723d6e49f00ed51bc6b88d54fff", size = 10146640, upload-time = "2026-03-04T00:56:38.95Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/5c/bb64aee2da24895d57611eed00fac54739bfa34f98ab344020a6605875bf/google_cloud_aiplatform-1.140.0-py2.py3-none-any.whl", hash = "sha256:e94493a2682b9d17efa7146a53bb3665bf1595c3394fd3d0f45d18f71623fddc", size = 8355660, upload-time = "2026-03-04T00:56:34.441Z" }, +] + +[[package]] +name = "google-cloud-bigquery" +version = "3.40.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-resumable-media" }, + { name = "packaging" }, + { name = "python-dateutil" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/11/0c/153ee546c288949fcc6794d58811ab5420f3ecad5fa7f9e73f78d9512a6e/google_cloud_bigquery-3.40.1.tar.gz", hash = "sha256:75afcfb6e007238fe1deefb2182105249321145ff921784fe7b1de2b4ba24506", size = 511761, upload-time = "2026-02-12T18:44:18.958Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/f5/081cf5b90adfe524ae0d671781b0d497a75a0f2601d075af518828e22d8f/google_cloud_bigquery-3.40.1-py3-none-any.whl", hash = "sha256:9082a6b8193aba87bed6a2c79cf1152b524c99bb7e7ac33a785e333c09eac868", size = 262018, upload-time = "2026-02-12T18:44:16.913Z" }, +] + +[[package]] +name = "google-cloud-core" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/03/ef0bc99d0e0faf4fdbe67ac445e18cdaa74824fd93cd069e7bb6548cb52d/google_cloud_core-2.5.0.tar.gz", hash = "sha256:7c1b7ef5c92311717bd05301aa1a91ffbc565673d3b0b4163a52d8413a186963", size = 36027, upload-time = "2025-10-29T23:17:39.513Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/20/bfa472e327c8edee00f04beecc80baeddd2ab33ee0e86fd7654da49d45e9/google_cloud_core-2.5.0-py3-none-any.whl", hash = "sha256:67d977b41ae6c7211ee830c7912e41003ea8194bff15ae7d72fd6f51e57acabc", size = 29469, upload-time = "2025-10-29T23:17:38.548Z" }, +] + +[[package]] +name = "google-cloud-resource-manager" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "grpc-google-iam-v1" }, + { name = "grpcio" }, + { name = "proto-plus" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/7f/db00b2820475793a52958dc55fe9ec2eb8e863546e05fcece9b921f86ebe/google_cloud_resource_manager-1.16.0.tar.gz", hash = "sha256:cc938f87cc36c2672f062b1e541650629e0d954c405a4dac35ceedee70c267c3", size = 459840, upload-time = "2026-01-15T13:04:07.726Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/ff/4b28bcc791d9d7e4ac8fea00fbd90ccb236afda56746a3b4564d2ae45df3/google_cloud_resource_manager-1.16.0-py3-none-any.whl", hash = "sha256:fb9a2ad2b5053c508e1c407ac31abfd1a22e91c32876c1892830724195819a28", size = 400218, upload-time = "2026-01-15T13:02:47.378Z" }, +] + +[[package]] +name = "google-cloud-storage" +version = "3.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-crc32c" }, + { name = "google-resumable-media" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/b1/4f0798e88285b50dfc60ed3a7de071def538b358db2da468c2e0deecbb40/google_cloud_storage-3.9.0.tar.gz", hash = "sha256:f2d8ca7db2f652be757e92573b2196e10fbc09649b5c016f8b422ad593c641cc", size = 17298544, upload-time = "2026-02-02T13:36:34.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/0b/816a6ae3c9fd096937d2e5f9670558908811d57d59ddf69dd4b83b326fd1/google_cloud_storage-3.9.0-py3-none-any.whl", hash = "sha256:2dce75a9e8b3387078cbbdad44757d410ecdb916101f8ba308abf202b6968066", size = 321324, upload-time = "2026-02-02T13:36:32.271Z" }, +] + +[[package]] +name = "google-crc32c" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/41/4b9c02f99e4c5fb477122cd5437403b552873f014616ac1d19ac8221a58d/google_crc32c-1.8.0.tar.gz", hash = "sha256:a428e25fb7691024de47fecfbff7ff957214da51eddded0da0ae0e0f03a2cf79", size = 14192, upload-time = "2025-12-16T00:35:25.142Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/ac/6f7bc93886a823ab545948c2dd48143027b2355ad1944c7cf852b338dc91/google_crc32c-1.8.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0470b8c3d73b5f4e3300165498e4cf25221c7eb37f1159e221d1825b6df8a7ff", size = 31296, upload-time = "2025-12-16T00:19:07.261Z" }, + { url = "https://files.pythonhosted.org/packages/f7/97/a5accde175dee985311d949cfcb1249dcbb290f5ec83c994ea733311948f/google_crc32c-1.8.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:119fcd90c57c89f30040b47c211acee231b25a45d225e3225294386f5d258288", size = 30870, upload-time = "2025-12-16T00:29:17.669Z" }, + { url = "https://files.pythonhosted.org/packages/3d/63/bec827e70b7a0d4094e7476f863c0dbd6b5f0f1f91d9c9b32b76dcdfeb4e/google_crc32c-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6f35aaffc8ccd81ba3162443fabb920e65b1f20ab1952a31b13173a67811467d", size = 33214, upload-time = "2025-12-16T00:40:19.618Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/11b70614df04c289128d782efc084b9035ef8466b3d0a8757c1b6f5cf7ac/google_crc32c-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:864abafe7d6e2c4c66395c1eb0fe12dc891879769b52a3d56499612ca93b6092", size = 33589, upload-time = "2025-12-16T00:40:20.7Z" }, + { url = "https://files.pythonhosted.org/packages/3e/00/a08a4bc24f1261cc5b0f47312d8aebfbe4b53c2e6307f1b595605eed246b/google_crc32c-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:db3fe8eaf0612fc8b20fa21a5f25bd785bc3cd5be69f8f3412b0ac2ffd49e733", size = 34437, upload-time = "2025-12-16T00:35:19.437Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ef/21ccfaab3d5078d41efe8612e0ed0bfc9ce22475de074162a91a25f7980d/google_crc32c-1.8.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:014a7e68d623e9a4222d663931febc3033c5c7c9730785727de2a81f87d5bab8", size = 31298, upload-time = "2025-12-16T00:20:32.241Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b8/f8413d3f4b676136e965e764ceedec904fe38ae8de0cdc52a12d8eb1096e/google_crc32c-1.8.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:86cfc00fe45a0ac7359e5214a1704e51a99e757d0272554874f419f79838c5f7", size = 30872, upload-time = "2025-12-16T00:33:58.785Z" }, + { url = "https://files.pythonhosted.org/packages/f6/fd/33aa4ec62b290477181c55bb1c9302c9698c58c0ce9a6ab4874abc8b0d60/google_crc32c-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:19b40d637a54cb71e0829179f6cb41835f0fbd9e8eb60552152a8b52c36cbe15", size = 33243, upload-time = "2025-12-16T00:40:21.46Z" }, + { url = "https://files.pythonhosted.org/packages/71/03/4820b3bd99c9653d1a5210cb32f9ba4da9681619b4d35b6a052432df4773/google_crc32c-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:17446feb05abddc187e5441a45971b8394ea4c1b6efd88ab0af393fd9e0a156a", size = 33608, upload-time = "2025-12-16T00:40:22.204Z" }, + { url = "https://files.pythonhosted.org/packages/7c/43/acf61476a11437bf9733fb2f70599b1ced11ec7ed9ea760fdd9a77d0c619/google_crc32c-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:71734788a88f551fbd6a97be9668a0020698e07b2bf5b3aa26a36c10cdfb27b2", size = 34439, upload-time = "2025-12-16T00:35:20.458Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5f/7307325b1198b59324c0fa9807cafb551afb65e831699f2ce211ad5c8240/google_crc32c-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:4b8286b659c1335172e39563ab0a768b8015e88e08329fa5321f774275fc3113", size = 31300, upload-time = "2025-12-16T00:21:56.723Z" }, + { url = "https://files.pythonhosted.org/packages/21/8e/58c0d5d86e2220e6a37befe7e6a94dd2f6006044b1a33edf1ff6d9f7e319/google_crc32c-1.8.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:2a3dc3318507de089c5384cc74d54318401410f82aa65b2d9cdde9d297aca7cb", size = 30867, upload-time = "2025-12-16T00:38:31.302Z" }, + { url = "https://files.pythonhosted.org/packages/ce/a9/a780cc66f86335a6019f557a8aaca8fbb970728f0efd2430d15ff1beae0e/google_crc32c-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14f87e04d613dfa218d6135e81b78272c3b904e2a7053b841481b38a7d901411", size = 33364, upload-time = "2025-12-16T00:40:22.96Z" }, + { url = "https://files.pythonhosted.org/packages/21/3f/3457ea803db0198c9aaca2dd373750972ce28a26f00544b6b85088811939/google_crc32c-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cb5c869c2923d56cb0c8e6bcdd73c009c36ae39b652dbe46a05eb4ef0ad01454", size = 33740, upload-time = "2025-12-16T00:40:23.96Z" }, + { url = "https://files.pythonhosted.org/packages/df/c0/87c2073e0c72515bb8733d4eef7b21548e8d189f094b5dad20b0ecaf64f6/google_crc32c-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:3cc0c8912038065eafa603b238abf252e204accab2a704c63b9e14837a854962", size = 34437, upload-time = "2025-12-16T00:35:21.395Z" }, + { url = "https://files.pythonhosted.org/packages/d1/db/000f15b41724589b0e7bc24bc7a8967898d8d3bc8caf64c513d91ef1f6c0/google_crc32c-1.8.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:3ebb04528e83b2634857f43f9bb8ef5b2bbe7f10f140daeb01b58f972d04736b", size = 31297, upload-time = "2025-12-16T00:23:20.709Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0d/8ebed0c39c53a7e838e2a486da8abb0e52de135f1b376ae2f0b160eb4c1a/google_crc32c-1.8.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:450dc98429d3e33ed2926fc99ee81001928d63460f8538f21a5d6060912a8e27", size = 30867, upload-time = "2025-12-16T00:43:14.628Z" }, + { url = "https://files.pythonhosted.org/packages/ce/42/b468aec74a0354b34c8cbf748db20d6e350a68a2b0912e128cabee49806c/google_crc32c-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3b9776774b24ba76831609ffbabce8cdf6fa2bd5e9df37b594221c7e333a81fa", size = 33344, upload-time = "2025-12-16T00:40:24.742Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e8/b33784d6fc77fb5062a8a7854e43e1e618b87d5ddf610a88025e4de6226e/google_crc32c-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:89c17d53d75562edfff86679244830599ee0a48efc216200691de8b02ab6b2b8", size = 33694, upload-time = "2025-12-16T00:40:25.505Z" }, + { url = "https://files.pythonhosted.org/packages/92/b1/d3cbd4d988afb3d8e4db94ca953df429ed6db7282ed0e700d25e6c7bfc8d/google_crc32c-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:57a50a9035b75643996fbf224d6661e386c7162d1dfdab9bc4ca790947d1007f", size = 34435, upload-time = "2025-12-16T00:35:22.107Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/8ecf3c2b864a490b9e7010c84fd203ec8cf3b280651106a3a74dd1b0ca72/google_crc32c-1.8.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:e6584b12cb06796d285d09e33f63309a09368b9d806a551d8036a4207ea43697", size = 31301, upload-time = "2025-12-16T00:24:48.527Z" }, + { url = "https://files.pythonhosted.org/packages/36/c6/f7ff6c11f5ca215d9f43d3629163727a272eabc356e5c9b2853df2bfe965/google_crc32c-1.8.0-cp314-cp314-macosx_12_0_x86_64.whl", hash = "sha256:f4b51844ef67d6cf2e9425983274da75f18b1597bb2c998e1c0a0e8d46f8f651", size = 30868, upload-time = "2025-12-16T00:48:12.163Z" }, + { url = "https://files.pythonhosted.org/packages/56/15/c25671c7aad70f8179d858c55a6ae8404902abe0cdcf32a29d581792b491/google_crc32c-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b0d1a7afc6e8e4635564ba8aa5c0548e3173e41b6384d7711a9123165f582de2", size = 33381, upload-time = "2025-12-16T00:40:26.268Z" }, + { url = "https://files.pythonhosted.org/packages/42/fa/f50f51260d7b0ef5d4898af122d8a7ec5a84e2984f676f746445f783705f/google_crc32c-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b3f68782f3cbd1bce027e48768293072813469af6a61a86f6bb4977a4380f21", size = 33734, upload-time = "2025-12-16T00:40:27.028Z" }, + { url = "https://files.pythonhosted.org/packages/08/a5/7b059810934a09fb3ccb657e0843813c1fee1183d3bc2c8041800374aa2c/google_crc32c-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:d511b3153e7011a27ab6ee6bb3a5404a55b994dc1a7322c0b87b29606d9790e2", size = 34878, upload-time = "2025-12-16T00:35:23.142Z" }, + { url = "https://files.pythonhosted.org/packages/52/c5/c171e4d8c44fec1422d801a6d2e5d7ddabd733eeda505c79730ee9607f07/google_crc32c-1.8.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:87fa445064e7db928226b2e6f0d5304ab4cd0339e664a4e9a25029f384d9bb93", size = 28615, upload-time = "2025-12-16T00:40:29.298Z" }, + { url = "https://files.pythonhosted.org/packages/9c/97/7d75fe37a7a6ed171a2cf17117177e7aab7e6e0d115858741b41e9dd4254/google_crc32c-1.8.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f639065ea2042d5c034bf258a9f085eaa7af0cd250667c0635a3118e8f92c69c", size = 28800, upload-time = "2025-12-16T00:40:30.322Z" }, +] + +[[package]] +name = "google-genai" +version = "1.66.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "google-auth", extra = ["requests"] }, + { name = "httpx" }, + { name = "pydantic" }, + { name = "requests" }, + { name = "sniffio" }, + { name = "tenacity" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9b/ba/0b343b0770d4710ad2979fd9301d7caa56c940174d5361ed4a7cc4979241/google_genai-1.66.0.tar.gz", hash = "sha256:ffc01647b65046bca6387320057aa51db0ad64bcc72c8e3e914062acfa5f7c49", size = 504386, upload-time = "2026-03-04T22:15:28.156Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/dd/403949d922d4e261b08b64aaa132af4e456c3b15c8e2a2d9e6ef693f66e2/google_genai-1.66.0-py3-none-any.whl", hash = "sha256:7f127a39cf695277104ce4091bb26e417c59bb46e952ff3699c3a982d9c474ee", size = 732174, upload-time = "2026-03-04T22:15:26.63Z" }, +] + +[[package]] +name = "google-resumable-media" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-crc32c" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/d7/520b62a35b23038ff005e334dba3ffc75fcf583bee26723f1fd8fd4b6919/google_resumable_media-2.8.0.tar.gz", hash = "sha256:f1157ed8b46994d60a1bc432544db62352043113684d4e030ee02e77ebe9a1ae", size = 2163265, upload-time = "2025-11-17T15:38:06.659Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/0b/93afde9cfe012260e9fe1522f35c9b72d6ee222f316586b1f23ecf44d518/google_resumable_media-2.8.0-py3-none-any.whl", hash = "sha256:dd14a116af303845a8d932ddae161a26e86cc229645bc98b39f026f9b1717582", size = 81340, upload-time = "2025-11-17T15:38:05.594Z" }, +] + [[package]] name = "googleapis-common-protos" version = "1.72.0" @@ -826,6 +1018,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" }, ] +[package.optional-dependencies] +grpc = [ + { name = "grpcio" }, +] + [[package]] name = "greenlet" version = "3.3.1" @@ -886,6 +1083,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e1/2b/98c7f93e6db9977aaee07eb1e51ca63bd5f779b900d362791d3252e60558/greenlet-3.3.1-cp314-cp314t-win_amd64.whl", hash = "sha256:301860987846c24cb8964bdec0e31a96ad4a2a801b41b4ef40963c1b44f33451", size = 233181, upload-time = "2026-01-23T15:33:00.29Z" }, ] +[[package]] +name = "grpc-google-iam-v1" +version = "0.14.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos", extra = ["grpc"] }, + { name = "grpcio" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/1e/1011451679a983f2f5c6771a1682542ecb027776762ad031fd0d7129164b/grpc_google_iam_v1-0.14.3.tar.gz", hash = "sha256:879ac4ef33136c5491a6300e27575a9ec760f6cdf9a2518798c1b8977a5dc389", size = 23745, upload-time = "2025-10-15T21:14:53.318Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/bd/330a1bbdb1afe0b96311249e699b6dc9cfc17916394fd4503ac5aca2514b/grpc_google_iam_v1-0.14.3-py3-none-any.whl", hash = "sha256:7a7f697e017a067206a3dfef44e4c634a34d3dee135fe7d7a4613fe3e59217e6", size = 32690, upload-time = "2025-10-15T21:14:51.72Z" }, +] + [[package]] name = "grpcio" version = "1.78.0" @@ -960,6 +1171,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/df/6d/4d095d27ccd049865ecdafc467754e9e47ad0f677a30dda969c3590f6582/grpcio_reflection-1.78.0-py3-none-any.whl", hash = "sha256:06fcfde9e6888cdd12e9dd1cf6dc7c440c2e9acf420f696ccbe008672ed05b60", size = 22800, upload-time = "2026-02-06T10:01:33.822Z" }, ] +[[package]] +name = "grpcio-status" +version = "1.78.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/cd/89ce482a931b543b92cdd9b2888805518c4620e0094409acb8c81dd4610a/grpcio_status-1.78.0.tar.gz", hash = "sha256:a34cfd28101bfea84b5aa0f936b4b423019e9213882907166af6b3bddc59e189", size = 13808, upload-time = "2026-02-06T10:01:48.034Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/8a/1241ec22c41028bddd4a052ae9369267b4475265ad0ce7140974548dc3fa/grpcio_status-1.78.0-py3-none-any.whl", hash = "sha256:b492b693d4bf27b47a6c32590701724f1d3b9444b36491878fb71f6208857f34", size = 14523, upload-time = "2026-02-06T10:01:32.584Z" }, +] + [[package]] name = "grpcio-tools" version = "1.78.0" @@ -1884,6 +2109,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, ] +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + [[package]] name = "pyupgrade" version = "3.21.2" @@ -2057,6 +2294,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e1/c6/76dc613121b793286a3f91621d7b75a2b493e0390ddca50f11993eadf192/setuptools-82.0.0-py3-none-any.whl", hash = "sha256:70b18734b607bd1da571d097d236cfcfacaf01de45717d59e6e04b96877532e0", size = 1003468, upload-time = "2026-02-08T15:08:38.723Z" }, ] +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -2175,6 +2421,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" }, ] +[[package]] +name = "tenacity" +version = "9.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/c6/ee486fd809e357697ee8a44d3d69222b344920433d3b6666ccd9b374630c/tenacity-9.1.4.tar.gz", hash = "sha256:adb31d4c263f2bd041081ab33b498309a57c77f9acf2db65aadf0898179cf93a", size = 49413, upload-time = "2026-02-07T10:45:33.841Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/c1/eb8f9debc45d3b7918a32ab756658a0904732f75e555402972246b0b8e71/tenacity-9.1.4-py3-none-any.whl", hash = "sha256:6095a360c919085f28c6527de529e76a06ad89b23659fa881ae0649b867a9d55", size = 28926, upload-time = "2026-02-07T10:45:32.24Z" }, +] + [[package]] name = "tokenize-rt" version = "6.2.0" @@ -2369,6 +2624,74 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f7/b4/8268da45f26f4fe84f6eae80a6ca1485ffb490a926afecff75fc48f61979/virtualenv-20.39.0-py3-none-any.whl", hash = "sha256:44888bba3775990a152ea1f73f8e5f566d49f11bbd1de61d426fd7732770043e", size = 5839121, upload-time = "2026-02-23T18:09:11.173Z" }, ] +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/74/221f58decd852f4b59cc3354cccaf87e8ef695fede361d03dc9a7396573b/websockets-16.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04cdd5d2d1dacbad0a7bf36ccbcd3ccd5a30ee188f2560b7a62a30d14107b31a", size = 177343, upload-time = "2026-01-10T09:22:21.28Z" }, + { url = "https://files.pythonhosted.org/packages/19/0f/22ef6107ee52ab7f0b710d55d36f5a5d3ef19e8a205541a6d7ffa7994e5a/websockets-16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8ff32bb86522a9e5e31439a58addbb0166f0204d64066fb955265c4e214160f0", size = 175021, upload-time = "2026-01-10T09:22:22.696Z" }, + { url = "https://files.pythonhosted.org/packages/10/40/904a4cb30d9b61c0e278899bf36342e9b0208eb3c470324a9ecbaac2a30f/websockets-16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:583b7c42688636f930688d712885cf1531326ee05effd982028212ccc13e5957", size = 175320, upload-time = "2026-01-10T09:22:23.94Z" }, + { url = "https://files.pythonhosted.org/packages/9d/2f/4b3ca7e106bc608744b1cdae041e005e446124bebb037b18799c2d356864/websockets-16.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7d837379b647c0c4c2355c2499723f82f1635fd2c26510e1f587d89bc2199e72", size = 183815, upload-time = "2026-01-10T09:22:25.469Z" }, + { url = "https://files.pythonhosted.org/packages/86/26/d40eaa2a46d4302becec8d15b0fc5e45bdde05191e7628405a19cf491ccd/websockets-16.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df57afc692e517a85e65b72e165356ed1df12386ecb879ad5693be08fac65dde", size = 185054, upload-time = "2026-01-10T09:22:27.101Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ba/6500a0efc94f7373ee8fefa8c271acdfd4dca8bd49a90d4be7ccabfc397e/websockets-16.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2b9f1e0d69bc60a4a87349d50c09a037a2607918746f07de04df9e43252c77a3", size = 184565, upload-time = "2026-01-10T09:22:28.293Z" }, + { url = "https://files.pythonhosted.org/packages/04/b4/96bf2cee7c8d8102389374a2616200574f5f01128d1082f44102140344cc/websockets-16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:335c23addf3d5e6a8633f9f8eda77efad001671e80b95c491dd0924587ece0b3", size = 183848, upload-time = "2026-01-10T09:22:30.394Z" }, + { url = "https://files.pythonhosted.org/packages/02/8e/81f40fb00fd125357814e8c3025738fc4ffc3da4b6b4a4472a82ba304b41/websockets-16.0-cp310-cp310-win32.whl", hash = "sha256:37b31c1623c6605e4c00d466c9d633f9b812ea430c11c8a278774a1fde1acfa9", size = 178249, upload-time = "2026-01-10T09:22:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/b4/5f/7e40efe8df57db9b91c88a43690ac66f7b7aa73a11aa6a66b927e44f26fa/websockets-16.0-cp310-cp310-win_amd64.whl", hash = "sha256:8e1dab317b6e77424356e11e99a432b7cb2f3ec8c5ab4dabbcee6add48f72b35", size = 178685, upload-time = "2026-01-10T09:22:33.345Z" }, + { url = "https://files.pythonhosted.org/packages/f2/db/de907251b4ff46ae804ad0409809504153b3f30984daf82a1d84a9875830/websockets-16.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8", size = 177340, upload-time = "2026-01-10T09:22:34.539Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fa/abe89019d8d8815c8781e90d697dec52523fb8ebe308bf11664e8de1877e/websockets-16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad", size = 175022, upload-time = "2026-01-10T09:22:36.332Z" }, + { url = "https://files.pythonhosted.org/packages/58/5d/88ea17ed1ded2079358b40d31d48abe90a73c9e5819dbcde1606e991e2ad/websockets-16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d", size = 175319, upload-time = "2026-01-10T09:22:37.602Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ae/0ee92b33087a33632f37a635e11e1d99d429d3d323329675a6022312aac2/websockets-16.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe", size = 184631, upload-time = "2026-01-10T09:22:38.789Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c5/27178df583b6c5b31b29f526ba2da5e2f864ecc79c99dae630a85d68c304/websockets-16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b", size = 185870, upload-time = "2026-01-10T09:22:39.893Z" }, + { url = "https://files.pythonhosted.org/packages/87/05/536652aa84ddc1c018dbb7e2c4cbcd0db884580bf8e95aece7593fde526f/websockets-16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5", size = 185361, upload-time = "2026-01-10T09:22:41.016Z" }, + { url = "https://files.pythonhosted.org/packages/6d/e2/d5332c90da12b1e01f06fb1b85c50cfc489783076547415bf9f0a659ec19/websockets-16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64", size = 184615, upload-time = "2026-01-10T09:22:42.442Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/d3f9576691cae9253b51555f841bc6600bf0a983a461c79500ace5a5b364/websockets-16.0-cp311-cp311-win32.whl", hash = "sha256:5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6", size = 178246, upload-time = "2026-01-10T09:22:43.654Z" }, + { url = "https://files.pythonhosted.org/packages/54/67/eaff76b3dbaf18dcddabc3b8c1dba50b483761cccff67793897945b37408/websockets-16.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac", size = 178684, upload-time = "2026-01-10T09:22:44.941Z" }, + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/72/07/c98a68571dcf256e74f1f816b8cc5eae6eb2d3d5cfa44d37f801619d9166/websockets-16.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d", size = 174947, upload-time = "2026-01-10T09:23:36.166Z" }, + { url = "https://files.pythonhosted.org/packages/7e/52/93e166a81e0305b33fe416338be92ae863563fe7bce446b0f687b9df5aea/websockets-16.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03", size = 175260, upload-time = "2026-01-10T09:23:37.409Z" }, + { url = "https://files.pythonhosted.org/packages/56/0c/2dbf513bafd24889d33de2ff0368190a0e69f37bcfa19009ef819fe4d507/websockets-16.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da", size = 176071, upload-time = "2026-01-10T09:23:39.158Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8f/aea9c71cc92bf9b6cc0f7f70df8f0b420636b6c96ef4feee1e16f80f75dd/websockets-16.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c", size = 176968, upload-time = "2026-01-10T09:23:41.031Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3f/f70e03f40ffc9a30d817eef7da1be72ee4956ba8d7255c399a01b135902a/websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767", size = 178735, upload-time = "2026-01-10T09:23:42.259Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, +] + [[package]] name = "zipp" version = "3.23.0" From ce4f61bc0dfe7478d237887827f2cd12b55fe3d2 Mon Sep 17 00:00:00 2001 From: Gabor Feher Date: Tue, 10 Mar 2026 07:43:29 +0000 Subject: [PATCH 2/4] Fix a pyright error and bump the pyright action to v3 --- .github/workflows/linter.yaml | 2 +- src/a2a/contrib/tasks/vertex_task_store.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/linter.yaml b/.github/workflows/linter.yaml index 584d68bd1..4069a4616 100644 --- a/.github/workflows/linter.yaml +++ b/.github/workflows/linter.yaml @@ -45,7 +45,7 @@ jobs: - name: Run Pyright (Pylance equivalent) id: pyright continue-on-error: true - uses: jakebailey/pyright-action@v2 + uses: jakebailey/pyright-action@v3 with: pylance-version: latest-release diff --git a/src/a2a/contrib/tasks/vertex_task_store.py b/src/a2a/contrib/tasks/vertex_task_store.py index 3b87c824d..1b5d852da 100644 --- a/src/a2a/contrib/tasks/vertex_task_store.py +++ b/src/a2a/contrib/tasks/vertex_task_store.py @@ -32,7 +32,7 @@ class VertexTaskStore(TaskStore): def __init__( self, - client: vertexai.Client, + client: vertexai.Client, # type: ignore agent_engine_resource_id: str, ) -> None: """Initializes the VertexTaskStore. From f58560705b8b90d6589ee540551b8514a9b9441a Mon Sep 17 00:00:00 2001 From: Gabor Feher Date: Tue, 10 Mar 2026 14:45:39 +0000 Subject: [PATCH 3/4] comment fixes --- tests/contrib/tasks/test_vertex_task_store.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/contrib/tasks/test_vertex_task_store.py b/tests/contrib/tasks/test_vertex_task_store.py index 1f80be3bd..936c7bbf5 100644 --- a/tests/contrib/tasks/test_vertex_task_store.py +++ b/tests/contrib/tasks/test_vertex_task_store.py @@ -207,8 +207,6 @@ async def test_save_and_get_detailed_task( assert retrieved_task.status.state == TaskState.TASK_STATE_SUBMITTED assert retrieved_task.metadata['key1'] == 'value1' assert retrieved_task.metadata['key2'] == 123 - - # Pydantic models handle their own serialization for comparison if model_dump is used assert retrieved_task.artifacts == test_task.artifacts @@ -415,7 +413,7 @@ async def test_update_task_delete_artifact( async def test_metadata_field_mapping( vertex_store: VertexTaskStore, ) -> None: - """Test that metadata field is correctly mapped between Pydantic and SQLAlchemy. + """Test that metadata field is correctly mapped between the core types and vertex. This test verifies: 1. Metadata can be None From d96aea13361afb5f6e3daddc337085b522904337 Mon Sep 17 00:00:00 2001 From: Gabor Feher Date: Tue, 10 Mar 2026 15:41:52 +0000 Subject: [PATCH 4/4] test import tweaks --- tests/contrib/tasks/__init__.py | 0 tests/contrib/tasks/test_vertex_task_store.py | 6 ++---- 2 files changed, 2 insertions(+), 4 deletions(-) create mode 100644 tests/contrib/tasks/__init__.py diff --git a/tests/contrib/tasks/__init__.py b/tests/contrib/tasks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/contrib/tasks/test_vertex_task_store.py b/tests/contrib/tasks/test_vertex_task_store.py index 936c7bbf5..96037c697 100644 --- a/tests/contrib/tasks/test_vertex_task_store.py +++ b/tests/contrib/tasks/test_vertex_task_store.py @@ -22,6 +22,8 @@ import pytest import pytest_asyncio +from .fake_vertex_client import FakeVertexClient + # Skip the entire test module if vertexai is not installed pytest.importorskip( @@ -40,7 +42,6 @@ 'VERTEX_API_VERSION', ] ) -import sys @pytest.fixture( @@ -114,9 +115,6 @@ async def vertex_store( reusing the module-scoped engine. Uses fake client for 'fake' backend. """ if backend_type == 'fake': - sys.path.append(os.path.dirname(__file__)) - from fake_vertex_client import FakeVertexClient - client = FakeVertexClient() else: project = os.environ.get('VERTEX_PROJECT')