Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
c6ecf79
feat(server): add v0.3 legacy compatibility for database models
sokoliva Mar 6, 2026
1c7e552
refactor: remove PLW
sokoliva Mar 6, 2026
61c8106
Merge branch '1.0-dev' into database-compatibility-1-0
sokoliva Mar 6, 2026
b52c8d4
refactor: remove global variable
sokoliva Mar 6, 2026
e5de61f
Merge branch 'database-compatibility-1-0' of https://github.com/sokol…
sokoliva Mar 6, 2026
dca38ab
Merge branch '1.0-dev' into database-compatibility-1-0
sokoliva Mar 9, 2026
3d98303
WIP
sokoliva Mar 9, 2026
3e44044
Merge branch 'database-compatibility-1-0' of https://github.com/sokol…
sokoliva Mar 9, 2026
0f8f9a9
Merge branch '1.0-dev' of https://github.com/a2aproject/a2a-python in…
sokoliva Mar 10, 2026
4e03b9d
refactor: replace PydanticType/PydanticListType with explicit JSON se…
sokoliva Mar 10, 2026
11598fa
refactor: simplify task deserialization in `_from_orm` by using `Pars…
sokoliva Mar 10, 2026
350539b
refactor: remove unused Any and cast imports from database_task_store.py
sokoliva Mar 10, 2026
b8504cf
Refactor: Move `types_v03` and `sqlalchemy.insert` imports to module …
sokoliva Mar 10, 2026
335ea41
test: clarify legacy data conversion comments in database store tests…
sokoliva Mar 10, 2026
42aada9
refactor: improve SQLAlchemy ORM to Protobuf message mapping for JSON…
sokoliva Mar 10, 2026
ba7a873
Merge branch '1.0-dev' into database-compatibility-1-0
sokoliva Mar 10, 2026
615f608
refactor: use `model_validate` for legacy task conversion to improve …
sokoliva Mar 10, 2026
327f517
Merge branch 'database-compatibility-1-0' of https://github.com/sokol…
sokoliva Mar 10, 2026
5ae5107
Fix comments and legacy conversion
sokoliva Mar 11, 2026
08a1172
Merge branch '1.0-dev' into database-compatibility-1-0
sokoliva Mar 11, 2026
57e0218
TaskStatus is mandatory
sokoliva Mar 11, 2026
64ddd9b
Merge branch 'database-compatibility-1-0' of https://github.com/sokol…
sokoliva Mar 11, 2026
72f4434
Merge branch '1.0-dev' into database-compatibility-1-0
sokoliva Mar 11, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 58 additions & 4 deletions src/a2a/server/models.py
Comment thread
sokoliva marked this conversation as resolved.
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from collections.abc import Callable
from datetime import datetime
from typing import TYPE_CHECKING, Any, Generic, TypeVar

Expand All @@ -11,10 +12,12 @@
return func


from google.protobuf.json_format import MessageToDict, ParseDict
from google.protobuf.json_format import MessageToDict, ParseDict, ParseError
from google.protobuf.message import Message as ProtoMessage
from pydantic import BaseModel
from pydantic import BaseModel, ValidationError

from a2a.compat.v0_3 import conversions
from a2a.compat.v0_3 import types as types_v03
from a2a.types.a2a_pb2 import Artifact, Message, TaskStatus


Expand Down Expand Up @@ -81,7 +84,19 @@
if isinstance(self.pydantic_type, type) and issubclass(
self.pydantic_type, ProtoMessage
):
return ParseDict(value, self.pydantic_type()) # type: ignore[return-value]
try:
return ParseDict(value, self.pydantic_type()) # type: ignore[return-value]
except (ParseError, ValueError):
# Try legacy conversion
legacy_map = _get_legacy_conversions()
if self.pydantic_type in legacy_map:
legacy_type, convert_func = legacy_map[self.pydantic_type]
try:
legacy_instance = legacy_type.model_validate(value)
return convert_func(legacy_instance)
except ValidationError:
pass
raise
# Assume it's a Pydantic model
return self.pydantic_type.model_validate(value) # type: ignore[attr-defined]

Expand Down Expand Up @@ -130,7 +145,24 @@
if isinstance(self.pydantic_type, type) and issubclass(
self.pydantic_type, ProtoMessage
):
return [ParseDict(item, self.pydantic_type()) for item in value] # type: ignore[misc]
result = []
legacy_map = _get_legacy_conversions()
legacy_info = legacy_map.get(self.pydantic_type)

for item in value:
try:
result.append(ParseDict(item, self.pydantic_type()))
except (ParseError, ValueError): # noqa: PERF203
if legacy_info:
legacy_type, convert_func = legacy_info
try:
legacy_instance = legacy_type.model_validate(item)
result.append(convert_func(legacy_instance))
continue
except ValidationError:
pass
raise
return result # type: ignore[return-value]
# Assume it's a Pydantic model
return [self.pydantic_type.model_validate(item) for item in value] # type: ignore[attr-defined]

Expand Down Expand Up @@ -292,3 +324,25 @@
"""Default push notification config model with standard table name."""

__tablename__ = 'push_notification_configs'


_LEGACY_CONVERSIONS: dict[type, tuple[type[BaseModel], Callable]] | None = None


def _get_legacy_conversions() -> dict[type, tuple[type[BaseModel], Callable]]:
Comment thread
sokoliva marked this conversation as resolved.
Outdated
"""Lazily load and return legacy conversion mapping."""
global _LEGACY_CONVERSIONS

Check failure on line 334 in src/a2a/server/models.py

View workflow job for this annotation

GitHub Actions / Lint Code Base

ruff (PLW0603)

src/a2a/server/models.py:334:12: PLW0603 Using the global statement to update `_LEGACY_CONVERSIONS` is discouraged

Check failure on line 334 in src/a2a/server/models.py

View workflow job for this annotation

GitHub Actions / Lint Code Base

ruff (PLW0603)

src/a2a/server/models.py:334:12: PLW0603 Using the global statement to update `_LEGACY_CONVERSIONS` is discouraged
if _LEGACY_CONVERSIONS is None:
try:
# Lazy imports to avoid circular dependencies and unnecessary overhead
_LEGACY_CONVERSIONS = {
TaskStatus: (
types_v03.TaskStatus,
conversions.to_core_task_status,
),
Message: (types_v03.Message, conversions.to_core_message),
Artifact: (types_v03.Artifact, conversions.to_core_artifact),
}
except ImportError:
_LEGACY_CONVERSIONS = {}
return _LEGACY_CONVERSIONS
135 changes: 135 additions & 0 deletions tests/server/tasks/test_database_task_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -683,4 +683,139 @@ async def test_owner_resource_scoping(
await task_store.delete('u2-task1', context_user2)


@pytest.mark.asyncio
async def test_get_0_3_task_detailed(
db_store_parameterized: DatabaseTaskStore,
) -> None:
"""Test retrieving a detailed legacy v0.3 task from the database.

This test simulates a database that already contains legacy v0.3 JSON data
(string-based enums, different field names) and verifies that the store
correctly converts it to the modern Protobuf-based Task model.
"""
from a2a.compat.v0_3 import types as types_v03
Comment thread
sokoliva marked this conversation as resolved.
Outdated
from sqlalchemy import insert

task_id = 'legacy-detailed-1'
owner = 'legacy_user'
context_user = ServerCallContext(user=SampleUser(user_name=owner))

# 1. Create a detailed legacy Task using v0.3 models
legacy_task = types_v03.Task(
id=task_id,
context_id='legacy-ctx-1',
status=types_v03.TaskStatus(
state=types_v03.TaskState.working,
message=types_v03.Message(
message_id='msg-status',
role=types_v03.Role.agent,
parts=[
types_v03.Part(
root=types_v03.TextPart(text='Legacy status message')
)
],
),
timestamp='2023-10-27T10:00:00Z',
),
history=[
types_v03.Message(
message_id='msg-1',
role=types_v03.Role.user,
parts=[
types_v03.Part(root=types_v03.TextPart(text='Hello legacy'))
],
),
types_v03.Message(
message_id='msg-2',
role=types_v03.Role.agent,
parts=[
types_v03.Part(
root=types_v03.DataPart(data={'legacy_key': 'value'})
)
],
),
],
artifacts=[
types_v03.Artifact(
artifact_id='art-1',
name='Legacy Artifact',
parts=[
types_v03.Part(
root=types_v03.FilePart(
file=types_v03.FileWithUri(
uri='https://example.com/legacy.txt',
mime_type='text/plain',
)
)
)
],
)
],
metadata={'meta_key': 'meta_val'},
)

# 2. Manually insert the legacy data into the database
# We must bypass the store's save() method because it expects Protobuf objects.
async with db_store_parameterized.async_session_maker.begin() as session:
# Pydantic model_dump(mode='json') produces exactly what would be in the legacy DB
legacy_data = legacy_task.model_dump(mode='json')

stmt = insert(db_store_parameterized.task_model).values(
id=task_id,
context_id=legacy_task.context_id,
owner=owner,
status=legacy_data['status'],
history=legacy_data['history'],
artifacts=legacy_data['artifacts'],
task_metadata=legacy_data['metadata'],
kind='task',
last_updated=datetime.now(timezone.utc),
)
await session.execute(stmt)

# 3. Retrieve the task using the standard store.get()
# This will trigger the PydanticType/PydanticListType legacy fallback
retrieved_task = await db_store_parameterized.get(task_id, context_user)

# 4. Verify the conversion to modern Protobuf
assert retrieved_task is not None
assert retrieved_task.id == task_id
assert retrieved_task.context_id == 'legacy-ctx-1'

# Check Status & State (The most critical part: string 'working' -> enum TASK_STATE_WORKING)
assert retrieved_task.status.state == TaskState.TASK_STATE_WORKING
assert retrieved_task.status.message.message_id == 'msg-status'
assert retrieved_task.status.message.role == Role.ROLE_AGENT
assert (
retrieved_task.status.message.parts[0].text == 'Legacy status message'
)

# Check History
assert len(retrieved_task.history) == 2
assert retrieved_task.history[0].message_id == 'msg-1'
assert retrieved_task.history[0].role == Role.ROLE_USER
assert retrieved_task.history[0].parts[0].text == 'Hello legacy'

assert retrieved_task.history[1].message_id == 'msg-2'
assert retrieved_task.history[1].role == Role.ROLE_AGENT
assert (
MessageToDict(retrieved_task.history[1].parts[0].data)['legacy_key']
== 'value'
)

# Check Artifacts
assert len(retrieved_task.artifacts) == 1
assert retrieved_task.artifacts[0].artifact_id == 'art-1'
assert retrieved_task.artifacts[0].name == 'Legacy Artifact'
assert (
retrieved_task.artifacts[0].parts[0].url
== 'https://example.com/legacy.txt'
)

# Check Metadata
assert dict(retrieved_task.metadata) == {'meta_key': 'meta_val'}

await db_store_parameterized.delete(task_id, context_user)


# Ensure aiosqlite, asyncpg, and aiomysql are installed in the test environment (added to pyproject.toml).
Loading