diff --git a/dapr/aio/clients/grpc/client.py b/dapr/aio/clients/grpc/client.py index 68d032b57..3c9eca5d4 100644 --- a/dapr/aio/clients/grpc/client.py +++ b/dapr/aio/clients/grpc/client.py @@ -82,6 +82,7 @@ InvokeMethodResponse, QueryResponse, QueryResponseItem, + MetadataMCPServer, RegisteredComponents, StateResponse, TopicEventResponse, @@ -1723,6 +1724,9 @@ async def get_metadata(self) -> GetMetadataResponse: for i in response.registered_components ] extended_metadata = dict(response.extended_metadata.items()) + mcp_servers = [ + MetadataMCPServer(name=s.name) for s in response.mcp_servers + ] return GetMetadataResponse( application_id=response.id, @@ -1730,6 +1734,7 @@ async def get_metadata(self) -> GetMetadataResponse: registered_components=registered_components, extended_metadata=extended_metadata, headers=await call.initial_metadata(), + mcp_servers=mcp_servers, ) async def schedule_job_alpha1(self, job: Job, overwrite: bool = False) -> DaprResponse: diff --git a/dapr/clients/grpc/_response.py b/dapr/clients/grpc/_response.py index 2f966d3e8..bbe8efb78 100644 --- a/dapr/clients/grpc/_response.py +++ b/dapr/clients/grpc/_response.py @@ -956,6 +956,7 @@ def __init__( registered_components: Sequence[RegisteredComponents], extended_metadata: Dict[str, str], headers: MetadataTuple = (), + mcp_servers: Optional[Sequence['MetadataMCPServer']] = None, ): """Initializes GetMetadataResponse. @@ -968,12 +969,15 @@ def __init__( extended_metadata (Dict[str, str]): mapping of custom (extended) attributes to their respective values. headers (Tuple, optional): the headers from Dapr gRPC response. + mcp_servers (Sequence[MetadataMCPServer], optional): list of + loaded MCPServer resources. """ super().__init__(headers) self._application_id = application_id self._active_actors_count = active_actors_count self._registered_components = registered_components self._extended_metadata = extended_metadata + self._mcp_servers = mcp_servers or [] @property def application_id(self) -> str: @@ -995,6 +999,11 @@ def extended_metadata(self) -> Dict[str, str]: """Mapping of custom (extended) attributes to their respective values.""" return self._extended_metadata + @property + def mcp_servers(self) -> Sequence['MetadataMCPServer']: + """List of loaded MCPServer resources.""" + return self._mcp_servers + class RegisteredComponents(NamedTuple): """Describes a loaded Dapr component.""" @@ -1012,6 +1021,13 @@ class RegisteredComponents(NamedTuple): """Supported capabilities for this component type and version.""" +class MetadataMCPServer(NamedTuple): + """Describes a loaded Dapr MCPServer resource.""" + + name: str + """Name of the MCPServer resource.""" + + class CryptoResponse(DaprResponse, Generic[TCryptoResponse]): """An iterable of cryptography API responses.""" diff --git a/dapr/clients/grpc/client.py b/dapr/clients/grpc/client.py index 5ac02f609..266f67065 100644 --- a/dapr/clients/grpc/client.py +++ b/dapr/clients/grpc/client.py @@ -73,6 +73,7 @@ InvokeMethodResponse, QueryResponse, QueryResponseItem, + MetadataMCPServer, RegisteredComponents, StateResponse, TopicEventResponse, @@ -1828,6 +1829,9 @@ def get_metadata(self) -> GetMetadataResponse: for i in response.registered_components ] extended_metadata = dict(response.extended_metadata.items()) + mcp_servers = [ + MetadataMCPServer(name=s.name) for s in response.mcp_servers + ] return GetMetadataResponse( application_id=response.id, @@ -1835,6 +1839,7 @@ def get_metadata(self) -> GetMetadataResponse: registered_components=registered_components, extended_metadata=extended_metadata, headers=call.initial_metadata(), + mcp_servers=mcp_servers, ) def set_metadata(self, attributeName: str, attributeValue: str) -> DaprResponse: diff --git a/dapr/proto/runtime/v1/metadata_pb2.py b/dapr/proto/runtime/v1/metadata_pb2.py index f19bea65d..d3eb414e6 100644 --- a/dapr/proto/runtime/v1/metadata_pb2.py +++ b/dapr/proto/runtime/v1/metadata_pb2.py @@ -24,54 +24,56 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$dapr/proto/runtime/v1/metadata.proto\x12\x15\x64\x61pr.proto.runtime.v1\"\x14\n\x12GetMetadataRequest\"\xd1\x07\n\x13GetMetadataResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12Q\n\x13\x61\x63tive_actors_count\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountB\x02\x18\x01R\x06\x61\x63tors\x12V\n\x15registered_components\x18\x03 \x03(\x0b\x32+.dapr.proto.runtime.v1.RegisteredComponentsR\ncomponents\x12\x65\n\x11\x65xtended_metadata\x18\x04 \x03(\x0b\x32@.dapr.proto.runtime.v1.GetMetadataResponse.ExtendedMetadataEntryR\x08\x65xtended\x12O\n\rsubscriptions\x18\x05 \x03(\x0b\x32).dapr.proto.runtime.v1.PubsubSubscriptionR\rsubscriptions\x12R\n\x0ehttp_endpoints\x18\x06 \x03(\x0b\x32+.dapr.proto.runtime.v1.MetadataHTTPEndpointR\rhttpEndpoints\x12j\n\x19\x61pp_connection_properties\x18\x07 \x01(\x0b\x32..dapr.proto.runtime.v1.AppConnectionPropertiesR\x17\x61ppConnectionProperties\x12\'\n\x0fruntime_version\x18\x08 \x01(\tR\x0eruntimeVersion\x12)\n\x10\x65nabled_features\x18\t \x03(\tR\x0f\x65nabledFeatures\x12H\n\ractor_runtime\x18\n \x01(\x0b\x32#.dapr.proto.runtime.v1.ActorRuntimeR\x0c\x61\x63torRuntime\x12K\n\tscheduler\x18\x0b \x01(\x0b\x32(.dapr.proto.runtime.v1.MetadataSchedulerH\x00R\tscheduler\x88\x01\x01\x12K\n\tworkflows\x18\x0c \x01(\x0b\x32(.dapr.proto.runtime.v1.MetadataWorkflowsH\x01R\tworkflows\x88\x01\x01\x1a\x37\n\x15\x45xtendedMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0c\n\n_schedulerB\x0c\n\n_workflows\"@\n\x11MetadataWorkflows\x12+\n\x11\x63onnected_workers\x18\x01 \x01(\x05R\x10\x63onnectedWorkers\"0\n\x11MetadataScheduler\x12\x1b\n\x13\x63onnected_addresses\x18\x01 \x03(\t\"\xbc\x02\n\x0c\x41\x63torRuntime\x12]\n\x0eruntime_status\x18\x01 \x01(\x0e\x32\x36.dapr.proto.runtime.v1.ActorRuntime.ActorRuntimeStatusR\rruntimeStatus\x12M\n\ractive_actors\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountR\x0c\x61\x63tiveActors\x12\x1d\n\nhost_ready\x18\x03 \x01(\x08R\thostReady\x12\x1c\n\tplacement\x18\x04 \x01(\tR\tplacement\"A\n\x12\x41\x63torRuntimeStatus\x12\x10\n\x0cINITIALIZING\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\"0\n\x11\x41\x63tiveActorsCount\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\"Y\n\x14RegisteredComponents\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x04 \x03(\t\"*\n\x14MetadataHTTPEndpoint\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\xd1\x01\n\x17\x41ppConnectionProperties\x12\x0c\n\x04port\x18\x01 \x01(\x05\x12\x10\n\x08protocol\x18\x02 \x01(\t\x12\'\n\x0f\x63hannel_address\x18\x03 \x01(\tR\x0e\x63hannelAddress\x12\'\n\x0fmax_concurrency\x18\x04 \x01(\x05R\x0emaxConcurrency\x12\x44\n\x06health\x18\x05 \x01(\x0b\x32\x34.dapr.proto.runtime.v1.AppConnectionHealthProperties\"\xdc\x01\n\x1d\x41ppConnectionHealthProperties\x12*\n\x11health_check_path\x18\x01 \x01(\tR\x0fhealthCheckPath\x12\x32\n\x15health_probe_interval\x18\x02 \x01(\tR\x13healthProbeInterval\x12\x30\n\x14health_probe_timeout\x18\x03 \x01(\tR\x12healthProbeTimeout\x12)\n\x10health_threshold\x18\x04 \x01(\x05R\x0fhealthThreshold\"\x86\x03\n\x12PubsubSubscription\x12\x1f\n\x0bpubsub_name\x18\x01 \x01(\tR\npubsubname\x12\x14\n\x05topic\x18\x02 \x01(\tR\x05topic\x12S\n\x08metadata\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.PubsubSubscription.MetadataEntryR\x08metadata\x12\x44\n\x05rules\x18\x04 \x01(\x0b\x32..dapr.proto.runtime.v1.PubsubSubscriptionRulesR\x05rules\x12*\n\x11\x64\x65\x61\x64_letter_topic\x18\x05 \x01(\tR\x0f\x64\x65\x61\x64LetterTopic\x12\x41\n\x04type\x18\x06 \x01(\x0e\x32-.dapr.proto.runtime.v1.PubsubSubscriptionTypeR\x04type\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"W\n\x17PubsubSubscriptionRules\x12<\n\x05rules\x18\x01 \x03(\x0b\x32-.dapr.proto.runtime.v1.PubsubSubscriptionRule\"5\n\x16PubsubSubscriptionRule\x12\r\n\x05match\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"0\n\x12SetMetadataRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t*W\n\x16PubsubSubscriptionType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0b\x44\x45\x43LARATIVE\x10\x01\x12\x10\n\x0cPROGRAMMATIC\x10\x02\x12\r\n\tSTREAMING\x10\x03\x42q\n\nio.dapr.v1B\x12\x44\x61prMetadataProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\xaa\x02\x1b\x44\x61pr.Client.Autogen.Grpc.v1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$dapr/proto/runtime/v1/metadata.proto\x12\x15\x64\x61pr.proto.runtime.v1\"\x14\n\x12GetMetadataRequest\"\x9c\x08\n\x13GetMetadataResponse\x12\n\n\x02id\x18\x01 \x01(\t\x12Q\n\x13\x61\x63tive_actors_count\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountB\x02\x18\x01R\x06\x61\x63tors\x12V\n\x15registered_components\x18\x03 \x03(\x0b\x32+.dapr.proto.runtime.v1.RegisteredComponentsR\ncomponents\x12\x65\n\x11\x65xtended_metadata\x18\x04 \x03(\x0b\x32@.dapr.proto.runtime.v1.GetMetadataResponse.ExtendedMetadataEntryR\x08\x65xtended\x12O\n\rsubscriptions\x18\x05 \x03(\x0b\x32).dapr.proto.runtime.v1.PubsubSubscriptionR\rsubscriptions\x12R\n\x0ehttp_endpoints\x18\x06 \x03(\x0b\x32+.dapr.proto.runtime.v1.MetadataHTTPEndpointR\rhttpEndpoints\x12j\n\x19\x61pp_connection_properties\x18\x07 \x01(\x0b\x32..dapr.proto.runtime.v1.AppConnectionPropertiesR\x17\x61ppConnectionProperties\x12\'\n\x0fruntime_version\x18\x08 \x01(\tR\x0eruntimeVersion\x12)\n\x10\x65nabled_features\x18\t \x03(\tR\x0f\x65nabledFeatures\x12H\n\ractor_runtime\x18\n \x01(\x0b\x32#.dapr.proto.runtime.v1.ActorRuntimeR\x0c\x61\x63torRuntime\x12K\n\tscheduler\x18\x0b \x01(\x0b\x32(.dapr.proto.runtime.v1.MetadataSchedulerH\x00R\tscheduler\x88\x01\x01\x12K\n\tworkflows\x18\x0c \x01(\x0b\x32(.dapr.proto.runtime.v1.MetadataWorkflowsH\x01R\tworkflows\x88\x01\x01\x12I\n\x0bmcp_servers\x18\r \x03(\x0b\x32(.dapr.proto.runtime.v1.MetadataMCPServerR\nmcpServers\x1a\x37\n\x15\x45xtendedMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0c\n\n_schedulerB\x0c\n\n_workflows\"@\n\x11MetadataWorkflows\x12+\n\x11\x63onnected_workers\x18\x01 \x01(\x05R\x10\x63onnectedWorkers\"0\n\x11MetadataScheduler\x12\x1b\n\x13\x63onnected_addresses\x18\x01 \x03(\t\"\xbc\x02\n\x0c\x41\x63torRuntime\x12]\n\x0eruntime_status\x18\x01 \x01(\x0e\x32\x36.dapr.proto.runtime.v1.ActorRuntime.ActorRuntimeStatusR\rruntimeStatus\x12M\n\ractive_actors\x18\x02 \x03(\x0b\x32(.dapr.proto.runtime.v1.ActiveActorsCountR\x0c\x61\x63tiveActors\x12\x1d\n\nhost_ready\x18\x03 \x01(\x08R\thostReady\x12\x1c\n\tplacement\x18\x04 \x01(\tR\tplacement\"A\n\x12\x41\x63torRuntimeStatus\x12\x10\n\x0cINITIALIZING\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\"0\n\x11\x41\x63tiveActorsCount\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\"Y\n\x14RegisteredComponents\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x14\n\x0c\x63\x61pabilities\x18\x04 \x03(\t\"*\n\x14MetadataHTTPEndpoint\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\'\n\x11MetadataMCPServer\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\xd1\x01\n\x17\x41ppConnectionProperties\x12\x0c\n\x04port\x18\x01 \x01(\x05\x12\x10\n\x08protocol\x18\x02 \x01(\t\x12\'\n\x0f\x63hannel_address\x18\x03 \x01(\tR\x0e\x63hannelAddress\x12\'\n\x0fmax_concurrency\x18\x04 \x01(\x05R\x0emaxConcurrency\x12\x44\n\x06health\x18\x05 \x01(\x0b\x32\x34.dapr.proto.runtime.v1.AppConnectionHealthProperties\"\xdc\x01\n\x1d\x41ppConnectionHealthProperties\x12*\n\x11health_check_path\x18\x01 \x01(\tR\x0fhealthCheckPath\x12\x32\n\x15health_probe_interval\x18\x02 \x01(\tR\x13healthProbeInterval\x12\x30\n\x14health_probe_timeout\x18\x03 \x01(\tR\x12healthProbeTimeout\x12)\n\x10health_threshold\x18\x04 \x01(\x05R\x0fhealthThreshold\"\x86\x03\n\x12PubsubSubscription\x12\x1f\n\x0bpubsub_name\x18\x01 \x01(\tR\npubsubname\x12\x14\n\x05topic\x18\x02 \x01(\tR\x05topic\x12S\n\x08metadata\x18\x03 \x03(\x0b\x32\x37.dapr.proto.runtime.v1.PubsubSubscription.MetadataEntryR\x08metadata\x12\x44\n\x05rules\x18\x04 \x01(\x0b\x32..dapr.proto.runtime.v1.PubsubSubscriptionRulesR\x05rules\x12*\n\x11\x64\x65\x61\x64_letter_topic\x18\x05 \x01(\tR\x0f\x64\x65\x61\x64LetterTopic\x12\x41\n\x04type\x18\x06 \x01(\x0e\x32-.dapr.proto.runtime.v1.PubsubSubscriptionTypeR\x04type\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"W\n\x17PubsubSubscriptionRules\x12<\n\x05rules\x18\x01 \x03(\x0b\x32-.dapr.proto.runtime.v1.PubsubSubscriptionRule\"5\n\x16PubsubSubscriptionRule\x12\r\n\x05match\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"0\n\x12SetMetadataRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t*W\n\x16PubsubSubscriptionType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0b\x44\x45\x43LARATIVE\x10\x01\x12\x10\n\x0cPROGRAMMATIC\x10\x02\x12\r\n\tSTREAMING\x10\x03\x42i\n\nio.dapr.v1B\nDaprProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\xaa\x02\x1b\x44\x61pr.Client.Autogen.Grpc.v1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'dapr.proto.runtime.v1.metadata_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\nio.dapr.v1B\022DaprMetadataProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\252\002\033Dapr.Client.Autogen.Grpc.v1' + _globals['DESCRIPTOR']._serialized_options = b'\n\nio.dapr.v1B\nDaprProtosZ1github.com/dapr/dapr/pkg/proto/runtime/v1;runtime\252\002\033Dapr.Client.Autogen.Grpc.v1' _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._loaded_options = None _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_options = b'8\001' _globals['_GETMETADATARESPONSE'].fields_by_name['active_actors_count']._loaded_options = None _globals['_GETMETADATARESPONSE'].fields_by_name['active_actors_count']._serialized_options = b'\030\001' _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._loaded_options = None _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_options = b'8\001' - _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_start=2707 - _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_end=2794 + _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_start=2823 + _globals['_PUBSUBSUBSCRIPTIONTYPE']._serialized_end=2910 _globals['_GETMETADATAREQUEST']._serialized_start=63 _globals['_GETMETADATAREQUEST']._serialized_end=83 _globals['_GETMETADATARESPONSE']._serialized_start=86 - _globals['_GETMETADATARESPONSE']._serialized_end=1063 - _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_start=980 - _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_end=1035 - _globals['_METADATAWORKFLOWS']._serialized_start=1065 - _globals['_METADATAWORKFLOWS']._serialized_end=1129 - _globals['_METADATASCHEDULER']._serialized_start=1131 - _globals['_METADATASCHEDULER']._serialized_end=1179 - _globals['_ACTORRUNTIME']._serialized_start=1182 - _globals['_ACTORRUNTIME']._serialized_end=1498 - _globals['_ACTORRUNTIME_ACTORRUNTIMESTATUS']._serialized_start=1433 - _globals['_ACTORRUNTIME_ACTORRUNTIMESTATUS']._serialized_end=1498 - _globals['_ACTIVEACTORSCOUNT']._serialized_start=1500 - _globals['_ACTIVEACTORSCOUNT']._serialized_end=1548 - _globals['_REGISTEREDCOMPONENTS']._serialized_start=1550 - _globals['_REGISTEREDCOMPONENTS']._serialized_end=1639 - _globals['_METADATAHTTPENDPOINT']._serialized_start=1641 - _globals['_METADATAHTTPENDPOINT']._serialized_end=1683 - _globals['_APPCONNECTIONPROPERTIES']._serialized_start=1686 - _globals['_APPCONNECTIONPROPERTIES']._serialized_end=1895 - _globals['_APPCONNECTIONHEALTHPROPERTIES']._serialized_start=1898 - _globals['_APPCONNECTIONHEALTHPROPERTIES']._serialized_end=2118 - _globals['_PUBSUBSUBSCRIPTION']._serialized_start=2121 - _globals['_PUBSUBSUBSCRIPTION']._serialized_end=2511 - _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_start=2464 - _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_end=2511 - _globals['_PUBSUBSUBSCRIPTIONRULES']._serialized_start=2513 - _globals['_PUBSUBSUBSCRIPTIONRULES']._serialized_end=2600 - _globals['_PUBSUBSUBSCRIPTIONRULE']._serialized_start=2602 - _globals['_PUBSUBSUBSCRIPTIONRULE']._serialized_end=2655 - _globals['_SETMETADATAREQUEST']._serialized_start=2657 - _globals['_SETMETADATAREQUEST']._serialized_end=2705 + _globals['_GETMETADATARESPONSE']._serialized_end=1138 + _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_start=1055 + _globals['_GETMETADATARESPONSE_EXTENDEDMETADATAENTRY']._serialized_end=1110 + _globals['_METADATAWORKFLOWS']._serialized_start=1140 + _globals['_METADATAWORKFLOWS']._serialized_end=1204 + _globals['_METADATASCHEDULER']._serialized_start=1206 + _globals['_METADATASCHEDULER']._serialized_end=1254 + _globals['_ACTORRUNTIME']._serialized_start=1257 + _globals['_ACTORRUNTIME']._serialized_end=1573 + _globals['_ACTORRUNTIME_ACTORRUNTIMESTATUS']._serialized_start=1508 + _globals['_ACTORRUNTIME_ACTORRUNTIMESTATUS']._serialized_end=1573 + _globals['_ACTIVEACTORSCOUNT']._serialized_start=1575 + _globals['_ACTIVEACTORSCOUNT']._serialized_end=1623 + _globals['_REGISTEREDCOMPONENTS']._serialized_start=1625 + _globals['_REGISTEREDCOMPONENTS']._serialized_end=1714 + _globals['_METADATAHTTPENDPOINT']._serialized_start=1716 + _globals['_METADATAHTTPENDPOINT']._serialized_end=1758 + _globals['_METADATAMCPSERVER']._serialized_start=1760 + _globals['_METADATAMCPSERVER']._serialized_end=1799 + _globals['_APPCONNECTIONPROPERTIES']._serialized_start=1802 + _globals['_APPCONNECTIONPROPERTIES']._serialized_end=2011 + _globals['_APPCONNECTIONHEALTHPROPERTIES']._serialized_start=2014 + _globals['_APPCONNECTIONHEALTHPROPERTIES']._serialized_end=2234 + _globals['_PUBSUBSUBSCRIPTION']._serialized_start=2237 + _globals['_PUBSUBSUBSCRIPTION']._serialized_end=2627 + _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_start=2580 + _globals['_PUBSUBSUBSCRIPTION_METADATAENTRY']._serialized_end=2627 + _globals['_PUBSUBSUBSCRIPTIONRULES']._serialized_start=2629 + _globals['_PUBSUBSUBSCRIPTIONRULES']._serialized_end=2716 + _globals['_PUBSUBSUBSCRIPTIONRULE']._serialized_start=2718 + _globals['_PUBSUBSUBSCRIPTIONRULE']._serialized_end=2771 + _globals['_SETMETADATAREQUEST']._serialized_start=2773 + _globals['_SETMETADATAREQUEST']._serialized_end=2821 # @@protoc_insertion_point(module_scope) diff --git a/dapr/proto/runtime/v1/metadata_pb2.pyi b/dapr/proto/runtime/v1/metadata_pb2.pyi index 82ab0137f..f79863821 100644 --- a/dapr/proto/runtime/v1/metadata_pb2.pyi +++ b/dapr/proto/runtime/v1/metadata_pb2.pyi @@ -112,6 +112,7 @@ class GetMetadataResponse(_message.Message): ACTOR_RUNTIME_FIELD_NUMBER: _builtins.int SCHEDULER_FIELD_NUMBER: _builtins.int WORKFLOWS_FIELD_NUMBER: _builtins.int + MCP_SERVERS_FIELD_NUMBER: _builtins.int id: _builtins.str runtime_version: _builtins.str @_builtins.property @@ -137,6 +138,8 @@ class GetMetadataResponse(_message.Message): def scheduler(self) -> Global___MetadataScheduler: ... @_builtins.property def workflows(self) -> Global___MetadataWorkflows: ... + @_builtins.property + def mcp_servers(self) -> _containers.RepeatedCompositeFieldContainer[Global___MetadataMCPServer]: ... def __init__( self, *, @@ -152,10 +155,11 @@ class GetMetadataResponse(_message.Message): actor_runtime: Global___ActorRuntime | None = ..., scheduler: Global___MetadataScheduler | None = ..., workflows: Global___MetadataWorkflows | None = ..., + mcp_servers: _abc.Iterable[Global___MetadataMCPServer] | None = ..., ) -> None: ... _HasFieldArgType: _TypeAlias = _typing.Literal["_scheduler", b"_scheduler", "_workflows", b"_workflows", "actor_runtime", b"actor_runtime", "app_connection_properties", b"app_connection_properties", "scheduler", b"scheduler", "workflows", b"workflows"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_scheduler", b"_scheduler", "_workflows", b"_workflows", "active_actors_count", b"active_actors_count", "actor_runtime", b"actor_runtime", "app_connection_properties", b"app_connection_properties", "enabled_features", b"enabled_features", "extended_metadata", b"extended_metadata", "http_endpoints", b"http_endpoints", "id", b"id", "registered_components", b"registered_components", "runtime_version", b"runtime_version", "scheduler", b"scheduler", "subscriptions", b"subscriptions", "workflows", b"workflows"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_scheduler", b"_scheduler", "_workflows", b"_workflows", "active_actors_count", b"active_actors_count", "actor_runtime", b"actor_runtime", "app_connection_properties", b"app_connection_properties", "enabled_features", b"enabled_features", "extended_metadata", b"extended_metadata", "http_endpoints", b"http_endpoints", "id", b"id", "mcp_servers", b"mcp_servers", "registered_components", b"registered_components", "runtime_version", b"runtime_version", "scheduler", b"scheduler", "subscriptions", b"subscriptions", "workflows", b"workflows"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__scheduler: _TypeAlias = _typing.Literal["scheduler"] # noqa: Y015 _WhichOneofArgType__scheduler: _TypeAlias = _typing.Literal["_scheduler", b"_scheduler"] # noqa: Y015 @@ -324,6 +328,22 @@ class MetadataHTTPEndpoint(_message.Message): Global___MetadataHTTPEndpoint: _TypeAlias = MetadataHTTPEndpoint # noqa: Y015 +@_typing.final +class MetadataMCPServer(_message.Message): + DESCRIPTOR: _descriptor.Descriptor + + NAME_FIELD_NUMBER: _builtins.int + name: _builtins.str + def __init__( + self, + *, + name: _builtins.str = ..., + ) -> None: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["name", b"name"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + +Global___MetadataMCPServer: _TypeAlias = MetadataMCPServer # noqa: Y015 + @_typing.final class AppConnectionProperties(_message.Message): DESCRIPTOR: _descriptor.Descriptor diff --git a/examples/mcp/README.md b/examples/mcp/README.md new file mode 100644 index 000000000..11205a74c --- /dev/null +++ b/examples/mcp/README.md @@ -0,0 +1,70 @@ +# MCP Examples + +Examples demonstrating how to use the `DaprMCPClient` from the Dapr Python SDK +to discover and invoke MCP tools via Dapr's built-in workflow orchestrations. + +## Prerequisites + +- **Dapr CLI** installed with `dapr init` completed (provides Redis on `localhost:6379`) +- **Python 3.11+** +- Install deps: `pip install -r requirements.txt` + +## Files + +| File | Purpose | +|------|---------| +| `mcp_tool_discovery.py` | The example: discovers tools and runs one in a workflow. | +| `weather_mcp_server.py` | Self-contained MCP server with `get_weather` / `get_forecast` tools (streamable-HTTP on `:8081/mcp`). | +| `resources/weather.yaml` | Dapr `MCPServer` resource pointing the sidecar at the weather server. | +| `resources/statestore.yaml` | Redis state store with `actorStateStore: true` (required by workflows). | + +## Run + +In one terminal, start the bundled MCP server: + +```bash +python weather_mcp_server.py +``` + +In another terminal, run the example with Dapr: + +```bash +dapr run \ + --app-id mcp-demo \ + --resources-path ./resources \ + -- python mcp_tool_discovery.py +``` + +The example will: + +1. Connect to the `weather` MCPServer resource via the sidecar. +2. Print each discovered tool's name, description, and workflow name. +3. Schedule a `CallTool` child workflow for the first tool with `{"location": "Seattle"}`. +4. Print the result. + +## Using a different MCP server + +Edit `resources/weather.yaml` to point at any MCP-compatible endpoint. Supported +transports: + +```yaml +spec: + endpoint: + streamableHTTP: + url: http://host:port/mcp +``` + +```yaml +spec: + endpoint: + sse: + url: http://host:port/sse +``` + +```yaml +spec: + endpoint: + stdio: + command: python + args: ["path/to/server.py"] +``` diff --git a/examples/mcp/mcp_tool_discovery.py b/examples/mcp/mcp_tool_discovery.py new file mode 100644 index 000000000..5e1754a7b --- /dev/null +++ b/examples/mcp/mcp_tool_discovery.py @@ -0,0 +1,141 @@ +# -*- coding: utf-8 -*- +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +MCP Tool Discovery Example +=========================== + +Demonstrates using DaprMCPClient to discover MCP tools from Dapr +MCPServer resources — without any agent framework dependency. + +This is the SDK-level client that any agent framework can build on top of. + +Prerequisites +------------- +1. A Dapr MCPServer resource named "weather" loaded in the sidecar:: + + apiVersion: dapr.io/v1alpha1 + kind: MCPServer + metadata: + name: weather + spec: + endpoint: + streamableHTTP: + url: http://localhost:8081/mcp + +2. An MCP server running at the configured URL. + +Run +--- +:: + + dapr run --app-id mcp-demo --resources-path ./resources -- python mcp_tool_discovery.py +""" + +from dapr.ext.workflow import ( + DaprMCPClient, + DaprWorkflowClient, + DaprWorkflowContext, + WorkflowActivityContext, + WorkflowRuntime, + create_pydantic_model_from_schema, +) + + +def main(): + # ------------------------------------------------------------------ + # 1. Discover MCP tools from a Dapr MCPServer resource. + # ------------------------------------------------------------------ + print("Connecting to MCPServer 'weather'...") + + client = DaprMCPClient(timeout_in_seconds=30) + client.connect("weather") + + tools = client.get_all_tools() + print(f"\nDiscovered {len(tools)} tool(s):\n") + for tool in tools: + print(f" Name: {tool.name}") + print(f" Description: {tool.description}") + print(f" Server: {tool.server_name}") + print(f" Workflow: {tool.call_tool_workflow}") + if tool.input_schema.get("properties"): + props = list(tool.input_schema["properties"].keys()) + print(f" Parameters: {', '.join(props)}") + print() + + # ------------------------------------------------------------------ + # 2. Use the tool in a Dapr workflow. + # This shows how any framework can use MCPToolDef to schedule + # durable tool calls via child workflows. + # ------------------------------------------------------------------ + if not tools: + print("No tools discovered — exiting.") + return + + tool = tools[0] + print(f"Using tool '{tool.name}' in a workflow...\n") + + # Build a Pydantic model from the tool's JSON Schema for validation. + if tool.input_schema: + ArgsModel = create_pydantic_model_from_schema( + tool.input_schema, f"{tool.name}Args" + ) + print(f" Args model: {ArgsModel.__name__}") + print(f" Fields: {list(ArgsModel.model_fields.keys())}\n") + + # Define a simple workflow that calls the MCP tool. + def call_mcp_tool_workflow(ctx: DaprWorkflowContext, input: dict): + """Workflow that calls an MCP tool as a child workflow.""" + result = yield ctx.call_child_workflow( + workflow=tool.call_tool_workflow, + input={ + "toolName": tool.name, + "arguments": input.get("arguments", {}), + }, + ) + return result + + def print_result(ctx: WorkflowActivityContext, input): + """Activity that prints the tool result.""" + print(f" Tool result: {input}") + + # Register and run the workflow. + wfr = WorkflowRuntime() + wfr.register_workflow(call_mcp_tool_workflow) + wfr.register_activity(print_result) + wfr.start() + + wf_client = DaprWorkflowClient() + instance_id = wf_client.schedule_new_workflow( + workflow=call_mcp_tool_workflow, + input={"arguments": {"location": "Seattle"}}, + ) + print(f" Scheduled workflow: {instance_id}") + + state = wf_client.wait_for_workflow_completion( + instance_id=instance_id, + timeout_in_seconds=30, + fetch_payloads=True, + ) + + if state: + print(f" Status: {state.runtime_status.name}") + print(f" Output: {state.serialized_output}") + else: + print(" Workflow timed out.") + + wfr.shutdown() + print("\nDone.") + + +if __name__ == "__main__": + main() diff --git a/examples/mcp/requirements.txt b/examples/mcp/requirements.txt new file mode 100644 index 000000000..087549b94 --- /dev/null +++ b/examples/mcp/requirements.txt @@ -0,0 +1,2 @@ +dapr-ext-workflow +mcp diff --git a/examples/mcp/resources/statestore.yaml b/examples/mcp/resources/statestore.yaml new file mode 100644 index 000000000..0bbb9d8f9 --- /dev/null +++ b/examples/mcp/resources/statestore.yaml @@ -0,0 +1,14 @@ +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: workflowstatestore +spec: + type: state.redis + version: v1 + metadata: + - name: redisHost + value: localhost:6379 + - name: redisPassword + value: "" + - name: actorStateStore + value: "true" diff --git a/examples/mcp/resources/weather.yaml b/examples/mcp/resources/weather.yaml new file mode 100644 index 000000000..2cb0710c1 --- /dev/null +++ b/examples/mcp/resources/weather.yaml @@ -0,0 +1,8 @@ +apiVersion: dapr.io/v1alpha1 +kind: MCPServer +metadata: + name: weather +spec: + endpoint: + streamableHTTP: + url: http://localhost:8081/mcp diff --git a/examples/mcp/weather_mcp_server.py b/examples/mcp/weather_mcp_server.py new file mode 100644 index 000000000..b9dcd1ae7 --- /dev/null +++ b/examples/mcp/weather_mcp_server.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Weather MCP Server (streamable-HTTP transport) +============================================== + +Self-contained MCP server used by ``mcp_tool_discovery.py``. Exposes +``get_weather`` and ``get_forecast`` tools over the streamable-HTTP +transport at ``http://:/mcp``. + +Run:: + + python weather_mcp_server.py [--host 0.0.0.0] [--port 8081] +""" + +import argparse +import logging +import random + +from mcp.server.fastmcp import FastMCP + +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" +) +logger = logging.getLogger("weather-mcp-server") + + +def build_server(host: str, port: int) -> FastMCP: + mcp = FastMCP("WeatherService", host=host, port=port) + + @mcp.tool() + async def get_weather(location: str) -> str: + """Get current weather information for a location. + + Args: + location: City or region name (e.g. 'Seattle', 'London'). + + Returns: + Current temperature and conditions. + """ + temperature = random.randint(32, 105) + conditions = random.choice( + ["sunny", "cloudy", "partly cloudy", "rainy", "windy", "snowy", "foggy"] + ) + humidity = random.randint(20, 95) + return f"{location}: {temperature}F, {conditions}, {humidity}% humidity." + + @mcp.tool() + async def get_forecast(location: str, days: int = 5) -> str: + """Get a multi-day weather forecast for a location. + + Args: + location: City or region name. + days: Number of days to forecast (default 5, max 10). + + Returns: + Multi-line forecast summary. + """ + days = min(max(days, 1), 10) + lines = [f"{location} {days}-day forecast:"] + for i in range(1, days + 1): + high = random.randint(55, 105) + low = high - random.randint(10, 25) + cond = random.choice( + ["sunny", "cloudy", "rainy", "stormy", "clear", "partly cloudy"] + ) + lines.append(f" Day {i}: High {high}F / Low {low}F, {cond}") + return "\n".join(lines) + + return mcp + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Weather MCP server (streamable-HTTP transport)" + ) + parser.add_argument("--host", default="0.0.0.0") + parser.add_argument("--port", type=int, default=8081) + args = parser.parse_args() + + mcp = build_server(args.host, args.port) + logger.info( + "Weather MCP server listening on http://%s:%d/mcp", args.host, args.port + ) + try: + mcp.run(transport="streamable-http") + except (KeyboardInterrupt, SystemExit): + logger.info("Shutting down.") + + +if __name__ == "__main__": + main() diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py b/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py index ef8e082e9..aa8f8e7c0 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/__init__.py @@ -22,6 +22,10 @@ from dapr.ext.workflow.workflow_runtime import WorkflowRuntime, alternate_name from dapr.ext.workflow.workflow_state import WorkflowState, WorkflowStatus +# MCP +from dapr.ext.workflow.mcp import DaprMCPClient, MCPToolDef, MCP_WORKFLOW_PREFIX +from dapr.ext.workflow.mcp_schema import create_pydantic_model_from_schema + __all__ = [ 'WorkflowRuntime', 'DaprWorkflowClient', @@ -34,4 +38,9 @@ 'alternate_name', 'RetryPolicy', 'TaskFailedError', + # MCP + 'DaprMCPClient', + 'MCPToolDef', + 'MCP_WORKFLOW_PREFIX', + 'create_pydantic_model_from_schema', ] diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.py index c969a3d2a..d7114434c 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.py @@ -28,7 +28,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15\x62\x61\x63kend_service.proto\x12\x1d\x64urabletask.protos.backend.v1\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"T\n\x0f\x41\x64\x64\x45ventRequest\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x1c\n\x05\x65vent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\x12\n\x10\x41\x64\x64\x45ventResponse\"`\n\x1f\x43ompleteActivityWorkItemRequest\x12\x17\n\x0f\x63ompletionToken\x18\x01 \x01(\t\x12$\n\rresponseEvent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\"\n CompleteActivityWorkItemResponse\"\xa4\x03\n\x1f\x43ompleteWorkflowWorkItemRequest\x12\x17\n\x0f\x63ompletionToken\x18\x01 \x01(\t\x12#\n\x08instance\x18\x02 \x01(\x0b\x32\x11.WorkflowInstance\x12+\n\rruntimeStatus\x18\x03 \x01(\x0e\x32\x14.OrchestrationStatus\x12\x32\n\x0c\x63ustomStatus\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12!\n\nnewHistory\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12\x1f\n\x08newTasks\x18\x06 \x03(\x0b\x32\r.HistoryEvent\x12 \n\tnewTimers\x18\x07 \x03(\x0b\x32\r.HistoryEvent\x12\x43\n\x0bnewMessages\x18\x08 \x03(\x0b\x32..durabletask.protos.backend.v1.WorkflowMessage\x12\x37\n\x12numEventsProcessed\x18\t \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\"\n CompleteWorkflowWorkItemResponse\"T\n\x0fWorkflowMessage\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x1c\n\x05\x65vent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\x9c\x01\n\x14\x42\x61\x63kendWorkflowState\x12\x1c\n\x05inbox\x18\x01 \x03(\x0b\x32\r.HistoryEvent\x12\x1e\n\x07history\x18\x02 \x03(\x0b\x32\r.HistoryEvent\x12\x32\n\x0c\x63ustomStatus\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x12\n\ngeneration\x18\x04 \x01(\x04\"P\n\x1d\x43reateWorkflowInstanceRequest\x12!\n\nstartEvent\x18\x01 \x01(\x0b\x32\r.HistoryEventJ\x04\x08\x02\x10\x03R\x06policy\"\xca\x03\n\x10WorkflowMetadata\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\rruntimeStatus\x18\x03 \x01(\x0e\x32\x14.OrchestrationStatus\x12-\n\tcreatedAt\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rlastUpdatedAt\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05input\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x06output\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ustomStatus\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\t \x01(\x0b\x32\x13.TaskFailureDetails\x12/\n\x0b\x63ompletedAt\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x18\n\x10parentInstanceId\x18\x0b \x01(\t\"\x99\x01\n\x1c\x42\x61\x63kendWorkflowStateMetadata\x12\x13\n\x0binboxLength\x18\x01 \x01(\x04\x12\x15\n\rhistoryLength\x18\x02 \x01(\x04\x12\x12\n\ngeneration\x18\x03 \x01(\x04\x12\x17\n\x0fsignatureLength\x18\x04 \x01(\x04\x12 \n\x18signingCertificateLength\x18\x05 \x01(\x04\")\n\x12SigningCertificate\x12\x13\n\x0b\x63\x65rtificate\x18\x01 \x01(\x0c\"\xc4\x01\n\x10HistorySignature\x12\x17\n\x0fstartEventIndex\x18\x01 \x01(\x04\x12\x12\n\neventCount\x18\x02 \x01(\x04\x12$\n\x17previousSignatureDigest\x18\x03 \x01(\x0cH\x00\x88\x01\x01\x12\x14\n\x0c\x65ventsDigest\x18\x04 \x01(\x0c\x12\x18\n\x10\x63\x65rtificateIndex\x18\x05 \x01(\x04\x12\x11\n\tsignature\x18\x06 \x01(\x0c\x42\x1a\n\x18_previousSignatureDigest\"E\n\x0c\x44urableTimer\x12!\n\ntimerEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x12\n\ngeneration\x18\x02 \x01(\x04\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15\x62\x61\x63kend_service.proto\x12\x1d\x64urabletask.protos.backend.v1\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"Y\n\x0f\x41\x64\x64\x45ventRequest\x12(\n\x08instance\x18\x01 \x01(\x0b\x32\x16.OrchestrationInstance\x12\x1c\n\x05\x65vent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\x12\n\x10\x41\x64\x64\x45ventResponse\"`\n\x1f\x43ompleteActivityWorkItemRequest\x12\x17\n\x0f\x63ompletionToken\x18\x01 \x01(\t\x12$\n\rresponseEvent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\"\n CompleteActivityWorkItemResponse\"\xb2\x03\n$CompleteOrchestrationWorkItemRequest\x12\x17\n\x0f\x63ompletionToken\x18\x01 \x01(\t\x12(\n\x08instance\x18\x02 \x01(\x0b\x32\x16.OrchestrationInstance\x12+\n\rruntimeStatus\x18\x03 \x01(\x0e\x32\x14.OrchestrationStatus\x12\x32\n\x0c\x63ustomStatus\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12!\n\nnewHistory\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12\x1f\n\x08newTasks\x18\x06 \x03(\x0b\x32\r.HistoryEvent\x12 \n\tnewTimers\x18\x07 \x03(\x0b\x32\r.HistoryEvent\x12G\n\x0bnewMessages\x18\x08 \x03(\x0b\x32\x32.durabletask.protos.backend.v1.OrchestratorMessage\x12\x37\n\x12numEventsProcessed\x18\t \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\'\n%CompleteOrchestrationWorkItemResponse\"]\n\x13OrchestratorMessage\x12(\n\x08instance\x18\x01 \x01(\x0b\x32\x16.OrchestrationInstance\x12\x1c\n\x05\x65vent\x18\x02 \x01(\x0b\x32\r.HistoryEvent\"\x95\x01\n\rWorkflowState\x12\x1c\n\x05inbox\x18\x01 \x03(\x0b\x32\r.HistoryEvent\x12\x1e\n\x07history\x18\x02 \x03(\x0b\x32\r.HistoryEvent\x12\x32\n\x0c\x63ustomStatus\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x12\n\ngeneration\x18\x04 \x01(\x04\"\x7f\n\x1d\x43reateWorkflowInstanceRequest\x12!\n\nstartEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x30\n\x06policy\x18\x02 \x01(\x0b\x32\x1b.OrchestrationIdReusePolicyH\x00\x88\x01\x01\x42\t\n\x07_policy\"\xcf\x03\n\x15OrchestrationMetadata\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\rruntimeStatus\x18\x03 \x01(\x0e\x32\x14.OrchestrationStatus\x12-\n\tcreatedAt\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rlastUpdatedAt\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05input\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x06output\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ustomStatus\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\t \x01(\x0b\x32\x13.TaskFailureDetails\x12/\n\x0b\x63ompletedAt\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x18\n\x10parentInstanceId\x18\x0b \x01(\t\"W\n\x15WorkflowStateMetadata\x12\x13\n\x0binboxLength\x18\x01 \x01(\x04\x12\x15\n\rhistoryLength\x18\x02 \x01(\x04\x12\x12\n\ngeneration\x18\x03 \x01(\x04\"E\n\x0c\x44urableTimer\x12!\n\ntimerEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x12\n\ngeneration\x18\x02 \x01(\x04\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -37,31 +37,27 @@ _globals['DESCRIPTOR']._loaded_options = None _globals['DESCRIPTOR']._serialized_options = b'\n+io.dapr.durabletask.implementation.protobufZ\013/api/protos\252\002\031Dapr.DurableTask.Protobuf' _globals['_ADDEVENTREQUEST']._serialized_start=164 - _globals['_ADDEVENTREQUEST']._serialized_end=248 - _globals['_ADDEVENTRESPONSE']._serialized_start=250 - _globals['_ADDEVENTRESPONSE']._serialized_end=268 - _globals['_COMPLETEACTIVITYWORKITEMREQUEST']._serialized_start=270 - _globals['_COMPLETEACTIVITYWORKITEMREQUEST']._serialized_end=366 - _globals['_COMPLETEACTIVITYWORKITEMRESPONSE']._serialized_start=368 - _globals['_COMPLETEACTIVITYWORKITEMRESPONSE']._serialized_end=402 - _globals['_COMPLETEWORKFLOWWORKITEMREQUEST']._serialized_start=405 - _globals['_COMPLETEWORKFLOWWORKITEMREQUEST']._serialized_end=825 - _globals['_COMPLETEWORKFLOWWORKITEMRESPONSE']._serialized_start=827 - _globals['_COMPLETEWORKFLOWWORKITEMRESPONSE']._serialized_end=861 - _globals['_WORKFLOWMESSAGE']._serialized_start=863 - _globals['_WORKFLOWMESSAGE']._serialized_end=947 - _globals['_BACKENDWORKFLOWSTATE']._serialized_start=950 - _globals['_BACKENDWORKFLOWSTATE']._serialized_end=1106 - _globals['_CREATEWORKFLOWINSTANCEREQUEST']._serialized_start=1108 - _globals['_CREATEWORKFLOWINSTANCEREQUEST']._serialized_end=1188 - _globals['_WORKFLOWMETADATA']._serialized_start=1191 - _globals['_WORKFLOWMETADATA']._serialized_end=1649 - _globals['_BACKENDWORKFLOWSTATEMETADATA']._serialized_start=1652 - _globals['_BACKENDWORKFLOWSTATEMETADATA']._serialized_end=1805 - _globals['_SIGNINGCERTIFICATE']._serialized_start=1807 - _globals['_SIGNINGCERTIFICATE']._serialized_end=1848 - _globals['_HISTORYSIGNATURE']._serialized_start=1851 - _globals['_HISTORYSIGNATURE']._serialized_end=2047 - _globals['_DURABLETIMER']._serialized_start=2049 - _globals['_DURABLETIMER']._serialized_end=2118 + _globals['_ADDEVENTREQUEST']._serialized_end=253 + _globals['_ADDEVENTRESPONSE']._serialized_start=255 + _globals['_ADDEVENTRESPONSE']._serialized_end=273 + _globals['_COMPLETEACTIVITYWORKITEMREQUEST']._serialized_start=275 + _globals['_COMPLETEACTIVITYWORKITEMREQUEST']._serialized_end=371 + _globals['_COMPLETEACTIVITYWORKITEMRESPONSE']._serialized_start=373 + _globals['_COMPLETEACTIVITYWORKITEMRESPONSE']._serialized_end=407 + _globals['_COMPLETEORCHESTRATIONWORKITEMREQUEST']._serialized_start=410 + _globals['_COMPLETEORCHESTRATIONWORKITEMREQUEST']._serialized_end=844 + _globals['_COMPLETEORCHESTRATIONWORKITEMRESPONSE']._serialized_start=846 + _globals['_COMPLETEORCHESTRATIONWORKITEMRESPONSE']._serialized_end=885 + _globals['_ORCHESTRATORMESSAGE']._serialized_start=887 + _globals['_ORCHESTRATORMESSAGE']._serialized_end=980 + _globals['_WORKFLOWSTATE']._serialized_start=983 + _globals['_WORKFLOWSTATE']._serialized_end=1132 + _globals['_CREATEWORKFLOWINSTANCEREQUEST']._serialized_start=1134 + _globals['_CREATEWORKFLOWINSTANCEREQUEST']._serialized_end=1261 + _globals['_ORCHESTRATIONMETADATA']._serialized_start=1264 + _globals['_ORCHESTRATIONMETADATA']._serialized_end=1727 + _globals['_WORKFLOWSTATEMETADATA']._serialized_start=1729 + _globals['_WORKFLOWSTATEMETADATA']._serialized_end=1816 + _globals['_DURABLETIMER']._serialized_start=1818 + _globals['_DURABLETIMER']._serialized_end=1887 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.pyi index 7b1a84ded..da6552493 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2.pyi @@ -17,45 +17,46 @@ from dapr.ext.workflow._durabletask.internal import orchestration_pb2 as _orches import sys import typing as _typing -if sys.version_info >= (3, 10): - from typing import TypeAlias as _TypeAlias +if sys.version_info >= (3, 11): + from typing import TypeAlias as _TypeAlias, Never as _Never else: - from typing_extensions import TypeAlias as _TypeAlias + from typing_extensions import TypeAlias as _TypeAlias, Never as _Never DESCRIPTOR: _descriptor.FileDescriptor @_typing.final class AddEventRequest(_message.Message): - """Request payload for adding new workflow events.""" + """Request payload for adding new orchestration events.""" DESCRIPTOR: _descriptor.Descriptor INSTANCE_FIELD_NUMBER: _builtins.int EVENT_FIELD_NUMBER: _builtins.int @_builtins.property - def instance(self) -> _orchestration_pb2.WorkflowInstance: - """The ID of the workflow to send an event to.""" + def instance(self) -> _orchestration_pb2.OrchestrationInstance: + """The ID of the orchestration to send an event to.""" @_builtins.property def event(self) -> _history_events_pb2.HistoryEvent: - """The event to send to the workflow.""" + """The event to send to the orchestration.""" def __init__( self, *, - instance: _orchestration_pb2.WorkflowInstance | None = ..., + instance: _orchestration_pb2.OrchestrationInstance | None = ..., event: _history_events_pb2.HistoryEvent | None = ..., ) -> None: ... _HasFieldArgType: _TypeAlias = _typing.Literal["event", b"event", "instance", b"instance"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["event", b"event", "instance", b"instance"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___AddEventRequest: _TypeAlias = AddEventRequest # noqa: Y015 @_typing.final class AddEventResponse(_message.Message): - """Response payload for adding new workflow events. + """Response payload for adding new orchestration events. No fields """ @@ -64,6 +65,11 @@ class AddEventResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___AddEventResponse: _TypeAlias = AddEventResponse # noqa: Y015 @@ -79,7 +85,7 @@ class CompleteActivityWorkItemRequest(_message.Message): """The completion token that was provided when the work item was fetched.""" @_builtins.property def responseEvent(self) -> _history_events_pb2.HistoryEvent: - """The response event that will be sent to the workflow. + """The response event that will be sent to the orchestrator. This must be either a TaskCompleted event or a TaskFailed event. """ @@ -93,6 +99,7 @@ class CompleteActivityWorkItemRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["completionToken", b"completionToken", "responseEvent", b"responseEvent"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CompleteActivityWorkItemRequest: _TypeAlias = CompleteActivityWorkItemRequest # noqa: Y015 @@ -107,12 +114,17 @@ class CompleteActivityWorkItemResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CompleteActivityWorkItemResponse: _TypeAlias = CompleteActivityWorkItemResponse # noqa: Y015 @_typing.final -class CompleteWorkflowWorkItemRequest(_message.Message): - """Request payload for completing a workflow work item.""" +class CompleteOrchestrationWorkItemRequest(_message.Message): + """Request payload for completing an orchestration work item.""" DESCRIPTOR: _descriptor.Descriptor @@ -129,7 +141,7 @@ class CompleteWorkflowWorkItemRequest(_message.Message): """The completion token that was provided when the work item was fetched.""" runtimeStatus: _orchestration_pb2.OrchestrationStatus.ValueType @_builtins.property - def instance(self) -> _orchestration_pb2.WorkflowInstance: ... + def instance(self) -> _orchestration_pb2.OrchestrationInstance: ... @_builtins.property def customStatus(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -139,36 +151,37 @@ class CompleteWorkflowWorkItemRequest(_message.Message): @_builtins.property def newTimers(self) -> _containers.RepeatedCompositeFieldContainer[_history_events_pb2.HistoryEvent]: ... @_builtins.property - def newMessages(self) -> _containers.RepeatedCompositeFieldContainer[Global___WorkflowMessage]: ... + def newMessages(self) -> _containers.RepeatedCompositeFieldContainer[Global___OrchestratorMessage]: ... @_builtins.property def numEventsProcessed(self) -> _wrappers_pb2.Int32Value: - """The number of work item events that were processed by the workflow. - This field is optional. If not set, the service should assume that the workflow processed all events. + """The number of work item events that were processed by the orchestrator. + This field is optional. If not set, the service should assume that the orchestrator processed all events. """ def __init__( self, *, completionToken: _builtins.str = ..., - instance: _orchestration_pb2.WorkflowInstance | None = ..., + instance: _orchestration_pb2.OrchestrationInstance | None = ..., runtimeStatus: _orchestration_pb2.OrchestrationStatus.ValueType = ..., customStatus: _wrappers_pb2.StringValue | None = ..., newHistory: _abc.Iterable[_history_events_pb2.HistoryEvent] | None = ..., newTasks: _abc.Iterable[_history_events_pb2.HistoryEvent] | None = ..., newTimers: _abc.Iterable[_history_events_pb2.HistoryEvent] | None = ..., - newMessages: _abc.Iterable[Global___WorkflowMessage] | None = ..., + newMessages: _abc.Iterable[Global___OrchestratorMessage] | None = ..., numEventsProcessed: _wrappers_pb2.Int32Value | None = ..., ) -> None: ... _HasFieldArgType: _TypeAlias = _typing.Literal["customStatus", b"customStatus", "instance", b"instance", "numEventsProcessed", b"numEventsProcessed"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["completionToken", b"completionToken", "customStatus", b"customStatus", "instance", b"instance", "newHistory", b"newHistory", "newMessages", b"newMessages", "newTasks", b"newTasks", "newTimers", b"newTimers", "numEventsProcessed", b"numEventsProcessed", "runtimeStatus", b"runtimeStatus"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___CompleteWorkflowWorkItemRequest: _TypeAlias = CompleteWorkflowWorkItemRequest # noqa: Y015 +Global___CompleteOrchestrationWorkItemRequest: _TypeAlias = CompleteOrchestrationWorkItemRequest # noqa: Y015 @_typing.final -class CompleteWorkflowWorkItemResponse(_message.Message): - """Response payload for completing a workflow work item. +class CompleteOrchestrationWorkItemResponse(_message.Message): + """Response payload for completing an orchestration work item. No fields """ @@ -177,40 +190,46 @@ class CompleteWorkflowWorkItemResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___CompleteWorkflowWorkItemResponse: _TypeAlias = CompleteWorkflowWorkItemResponse # noqa: Y015 +Global___CompleteOrchestrationWorkItemResponse: _TypeAlias = CompleteOrchestrationWorkItemResponse # noqa: Y015 @_typing.final -class WorkflowMessage(_message.Message): - """A message to be delivered to a workflow by the backend.""" +class OrchestratorMessage(_message.Message): + """A message to be delivered to an orchestration by the backend.""" DESCRIPTOR: _descriptor.Descriptor INSTANCE_FIELD_NUMBER: _builtins.int EVENT_FIELD_NUMBER: _builtins.int @_builtins.property - def instance(self) -> _orchestration_pb2.WorkflowInstance: - """The ID of the workflow instance to receive the message.""" + def instance(self) -> _orchestration_pb2.OrchestrationInstance: + """The ID of the orchestration instance to receive the message.""" @_builtins.property def event(self) -> _history_events_pb2.HistoryEvent: - """The event payload to be received by the target workflow.""" + """The event payload to be received by the target orchestration.""" def __init__( self, *, - instance: _orchestration_pb2.WorkflowInstance | None = ..., + instance: _orchestration_pb2.OrchestrationInstance | None = ..., event: _history_events_pb2.HistoryEvent | None = ..., ) -> None: ... _HasFieldArgType: _TypeAlias = _typing.Literal["event", b"event", "instance", b"instance"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["event", b"event", "instance", b"instance"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___WorkflowMessage: _TypeAlias = WorkflowMessage # noqa: Y015 +Global___OrchestratorMessage: _TypeAlias = OrchestratorMessage # noqa: Y015 @_typing.final -class BackendWorkflowState(_message.Message): +class WorkflowState(_message.Message): DESCRIPTOR: _descriptor.Descriptor INBOX_FIELD_NUMBER: _builtins.int @@ -236,30 +255,38 @@ class BackendWorkflowState(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["customStatus", b"customStatus", "generation", b"generation", "history", b"history", "inbox", b"inbox"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___BackendWorkflowState: _TypeAlias = BackendWorkflowState # noqa: Y015 +Global___WorkflowState: _TypeAlias = WorkflowState # noqa: Y015 @_typing.final class CreateWorkflowInstanceRequest(_message.Message): DESCRIPTOR: _descriptor.Descriptor STARTEVENT_FIELD_NUMBER: _builtins.int + POLICY_FIELD_NUMBER: _builtins.int @_builtins.property def startEvent(self) -> _history_events_pb2.HistoryEvent: ... + @_builtins.property + def policy(self) -> _orchestration_pb2.OrchestrationIdReusePolicy: ... def __init__( self, *, startEvent: _history_events_pb2.HistoryEvent | None = ..., + policy: _orchestration_pb2.OrchestrationIdReusePolicy | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["startEvent", b"startEvent"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_policy", b"_policy", "policy", b"policy", "startEvent", b"startEvent"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["startEvent", b"startEvent"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_policy", b"_policy", "policy", b"policy", "startEvent", b"startEvent"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + _WhichOneofReturnType__policy: _TypeAlias = _typing.Literal["policy"] # noqa: Y015 + _WhichOneofArgType__policy: _TypeAlias = _typing.Literal["_policy", b"_policy"] # noqa: Y015 + def WhichOneof(self, oneof_group: _WhichOneofArgType__policy) -> _WhichOneofReturnType__policy | None: ... Global___CreateWorkflowInstanceRequest: _TypeAlias = CreateWorkflowInstanceRequest # noqa: Y015 @_typing.final -class WorkflowMetadata(_message.Message): +class OrchestrationMetadata(_message.Message): DESCRIPTOR: _descriptor.Descriptor INSTANCEID_FIELD_NUMBER: _builtins.int @@ -310,133 +337,34 @@ class WorkflowMetadata(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["completedAt", b"completedAt", "createdAt", b"createdAt", "customStatus", b"customStatus", "failureDetails", b"failureDetails", "input", b"input", "instanceId", b"instanceId", "lastUpdatedAt", b"lastUpdatedAt", "name", b"name", "output", b"output", "parentInstanceId", b"parentInstanceId", "runtimeStatus", b"runtimeStatus"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___WorkflowMetadata: _TypeAlias = WorkflowMetadata # noqa: Y015 +Global___OrchestrationMetadata: _TypeAlias = OrchestrationMetadata # noqa: Y015 @_typing.final -class BackendWorkflowStateMetadata(_message.Message): +class WorkflowStateMetadata(_message.Message): DESCRIPTOR: _descriptor.Descriptor INBOXLENGTH_FIELD_NUMBER: _builtins.int HISTORYLENGTH_FIELD_NUMBER: _builtins.int GENERATION_FIELD_NUMBER: _builtins.int - SIGNATURELENGTH_FIELD_NUMBER: _builtins.int - SIGNINGCERTIFICATELENGTH_FIELD_NUMBER: _builtins.int inboxLength: _builtins.int historyLength: _builtins.int generation: _builtins.int - signatureLength: _builtins.int - """Number of HistorySignature entries stored (signature-NNNNNN keys).""" - signingCertificateLength: _builtins.int - """Number of SigningCertificate entries stored (sigcert-NNNNNN keys).""" def __init__( self, *, inboxLength: _builtins.int = ..., historyLength: _builtins.int = ..., generation: _builtins.int = ..., - signatureLength: _builtins.int = ..., - signingCertificateLength: _builtins.int = ..., - ) -> None: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["generation", b"generation", "historyLength", b"historyLength", "inboxLength", b"inboxLength", "signatureLength", b"signatureLength", "signingCertificateLength", b"signingCertificateLength"] # noqa: Y015 - def ClearField(self, field_name: _ClearFieldArgType) -> None: ... - -Global___BackendWorkflowStateMetadata: _TypeAlias = BackendWorkflowStateMetadata # noqa: Y015 - -@_typing.final -class SigningCertificate(_message.Message): - """A signing identity's X.509 certificate, stored once and referenced by index - from HistorySignature entries. This avoids duplicating the certificate - across every signature from the same identity. Stored as individual actor - state keys: sigcert-000000, sigcert-000001, etc. - """ - - DESCRIPTOR: _descriptor.Descriptor - - CERTIFICATE_FIELD_NUMBER: _builtins.int - certificate: _builtins.bytes - """X.509 certificate chain of the signing identity. Certificates are - DER-encoded and concatenated directly in order: leaf first, followed by - intermediates. Each certificate is a self-delimiting ASN.1 SEQUENCE, - so the chain can be parsed by reading consecutive DER structures. - """ - def __init__( - self, - *, - certificate: _builtins.bytes = ..., - ) -> None: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["certificate", b"certificate"] # noqa: Y015 - def ClearField(self, field_name: _ClearFieldArgType) -> None: ... - -Global___SigningCertificate: _TypeAlias = SigningCertificate # noqa: Y015 - -@_typing.final -class HistorySignature(_message.Message): - """Signing metadata for a contiguous range of history events. - This is metadata-only — it does NOT contain the events themselves. - Events are stored once in history-NNNNNN keys; this message references them - by index range and stores only the signing artifacts. - Stored as individual actor state keys: signature-000000, signature-000001, - etc. - """ - - DESCRIPTOR: _descriptor.Descriptor - - STARTEVENTINDEX_FIELD_NUMBER: _builtins.int - EVENTCOUNT_FIELD_NUMBER: _builtins.int - PREVIOUSSIGNATUREDIGEST_FIELD_NUMBER: _builtins.int - EVENTSDIGEST_FIELD_NUMBER: _builtins.int - CERTIFICATEINDEX_FIELD_NUMBER: _builtins.int - SIGNATURE_FIELD_NUMBER: _builtins.int - startEventIndex: _builtins.int - """Index of the first event covered by this signature (inclusive).""" - eventCount: _builtins.int - """Number of events covered by this signature.""" - previousSignatureDigest: _builtins.bytes - """SHA-256 digest of the previous HistorySignature message (the entire - deterministically serialized protobuf message). Absent for the first - signature in the chain (no predecessor). When computing the signature - input for the root case, this value is treated as empty (zero-length). - """ - eventsDigest: _builtins.bytes - """SHA-256 digest over the concatenation of the raw serialized bytes of each - history event in this range, in order. The bytes are the exact values - persisted to the state store (one per history-NNNNNN key). - """ - certificateIndex: _builtins.int - """Index into the SigningCertificate table (sigcert-NNNNNN keys). - Multiple signatures from the same identity share the same index. - A new entry is appended only when the certificate rotates. - """ - signature: _builtins.bytes - """Cryptographic signature over - SHA-256(previousSignatureDigest || eventsDigest) - using the private key corresponding to the referenced certificate. - The algorithm is determined by the certificate's key type: - Ed25519: raw Ed25519 signature over the input bytes - ECDSA: fixed-size r||s over SHA-256(input), each component - zero-padded to the curve byte length - RSA: PKCS#1 v1.5 with SHA-256 - """ - def __init__( - self, - *, - startEventIndex: _builtins.int = ..., - eventCount: _builtins.int = ..., - previousSignatureDigest: _builtins.bytes | None = ..., - eventsDigest: _builtins.bytes = ..., - certificateIndex: _builtins.int = ..., - signature: _builtins.bytes = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_previousSignatureDigest", b"_previousSignatureDigest", "previousSignatureDigest", b"previousSignatureDigest"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_previousSignatureDigest", b"_previousSignatureDigest", "certificateIndex", b"certificateIndex", "eventCount", b"eventCount", "eventsDigest", b"eventsDigest", "previousSignatureDigest", b"previousSignatureDigest", "signature", b"signature", "startEventIndex", b"startEventIndex"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["generation", b"generation", "historyLength", b"historyLength", "inboxLength", b"inboxLength"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... - _WhichOneofReturnType__previousSignatureDigest: _TypeAlias = _typing.Literal["previousSignatureDigest"] # noqa: Y015 - _WhichOneofArgType__previousSignatureDigest: _TypeAlias = _typing.Literal["_previousSignatureDigest", b"_previousSignatureDigest"] # noqa: Y015 - def WhichOneof(self, oneof_group: _WhichOneofArgType__previousSignatureDigest) -> _WhichOneofReturnType__previousSignatureDigest | None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___HistorySignature: _TypeAlias = HistorySignature # noqa: Y015 +Global___WorkflowStateMetadata: _TypeAlias = WorkflowStateMetadata # noqa: Y015 @_typing.final class DurableTimer(_message.Message): @@ -457,5 +385,6 @@ class DurableTimer(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["generation", b"generation", "timerEvent", b"timerEvent"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___DurableTimer: _TypeAlias = DurableTimer # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2_grpc.py index 4cb95f3af..10a6c6cf9 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2_grpc.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/backend_service_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.76.0' +GRPC_GENERATED_VERSION = '1.80.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.py index feb7e313a..945dc0118 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.py @@ -27,7 +27,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14history_events.proto\x1a\x13orchestration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xd6\x03\n\x15\x45xecutionStartedEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12+\n\x0eparentInstance\x18\x05 \x01(\x0b\x32\x13.ParentInstanceInfo\x12;\n\x17scheduledStartTimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12)\n\x12parentTraceContext\x18\x07 \x01(\x0b\x32\r.TraceContext\x12\x34\n\x0eworkflowSpanID\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x04tags\x18\t \x03(\x0b\x32 .ExecutionStartedEvent.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa2\x01\n\x17\x45xecutionCompletedEvent\x12,\n\x0eworkflowStatus\x18\x01 \x01(\x0e\x32\x14.OrchestrationStatus\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x03 \x01(\x0b\x32\x13.TaskFailureDetails\"X\n\x18\x45xecutionTerminatedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x0f\n\x07recurse\x18\x02 \x01(\x08\"\x9e\x02\n\x12TaskScheduledEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x12parentTraceContext\x18\x04 \x01(\x0b\x32\r.TraceContext\x12\x17\n\x0ftaskExecutionId\x18\x05 \x01(\t\x12>\n\x17rerunParentInstanceInfo\x18\x06 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x00\x88\x01\x01\x42\x1a\n\x18_rerunParentInstanceInfo\"t\n\x12TaskCompletedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x17\n\x0ftaskExecutionId\x18\x03 \x01(\t\"p\n\x0fTaskFailedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12+\n\x0e\x66\x61ilureDetails\x18\x02 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x17\n\x0ftaskExecutionId\x18\x03 \x01(\t\"\xa8\x02\n!ChildWorkflowInstanceCreatedEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x12parentTraceContext\x18\x05 \x01(\x0b\x32\r.TraceContext\x12>\n\x17rerunParentInstanceInfo\x18\x06 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x00\x88\x01\x01\x42\x1a\n\x18_rerunParentInstanceInfo\"l\n#ChildWorkflowInstanceCompletedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"h\n ChildWorkflowInstanceFailedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12+\n\x0e\x66\x61ilureDetails\x18\x02 \x01(\x0b\x32\x13.TaskFailureDetails\"\x18\n\x16TimerOriginCreateTimer\"(\n\x18TimerOriginExternalEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\"3\n\x18TimerOriginActivityRetry\x12\x17\n\x0ftaskExecutionId\x18\x01 \x01(\t\"3\n\x1dTimerOriginChildWorkflowRetry\x12\x12\n\ninstanceId\x18\x01 \x01(\t\"\x97\x03\n\x11TimerCreatedEvent\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12>\n\x17rerunParentInstanceInfo\x18\x03 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x02\x88\x01\x01\x12.\n\x0b\x63reateTimer\x18\x04 \x01(\x0b\x32\x17.TimerOriginCreateTimerH\x00\x12\x32\n\rexternalEvent\x18\x05 \x01(\x0b\x32\x19.TimerOriginExternalEventH\x00\x12\x32\n\ractivityRetry\x18\x06 \x01(\x0b\x32\x19.TimerOriginActivityRetryH\x00\x12<\n\x12\x63hildWorkflowRetry\x18\x07 \x01(\x0b\x32\x1e.TimerOriginChildWorkflowRetryH\x00\x42\x08\n\x06originB\x07\n\x05_nameB\x1a\n\x18_rerunParentInstanceInfo\"N\n\x0fTimerFiredEvent\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07timerId\x18\x02 \x01(\x05\"J\n\x14WorkflowStartedEvent\x12&\n\x07version\x18\x01 \x01(\x0b\x32\x10.WorkflowVersionH\x00\x88\x01\x01\x42\n\n\x08_version\"\x18\n\x16WorkflowCompletedEvent\"_\n\x0e\x45ventSentEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"M\n\x10\x45ventRaisedEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05input\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"A\n\x12\x43ontinueAsNewEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"F\n\x17\x45xecutionSuspendedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"D\n\x15\x45xecutionResumedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"a\n\x15\x45xecutionStalledEvent\x12\x1e\n\x06reason\x18\x01 \x01(\x0e\x32\x0e.StalledReason\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_description\"\xa8\t\n\x0cHistoryEvent\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\x05\x12-\n\ttimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x10\x65xecutionStarted\x18\x03 \x01(\x0b\x32\x16.ExecutionStartedEventH\x00\x12\x36\n\x12\x65xecutionCompleted\x18\x04 \x01(\x0b\x32\x18.ExecutionCompletedEventH\x00\x12\x38\n\x13\x65xecutionTerminated\x18\x05 \x01(\x0b\x32\x19.ExecutionTerminatedEventH\x00\x12,\n\rtaskScheduled\x18\x06 \x01(\x0b\x32\x13.TaskScheduledEventH\x00\x12,\n\rtaskCompleted\x18\x07 \x01(\x0b\x32\x13.TaskCompletedEventH\x00\x12&\n\ntaskFailed\x18\x08 \x01(\x0b\x32\x10.TaskFailedEventH\x00\x12J\n\x1c\x63hildWorkflowInstanceCreated\x18\t \x01(\x0b\x32\".ChildWorkflowInstanceCreatedEventH\x00\x12N\n\x1e\x63hildWorkflowInstanceCompleted\x18\n \x01(\x0b\x32$.ChildWorkflowInstanceCompletedEventH\x00\x12H\n\x1b\x63hildWorkflowInstanceFailed\x18\x0b \x01(\x0b\x32!.ChildWorkflowInstanceFailedEventH\x00\x12*\n\x0ctimerCreated\x18\x0c \x01(\x0b\x32\x12.TimerCreatedEventH\x00\x12&\n\ntimerFired\x18\r \x01(\x0b\x32\x10.TimerFiredEventH\x00\x12\x30\n\x0fworkflowStarted\x18\x0e \x01(\x0b\x32\x15.WorkflowStartedEventH\x00\x12\x34\n\x11workflowCompleted\x18\x0f \x01(\x0b\x32\x17.WorkflowCompletedEventH\x00\x12$\n\teventSent\x18\x10 \x01(\x0b\x32\x0f.EventSentEventH\x00\x12(\n\x0b\x65ventRaised\x18\x11 \x01(\x0b\x32\x11.EventRaisedEventH\x00\x12,\n\rcontinueAsNew\x18\x14 \x01(\x0b\x32\x13.ContinueAsNewEventH\x00\x12\x36\n\x12\x65xecutionSuspended\x18\x15 \x01(\x0b\x32\x18.ExecutionSuspendedEventH\x00\x12\x32\n\x10\x65xecutionResumed\x18\x16 \x01(\x0b\x32\x16.ExecutionResumedEventH\x00\x12\x32\n\x10\x65xecutionStalled\x18\x1f \x01(\x0b\x32\x16.ExecutionStalledEventH\x00\x12 \n\x06router\x18\x1e \x01(\x0b\x32\x0b.TaskRouterH\x01\x88\x01\x01\x42\x0b\n\teventTypeB\t\n\x07_routerJ\x04\x08\x12\x10\x13J\x04\x08\x13\x10\x14J\x04\x08\x17\x10\x18J\x04\x08\x18\x10\x19J\x04\x08\x19\x10\x1aJ\x04\x08\x1a\x10\x1bJ\x04\x08\x1b\x10\x1cJ\x04\x08\x1c\x10\x1dJ\x04\x08\x1d\x10\x1e\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14history_events.proto\x1a\x13orchestration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xe5\x03\n\x15\x45xecutionStartedEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x35\n\x15orchestrationInstance\x18\x04 \x01(\x0b\x32\x16.OrchestrationInstance\x12+\n\x0eparentInstance\x18\x05 \x01(\x0b\x32\x13.ParentInstanceInfo\x12;\n\x17scheduledStartTimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12)\n\x12parentTraceContext\x18\x07 \x01(\x0b\x32\r.TraceContext\x12\x39\n\x13orchestrationSpanID\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x04tags\x18\t \x03(\x0b\x32 .ExecutionStartedEvent.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xa7\x01\n\x17\x45xecutionCompletedEvent\x12\x31\n\x13orchestrationStatus\x18\x01 \x01(\x0e\x32\x14.OrchestrationStatus\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x03 \x01(\x0b\x32\x13.TaskFailureDetails\"X\n\x18\x45xecutionTerminatedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x0f\n\x07recurse\x18\x02 \x01(\x08\"\x9e\x02\n\x12TaskScheduledEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x12parentTraceContext\x18\x04 \x01(\x0b\x32\r.TraceContext\x12\x17\n\x0ftaskExecutionId\x18\x05 \x01(\t\x12>\n\x17rerunParentInstanceInfo\x18\x06 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x00\x88\x01\x01\x42\x1a\n\x18_rerunParentInstanceInfo\"t\n\x12TaskCompletedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x17\n\x0ftaskExecutionId\x18\x03 \x01(\t\"p\n\x0fTaskFailedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12+\n\x0e\x66\x61ilureDetails\x18\x02 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x17\n\x0ftaskExecutionId\x18\x03 \x01(\t\"\xab\x02\n$SubOrchestrationInstanceCreatedEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x12parentTraceContext\x18\x05 \x01(\x0b\x32\r.TraceContext\x12>\n\x17rerunParentInstanceInfo\x18\x06 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x00\x88\x01\x01\x42\x1a\n\x18_rerunParentInstanceInfo\"o\n&SubOrchestrationInstanceCompletedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"k\n#SubOrchestrationInstanceFailedEvent\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12+\n\x0e\x66\x61ilureDetails\x18\x02 \x01(\x0b\x32\x13.TaskFailureDetails\"\xb7\x01\n\x11TimerCreatedEvent\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\x04name\x18\x02 \x01(\tH\x00\x88\x01\x01\x12>\n\x17rerunParentInstanceInfo\x18\x03 \x01(\x0b\x32\x18.RerunParentInstanceInfoH\x01\x88\x01\x01\x42\x07\n\x05_nameB\x1a\n\x18_rerunParentInstanceInfo\"N\n\x0fTimerFiredEvent\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07timerId\x18\x02 \x01(\x05\"S\n\x18OrchestratorStartedEvent\x12+\n\x07version\x18\x01 \x01(\x0b\x32\x15.OrchestrationVersionH\x00\x88\x01\x01\x42\n\n\x08_version\"\x1c\n\x1aOrchestratorCompletedEvent\"_\n\x0e\x45ventSentEvent\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"M\n\x10\x45ventRaisedEvent\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x05input\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"A\n\x12\x43ontinueAsNewEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"F\n\x17\x45xecutionSuspendedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"D\n\x15\x45xecutionResumedEvent\x12+\n\x05input\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"a\n\x15\x45xecutionStalledEvent\x12\x1e\n\x06reason\x18\x01 \x01(\x0e\x32\x0e.StalledReason\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_description\"\xca\t\n\x0cHistoryEvent\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\x05\x12-\n\ttimestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x10\x65xecutionStarted\x18\x03 \x01(\x0b\x32\x16.ExecutionStartedEventH\x00\x12\x36\n\x12\x65xecutionCompleted\x18\x04 \x01(\x0b\x32\x18.ExecutionCompletedEventH\x00\x12\x38\n\x13\x65xecutionTerminated\x18\x05 \x01(\x0b\x32\x19.ExecutionTerminatedEventH\x00\x12,\n\rtaskScheduled\x18\x06 \x01(\x0b\x32\x13.TaskScheduledEventH\x00\x12,\n\rtaskCompleted\x18\x07 \x01(\x0b\x32\x13.TaskCompletedEventH\x00\x12&\n\ntaskFailed\x18\x08 \x01(\x0b\x32\x10.TaskFailedEventH\x00\x12P\n\x1fsubOrchestrationInstanceCreated\x18\t \x01(\x0b\x32%.SubOrchestrationInstanceCreatedEventH\x00\x12T\n!subOrchestrationInstanceCompleted\x18\n \x01(\x0b\x32\'.SubOrchestrationInstanceCompletedEventH\x00\x12N\n\x1esubOrchestrationInstanceFailed\x18\x0b \x01(\x0b\x32$.SubOrchestrationInstanceFailedEventH\x00\x12*\n\x0ctimerCreated\x18\x0c \x01(\x0b\x32\x12.TimerCreatedEventH\x00\x12&\n\ntimerFired\x18\r \x01(\x0b\x32\x10.TimerFiredEventH\x00\x12\x38\n\x13orchestratorStarted\x18\x0e \x01(\x0b\x32\x19.OrchestratorStartedEventH\x00\x12<\n\x15orchestratorCompleted\x18\x0f \x01(\x0b\x32\x1b.OrchestratorCompletedEventH\x00\x12$\n\teventSent\x18\x10 \x01(\x0b\x32\x0f.EventSentEventH\x00\x12(\n\x0b\x65ventRaised\x18\x11 \x01(\x0b\x32\x11.EventRaisedEventH\x00\x12,\n\rcontinueAsNew\x18\x14 \x01(\x0b\x32\x13.ContinueAsNewEventH\x00\x12\x36\n\x12\x65xecutionSuspended\x18\x15 \x01(\x0b\x32\x18.ExecutionSuspendedEventH\x00\x12\x32\n\x10\x65xecutionResumed\x18\x16 \x01(\x0b\x32\x16.ExecutionResumedEventH\x00\x12\x32\n\x10\x65xecutionStalled\x18\x1f \x01(\x0b\x32\x16.ExecutionStalledEventH\x00\x12 \n\x06router\x18\x1e \x01(\x0b\x32\x0b.TaskRouterH\x01\x88\x01\x01\x42\x0b\n\teventTypeB\t\n\x07_routerJ\x04\x08\x12\x10\x13J\x04\x08\x13\x10\x14J\x04\x08\x17\x10\x18J\x04\x08\x18\x10\x19J\x04\x08\x19\x10\x1aJ\x04\x08\x1a\x10\x1bJ\x04\x08\x1b\x10\x1cJ\x04\x08\x1c\x10\x1dJ\x04\x08\x1d\x10\x1e\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -38,53 +38,45 @@ _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._loaded_options = None _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_options = b'8\001' _globals['_EXECUTIONSTARTEDEVENT']._serialized_start=111 - _globals['_EXECUTIONSTARTEDEVENT']._serialized_end=581 - _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_start=538 - _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_end=581 - _globals['_EXECUTIONCOMPLETEDEVENT']._serialized_start=584 - _globals['_EXECUTIONCOMPLETEDEVENT']._serialized_end=746 - _globals['_EXECUTIONTERMINATEDEVENT']._serialized_start=748 - _globals['_EXECUTIONTERMINATEDEVENT']._serialized_end=836 - _globals['_TASKSCHEDULEDEVENT']._serialized_start=839 - _globals['_TASKSCHEDULEDEVENT']._serialized_end=1125 - _globals['_TASKCOMPLETEDEVENT']._serialized_start=1127 - _globals['_TASKCOMPLETEDEVENT']._serialized_end=1243 - _globals['_TASKFAILEDEVENT']._serialized_start=1245 - _globals['_TASKFAILEDEVENT']._serialized_end=1357 - _globals['_CHILDWORKFLOWINSTANCECREATEDEVENT']._serialized_start=1360 - _globals['_CHILDWORKFLOWINSTANCECREATEDEVENT']._serialized_end=1656 - _globals['_CHILDWORKFLOWINSTANCECOMPLETEDEVENT']._serialized_start=1658 - _globals['_CHILDWORKFLOWINSTANCECOMPLETEDEVENT']._serialized_end=1766 - _globals['_CHILDWORKFLOWINSTANCEFAILEDEVENT']._serialized_start=1768 - _globals['_CHILDWORKFLOWINSTANCEFAILEDEVENT']._serialized_end=1872 - _globals['_TIMERORIGINCREATETIMER']._serialized_start=1874 - _globals['_TIMERORIGINCREATETIMER']._serialized_end=1898 - _globals['_TIMERORIGINEXTERNALEVENT']._serialized_start=1900 - _globals['_TIMERORIGINEXTERNALEVENT']._serialized_end=1940 - _globals['_TIMERORIGINACTIVITYRETRY']._serialized_start=1942 - _globals['_TIMERORIGINACTIVITYRETRY']._serialized_end=1993 - _globals['_TIMERORIGINCHILDWORKFLOWRETRY']._serialized_start=1995 - _globals['_TIMERORIGINCHILDWORKFLOWRETRY']._serialized_end=2046 - _globals['_TIMERCREATEDEVENT']._serialized_start=2049 - _globals['_TIMERCREATEDEVENT']._serialized_end=2456 - _globals['_TIMERFIREDEVENT']._serialized_start=2458 - _globals['_TIMERFIREDEVENT']._serialized_end=2536 - _globals['_WORKFLOWSTARTEDEVENT']._serialized_start=2538 - _globals['_WORKFLOWSTARTEDEVENT']._serialized_end=2612 - _globals['_WORKFLOWCOMPLETEDEVENT']._serialized_start=2614 - _globals['_WORKFLOWCOMPLETEDEVENT']._serialized_end=2638 - _globals['_EVENTSENTEVENT']._serialized_start=2640 - _globals['_EVENTSENTEVENT']._serialized_end=2735 - _globals['_EVENTRAISEDEVENT']._serialized_start=2737 - _globals['_EVENTRAISEDEVENT']._serialized_end=2814 - _globals['_CONTINUEASNEWEVENT']._serialized_start=2816 - _globals['_CONTINUEASNEWEVENT']._serialized_end=2881 - _globals['_EXECUTIONSUSPENDEDEVENT']._serialized_start=2883 - _globals['_EXECUTIONSUSPENDEDEVENT']._serialized_end=2953 - _globals['_EXECUTIONRESUMEDEVENT']._serialized_start=2955 - _globals['_EXECUTIONRESUMEDEVENT']._serialized_end=3023 - _globals['_EXECUTIONSTALLEDEVENT']._serialized_start=3025 - _globals['_EXECUTIONSTALLEDEVENT']._serialized_end=3122 - _globals['_HISTORYEVENT']._serialized_start=3125 - _globals['_HISTORYEVENT']._serialized_end=4317 + _globals['_EXECUTIONSTARTEDEVENT']._serialized_end=596 + _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_start=553 + _globals['_EXECUTIONSTARTEDEVENT_TAGSENTRY']._serialized_end=596 + _globals['_EXECUTIONCOMPLETEDEVENT']._serialized_start=599 + _globals['_EXECUTIONCOMPLETEDEVENT']._serialized_end=766 + _globals['_EXECUTIONTERMINATEDEVENT']._serialized_start=768 + _globals['_EXECUTIONTERMINATEDEVENT']._serialized_end=856 + _globals['_TASKSCHEDULEDEVENT']._serialized_start=859 + _globals['_TASKSCHEDULEDEVENT']._serialized_end=1145 + _globals['_TASKCOMPLETEDEVENT']._serialized_start=1147 + _globals['_TASKCOMPLETEDEVENT']._serialized_end=1263 + _globals['_TASKFAILEDEVENT']._serialized_start=1265 + _globals['_TASKFAILEDEVENT']._serialized_end=1377 + _globals['_SUBORCHESTRATIONINSTANCECREATEDEVENT']._serialized_start=1380 + _globals['_SUBORCHESTRATIONINSTANCECREATEDEVENT']._serialized_end=1679 + _globals['_SUBORCHESTRATIONINSTANCECOMPLETEDEVENT']._serialized_start=1681 + _globals['_SUBORCHESTRATIONINSTANCECOMPLETEDEVENT']._serialized_end=1792 + _globals['_SUBORCHESTRATIONINSTANCEFAILEDEVENT']._serialized_start=1794 + _globals['_SUBORCHESTRATIONINSTANCEFAILEDEVENT']._serialized_end=1901 + _globals['_TIMERCREATEDEVENT']._serialized_start=1904 + _globals['_TIMERCREATEDEVENT']._serialized_end=2087 + _globals['_TIMERFIREDEVENT']._serialized_start=2089 + _globals['_TIMERFIREDEVENT']._serialized_end=2167 + _globals['_ORCHESTRATORSTARTEDEVENT']._serialized_start=2169 + _globals['_ORCHESTRATORSTARTEDEVENT']._serialized_end=2252 + _globals['_ORCHESTRATORCOMPLETEDEVENT']._serialized_start=2254 + _globals['_ORCHESTRATORCOMPLETEDEVENT']._serialized_end=2282 + _globals['_EVENTSENTEVENT']._serialized_start=2284 + _globals['_EVENTSENTEVENT']._serialized_end=2379 + _globals['_EVENTRAISEDEVENT']._serialized_start=2381 + _globals['_EVENTRAISEDEVENT']._serialized_end=2458 + _globals['_CONTINUEASNEWEVENT']._serialized_start=2460 + _globals['_CONTINUEASNEWEVENT']._serialized_end=2525 + _globals['_EXECUTIONSUSPENDEDEVENT']._serialized_start=2527 + _globals['_EXECUTIONSUSPENDEDEVENT']._serialized_end=2597 + _globals['_EXECUTIONRESUMEDEVENT']._serialized_start=2599 + _globals['_EXECUTIONRESUMEDEVENT']._serialized_end=2667 + _globals['_EXECUTIONSTALLEDEVENT']._serialized_start=2669 + _globals['_EXECUTIONSTALLEDEVENT']._serialized_end=2766 + _globals['_HISTORYEVENT']._serialized_start=2769 + _globals['_HISTORYEVENT']._serialized_end=3995 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.pyi index 66ba6ba67..e456bb1ba 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2.pyi @@ -16,10 +16,10 @@ from dapr.ext.workflow._durabletask.internal import orchestration_pb2 as _orches import sys import typing as _typing -if sys.version_info >= (3, 10): - from typing import TypeAlias as _TypeAlias +if sys.version_info >= (3, 11): + from typing import TypeAlias as _TypeAlias, Never as _Never else: - from typing_extensions import TypeAlias as _TypeAlias + from typing_extensions import TypeAlias as _TypeAlias, Never as _Never DESCRIPTOR: _descriptor.FileDescriptor @@ -41,17 +41,20 @@ class ExecutionStartedEvent(_message.Message): key: _builtins.str = ..., value: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["key", b"key", "value", b"value"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... NAME_FIELD_NUMBER: _builtins.int VERSION_FIELD_NUMBER: _builtins.int INPUT_FIELD_NUMBER: _builtins.int - WORKFLOWINSTANCE_FIELD_NUMBER: _builtins.int + ORCHESTRATIONINSTANCE_FIELD_NUMBER: _builtins.int PARENTINSTANCE_FIELD_NUMBER: _builtins.int SCHEDULEDSTARTTIMESTAMP_FIELD_NUMBER: _builtins.int PARENTTRACECONTEXT_FIELD_NUMBER: _builtins.int - WORKFLOWSPANID_FIELD_NUMBER: _builtins.int + ORCHESTRATIONSPANID_FIELD_NUMBER: _builtins.int TAGS_FIELD_NUMBER: _builtins.int name: _builtins.str @_builtins.property @@ -59,7 +62,7 @@ class ExecutionStartedEvent(_message.Message): @_builtins.property def input(self) -> _wrappers_pb2.StringValue: ... @_builtins.property - def workflowInstance(self) -> _orchestration_pb2.WorkflowInstance: ... + def orchestrationInstance(self) -> _orchestration_pb2.OrchestrationInstance: ... @_builtins.property def parentInstance(self) -> _orchestration_pb2.ParentInstanceInfo: ... @_builtins.property @@ -67,7 +70,7 @@ class ExecutionStartedEvent(_message.Message): @_builtins.property def parentTraceContext(self) -> _orchestration_pb2.TraceContext: ... @_builtins.property - def workflowSpanID(self) -> _wrappers_pb2.StringValue: ... + def orchestrationSpanID(self) -> _wrappers_pb2.StringValue: ... @_builtins.property def tags(self) -> _containers.ScalarMap[_builtins.str, _builtins.str]: ... def __init__( @@ -76,17 +79,18 @@ class ExecutionStartedEvent(_message.Message): name: _builtins.str = ..., version: _wrappers_pb2.StringValue | None = ..., input: _wrappers_pb2.StringValue | None = ..., - workflowInstance: _orchestration_pb2.WorkflowInstance | None = ..., + orchestrationInstance: _orchestration_pb2.OrchestrationInstance | None = ..., parentInstance: _orchestration_pb2.ParentInstanceInfo | None = ..., scheduledStartTimestamp: _timestamp_pb2.Timestamp | None = ..., parentTraceContext: _orchestration_pb2.TraceContext | None = ..., - workflowSpanID: _wrappers_pb2.StringValue | None = ..., + orchestrationSpanID: _wrappers_pb2.StringValue | None = ..., tags: _abc.Mapping[_builtins.str, _builtins.str] | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "parentInstance", b"parentInstance", "parentTraceContext", b"parentTraceContext", "scheduledStartTimestamp", b"scheduledStartTimestamp", "version", b"version", "workflowInstance", b"workflowInstance", "workflowSpanID", b"workflowSpanID"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "orchestrationInstance", b"orchestrationInstance", "orchestrationSpanID", b"orchestrationSpanID", "parentInstance", b"parentInstance", "parentTraceContext", b"parentTraceContext", "scheduledStartTimestamp", b"scheduledStartTimestamp", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "name", b"name", "parentInstance", b"parentInstance", "parentTraceContext", b"parentTraceContext", "scheduledStartTimestamp", b"scheduledStartTimestamp", "tags", b"tags", "version", b"version", "workflowInstance", b"workflowInstance", "workflowSpanID", b"workflowSpanID"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "name", b"name", "orchestrationInstance", b"orchestrationInstance", "orchestrationSpanID", b"orchestrationSpanID", "parentInstance", b"parentInstance", "parentTraceContext", b"parentTraceContext", "scheduledStartTimestamp", b"scheduledStartTimestamp", "tags", b"tags", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ExecutionStartedEvent: _TypeAlias = ExecutionStartedEvent # noqa: Y015 @@ -94,10 +98,10 @@ Global___ExecutionStartedEvent: _TypeAlias = ExecutionStartedEvent # noqa: Y015 class ExecutionCompletedEvent(_message.Message): DESCRIPTOR: _descriptor.Descriptor - WORKFLOWSTATUS_FIELD_NUMBER: _builtins.int + ORCHESTRATIONSTATUS_FIELD_NUMBER: _builtins.int RESULT_FIELD_NUMBER: _builtins.int FAILUREDETAILS_FIELD_NUMBER: _builtins.int - workflowStatus: _orchestration_pb2.OrchestrationStatus.ValueType + orchestrationStatus: _orchestration_pb2.OrchestrationStatus.ValueType @_builtins.property def result(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -105,14 +109,15 @@ class ExecutionCompletedEvent(_message.Message): def __init__( self, *, - workflowStatus: _orchestration_pb2.OrchestrationStatus.ValueType = ..., + orchestrationStatus: _orchestration_pb2.OrchestrationStatus.ValueType = ..., result: _wrappers_pb2.StringValue | None = ..., failureDetails: _orchestration_pb2.TaskFailureDetails | None = ..., ) -> None: ... _HasFieldArgType: _TypeAlias = _typing.Literal["failureDetails", b"failureDetails", "result", b"result"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["failureDetails", b"failureDetails", "result", b"result", "workflowStatus", b"workflowStatus"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["failureDetails", b"failureDetails", "orchestrationStatus", b"orchestrationStatus", "result", b"result"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ExecutionCompletedEvent: _TypeAlias = ExecutionCompletedEvent # noqa: Y015 @@ -135,6 +140,7 @@ class ExecutionTerminatedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "recurse", b"recurse"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ExecutionTerminatedEvent: _TypeAlias = ExecutionTerminatedEvent # noqa: Y015 @@ -204,6 +210,7 @@ class TaskCompletedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["result", b"result", "taskExecutionId", b"taskExecutionId", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TaskCompletedEvent: _TypeAlias = TaskCompletedEvent # noqa: Y015 @@ -229,11 +236,12 @@ class TaskFailedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["failureDetails", b"failureDetails", "taskExecutionId", b"taskExecutionId", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TaskFailedEvent: _TypeAlias = TaskFailedEvent # noqa: Y015 @_typing.final -class ChildWorkflowInstanceCreatedEvent(_message.Message): +class SubOrchestrationInstanceCreatedEvent(_message.Message): DESCRIPTOR: _descriptor.Descriptor INSTANCEID_FIELD_NUMBER: _builtins.int @@ -274,10 +282,10 @@ class ChildWorkflowInstanceCreatedEvent(_message.Message): _WhichOneofArgType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo"] # noqa: Y015 def WhichOneof(self, oneof_group: _WhichOneofArgType__rerunParentInstanceInfo) -> _WhichOneofReturnType__rerunParentInstanceInfo | None: ... -Global___ChildWorkflowInstanceCreatedEvent: _TypeAlias = ChildWorkflowInstanceCreatedEvent # noqa: Y015 +Global___SubOrchestrationInstanceCreatedEvent: _TypeAlias = SubOrchestrationInstanceCreatedEvent # noqa: Y015 @_typing.final -class ChildWorkflowInstanceCompletedEvent(_message.Message): +class SubOrchestrationInstanceCompletedEvent(_message.Message): DESCRIPTOR: _descriptor.Descriptor TASKSCHEDULEDID_FIELD_NUMBER: _builtins.int @@ -295,11 +303,12 @@ class ChildWorkflowInstanceCompletedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["result", b"result", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___ChildWorkflowInstanceCompletedEvent: _TypeAlias = ChildWorkflowInstanceCompletedEvent # noqa: Y015 +Global___SubOrchestrationInstanceCompletedEvent: _TypeAlias = SubOrchestrationInstanceCompletedEvent # noqa: Y015 @_typing.final -class ChildWorkflowInstanceFailedEvent(_message.Message): +class SubOrchestrationInstanceFailedEvent(_message.Message): DESCRIPTOR: _descriptor.Descriptor TASKSCHEDULEDID_FIELD_NUMBER: _builtins.int @@ -317,77 +326,9 @@ class ChildWorkflowInstanceFailedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["failureDetails", b"failureDetails", "taskScheduledId", b"taskScheduledId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___ChildWorkflowInstanceFailedEvent: _TypeAlias = ChildWorkflowInstanceFailedEvent # noqa: Y015 - -@_typing.final -class TimerOriginCreateTimer(_message.Message): - """Indicates the timer was created by a createTimer call with no special origin.""" - - DESCRIPTOR: _descriptor.Descriptor - - def __init__( - self, - ) -> None: ... - -Global___TimerOriginCreateTimer: _TypeAlias = TimerOriginCreateTimer # noqa: Y015 - -@_typing.final -class TimerOriginExternalEvent(_message.Message): - """Indicates the timer was created as a timeout for a waitForExternalEvent call.""" - - DESCRIPTOR: _descriptor.Descriptor - - NAME_FIELD_NUMBER: _builtins.int - name: _builtins.str - """The name of the external event being waited on, matching EventRaisedEvent.name.""" - def __init__( - self, - *, - name: _builtins.str = ..., - ) -> None: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["name", b"name"] # noqa: Y015 - def ClearField(self, field_name: _ClearFieldArgType) -> None: ... - -Global___TimerOriginExternalEvent: _TypeAlias = TimerOriginExternalEvent # noqa: Y015 - -@_typing.final -class TimerOriginActivityRetry(_message.Message): - """Indicates the timer was created as a retry delay for an activity execution.""" - - DESCRIPTOR: _descriptor.Descriptor - - TASKEXECUTIONID_FIELD_NUMBER: _builtins.int - taskExecutionId: _builtins.str - """The task execution ID of the activity being retried.""" - def __init__( - self, - *, - taskExecutionId: _builtins.str = ..., - ) -> None: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["taskExecutionId", b"taskExecutionId"] # noqa: Y015 - def ClearField(self, field_name: _ClearFieldArgType) -> None: ... - -Global___TimerOriginActivityRetry: _TypeAlias = TimerOriginActivityRetry # noqa: Y015 - -@_typing.final -class TimerOriginChildWorkflowRetry(_message.Message): - """Indicates the timer was created as a retry delay for a child workflow execution.""" - - DESCRIPTOR: _descriptor.Descriptor - - INSTANCEID_FIELD_NUMBER: _builtins.int - instanceId: _builtins.str - """The instance ID of the workflow being retried.""" - def __init__( - self, - *, - instanceId: _builtins.str = ..., - ) -> None: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId"] # noqa: Y015 - def ClearField(self, field_name: _ClearFieldArgType) -> None: ... - -Global___TimerOriginChildWorkflowRetry: _TypeAlias = TimerOriginChildWorkflowRetry # noqa: Y015 +Global___SubOrchestrationInstanceFailedEvent: _TypeAlias = SubOrchestrationInstanceFailedEvent # noqa: Y015 @_typing.final class TimerCreatedEvent(_message.Message): @@ -396,10 +337,6 @@ class TimerCreatedEvent(_message.Message): FIREAT_FIELD_NUMBER: _builtins.int NAME_FIELD_NUMBER: _builtins.int RERUNPARENTINSTANCEINFO_FIELD_NUMBER: _builtins.int - CREATETIMER_FIELD_NUMBER: _builtins.int - EXTERNALEVENT_FIELD_NUMBER: _builtins.int - ACTIVITYRETRY_FIELD_NUMBER: _builtins.int - CHILDWORKFLOWRETRY_FIELD_NUMBER: _builtins.int name: _builtins.str @_builtins.property def fireAt(self) -> _timestamp_pb2.Timestamp: ... @@ -409,41 +346,25 @@ class TimerCreatedEvent(_message.Message): workflow execution as the result of a rerun operation. """ - @_builtins.property - def createTimer(self) -> Global___TimerOriginCreateTimer: ... - @_builtins.property - def externalEvent(self) -> Global___TimerOriginExternalEvent: ... - @_builtins.property - def activityRetry(self) -> Global___TimerOriginActivityRetry: ... - @_builtins.property - def childWorkflowRetry(self) -> Global___TimerOriginChildWorkflowRetry: ... def __init__( self, *, fireAt: _timestamp_pb2.Timestamp | None = ..., name: _builtins.str | None = ..., rerunParentInstanceInfo: _orchestration_pb2.RerunParentInstanceInfo | None = ..., - createTimer: Global___TimerOriginCreateTimer | None = ..., - externalEvent: Global___TimerOriginExternalEvent | None = ..., - activityRetry: Global___TimerOriginActivityRetry | None = ..., - childWorkflowRetry: Global___TimerOriginChildWorkflowRetry | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "activityRetry", b"activityRetry", "childWorkflowRetry", b"childWorkflowRetry", "createTimer", b"createTimer", "externalEvent", b"externalEvent", "fireAt", b"fireAt", "name", b"name", "origin", b"origin", "rerunParentInstanceInfo", b"rerunParentInstanceInfo"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "fireAt", b"fireAt", "name", b"name", "rerunParentInstanceInfo", b"rerunParentInstanceInfo"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "activityRetry", b"activityRetry", "childWorkflowRetry", b"childWorkflowRetry", "createTimer", b"createTimer", "externalEvent", b"externalEvent", "fireAt", b"fireAt", "name", b"name", "origin", b"origin", "rerunParentInstanceInfo", b"rerunParentInstanceInfo"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "_rerunParentInstanceInfo", b"_rerunParentInstanceInfo", "fireAt", b"fireAt", "name", b"name", "rerunParentInstanceInfo", b"rerunParentInstanceInfo"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__name: _TypeAlias = _typing.Literal["name"] # noqa: Y015 _WhichOneofArgType__name: _TypeAlias = _typing.Literal["_name", b"_name"] # noqa: Y015 _WhichOneofReturnType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["rerunParentInstanceInfo"] # noqa: Y015 _WhichOneofArgType__rerunParentInstanceInfo: _TypeAlias = _typing.Literal["_rerunParentInstanceInfo", b"_rerunParentInstanceInfo"] # noqa: Y015 - _WhichOneofReturnType_origin: _TypeAlias = _typing.Literal["createTimer", "externalEvent", "activityRetry", "childWorkflowRetry"] # noqa: Y015 - _WhichOneofArgType_origin: _TypeAlias = _typing.Literal["origin", b"origin"] # noqa: Y015 @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__name) -> _WhichOneofReturnType__name | None: ... @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__rerunParentInstanceInfo) -> _WhichOneofReturnType__rerunParentInstanceInfo | None: ... - @_typing.overload - def WhichOneof(self, oneof_group: _WhichOneofArgType_origin) -> _WhichOneofReturnType_origin | None: ... Global___TimerCreatedEvent: _TypeAlias = TimerCreatedEvent # noqa: Y015 @@ -466,20 +387,21 @@ class TimerFiredEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["fireAt", b"fireAt", "timerId", b"timerId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TimerFiredEvent: _TypeAlias = TimerFiredEvent # noqa: Y015 @_typing.final -class WorkflowStartedEvent(_message.Message): +class OrchestratorStartedEvent(_message.Message): DESCRIPTOR: _descriptor.Descriptor VERSION_FIELD_NUMBER: _builtins.int @_builtins.property - def version(self) -> _orchestration_pb2.WorkflowVersion: ... + def version(self) -> _orchestration_pb2.OrchestrationVersion: ... def __init__( self, *, - version: _orchestration_pb2.WorkflowVersion | None = ..., + version: _orchestration_pb2.OrchestrationVersion | None = ..., ) -> None: ... _HasFieldArgType: _TypeAlias = _typing.Literal["_version", b"_version", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... @@ -489,10 +411,10 @@ class WorkflowStartedEvent(_message.Message): _WhichOneofArgType__version: _TypeAlias = _typing.Literal["_version", b"_version"] # noqa: Y015 def WhichOneof(self, oneof_group: _WhichOneofArgType__version) -> _WhichOneofReturnType__version | None: ... -Global___WorkflowStartedEvent: _TypeAlias = WorkflowStartedEvent # noqa: Y015 +Global___OrchestratorStartedEvent: _TypeAlias = OrchestratorStartedEvent # noqa: Y015 @_typing.final -class WorkflowCompletedEvent(_message.Message): +class OrchestratorCompletedEvent(_message.Message): """No payload data""" DESCRIPTOR: _descriptor.Descriptor @@ -500,8 +422,13 @@ class WorkflowCompletedEvent(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___WorkflowCompletedEvent: _TypeAlias = WorkflowCompletedEvent # noqa: Y015 +Global___OrchestratorCompletedEvent: _TypeAlias = OrchestratorCompletedEvent # noqa: Y015 @_typing.final class EventSentEvent(_message.Message): @@ -525,6 +452,7 @@ class EventSentEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "instanceId", b"instanceId", "name", b"name"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___EventSentEvent: _TypeAlias = EventSentEvent # noqa: Y015 @@ -547,6 +475,7 @@ class EventRaisedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "name", b"name"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___EventRaisedEvent: _TypeAlias = EventRaisedEvent # noqa: Y015 @@ -566,6 +495,7 @@ class ContinueAsNewEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ContinueAsNewEvent: _TypeAlias = ContinueAsNewEvent # noqa: Y015 @@ -585,6 +515,7 @@ class ExecutionSuspendedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ExecutionSuspendedEvent: _TypeAlias = ExecutionSuspendedEvent # noqa: Y015 @@ -604,6 +535,7 @@ class ExecutionResumedEvent(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ExecutionResumedEvent: _TypeAlias = ExecutionResumedEvent # noqa: Y015 @@ -643,13 +575,13 @@ class HistoryEvent(_message.Message): TASKSCHEDULED_FIELD_NUMBER: _builtins.int TASKCOMPLETED_FIELD_NUMBER: _builtins.int TASKFAILED_FIELD_NUMBER: _builtins.int - CHILDWORKFLOWINSTANCECREATED_FIELD_NUMBER: _builtins.int - CHILDWORKFLOWINSTANCECOMPLETED_FIELD_NUMBER: _builtins.int - CHILDWORKFLOWINSTANCEFAILED_FIELD_NUMBER: _builtins.int + SUBORCHESTRATIONINSTANCECREATED_FIELD_NUMBER: _builtins.int + SUBORCHESTRATIONINSTANCECOMPLETED_FIELD_NUMBER: _builtins.int + SUBORCHESTRATIONINSTANCEFAILED_FIELD_NUMBER: _builtins.int TIMERCREATED_FIELD_NUMBER: _builtins.int TIMERFIRED_FIELD_NUMBER: _builtins.int - WORKFLOWSTARTED_FIELD_NUMBER: _builtins.int - WORKFLOWCOMPLETED_FIELD_NUMBER: _builtins.int + ORCHESTRATORSTARTED_FIELD_NUMBER: _builtins.int + ORCHESTRATORCOMPLETED_FIELD_NUMBER: _builtins.int EVENTSENT_FIELD_NUMBER: _builtins.int EVENTRAISED_FIELD_NUMBER: _builtins.int CONTINUEASNEW_FIELD_NUMBER: _builtins.int @@ -673,19 +605,19 @@ class HistoryEvent(_message.Message): @_builtins.property def taskFailed(self) -> Global___TaskFailedEvent: ... @_builtins.property - def childWorkflowInstanceCreated(self) -> Global___ChildWorkflowInstanceCreatedEvent: ... + def subOrchestrationInstanceCreated(self) -> Global___SubOrchestrationInstanceCreatedEvent: ... @_builtins.property - def childWorkflowInstanceCompleted(self) -> Global___ChildWorkflowInstanceCompletedEvent: ... + def subOrchestrationInstanceCompleted(self) -> Global___SubOrchestrationInstanceCompletedEvent: ... @_builtins.property - def childWorkflowInstanceFailed(self) -> Global___ChildWorkflowInstanceFailedEvent: ... + def subOrchestrationInstanceFailed(self) -> Global___SubOrchestrationInstanceFailedEvent: ... @_builtins.property def timerCreated(self) -> Global___TimerCreatedEvent: ... @_builtins.property def timerFired(self) -> Global___TimerFiredEvent: ... @_builtins.property - def workflowStarted(self) -> Global___WorkflowStartedEvent: ... + def orchestratorStarted(self) -> Global___OrchestratorStartedEvent: ... @_builtins.property - def workflowCompleted(self) -> Global___WorkflowCompletedEvent: ... + def orchestratorCompleted(self) -> Global___OrchestratorCompletedEvent: ... @_builtins.property def eventSent(self) -> Global___EventSentEvent: ... @_builtins.property @@ -711,13 +643,13 @@ class HistoryEvent(_message.Message): taskScheduled: Global___TaskScheduledEvent | None = ..., taskCompleted: Global___TaskCompletedEvent | None = ..., taskFailed: Global___TaskFailedEvent | None = ..., - childWorkflowInstanceCreated: Global___ChildWorkflowInstanceCreatedEvent | None = ..., - childWorkflowInstanceCompleted: Global___ChildWorkflowInstanceCompletedEvent | None = ..., - childWorkflowInstanceFailed: Global___ChildWorkflowInstanceFailedEvent | None = ..., + subOrchestrationInstanceCreated: Global___SubOrchestrationInstanceCreatedEvent | None = ..., + subOrchestrationInstanceCompleted: Global___SubOrchestrationInstanceCompletedEvent | None = ..., + subOrchestrationInstanceFailed: Global___SubOrchestrationInstanceFailedEvent | None = ..., timerCreated: Global___TimerCreatedEvent | None = ..., timerFired: Global___TimerFiredEvent | None = ..., - workflowStarted: Global___WorkflowStartedEvent | None = ..., - workflowCompleted: Global___WorkflowCompletedEvent | None = ..., + orchestratorStarted: Global___OrchestratorStartedEvent | None = ..., + orchestratorCompleted: Global___OrchestratorCompletedEvent | None = ..., eventSent: Global___EventSentEvent | None = ..., eventRaised: Global___EventRaisedEvent | None = ..., continueAsNew: Global___ContinueAsNewEvent | None = ..., @@ -726,13 +658,13 @@ class HistoryEvent(_message.Message): executionStalled: Global___ExecutionStalledEvent | None = ..., router: _orchestration_pb2.TaskRouter | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "childWorkflowInstanceCompleted", b"childWorkflowInstanceCompleted", "childWorkflowInstanceCreated", b"childWorkflowInstanceCreated", "childWorkflowInstanceFailed", b"childWorkflowInstanceFailed", "continueAsNew", b"continueAsNew", "eventRaised", b"eventRaised", "eventSent", b"eventSent", "eventType", b"eventType", "executionCompleted", b"executionCompleted", "executionResumed", b"executionResumed", "executionStalled", b"executionStalled", "executionStarted", b"executionStarted", "executionSuspended", b"executionSuspended", "executionTerminated", b"executionTerminated", "router", b"router", "taskCompleted", b"taskCompleted", "taskFailed", b"taskFailed", "taskScheduled", b"taskScheduled", "timerCreated", b"timerCreated", "timerFired", b"timerFired", "timestamp", b"timestamp", "workflowCompleted", b"workflowCompleted", "workflowStarted", b"workflowStarted"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "continueAsNew", b"continueAsNew", "eventRaised", b"eventRaised", "eventSent", b"eventSent", "eventType", b"eventType", "executionCompleted", b"executionCompleted", "executionResumed", b"executionResumed", "executionStalled", b"executionStalled", "executionStarted", b"executionStarted", "executionSuspended", b"executionSuspended", "executionTerminated", b"executionTerminated", "orchestratorCompleted", b"orchestratorCompleted", "orchestratorStarted", b"orchestratorStarted", "router", b"router", "subOrchestrationInstanceCompleted", b"subOrchestrationInstanceCompleted", "subOrchestrationInstanceCreated", b"subOrchestrationInstanceCreated", "subOrchestrationInstanceFailed", b"subOrchestrationInstanceFailed", "taskCompleted", b"taskCompleted", "taskFailed", b"taskFailed", "taskScheduled", b"taskScheduled", "timerCreated", b"timerCreated", "timerFired", b"timerFired", "timestamp", b"timestamp"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "childWorkflowInstanceCompleted", b"childWorkflowInstanceCompleted", "childWorkflowInstanceCreated", b"childWorkflowInstanceCreated", "childWorkflowInstanceFailed", b"childWorkflowInstanceFailed", "continueAsNew", b"continueAsNew", "eventId", b"eventId", "eventRaised", b"eventRaised", "eventSent", b"eventSent", "eventType", b"eventType", "executionCompleted", b"executionCompleted", "executionResumed", b"executionResumed", "executionStalled", b"executionStalled", "executionStarted", b"executionStarted", "executionSuspended", b"executionSuspended", "executionTerminated", b"executionTerminated", "router", b"router", "taskCompleted", b"taskCompleted", "taskFailed", b"taskFailed", "taskScheduled", b"taskScheduled", "timerCreated", b"timerCreated", "timerFired", b"timerFired", "timestamp", b"timestamp", "workflowCompleted", b"workflowCompleted", "workflowStarted", b"workflowStarted"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "continueAsNew", b"continueAsNew", "eventId", b"eventId", "eventRaised", b"eventRaised", "eventSent", b"eventSent", "eventType", b"eventType", "executionCompleted", b"executionCompleted", "executionResumed", b"executionResumed", "executionStalled", b"executionStalled", "executionStarted", b"executionStarted", "executionSuspended", b"executionSuspended", "executionTerminated", b"executionTerminated", "orchestratorCompleted", b"orchestratorCompleted", "orchestratorStarted", b"orchestratorStarted", "router", b"router", "subOrchestrationInstanceCompleted", b"subOrchestrationInstanceCompleted", "subOrchestrationInstanceCreated", b"subOrchestrationInstanceCreated", "subOrchestrationInstanceFailed", b"subOrchestrationInstanceFailed", "taskCompleted", b"taskCompleted", "taskFailed", b"taskFailed", "taskScheduled", b"taskScheduled", "timerCreated", b"timerCreated", "timerFired", b"timerFired", "timestamp", b"timestamp"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__router: _TypeAlias = _typing.Literal["router"] # noqa: Y015 _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 - _WhichOneofReturnType_eventType: _TypeAlias = _typing.Literal["executionStarted", "executionCompleted", "executionTerminated", "taskScheduled", "taskCompleted", "taskFailed", "childWorkflowInstanceCreated", "childWorkflowInstanceCompleted", "childWorkflowInstanceFailed", "timerCreated", "timerFired", "workflowStarted", "workflowCompleted", "eventSent", "eventRaised", "continueAsNew", "executionSuspended", "executionResumed", "executionStalled"] # noqa: Y015 + _WhichOneofReturnType_eventType: _TypeAlias = _typing.Literal["executionStarted", "executionCompleted", "executionTerminated", "taskScheduled", "taskCompleted", "taskFailed", "subOrchestrationInstanceCreated", "subOrchestrationInstanceCompleted", "subOrchestrationInstanceFailed", "timerCreated", "timerFired", "orchestratorStarted", "orchestratorCompleted", "eventSent", "eventRaised", "continueAsNew", "executionSuspended", "executionResumed", "executionStalled"] # noqa: Y015 _WhichOneofArgType_eventType: _TypeAlias = _typing.Literal["eventType", b"eventType"] # noqa: Y015 @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2_grpc.py index 8a738f5a2..201aad188 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2_grpc.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/history_events_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.76.0' +GRPC_GENERATED_VERSION = '1.80.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.py index f9727296e..095da79f2 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.py @@ -26,7 +26,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13orchestration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"K\n\nTaskRouter\x12\x13\n\x0bsourceAppID\x18\x01 \x01(\t\x12\x18\n\x0btargetAppID\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_targetAppID\">\n\x0fWorkflowVersion\x12\x0f\n\x07patches\x18\x01 \x03(\t\x12\x11\n\x04name\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x07\n\x05_name\"Y\n\x10WorkflowInstance\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x31\n\x0b\x65xecutionId\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xb2\x01\n\x12TaskFailureDetails\x12\x11\n\terrorType\x18\x01 \x01(\t\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x30\n\nstackTrace\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x0cinnerFailure\x18\x04 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x16\n\x0eisNonRetriable\x18\x05 \x01(\x08\"\xd3\x01\n\x12ParentInstanceInfo\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12*\n\x04name\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12\x12\n\x05\x61ppID\x18\x05 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_appID\"-\n\x17RerunParentInstanceInfo\x12\x12\n\ninstanceID\x18\x01 \x01(\t\"i\n\x0cTraceContext\x12\x13\n\x0btraceParent\x18\x01 \x01(\t\x12\x12\n\x06spanID\x18\x02 \x01(\tB\x02\x18\x01\x12\x30\n\ntraceState\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xef\x05\n\rWorkflowState\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x0eworkflowStatus\x18\x04 \x01(\x0e\x32\x14.OrchestrationStatus\x12;\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10\x63reatedTimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x14lastUpdatedTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05input\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x06output\x18\t \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ustomStatus\x18\n \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x0b \x01(\x0b\x32\x13.TaskFailureDetails\x12\x31\n\x0b\x65xecutionId\x18\x0c \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x36\n\x12\x63ompletedTimestamp\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x10parentInstanceId\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12&\n\x04tags\x18\x0f \x03(\x0b\x32\x18.WorkflowState.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01*>\n\rStalledReason\x12\x12\n\x0ePATCH_MISMATCH\x10\x00\x12\x19\n\x15VERSION_NOT_AVAILABLE\x10\x01*\xd7\x02\n\x13OrchestrationStatus\x12 \n\x1cORCHESTRATION_STATUS_RUNNING\x10\x00\x12\"\n\x1eORCHESTRATION_STATUS_COMPLETED\x10\x01\x12)\n%ORCHESTRATION_STATUS_CONTINUED_AS_NEW\x10\x02\x12\x1f\n\x1bORCHESTRATION_STATUS_FAILED\x10\x03\x12!\n\x1dORCHESTRATION_STATUS_CANCELED\x10\x04\x12#\n\x1fORCHESTRATION_STATUS_TERMINATED\x10\x05\x12 \n\x1cORCHESTRATION_STATUS_PENDING\x10\x06\x12\"\n\x1eORCHESTRATION_STATUS_SUSPENDED\x10\x07\x12 \n\x1cORCHESTRATION_STATUS_STALLED\x10\x08\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13orchestration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"K\n\nTaskRouter\x12\x13\n\x0bsourceAppID\x18\x01 \x01(\t\x12\x18\n\x0btargetAppID\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_targetAppID\"C\n\x14OrchestrationVersion\x12\x0f\n\x07patches\x18\x01 \x03(\t\x12\x11\n\x04name\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x07\n\x05_name\"^\n\x15OrchestrationInstance\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x31\n\x0b\x65xecutionId\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xb2\x01\n\x12TaskFailureDetails\x12\x11\n\terrorType\x18\x01 \x01(\t\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x30\n\nstackTrace\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12)\n\x0cinnerFailure\x18\x04 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x16\n\x0eisNonRetriable\x18\x05 \x01(\x08\"\xdd\x01\n\x12ParentInstanceInfo\x12\x17\n\x0ftaskScheduledId\x18\x01 \x01(\x05\x12*\n\x04name\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x35\n\x15orchestrationInstance\x18\x04 \x01(\x0b\x32\x16.OrchestrationInstance\x12\x12\n\x05\x61ppID\x18\x05 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_appID\"-\n\x17RerunParentInstanceInfo\x12\x12\n\ninstanceID\x18\x01 \x01(\t\"i\n\x0cTraceContext\x12\x13\n\x0btraceParent\x18\x01 \x01(\t\x12\x12\n\x06spanID\x18\x02 \x01(\tB\x02\x18\x01\x12\x30\n\ntraceState\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"w\n\x1aOrchestrationIdReusePolicy\x12-\n\x0foperationStatus\x18\x01 \x03(\x0e\x32\x14.OrchestrationStatus\x12*\n\x06\x61\x63tion\x18\x02 \x01(\x0e\x32\x1a.CreateOrchestrationAction\"\xfe\x05\n\x12OrchestrationState\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x31\n\x13orchestrationStatus\x18\x04 \x01(\x0e\x32\x14.OrchestrationStatus\x12;\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10\x63reatedTimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x14lastUpdatedTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x05input\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12,\n\x06output\x18\t \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0c\x63ustomStatus\x18\n \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x0b \x01(\x0b\x32\x13.TaskFailureDetails\x12\x31\n\x0b\x65xecutionId\x18\x0c \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x36\n\x12\x63ompletedTimestamp\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x10parentInstanceId\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x04tags\x18\x0f \x03(\x0b\x32\x1d.OrchestrationState.TagsEntry\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01*>\n\rStalledReason\x12\x12\n\x0ePATCH_MISMATCH\x10\x00\x12\x19\n\x15VERSION_NOT_AVAILABLE\x10\x01*\xd7\x02\n\x13OrchestrationStatus\x12 \n\x1cORCHESTRATION_STATUS_RUNNING\x10\x00\x12\"\n\x1eORCHESTRATION_STATUS_COMPLETED\x10\x01\x12)\n%ORCHESTRATION_STATUS_CONTINUED_AS_NEW\x10\x02\x12\x1f\n\x1bORCHESTRATION_STATUS_FAILED\x10\x03\x12!\n\x1dORCHESTRATION_STATUS_CANCELED\x10\x04\x12#\n\x1fORCHESTRATION_STATUS_TERMINATED\x10\x05\x12 \n\x1cORCHESTRATION_STATUS_PENDING\x10\x06\x12\"\n\x1eORCHESTRATION_STATUS_SUSPENDED\x10\x07\x12 \n\x1cORCHESTRATION_STATUS_STALLED\x10\x08*A\n\x19\x43reateOrchestrationAction\x12\t\n\x05\x45RROR\x10\x00\x12\n\n\x06IGNORE\x10\x01\x12\r\n\tTERMINATE\x10\x02\x42V\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -36,28 +36,32 @@ _globals['DESCRIPTOR']._serialized_options = b'\n+io.dapr.durabletask.implementation.protobufZ\013/api/protos\252\002\031Dapr.DurableTask.Protobuf' _globals['_TRACECONTEXT'].fields_by_name['spanID']._loaded_options = None _globals['_TRACECONTEXT'].fields_by_name['spanID']._serialized_options = b'\030\001' - _globals['_WORKFLOWSTATE_TAGSENTRY']._loaded_options = None - _globals['_WORKFLOWSTATE_TAGSENTRY']._serialized_options = b'8\001' - _globals['_STALLEDREASON']._serialized_start=1623 - _globals['_STALLEDREASON']._serialized_end=1685 - _globals['_ORCHESTRATIONSTATUS']._serialized_start=1688 - _globals['_ORCHESTRATIONSTATUS']._serialized_end=2031 + _globals['_ORCHESTRATIONSTATE_TAGSENTRY']._loaded_options = None + _globals['_ORCHESTRATIONSTATE_TAGSENTRY']._serialized_options = b'8\001' + _globals['_STALLEDREASON']._serialized_start=1779 + _globals['_STALLEDREASON']._serialized_end=1841 + _globals['_ORCHESTRATIONSTATUS']._serialized_start=1844 + _globals['_ORCHESTRATIONSTATUS']._serialized_end=2187 + _globals['_CREATEORCHESTRATIONACTION']._serialized_start=2189 + _globals['_CREATEORCHESTRATIONACTION']._serialized_end=2254 _globals['_TASKROUTER']._serialized_start=88 _globals['_TASKROUTER']._serialized_end=163 - _globals['_WORKFLOWVERSION']._serialized_start=165 - _globals['_WORKFLOWVERSION']._serialized_end=227 - _globals['_WORKFLOWINSTANCE']._serialized_start=229 - _globals['_WORKFLOWINSTANCE']._serialized_end=318 - _globals['_TASKFAILUREDETAILS']._serialized_start=321 - _globals['_TASKFAILUREDETAILS']._serialized_end=499 - _globals['_PARENTINSTANCEINFO']._serialized_start=502 - _globals['_PARENTINSTANCEINFO']._serialized_end=713 - _globals['_RERUNPARENTINSTANCEINFO']._serialized_start=715 - _globals['_RERUNPARENTINSTANCEINFO']._serialized_end=760 - _globals['_TRACECONTEXT']._serialized_start=762 - _globals['_TRACECONTEXT']._serialized_end=867 - _globals['_WORKFLOWSTATE']._serialized_start=870 - _globals['_WORKFLOWSTATE']._serialized_end=1621 - _globals['_WORKFLOWSTATE_TAGSENTRY']._serialized_start=1578 - _globals['_WORKFLOWSTATE_TAGSENTRY']._serialized_end=1621 + _globals['_ORCHESTRATIONVERSION']._serialized_start=165 + _globals['_ORCHESTRATIONVERSION']._serialized_end=232 + _globals['_ORCHESTRATIONINSTANCE']._serialized_start=234 + _globals['_ORCHESTRATIONINSTANCE']._serialized_end=328 + _globals['_TASKFAILUREDETAILS']._serialized_start=331 + _globals['_TASKFAILUREDETAILS']._serialized_end=509 + _globals['_PARENTINSTANCEINFO']._serialized_start=512 + _globals['_PARENTINSTANCEINFO']._serialized_end=733 + _globals['_RERUNPARENTINSTANCEINFO']._serialized_start=735 + _globals['_RERUNPARENTINSTANCEINFO']._serialized_end=780 + _globals['_TRACECONTEXT']._serialized_start=782 + _globals['_TRACECONTEXT']._serialized_end=887 + _globals['_ORCHESTRATIONIDREUSEPOLICY']._serialized_start=889 + _globals['_ORCHESTRATIONIDREUSEPOLICY']._serialized_end=1008 + _globals['_ORCHESTRATIONSTATE']._serialized_start=1011 + _globals['_ORCHESTRATIONSTATE']._serialized_end=1777 + _globals['_ORCHESTRATIONSTATE_TAGSENTRY']._serialized_start=1734 + _globals['_ORCHESTRATIONSTATE_TAGSENTRY']._serialized_end=1777 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.pyi index ce5f416c1..b17754182 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2.pyi @@ -16,10 +16,10 @@ import builtins as _builtins import sys import typing as _typing -if sys.version_info >= (3, 10): - from typing import TypeAlias as _TypeAlias +if sys.version_info >= (3, 11): + from typing import TypeAlias as _TypeAlias, Never as _Never else: - from typing_extensions import TypeAlias as _TypeAlias + from typing_extensions import TypeAlias as _TypeAlias, Never as _Never if sys.version_info >= (3, 13): from warnings import deprecated as _deprecated @@ -72,6 +72,23 @@ ORCHESTRATION_STATUS_SUSPENDED: OrchestrationStatus.ValueType # 7 ORCHESTRATION_STATUS_STALLED: OrchestrationStatus.ValueType # 8 Global___OrchestrationStatus: _TypeAlias = OrchestrationStatus # noqa: Y015 +class _CreateOrchestrationAction: + ValueType = _typing.NewType("ValueType", _builtins.int) + V: _TypeAlias = ValueType # noqa: Y015 + +class _CreateOrchestrationActionEnumTypeWrapper(_enum_type_wrapper._EnumTypeWrapper[_CreateOrchestrationAction.ValueType], _builtins.type): + DESCRIPTOR: _descriptor.EnumDescriptor + ERROR: _CreateOrchestrationAction.ValueType # 0 + IGNORE: _CreateOrchestrationAction.ValueType # 1 + TERMINATE: _CreateOrchestrationAction.ValueType # 2 + +class CreateOrchestrationAction(_CreateOrchestrationAction, metaclass=_CreateOrchestrationActionEnumTypeWrapper): ... + +ERROR: CreateOrchestrationAction.ValueType # 0 +IGNORE: CreateOrchestrationAction.ValueType # 1 +TERMINATE: CreateOrchestrationAction.ValueType # 2 +Global___CreateOrchestrationAction: _TypeAlias = CreateOrchestrationAction # noqa: Y015 + @_typing.final class TaskRouter(_message.Message): DESCRIPTOR: _descriptor.Descriptor @@ -97,7 +114,7 @@ class TaskRouter(_message.Message): Global___TaskRouter: _TypeAlias = TaskRouter # noqa: Y015 @_typing.final -class WorkflowVersion(_message.Message): +class OrchestrationVersion(_message.Message): DESCRIPTOR: _descriptor.Descriptor PATCHES_FIELD_NUMBER: _builtins.int @@ -120,10 +137,10 @@ class WorkflowVersion(_message.Message): _WhichOneofArgType__name: _TypeAlias = _typing.Literal["_name", b"_name"] # noqa: Y015 def WhichOneof(self, oneof_group: _WhichOneofArgType__name) -> _WhichOneofReturnType__name | None: ... -Global___WorkflowVersion: _TypeAlias = WorkflowVersion # noqa: Y015 +Global___OrchestrationVersion: _TypeAlias = OrchestrationVersion # noqa: Y015 @_typing.final -class WorkflowInstance(_message.Message): +class OrchestrationInstance(_message.Message): DESCRIPTOR: _descriptor.Descriptor INSTANCEID_FIELD_NUMBER: _builtins.int @@ -141,8 +158,9 @@ class WorkflowInstance(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["executionId", b"executionId", "instanceId", b"instanceId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___WorkflowInstance: _TypeAlias = WorkflowInstance # noqa: Y015 +Global___OrchestrationInstance: _TypeAlias = OrchestrationInstance # noqa: Y015 @_typing.final class TaskFailureDetails(_message.Message): @@ -173,6 +191,7 @@ class TaskFailureDetails(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["errorMessage", b"errorMessage", "errorType", b"errorType", "innerFailure", b"innerFailure", "isNonRetriable", b"isNonRetriable", "stackTrace", b"stackTrace"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TaskFailureDetails: _TypeAlias = TaskFailureDetails # noqa: Y015 @@ -183,7 +202,7 @@ class ParentInstanceInfo(_message.Message): TASKSCHEDULEDID_FIELD_NUMBER: _builtins.int NAME_FIELD_NUMBER: _builtins.int VERSION_FIELD_NUMBER: _builtins.int - WORKFLOWINSTANCE_FIELD_NUMBER: _builtins.int + ORCHESTRATIONINSTANCE_FIELD_NUMBER: _builtins.int APPID_FIELD_NUMBER: _builtins.int taskScheduledId: _builtins.int appID: _builtins.str @@ -192,19 +211,19 @@ class ParentInstanceInfo(_message.Message): @_builtins.property def version(self) -> _wrappers_pb2.StringValue: ... @_builtins.property - def workflowInstance(self) -> Global___WorkflowInstance: ... + def orchestrationInstance(self) -> Global___OrchestrationInstance: ... def __init__( self, *, taskScheduledId: _builtins.int = ..., name: _wrappers_pb2.StringValue | None = ..., version: _wrappers_pb2.StringValue | None = ..., - workflowInstance: Global___WorkflowInstance | None = ..., + orchestrationInstance: Global___OrchestrationInstance | None = ..., appID: _builtins.str | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_appID", b"_appID", "appID", b"appID", "name", b"name", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_appID", b"_appID", "appID", b"appID", "name", b"name", "orchestrationInstance", b"orchestrationInstance", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_appID", b"_appID", "appID", b"appID", "name", b"name", "taskScheduledId", b"taskScheduledId", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_appID", b"_appID", "appID", b"appID", "name", b"name", "orchestrationInstance", b"orchestrationInstance", "taskScheduledId", b"taskScheduledId", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__appID: _TypeAlias = _typing.Literal["appID"] # noqa: Y015 _WhichOneofArgType__appID: _TypeAlias = _typing.Literal["_appID", b"_appID"] # noqa: Y015 @@ -214,16 +233,16 @@ Global___ParentInstanceInfo: _TypeAlias = ParentInstanceInfo # noqa: Y015 @_typing.final class RerunParentInstanceInfo(_message.Message): - """RerunParentInstanceInfo is used to indicate that this workflow was + """RerunParentInstanceInfo is used to indicate that this orchestration was started as part of a rerun operation. Contains information about the parent - workflow instance which was rerun. + orchestration instance which was rerun. """ DESCRIPTOR: _descriptor.Descriptor INSTANCEID_FIELD_NUMBER: _builtins.int instanceID: _builtins.str - """instanceID is the workflow instance ID this workflow has been + """instanceID is the orchestration instance ID this orchestration has been rerun from. """ def __init__( @@ -231,8 +250,11 @@ class RerunParentInstanceInfo(_message.Message): *, instanceID: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceID", b"instanceID"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___RerunParentInstanceInfo: _TypeAlias = RerunParentInstanceInfo # noqa: Y015 @@ -263,11 +285,35 @@ class TraceContext(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["spanID", b"spanID", "traceParent", b"traceParent", "traceState", b"traceState"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TraceContext: _TypeAlias = TraceContext # noqa: Y015 @_typing.final -class WorkflowState(_message.Message): +class OrchestrationIdReusePolicy(_message.Message): + DESCRIPTOR: _descriptor.Descriptor + + OPERATIONSTATUS_FIELD_NUMBER: _builtins.int + ACTION_FIELD_NUMBER: _builtins.int + action: Global___CreateOrchestrationAction.ValueType + @_builtins.property + def operationStatus(self) -> _containers.RepeatedScalarFieldContainer[Global___OrchestrationStatus.ValueType]: ... + def __init__( + self, + *, + operationStatus: _abc.Iterable[Global___OrchestrationStatus.ValueType] | None = ..., + action: Global___CreateOrchestrationAction.ValueType = ..., + ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _typing.Literal["action", b"action", "operationStatus", b"operationStatus"] # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... + +Global___OrchestrationIdReusePolicy: _TypeAlias = OrchestrationIdReusePolicy # noqa: Y015 + +@_typing.final +class OrchestrationState(_message.Message): DESCRIPTOR: _descriptor.Descriptor @_typing.final @@ -284,13 +330,16 @@ class WorkflowState(_message.Message): key: _builtins.str = ..., value: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["key", b"key", "value", b"value"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... INSTANCEID_FIELD_NUMBER: _builtins.int NAME_FIELD_NUMBER: _builtins.int VERSION_FIELD_NUMBER: _builtins.int - WORKFLOWSTATUS_FIELD_NUMBER: _builtins.int + ORCHESTRATIONSTATUS_FIELD_NUMBER: _builtins.int SCHEDULEDSTARTTIMESTAMP_FIELD_NUMBER: _builtins.int CREATEDTIMESTAMP_FIELD_NUMBER: _builtins.int LASTUPDATEDTIMESTAMP_FIELD_NUMBER: _builtins.int @@ -304,7 +353,7 @@ class WorkflowState(_message.Message): TAGS_FIELD_NUMBER: _builtins.int instanceId: _builtins.str name: _builtins.str - workflowStatus: Global___OrchestrationStatus.ValueType + orchestrationStatus: Global___OrchestrationStatus.ValueType @_builtins.property def version(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -335,7 +384,7 @@ class WorkflowState(_message.Message): instanceId: _builtins.str = ..., name: _builtins.str = ..., version: _wrappers_pb2.StringValue | None = ..., - workflowStatus: Global___OrchestrationStatus.ValueType = ..., + orchestrationStatus: Global___OrchestrationStatus.ValueType = ..., scheduledStartTimestamp: _timestamp_pb2.Timestamp | None = ..., createdTimestamp: _timestamp_pb2.Timestamp | None = ..., lastUpdatedTimestamp: _timestamp_pb2.Timestamp | None = ..., @@ -350,7 +399,8 @@ class WorkflowState(_message.Message): ) -> None: ... _HasFieldArgType: _TypeAlias = _typing.Literal["completedTimestamp", b"completedTimestamp", "createdTimestamp", b"createdTimestamp", "customStatus", b"customStatus", "executionId", b"executionId", "failureDetails", b"failureDetails", "input", b"input", "lastUpdatedTimestamp", b"lastUpdatedTimestamp", "output", b"output", "parentInstanceId", b"parentInstanceId", "scheduledStartTimestamp", b"scheduledStartTimestamp", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["completedTimestamp", b"completedTimestamp", "createdTimestamp", b"createdTimestamp", "customStatus", b"customStatus", "executionId", b"executionId", "failureDetails", b"failureDetails", "input", b"input", "instanceId", b"instanceId", "lastUpdatedTimestamp", b"lastUpdatedTimestamp", "name", b"name", "output", b"output", "parentInstanceId", b"parentInstanceId", "scheduledStartTimestamp", b"scheduledStartTimestamp", "tags", b"tags", "version", b"version", "workflowStatus", b"workflowStatus"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["completedTimestamp", b"completedTimestamp", "createdTimestamp", b"createdTimestamp", "customStatus", b"customStatus", "executionId", b"executionId", "failureDetails", b"failureDetails", "input", b"input", "instanceId", b"instanceId", "lastUpdatedTimestamp", b"lastUpdatedTimestamp", "name", b"name", "orchestrationStatus", b"orchestrationStatus", "output", b"output", "parentInstanceId", b"parentInstanceId", "scheduledStartTimestamp", b"scheduledStartTimestamp", "tags", b"tags", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___WorkflowState: _TypeAlias = WorkflowState # noqa: Y015 +Global___OrchestrationState: _TypeAlias = OrchestrationState # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2_grpc.py index dca81c14f..8afda749e 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2_grpc.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestration_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.76.0' +GRPC_GENERATED_VERSION = '1.80.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.py index 718bb76cc..57bc3704e 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.py @@ -28,7 +28,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aorchestrator_actions.proto\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xc4\x01\n\x12ScheduleTaskAction\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12 \n\x06router\x18\x04 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x12\x17\n\x0ftaskExecutionId\x18\x05 \x01(\tB\t\n\x07_router\"\xc6\x01\n\x19\x43reateChildWorkflowAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12 \n\x06router\x18\x05 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x42\t\n\x07_router\"\xbb\x02\n\x11\x43reateTimerAction\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12.\n\x0b\x63reateTimer\x18\x03 \x01(\x0b\x32\x17.TimerOriginCreateTimerH\x00\x12\x32\n\rexternalEvent\x18\x04 \x01(\x0b\x32\x19.TimerOriginExternalEventH\x00\x12\x32\n\ractivityRetry\x18\x05 \x01(\x0b\x32\x19.TimerOriginActivityRetryH\x00\x12<\n\x12\x63hildWorkflowRetry\x18\x06 \x01(\x0b\x32\x1e.TimerOriginChildWorkflowRetryH\x00\x42\x08\n\x06originB\x07\n\x05_name\"p\n\x0fSendEventAction\x12#\n\x08instance\x18\x01 \x01(\x0b\x32\x11.WorkflowInstance\x12\x0c\n\x04name\x18\x02 \x01(\t\x12*\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xaa\x02\n\x16\x43ompleteWorkflowAction\x12,\n\x0eworkflowStatus\x18\x01 \x01(\x0e\x32\x14.OrchestrationStatus\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\nnewVersion\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12&\n\x0f\x63\x61rryoverEvents\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12+\n\x0e\x66\x61ilureDetails\x18\x06 \x01(\x0b\x32\x13.TaskFailureDetails\"l\n\x17TerminateWorkflowAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x0f\n\x07recurse\x18\x03 \x01(\x08\"#\n!WorkflowVersionNotAvailableAction\"\xd6\x03\n\x0eWorkflowAction\x12\n\n\x02id\x18\x01 \x01(\x05\x12+\n\x0cscheduleTask\x18\x02 \x01(\x0b\x32\x13.ScheduleTaskActionH\x00\x12\x39\n\x13\x63reateChildWorkflow\x18\x03 \x01(\x0b\x32\x1a.CreateChildWorkflowActionH\x00\x12)\n\x0b\x63reateTimer\x18\x04 \x01(\x0b\x32\x12.CreateTimerActionH\x00\x12%\n\tsendEvent\x18\x05 \x01(\x0b\x32\x10.SendEventActionH\x00\x12\x33\n\x10\x63ompleteWorkflow\x18\x06 \x01(\x0b\x32\x17.CompleteWorkflowActionH\x00\x12\x35\n\x11terminateWorkflow\x18\x07 \x01(\x0b\x32\x18.TerminateWorkflowActionH\x00\x12I\n\x1bworkflowVersionNotAvailable\x18\n \x01(\x0b\x32\".WorkflowVersionNotAvailableActionH\x00\x12 \n\x06router\x18\t \x01(\x0b\x32\x0b.TaskRouterH\x01\x88\x01\x01\x42\x14\n\x12workflowActionTypeB\t\n\x07_routerJ\x04\x08\x08\x10\tBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aorchestrator_actions.proto\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xc4\x01\n\x12ScheduleTaskAction\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12 \n\x06router\x18\x04 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x12\x17\n\x0ftaskExecutionId\x18\x05 \x01(\tB\t\n\x07_router\"\xc9\x01\n\x1c\x43reateSubOrchestrationAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12 \n\x06router\x18\x05 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x42\t\n\x07_router\"[\n\x11\x43reateTimerAction\x12*\n\x06\x66ireAt\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x11\n\x04name\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x07\n\x05_name\"u\n\x0fSendEventAction\x12(\n\x08instance\x18\x01 \x01(\x0b\x32\x16.OrchestrationInstance\x12\x0c\n\x04name\x18\x02 \x01(\t\x12*\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\xb4\x02\n\x1b\x43ompleteOrchestrationAction\x12\x31\n\x13orchestrationStatus\x18\x01 \x01(\x0e\x32\x14.OrchestrationStatus\x12,\n\x06result\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\nnewVersion\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12&\n\x0f\x63\x61rryoverEvents\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12+\n\x0e\x66\x61ilureDetails\x18\x06 \x01(\x0b\x32\x13.TaskFailureDetails\"q\n\x1cTerminateOrchestrationAction\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x0f\n\x07recurse\x18\x03 \x01(\x08\"\'\n%OrchestratorVersionNotAvailableAction\"\x80\x04\n\x12OrchestratorAction\x12\n\n\x02id\x18\x01 \x01(\x05\x12+\n\x0cscheduleTask\x18\x02 \x01(\x0b\x32\x13.ScheduleTaskActionH\x00\x12?\n\x16\x63reateSubOrchestration\x18\x03 \x01(\x0b\x32\x1d.CreateSubOrchestrationActionH\x00\x12)\n\x0b\x63reateTimer\x18\x04 \x01(\x0b\x32\x12.CreateTimerActionH\x00\x12%\n\tsendEvent\x18\x05 \x01(\x0b\x32\x10.SendEventActionH\x00\x12=\n\x15\x63ompleteOrchestration\x18\x06 \x01(\x0b\x32\x1c.CompleteOrchestrationActionH\x00\x12?\n\x16terminateOrchestration\x18\x07 \x01(\x0b\x32\x1d.TerminateOrchestrationActionH\x00\x12Q\n\x1forchestratorVersionNotAvailable\x18\n \x01(\x0b\x32&.OrchestratorVersionNotAvailableActionH\x00\x12 \n\x06router\x18\t \x01(\x0b\x32\x0b.TaskRouterH\x01\x88\x01\x01\x42\x18\n\x16orchestratorActionTypeB\t\n\x07_routerJ\x04\x08\x08\x10\tBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -38,18 +38,18 @@ _globals['DESCRIPTOR']._serialized_options = b'\n+io.dapr.durabletask.implementation.protobufZ\013/api/protos\252\002\031Dapr.DurableTask.Protobuf' _globals['_SCHEDULETASKACTION']._serialized_start=139 _globals['_SCHEDULETASKACTION']._serialized_end=335 - _globals['_CREATECHILDWORKFLOWACTION']._serialized_start=338 - _globals['_CREATECHILDWORKFLOWACTION']._serialized_end=536 - _globals['_CREATETIMERACTION']._serialized_start=539 - _globals['_CREATETIMERACTION']._serialized_end=854 - _globals['_SENDEVENTACTION']._serialized_start=856 - _globals['_SENDEVENTACTION']._serialized_end=968 - _globals['_COMPLETEWORKFLOWACTION']._serialized_start=971 - _globals['_COMPLETEWORKFLOWACTION']._serialized_end=1269 - _globals['_TERMINATEWORKFLOWACTION']._serialized_start=1271 - _globals['_TERMINATEWORKFLOWACTION']._serialized_end=1379 - _globals['_WORKFLOWVERSIONNOTAVAILABLEACTION']._serialized_start=1381 - _globals['_WORKFLOWVERSIONNOTAVAILABLEACTION']._serialized_end=1416 - _globals['_WORKFLOWACTION']._serialized_start=1419 - _globals['_WORKFLOWACTION']._serialized_end=1889 + _globals['_CREATESUBORCHESTRATIONACTION']._serialized_start=338 + _globals['_CREATESUBORCHESTRATIONACTION']._serialized_end=539 + _globals['_CREATETIMERACTION']._serialized_start=541 + _globals['_CREATETIMERACTION']._serialized_end=632 + _globals['_SENDEVENTACTION']._serialized_start=634 + _globals['_SENDEVENTACTION']._serialized_end=751 + _globals['_COMPLETEORCHESTRATIONACTION']._serialized_start=754 + _globals['_COMPLETEORCHESTRATIONACTION']._serialized_end=1062 + _globals['_TERMINATEORCHESTRATIONACTION']._serialized_start=1064 + _globals['_TERMINATEORCHESTRATIONACTION']._serialized_end=1177 + _globals['_ORCHESTRATORVERSIONNOTAVAILABLEACTION']._serialized_start=1179 + _globals['_ORCHESTRATORVERSIONNOTAVAILABLEACTION']._serialized_end=1218 + _globals['_ORCHESTRATORACTION']._serialized_start=1221 + _globals['_ORCHESTRATORACTION']._serialized_end=1733 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.pyi index 8df2644c1..efe33e2d5 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2.pyi @@ -17,10 +17,10 @@ from dapr.ext.workflow._durabletask.internal import orchestration_pb2 as _orches import sys import typing as _typing -if sys.version_info >= (3, 10): - from typing import TypeAlias as _TypeAlias +if sys.version_info >= (3, 11): + from typing import TypeAlias as _TypeAlias, Never as _Never else: - from typing_extensions import TypeAlias as _TypeAlias + from typing_extensions import TypeAlias as _TypeAlias, Never as _Never DESCRIPTOR: _descriptor.FileDescriptor @@ -61,7 +61,7 @@ class ScheduleTaskAction(_message.Message): Global___ScheduleTaskAction: _TypeAlias = ScheduleTaskAction # noqa: Y015 @_typing.final -class CreateChildWorkflowAction(_message.Message): +class CreateSubOrchestrationAction(_message.Message): DESCRIPTOR: _descriptor.Descriptor INSTANCEID_FIELD_NUMBER: _builtins.int @@ -94,7 +94,7 @@ class CreateChildWorkflowAction(_message.Message): _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... -Global___CreateChildWorkflowAction: _TypeAlias = CreateChildWorkflowAction # noqa: Y015 +Global___CreateSubOrchestrationAction: _TypeAlias = CreateSubOrchestrationAction # noqa: Y015 @_typing.final class CreateTimerAction(_message.Message): @@ -102,43 +102,22 @@ class CreateTimerAction(_message.Message): FIREAT_FIELD_NUMBER: _builtins.int NAME_FIELD_NUMBER: _builtins.int - CREATETIMER_FIELD_NUMBER: _builtins.int - EXTERNALEVENT_FIELD_NUMBER: _builtins.int - ACTIVITYRETRY_FIELD_NUMBER: _builtins.int - CHILDWORKFLOWRETRY_FIELD_NUMBER: _builtins.int name: _builtins.str @_builtins.property def fireAt(self) -> _timestamp_pb2.Timestamp: ... - @_builtins.property - def createTimer(self) -> _history_events_pb2.TimerOriginCreateTimer: ... - @_builtins.property - def externalEvent(self) -> _history_events_pb2.TimerOriginExternalEvent: ... - @_builtins.property - def activityRetry(self) -> _history_events_pb2.TimerOriginActivityRetry: ... - @_builtins.property - def childWorkflowRetry(self) -> _history_events_pb2.TimerOriginChildWorkflowRetry: ... def __init__( self, *, fireAt: _timestamp_pb2.Timestamp | None = ..., name: _builtins.str | None = ..., - createTimer: _history_events_pb2.TimerOriginCreateTimer | None = ..., - externalEvent: _history_events_pb2.TimerOriginExternalEvent | None = ..., - activityRetry: _history_events_pb2.TimerOriginActivityRetry | None = ..., - childWorkflowRetry: _history_events_pb2.TimerOriginChildWorkflowRetry | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "activityRetry", b"activityRetry", "childWorkflowRetry", b"childWorkflowRetry", "createTimer", b"createTimer", "externalEvent", b"externalEvent", "fireAt", b"fireAt", "name", b"name", "origin", b"origin"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "fireAt", b"fireAt", "name", b"name"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "activityRetry", b"activityRetry", "childWorkflowRetry", b"childWorkflowRetry", "createTimer", b"createTimer", "externalEvent", b"externalEvent", "fireAt", b"fireAt", "name", b"name", "origin", b"origin"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_name", b"_name", "fireAt", b"fireAt", "name", b"name"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__name: _TypeAlias = _typing.Literal["name"] # noqa: Y015 _WhichOneofArgType__name: _TypeAlias = _typing.Literal["_name", b"_name"] # noqa: Y015 - _WhichOneofReturnType_origin: _TypeAlias = _typing.Literal["createTimer", "externalEvent", "activityRetry", "childWorkflowRetry"] # noqa: Y015 - _WhichOneofArgType_origin: _TypeAlias = _typing.Literal["origin", b"origin"] # noqa: Y015 - @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__name) -> _WhichOneofReturnType__name | None: ... - @_typing.overload - def WhichOneof(self, oneof_group: _WhichOneofArgType_origin) -> _WhichOneofReturnType_origin | None: ... Global___CreateTimerAction: _TypeAlias = CreateTimerAction # noqa: Y015 @@ -151,13 +130,13 @@ class SendEventAction(_message.Message): DATA_FIELD_NUMBER: _builtins.int name: _builtins.str @_builtins.property - def instance(self) -> _orchestration_pb2.WorkflowInstance: ... + def instance(self) -> _orchestration_pb2.OrchestrationInstance: ... @_builtins.property def data(self) -> _wrappers_pb2.StringValue: ... def __init__( self, *, - instance: _orchestration_pb2.WorkflowInstance | None = ..., + instance: _orchestration_pb2.OrchestrationInstance | None = ..., name: _builtins.str = ..., data: _wrappers_pb2.StringValue | None = ..., ) -> None: ... @@ -165,20 +144,21 @@ class SendEventAction(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["data", b"data", "instance", b"instance", "name", b"name"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___SendEventAction: _TypeAlias = SendEventAction # noqa: Y015 @_typing.final -class CompleteWorkflowAction(_message.Message): +class CompleteOrchestrationAction(_message.Message): DESCRIPTOR: _descriptor.Descriptor - WORKFLOWSTATUS_FIELD_NUMBER: _builtins.int + ORCHESTRATIONSTATUS_FIELD_NUMBER: _builtins.int RESULT_FIELD_NUMBER: _builtins.int DETAILS_FIELD_NUMBER: _builtins.int NEWVERSION_FIELD_NUMBER: _builtins.int CARRYOVEREVENTS_FIELD_NUMBER: _builtins.int FAILUREDETAILS_FIELD_NUMBER: _builtins.int - workflowStatus: _orchestration_pb2.OrchestrationStatus.ValueType + orchestrationStatus: _orchestration_pb2.OrchestrationStatus.ValueType @_builtins.property def result(self) -> _wrappers_pb2.StringValue: ... @_builtins.property @@ -192,7 +172,7 @@ class CompleteWorkflowAction(_message.Message): def __init__( self, *, - workflowStatus: _orchestration_pb2.OrchestrationStatus.ValueType = ..., + orchestrationStatus: _orchestration_pb2.OrchestrationStatus.ValueType = ..., result: _wrappers_pb2.StringValue | None = ..., details: _wrappers_pb2.StringValue | None = ..., newVersion: _wrappers_pb2.StringValue | None = ..., @@ -201,13 +181,14 @@ class CompleteWorkflowAction(_message.Message): ) -> None: ... _HasFieldArgType: _TypeAlias = _typing.Literal["details", b"details", "failureDetails", b"failureDetails", "newVersion", b"newVersion", "result", b"result"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["carryoverEvents", b"carryoverEvents", "details", b"details", "failureDetails", b"failureDetails", "newVersion", b"newVersion", "result", b"result", "workflowStatus", b"workflowStatus"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["carryoverEvents", b"carryoverEvents", "details", b"details", "failureDetails", b"failureDetails", "newVersion", b"newVersion", "orchestrationStatus", b"orchestrationStatus", "result", b"result"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___CompleteWorkflowAction: _TypeAlias = CompleteWorkflowAction # noqa: Y015 +Global___CompleteOrchestrationAction: _TypeAlias = CompleteOrchestrationAction # noqa: Y015 @_typing.final -class TerminateWorkflowAction(_message.Message): +class TerminateOrchestrationAction(_message.Message): DESCRIPTOR: _descriptor.Descriptor INSTANCEID_FIELD_NUMBER: _builtins.int @@ -228,47 +209,53 @@ class TerminateWorkflowAction(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId", "reason", b"reason", "recurse", b"recurse"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___TerminateWorkflowAction: _TypeAlias = TerminateWorkflowAction # noqa: Y015 +Global___TerminateOrchestrationAction: _TypeAlias = TerminateOrchestrationAction # noqa: Y015 @_typing.final -class WorkflowVersionNotAvailableAction(_message.Message): +class OrchestratorVersionNotAvailableAction(_message.Message): DESCRIPTOR: _descriptor.Descriptor def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___WorkflowVersionNotAvailableAction: _TypeAlias = WorkflowVersionNotAvailableAction # noqa: Y015 +Global___OrchestratorVersionNotAvailableAction: _TypeAlias = OrchestratorVersionNotAvailableAction # noqa: Y015 @_typing.final -class WorkflowAction(_message.Message): +class OrchestratorAction(_message.Message): DESCRIPTOR: _descriptor.Descriptor ID_FIELD_NUMBER: _builtins.int SCHEDULETASK_FIELD_NUMBER: _builtins.int - CREATECHILDWORKFLOW_FIELD_NUMBER: _builtins.int + CREATESUBORCHESTRATION_FIELD_NUMBER: _builtins.int CREATETIMER_FIELD_NUMBER: _builtins.int SENDEVENT_FIELD_NUMBER: _builtins.int - COMPLETEWORKFLOW_FIELD_NUMBER: _builtins.int - TERMINATEWORKFLOW_FIELD_NUMBER: _builtins.int - WORKFLOWVERSIONNOTAVAILABLE_FIELD_NUMBER: _builtins.int + COMPLETEORCHESTRATION_FIELD_NUMBER: _builtins.int + TERMINATEORCHESTRATION_FIELD_NUMBER: _builtins.int + ORCHESTRATORVERSIONNOTAVAILABLE_FIELD_NUMBER: _builtins.int ROUTER_FIELD_NUMBER: _builtins.int id: _builtins.int @_builtins.property def scheduleTask(self) -> Global___ScheduleTaskAction: ... @_builtins.property - def createChildWorkflow(self) -> Global___CreateChildWorkflowAction: ... + def createSubOrchestration(self) -> Global___CreateSubOrchestrationAction: ... @_builtins.property def createTimer(self) -> Global___CreateTimerAction: ... @_builtins.property def sendEvent(self) -> Global___SendEventAction: ... @_builtins.property - def completeWorkflow(self) -> Global___CompleteWorkflowAction: ... + def completeOrchestration(self) -> Global___CompleteOrchestrationAction: ... @_builtins.property - def terminateWorkflow(self) -> Global___TerminateWorkflowAction: ... + def terminateOrchestration(self) -> Global___TerminateOrchestrationAction: ... @_builtins.property - def workflowVersionNotAvailable(self) -> Global___WorkflowVersionNotAvailableAction: ... + def orchestratorVersionNotAvailable(self) -> Global___OrchestratorVersionNotAvailableAction: ... @_builtins.property def router(self) -> _orchestration_pb2.TaskRouter: ... def __init__( @@ -276,25 +263,25 @@ class WorkflowAction(_message.Message): *, id: _builtins.int = ..., scheduleTask: Global___ScheduleTaskAction | None = ..., - createChildWorkflow: Global___CreateChildWorkflowAction | None = ..., + createSubOrchestration: Global___CreateSubOrchestrationAction | None = ..., createTimer: Global___CreateTimerAction | None = ..., sendEvent: Global___SendEventAction | None = ..., - completeWorkflow: Global___CompleteWorkflowAction | None = ..., - terminateWorkflow: Global___TerminateWorkflowAction | None = ..., - workflowVersionNotAvailable: Global___WorkflowVersionNotAvailableAction | None = ..., + completeOrchestration: Global___CompleteOrchestrationAction | None = ..., + terminateOrchestration: Global___TerminateOrchestrationAction | None = ..., + orchestratorVersionNotAvailable: Global___OrchestratorVersionNotAvailableAction | None = ..., router: _orchestration_pb2.TaskRouter | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "completeWorkflow", b"completeWorkflow", "createChildWorkflow", b"createChildWorkflow", "createTimer", b"createTimer", "router", b"router", "scheduleTask", b"scheduleTask", "sendEvent", b"sendEvent", "terminateWorkflow", b"terminateWorkflow", "workflowActionType", b"workflowActionType", "workflowVersionNotAvailable", b"workflowVersionNotAvailable"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "completeOrchestration", b"completeOrchestration", "createSubOrchestration", b"createSubOrchestration", "createTimer", b"createTimer", "orchestratorActionType", b"orchestratorActionType", "orchestratorVersionNotAvailable", b"orchestratorVersionNotAvailable", "router", b"router", "scheduleTask", b"scheduleTask", "sendEvent", b"sendEvent", "terminateOrchestration", b"terminateOrchestration"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "completeWorkflow", b"completeWorkflow", "createChildWorkflow", b"createChildWorkflow", "createTimer", b"createTimer", "id", b"id", "router", b"router", "scheduleTask", b"scheduleTask", "sendEvent", b"sendEvent", "terminateWorkflow", b"terminateWorkflow", "workflowActionType", b"workflowActionType", "workflowVersionNotAvailable", b"workflowVersionNotAvailable"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["_router", b"_router", "completeOrchestration", b"completeOrchestration", "createSubOrchestration", b"createSubOrchestration", "createTimer", b"createTimer", "id", b"id", "orchestratorActionType", b"orchestratorActionType", "orchestratorVersionNotAvailable", b"orchestratorVersionNotAvailable", "router", b"router", "scheduleTask", b"scheduleTask", "sendEvent", b"sendEvent", "terminateOrchestration", b"terminateOrchestration"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... _WhichOneofReturnType__router: _TypeAlias = _typing.Literal["router"] # noqa: Y015 _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 - _WhichOneofReturnType_workflowActionType: _TypeAlias = _typing.Literal["scheduleTask", "createChildWorkflow", "createTimer", "sendEvent", "completeWorkflow", "terminateWorkflow", "workflowVersionNotAvailable"] # noqa: Y015 - _WhichOneofArgType_workflowActionType: _TypeAlias = _typing.Literal["workflowActionType", b"workflowActionType"] # noqa: Y015 + _WhichOneofReturnType_orchestratorActionType: _TypeAlias = _typing.Literal["scheduleTask", "createSubOrchestration", "createTimer", "sendEvent", "completeOrchestration", "terminateOrchestration", "orchestratorVersionNotAvailable"] # noqa: Y015 + _WhichOneofArgType_orchestratorActionType: _TypeAlias = _typing.Literal["orchestratorActionType", b"orchestratorActionType"] # noqa: Y015 @_typing.overload def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... @_typing.overload - def WhichOneof(self, oneof_group: _WhichOneofArgType_workflowActionType) -> _WhichOneofReturnType_workflowActionType | None: ... + def WhichOneof(self, oneof_group: _WhichOneofArgType_orchestratorActionType) -> _WhichOneofReturnType_orchestratorActionType | None: ... -Global___WorkflowAction: _TypeAlias = WorkflowAction # noqa: Y015 +Global___OrchestratorAction: _TypeAlias = OrchestratorAction # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2_grpc.py index 2445897f9..042994a2a 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2_grpc.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_actions_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.76.0' +GRPC_GENERATED_VERSION = '1.80.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.py index a3255f819..2295bdaba 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.py @@ -30,7 +30,7 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aorchestrator_service.proto\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1aorchestrator_actions.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1bgoogle/protobuf/empty.proto\"\xfc\x01\n\x0f\x41\x63tivityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x10workflowInstance\x18\x04 \x01(\x0b\x32\x11.WorkflowInstance\x12\x0e\n\x06taskId\x18\x05 \x01(\x05\x12)\n\x12parentTraceContext\x18\x06 \x01(\x0b\x32\r.TraceContext\x12\x17\n\x0ftaskExecutionId\x18\x07 \x01(\t\"\xaa\x01\n\x10\x41\x63tivityResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0e\n\x06taskId\x18\x02 \x01(\x05\x12,\n\x06result\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x04 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x17\n\x0f\x63ompletionToken\x18\x05 \x01(\t\"\xf2\x01\n\x0fWorkflowRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x31\n\x0b\x65xecutionId\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12!\n\npastEvents\x18\x03 \x03(\x0b\x32\r.HistoryEvent\x12 \n\tnewEvents\x18\x04 \x03(\x0b\x32\r.HistoryEvent\x12 \n\x18requiresHistoryStreaming\x18\x06 \x01(\x08\x12 \n\x06router\x18\x07 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x42\t\n\x07_routerJ\x04\x08\x05\x10\x06\"\x82\x02\n\x10WorkflowResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12 \n\x07\x61\x63tions\x18\x02 \x03(\x0b\x32\x0f.WorkflowAction\x12\x32\n\x0c\x63ustomStatus\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x17\n\x0f\x63ompletionToken\x18\x04 \x01(\t\x12\x37\n\x12numEventsProcessed\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12&\n\x07version\x18\x06 \x01(\x0b\x32\x10.WorkflowVersionH\x00\x88\x01\x01\x42\n\n\x08_version\"\xaf\x03\n\x15\x43reateInstanceRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12;\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\x0b\x65xecutionId\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x04tags\x18\x08 \x03(\x0b\x32 .CreateInstanceRequest.TagsEntry\x12)\n\x12parentTraceContext\x18\t \x01(\x0b\x32\r.TraceContext\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01J\x04\x08\x06\x10\x07R\x1aorchestrationIdReusePolicy\",\n\x16\x43reateInstanceResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\"E\n\x12GetInstanceRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x1b\n\x13getInputsAndOutputs\x18\x02 \x01(\x08\"L\n\x13GetInstanceResponse\x12\x0e\n\x06\x65xists\x18\x01 \x01(\x08\x12%\n\rworkflowState\x18\x02 \x01(\x0b\x32\x0e.WorkflowState\"b\n\x11RaiseEventRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x14\n\x12RaiseEventResponse\"g\n\x10TerminateRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06output\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x11\n\trecursive\x18\x03 \x01(\x08\"\x13\n\x11TerminateResponse\"R\n\x0eSuspendRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x11\n\x0fSuspendResponse\"Q\n\rResumeRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x10\n\x0eResumeResponse\"\x9e\x01\n\x15PurgeInstancesRequest\x12\x14\n\ninstanceId\x18\x01 \x01(\tH\x00\x12\x33\n\x13purgeInstanceFilter\x18\x02 \x01(\x0b\x32\x14.PurgeInstanceFilterH\x00\x12\x11\n\trecursive\x18\x03 \x01(\x08\x12\x12\n\x05\x66orce\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\t\n\x07requestB\x08\n\x06_force\"\xaa\x01\n\x13PurgeInstanceFilter\x12\x33\n\x0f\x63reatedTimeFrom\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rcreatedTimeTo\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\rruntimeStatus\x18\x03 \x03(\x0e\x32\x14.OrchestrationStatus\"f\n\x16PurgeInstancesResponse\x12\x1c\n\x14\x64\x65letedInstanceCount\x18\x01 \x01(\x05\x12.\n\nisComplete\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\"-\n\x13GetWorkItemsRequestJ\x04\x08\x01\x10\x02J\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04J\x04\x08\n\x10\x0b\"\x9a\x01\n\x08WorkItem\x12+\n\x0fworkflowRequest\x18\x01 \x01(\x0b\x32\x10.WorkflowRequestH\x00\x12+\n\x0f\x61\x63tivityRequest\x18\x02 \x01(\x0b\x32\x10.ActivityRequestH\x00\x12\x17\n\x0f\x63ompletionToken\x18\n \x01(\tB\t\n\x07requestJ\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06\"\x16\n\x14\x43ompleteTaskResponse\"\x85\x02\n\x1dRerunWorkflowFromEventRequest\x12\x18\n\x10sourceInstanceID\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventID\x18\x02 \x01(\r\x12\x1a\n\rnewInstanceID\x18\x03 \x01(\tH\x00\x88\x01\x01\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x16\n\x0eoverwriteInput\x18\x05 \x01(\x08\x12\'\n\x1anewChildWorkflowInstanceID\x18\x06 \x01(\tH\x01\x88\x01\x01\x42\x10\n\x0e_newInstanceIDB\x1d\n\x1b_newChildWorkflowInstanceID\"7\n\x1eRerunWorkflowFromEventResponse\x12\x15\n\rnewInstanceID\x18\x01 \x01(\t\"r\n\x16ListInstanceIDsRequest\x12\x1e\n\x11\x63ontinuationToken\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08pageSize\x18\x02 \x01(\rH\x01\x88\x01\x01\x42\x14\n\x12_continuationTokenB\x0b\n\t_pageSize\"d\n\x17ListInstanceIDsResponse\x12\x13\n\x0binstanceIds\x18\x01 \x03(\t\x12\x1e\n\x11\x63ontinuationToken\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x14\n\x12_continuationToken\"/\n\x19GetInstanceHistoryRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\";\n\x1aGetInstanceHistoryResponse\x12\x1d\n\x06\x65vents\x18\x01 \x03(\x0b\x32\r.HistoryEvent*^\n\x10WorkerCapability\x12!\n\x1dWORKER_CAPABILITY_UNSPECIFIED\x10\x00\x12\'\n#WORKER_CAPABILITY_HISTORY_STREAMING\x10\x01\x32\xe8\x08\n\x15TaskHubSidecarService\x12\x37\n\x05Hello\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.Empty\x12@\n\rStartInstance\x12\x16.CreateInstanceRequest\x1a\x17.CreateInstanceResponse\x12\x38\n\x0bGetInstance\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x41\n\x14WaitForInstanceStart\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x46\n\x19WaitForInstanceCompletion\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x35\n\nRaiseEvent\x12\x12.RaiseEventRequest\x1a\x13.RaiseEventResponse\x12:\n\x11TerminateInstance\x12\x11.TerminateRequest\x1a\x12.TerminateResponse\x12\x34\n\x0fSuspendInstance\x12\x0f.SuspendRequest\x1a\x10.SuspendResponse\x12\x31\n\x0eResumeInstance\x12\x0e.ResumeRequest\x1a\x0f.ResumeResponse\x12\x41\n\x0ePurgeInstances\x12\x16.PurgeInstancesRequest\x1a\x17.PurgeInstancesResponse\x12\x31\n\x0cGetWorkItems\x12\x14.GetWorkItemsRequest\x1a\t.WorkItem0\x01\x12@\n\x14\x43ompleteActivityTask\x12\x11.ActivityResponse\x1a\x15.CompleteTaskResponse\x12I\n\x18\x43ompleteOrchestratorTask\x12\x11.WorkflowResponse\x1a\x15.CompleteTaskResponse\"\x03\x88\x02\x01\x12@\n\x14\x43ompleteWorkflowTask\x12\x11.WorkflowResponse\x1a\x15.CompleteTaskResponse\x12Y\n\x16RerunWorkflowFromEvent\x12\x1e.RerunWorkflowFromEventRequest\x1a\x1f.RerunWorkflowFromEventResponse\x12\x44\n\x0fListInstanceIDs\x12\x17.ListInstanceIDsRequest\x1a\x18.ListInstanceIDsResponse\x12M\n\x12GetInstanceHistory\x12\x1a.GetInstanceHistoryRequest\x1a\x1b.GetInstanceHistoryResponseBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aorchestrator_service.proto\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1aorchestrator_actions.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1bgoogle/protobuf/empty.proto\"\x86\x02\n\x0f\x41\x63tivityRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x07version\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x35\n\x15orchestrationInstance\x18\x04 \x01(\x0b\x32\x16.OrchestrationInstance\x12\x0e\n\x06taskId\x18\x05 \x01(\x05\x12)\n\x12parentTraceContext\x18\x06 \x01(\x0b\x32\r.TraceContext\x12\x17\n\x0ftaskExecutionId\x18\x07 \x01(\t\"\xaa\x01\n\x10\x41\x63tivityResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0e\n\x06taskId\x18\x02 \x01(\x05\x12,\n\x06result\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x0e\x66\x61ilureDetails\x18\x04 \x01(\x0b\x32\x13.TaskFailureDetails\x12\x17\n\x0f\x63ompletionToken\x18\x05 \x01(\t\"\xf6\x01\n\x13OrchestratorRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x31\n\x0b\x65xecutionId\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12!\n\npastEvents\x18\x03 \x03(\x0b\x32\r.HistoryEvent\x12 \n\tnewEvents\x18\x04 \x03(\x0b\x32\r.HistoryEvent\x12 \n\x18requiresHistoryStreaming\x18\x06 \x01(\x08\x12 \n\x06router\x18\x07 \x01(\x0b\x32\x0b.TaskRouterH\x00\x88\x01\x01\x42\t\n\x07_routerJ\x04\x08\x05\x10\x06\"\x8f\x02\n\x14OrchestratorResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12$\n\x07\x61\x63tions\x18\x02 \x03(\x0b\x32\x13.OrchestratorAction\x12\x32\n\x0c\x63ustomStatus\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x17\n\x0f\x63ompletionToken\x18\x04 \x01(\t\x12\x37\n\x12numEventsProcessed\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12+\n\x07version\x18\x06 \x01(\x0b\x32\x15.OrchestrationVersionH\x00\x88\x01\x01\x42\n\n\x08_version\"\xce\x03\n\x15\x43reateInstanceRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x07version\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12;\n\x17scheduledStartTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12?\n\x1aorchestrationIdReusePolicy\x18\x06 \x01(\x0b\x32\x1b.OrchestrationIdReusePolicy\x12\x31\n\x0b\x65xecutionId\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x04tags\x18\x08 \x03(\x0b\x32 .CreateInstanceRequest.TagsEntry\x12)\n\x12parentTraceContext\x18\t \x01(\x0b\x32\r.TraceContext\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\",\n\x16\x43reateInstanceResponse\x12\x12\n\ninstanceId\x18\x01 \x01(\t\"E\n\x12GetInstanceRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x1b\n\x13getInputsAndOutputs\x18\x02 \x01(\x08\"V\n\x13GetInstanceResponse\x12\x0e\n\x06\x65xists\x18\x01 \x01(\x08\x12/\n\x12orchestrationState\x18\x02 \x01(\x0b\x32\x13.OrchestrationState\"b\n\x11RaiseEventRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12+\n\x05input\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x14\n\x12RaiseEventResponse\"g\n\x10TerminateRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06output\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x11\n\trecursive\x18\x03 \x01(\x08\"\x13\n\x11TerminateResponse\"R\n\x0eSuspendRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x11\n\x0fSuspendResponse\"Q\n\rResumeRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12,\n\x06reason\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\"\x10\n\x0eResumeResponse\"\x9e\x01\n\x15PurgeInstancesRequest\x12\x14\n\ninstanceId\x18\x01 \x01(\tH\x00\x12\x33\n\x13purgeInstanceFilter\x18\x02 \x01(\x0b\x32\x14.PurgeInstanceFilterH\x00\x12\x11\n\trecursive\x18\x03 \x01(\x08\x12\x12\n\x05\x66orce\x18\x04 \x01(\x08H\x01\x88\x01\x01\x42\t\n\x07requestB\x08\n\x06_force\"\xaa\x01\n\x13PurgeInstanceFilter\x12\x33\n\x0f\x63reatedTimeFrom\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rcreatedTimeTo\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\rruntimeStatus\x18\x03 \x03(\x0e\x32\x14.OrchestrationStatus\"f\n\x16PurgeInstancesResponse\x12\x1c\n\x14\x64\x65letedInstanceCount\x18\x01 \x01(\x05\x12.\n\nisComplete\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\"-\n\x13GetWorkItemsRequestJ\x04\x08\x01\x10\x02J\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04J\x04\x08\n\x10\x0b\"\xa2\x01\n\x08WorkItem\x12\x33\n\x13orchestratorRequest\x18\x01 \x01(\x0b\x32\x14.OrchestratorRequestH\x00\x12+\n\x0f\x61\x63tivityRequest\x18\x02 \x01(\x0b\x32\x10.ActivityRequestH\x00\x12\x17\n\x0f\x63ompletionToken\x18\n \x01(\tB\t\n\x07requestJ\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06\"\x16\n\x14\x43ompleteTaskResponse\"\x85\x02\n\x1dRerunWorkflowFromEventRequest\x12\x18\n\x10sourceInstanceID\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventID\x18\x02 \x01(\r\x12\x1a\n\rnewInstanceID\x18\x03 \x01(\tH\x00\x88\x01\x01\x12+\n\x05input\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x16\n\x0eoverwriteInput\x18\x05 \x01(\x08\x12\'\n\x1anewChildWorkflowInstanceID\x18\x06 \x01(\tH\x01\x88\x01\x01\x42\x10\n\x0e_newInstanceIDB\x1d\n\x1b_newChildWorkflowInstanceID\"7\n\x1eRerunWorkflowFromEventResponse\x12\x15\n\rnewInstanceID\x18\x01 \x01(\t\"r\n\x16ListInstanceIDsRequest\x12\x1e\n\x11\x63ontinuationToken\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08pageSize\x18\x02 \x01(\rH\x01\x88\x01\x01\x42\x14\n\x12_continuationTokenB\x0b\n\t_pageSize\"d\n\x17ListInstanceIDsResponse\x12\x13\n\x0binstanceIds\x18\x01 \x03(\t\x12\x1e\n\x11\x63ontinuationToken\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x14\n\x12_continuationToken\"/\n\x19GetInstanceHistoryRequest\x12\x12\n\ninstanceId\x18\x01 \x01(\t\";\n\x1aGetInstanceHistoryResponse\x12\x1d\n\x06\x65vents\x18\x01 \x03(\x0b\x32\r.HistoryEvent*^\n\x10WorkerCapability\x12!\n\x1dWORKER_CAPABILITY_UNSPECIFIED\x10\x00\x12\'\n#WORKER_CAPABILITY_HISTORY_STREAMING\x10\x01\x32\xa5\x08\n\x15TaskHubSidecarService\x12\x37\n\x05Hello\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.Empty\x12@\n\rStartInstance\x12\x16.CreateInstanceRequest\x1a\x17.CreateInstanceResponse\x12\x38\n\x0bGetInstance\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x41\n\x14WaitForInstanceStart\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x46\n\x19WaitForInstanceCompletion\x12\x13.GetInstanceRequest\x1a\x14.GetInstanceResponse\x12\x35\n\nRaiseEvent\x12\x12.RaiseEventRequest\x1a\x13.RaiseEventResponse\x12:\n\x11TerminateInstance\x12\x11.TerminateRequest\x1a\x12.TerminateResponse\x12\x34\n\x0fSuspendInstance\x12\x0f.SuspendRequest\x1a\x10.SuspendResponse\x12\x31\n\x0eResumeInstance\x12\x0e.ResumeRequest\x1a\x0f.ResumeResponse\x12\x41\n\x0ePurgeInstances\x12\x16.PurgeInstancesRequest\x1a\x17.PurgeInstancesResponse\x12\x31\n\x0cGetWorkItems\x12\x14.GetWorkItemsRequest\x1a\t.WorkItem0\x01\x12@\n\x14\x43ompleteActivityTask\x12\x11.ActivityResponse\x1a\x15.CompleteTaskResponse\x12H\n\x18\x43ompleteOrchestratorTask\x12\x15.OrchestratorResponse\x1a\x15.CompleteTaskResponse\x12Y\n\x16RerunWorkflowFromEvent\x12\x1e.RerunWorkflowFromEventRequest\x1a\x1f.RerunWorkflowFromEventResponse\x12\x44\n\x0fListInstanceIDs\x12\x17.ListInstanceIDsRequest\x1a\x18.ListInstanceIDsResponse\x12M\n\x12GetInstanceHistory\x12\x1a.GetInstanceHistoryRequest\x1a\x1b.GetInstanceHistoryResponseBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -40,68 +40,66 @@ _globals['DESCRIPTOR']._serialized_options = b'\n+io.dapr.durabletask.implementation.protobufZ\013/api/protos\252\002\031Dapr.DurableTask.Protobuf' _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._loaded_options = None _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_options = b'8\001' - _globals['_TASKHUBSIDECARSERVICE'].methods_by_name['CompleteOrchestratorTask']._loaded_options = None - _globals['_TASKHUBSIDECARSERVICE'].methods_by_name['CompleteOrchestratorTask']._serialized_options = b'\210\002\001' - _globals['_WORKERCAPABILITY']._serialized_start=3525 - _globals['_WORKERCAPABILITY']._serialized_end=3619 + _globals['_WORKERCAPABILITY']._serialized_start=3601 + _globals['_WORKERCAPABILITY']._serialized_end=3695 _globals['_ACTIVITYREQUEST']._serialized_start=196 - _globals['_ACTIVITYREQUEST']._serialized_end=448 - _globals['_ACTIVITYRESPONSE']._serialized_start=451 - _globals['_ACTIVITYRESPONSE']._serialized_end=621 - _globals['_WORKFLOWREQUEST']._serialized_start=624 - _globals['_WORKFLOWREQUEST']._serialized_end=866 - _globals['_WORKFLOWRESPONSE']._serialized_start=869 - _globals['_WORKFLOWRESPONSE']._serialized_end=1127 - _globals['_CREATEINSTANCEREQUEST']._serialized_start=1130 - _globals['_CREATEINSTANCEREQUEST']._serialized_end=1561 - _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_start=1484 - _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_end=1527 - _globals['_CREATEINSTANCERESPONSE']._serialized_start=1563 - _globals['_CREATEINSTANCERESPONSE']._serialized_end=1607 - _globals['_GETINSTANCEREQUEST']._serialized_start=1609 - _globals['_GETINSTANCEREQUEST']._serialized_end=1678 - _globals['_GETINSTANCERESPONSE']._serialized_start=1680 - _globals['_GETINSTANCERESPONSE']._serialized_end=1756 - _globals['_RAISEEVENTREQUEST']._serialized_start=1758 - _globals['_RAISEEVENTREQUEST']._serialized_end=1856 - _globals['_RAISEEVENTRESPONSE']._serialized_start=1858 - _globals['_RAISEEVENTRESPONSE']._serialized_end=1878 - _globals['_TERMINATEREQUEST']._serialized_start=1880 - _globals['_TERMINATEREQUEST']._serialized_end=1983 - _globals['_TERMINATERESPONSE']._serialized_start=1985 - _globals['_TERMINATERESPONSE']._serialized_end=2004 - _globals['_SUSPENDREQUEST']._serialized_start=2006 - _globals['_SUSPENDREQUEST']._serialized_end=2088 - _globals['_SUSPENDRESPONSE']._serialized_start=2090 - _globals['_SUSPENDRESPONSE']._serialized_end=2107 - _globals['_RESUMEREQUEST']._serialized_start=2109 - _globals['_RESUMEREQUEST']._serialized_end=2190 - _globals['_RESUMERESPONSE']._serialized_start=2192 - _globals['_RESUMERESPONSE']._serialized_end=2208 - _globals['_PURGEINSTANCESREQUEST']._serialized_start=2211 - _globals['_PURGEINSTANCESREQUEST']._serialized_end=2369 - _globals['_PURGEINSTANCEFILTER']._serialized_start=2372 - _globals['_PURGEINSTANCEFILTER']._serialized_end=2542 - _globals['_PURGEINSTANCESRESPONSE']._serialized_start=2544 - _globals['_PURGEINSTANCESRESPONSE']._serialized_end=2646 - _globals['_GETWORKITEMSREQUEST']._serialized_start=2648 - _globals['_GETWORKITEMSREQUEST']._serialized_end=2693 - _globals['_WORKITEM']._serialized_start=2696 - _globals['_WORKITEM']._serialized_end=2850 - _globals['_COMPLETETASKRESPONSE']._serialized_start=2852 - _globals['_COMPLETETASKRESPONSE']._serialized_end=2874 - _globals['_RERUNWORKFLOWFROMEVENTREQUEST']._serialized_start=2877 - _globals['_RERUNWORKFLOWFROMEVENTREQUEST']._serialized_end=3138 - _globals['_RERUNWORKFLOWFROMEVENTRESPONSE']._serialized_start=3140 - _globals['_RERUNWORKFLOWFROMEVENTRESPONSE']._serialized_end=3195 - _globals['_LISTINSTANCEIDSREQUEST']._serialized_start=3197 - _globals['_LISTINSTANCEIDSREQUEST']._serialized_end=3311 - _globals['_LISTINSTANCEIDSRESPONSE']._serialized_start=3313 - _globals['_LISTINSTANCEIDSRESPONSE']._serialized_end=3413 - _globals['_GETINSTANCEHISTORYREQUEST']._serialized_start=3415 - _globals['_GETINSTANCEHISTORYREQUEST']._serialized_end=3462 - _globals['_GETINSTANCEHISTORYRESPONSE']._serialized_start=3464 - _globals['_GETINSTANCEHISTORYRESPONSE']._serialized_end=3523 - _globals['_TASKHUBSIDECARSERVICE']._serialized_start=3622 - _globals['_TASKHUBSIDECARSERVICE']._serialized_end=4750 + _globals['_ACTIVITYREQUEST']._serialized_end=458 + _globals['_ACTIVITYRESPONSE']._serialized_start=461 + _globals['_ACTIVITYRESPONSE']._serialized_end=631 + _globals['_ORCHESTRATORREQUEST']._serialized_start=634 + _globals['_ORCHESTRATORREQUEST']._serialized_end=880 + _globals['_ORCHESTRATORRESPONSE']._serialized_start=883 + _globals['_ORCHESTRATORRESPONSE']._serialized_end=1154 + _globals['_CREATEINSTANCEREQUEST']._serialized_start=1157 + _globals['_CREATEINSTANCEREQUEST']._serialized_end=1619 + _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_start=1576 + _globals['_CREATEINSTANCEREQUEST_TAGSENTRY']._serialized_end=1619 + _globals['_CREATEINSTANCERESPONSE']._serialized_start=1621 + _globals['_CREATEINSTANCERESPONSE']._serialized_end=1665 + _globals['_GETINSTANCEREQUEST']._serialized_start=1667 + _globals['_GETINSTANCEREQUEST']._serialized_end=1736 + _globals['_GETINSTANCERESPONSE']._serialized_start=1738 + _globals['_GETINSTANCERESPONSE']._serialized_end=1824 + _globals['_RAISEEVENTREQUEST']._serialized_start=1826 + _globals['_RAISEEVENTREQUEST']._serialized_end=1924 + _globals['_RAISEEVENTRESPONSE']._serialized_start=1926 + _globals['_RAISEEVENTRESPONSE']._serialized_end=1946 + _globals['_TERMINATEREQUEST']._serialized_start=1948 + _globals['_TERMINATEREQUEST']._serialized_end=2051 + _globals['_TERMINATERESPONSE']._serialized_start=2053 + _globals['_TERMINATERESPONSE']._serialized_end=2072 + _globals['_SUSPENDREQUEST']._serialized_start=2074 + _globals['_SUSPENDREQUEST']._serialized_end=2156 + _globals['_SUSPENDRESPONSE']._serialized_start=2158 + _globals['_SUSPENDRESPONSE']._serialized_end=2175 + _globals['_RESUMEREQUEST']._serialized_start=2177 + _globals['_RESUMEREQUEST']._serialized_end=2258 + _globals['_RESUMERESPONSE']._serialized_start=2260 + _globals['_RESUMERESPONSE']._serialized_end=2276 + _globals['_PURGEINSTANCESREQUEST']._serialized_start=2279 + _globals['_PURGEINSTANCESREQUEST']._serialized_end=2437 + _globals['_PURGEINSTANCEFILTER']._serialized_start=2440 + _globals['_PURGEINSTANCEFILTER']._serialized_end=2610 + _globals['_PURGEINSTANCESRESPONSE']._serialized_start=2612 + _globals['_PURGEINSTANCESRESPONSE']._serialized_end=2714 + _globals['_GETWORKITEMSREQUEST']._serialized_start=2716 + _globals['_GETWORKITEMSREQUEST']._serialized_end=2761 + _globals['_WORKITEM']._serialized_start=2764 + _globals['_WORKITEM']._serialized_end=2926 + _globals['_COMPLETETASKRESPONSE']._serialized_start=2928 + _globals['_COMPLETETASKRESPONSE']._serialized_end=2950 + _globals['_RERUNWORKFLOWFROMEVENTREQUEST']._serialized_start=2953 + _globals['_RERUNWORKFLOWFROMEVENTREQUEST']._serialized_end=3214 + _globals['_RERUNWORKFLOWFROMEVENTRESPONSE']._serialized_start=3216 + _globals['_RERUNWORKFLOWFROMEVENTRESPONSE']._serialized_end=3271 + _globals['_LISTINSTANCEIDSREQUEST']._serialized_start=3273 + _globals['_LISTINSTANCEIDSREQUEST']._serialized_end=3387 + _globals['_LISTINSTANCEIDSRESPONSE']._serialized_start=3389 + _globals['_LISTINSTANCEIDSRESPONSE']._serialized_end=3489 + _globals['_GETINSTANCEHISTORYREQUEST']._serialized_start=3491 + _globals['_GETINSTANCEHISTORYREQUEST']._serialized_end=3538 + _globals['_GETINSTANCEHISTORYRESPONSE']._serialized_start=3540 + _globals['_GETINSTANCEHISTORYRESPONSE']._serialized_end=3599 + _globals['_TASKHUBSIDECARSERVICE']._serialized_start=3698 + _globals['_TASKHUBSIDECARSERVICE']._serialized_end=4759 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.pyi index 8786348eb..eda749959 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2.pyi @@ -19,10 +19,10 @@ from dapr.ext.workflow._durabletask.internal import orchestrator_actions_pb2 as import sys import typing as _typing -if sys.version_info >= (3, 10): - from typing import TypeAlias as _TypeAlias +if sys.version_info >= (3, 11): + from typing import TypeAlias as _TypeAlias, Never as _Never else: - from typing_extensions import TypeAlias as _TypeAlias + from typing_extensions import TypeAlias as _TypeAlias, Never as _Never DESCRIPTOR: _descriptor.FileDescriptor @@ -35,7 +35,7 @@ class _WorkerCapabilityEnumTypeWrapper(_enum_type_wrapper._EnumTypeWrapper[_Work WORKER_CAPABILITY_UNSPECIFIED: _WorkerCapability.ValueType # 0 WORKER_CAPABILITY_HISTORY_STREAMING: _WorkerCapability.ValueType # 1 """Indicates that the worker is capable of streaming instance history as a more optimized - alternative to receiving the full history embedded in the workflow work-item. + alternative to receiving the full history embedded in the orchestrator work-item. When set, the service may return work items without any history events as an optimization. It is strongly recommended that all SDKs support this capability. """ @@ -45,7 +45,7 @@ class WorkerCapability(_WorkerCapability, metaclass=_WorkerCapabilityEnumTypeWra WORKER_CAPABILITY_UNSPECIFIED: WorkerCapability.ValueType # 0 WORKER_CAPABILITY_HISTORY_STREAMING: WorkerCapability.ValueType # 1 """Indicates that the worker is capable of streaming instance history as a more optimized -alternative to receiving the full history embedded in the workflow work-item. +alternative to receiving the full history embedded in the orchestrator work-item. When set, the service may return work items without any history events as an optimization. It is strongly recommended that all SDKs support this capability. """ @@ -58,7 +58,7 @@ class ActivityRequest(_message.Message): NAME_FIELD_NUMBER: _builtins.int VERSION_FIELD_NUMBER: _builtins.int INPUT_FIELD_NUMBER: _builtins.int - WORKFLOWINSTANCE_FIELD_NUMBER: _builtins.int + ORCHESTRATIONINSTANCE_FIELD_NUMBER: _builtins.int TASKID_FIELD_NUMBER: _builtins.int PARENTTRACECONTEXT_FIELD_NUMBER: _builtins.int TASKEXECUTIONID_FIELD_NUMBER: _builtins.int @@ -70,7 +70,7 @@ class ActivityRequest(_message.Message): @_builtins.property def input(self) -> _wrappers_pb2.StringValue: ... @_builtins.property - def workflowInstance(self) -> _orchestration_pb2.WorkflowInstance: ... + def orchestrationInstance(self) -> _orchestration_pb2.OrchestrationInstance: ... @_builtins.property def parentTraceContext(self) -> _orchestration_pb2.TraceContext: ... def __init__( @@ -79,15 +79,16 @@ class ActivityRequest(_message.Message): name: _builtins.str = ..., version: _wrappers_pb2.StringValue | None = ..., input: _wrappers_pb2.StringValue | None = ..., - workflowInstance: _orchestration_pb2.WorkflowInstance | None = ..., + orchestrationInstance: _orchestration_pb2.OrchestrationInstance | None = ..., taskId: _builtins.int = ..., parentTraceContext: _orchestration_pb2.TraceContext | None = ..., taskExecutionId: _builtins.str = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "parentTraceContext", b"parentTraceContext", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "orchestrationInstance", b"orchestrationInstance", "parentTraceContext", b"parentTraceContext", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "name", b"name", "parentTraceContext", b"parentTraceContext", "taskExecutionId", b"taskExecutionId", "taskId", b"taskId", "version", b"version", "workflowInstance", b"workflowInstance"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "name", b"name", "orchestrationInstance", b"orchestrationInstance", "parentTraceContext", b"parentTraceContext", "taskExecutionId", b"taskExecutionId", "taskId", b"taskId", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ActivityRequest: _TypeAlias = ActivityRequest # noqa: Y015 @@ -120,11 +121,12 @@ class ActivityResponse(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["completionToken", b"completionToken", "failureDetails", b"failureDetails", "instanceId", b"instanceId", "result", b"result", "taskId", b"taskId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ActivityResponse: _TypeAlias = ActivityResponse # noqa: Y015 @_typing.final -class WorkflowRequest(_message.Message): +class OrchestratorRequest(_message.Message): DESCRIPTOR: _descriptor.Descriptor INSTANCEID_FIELD_NUMBER: _builtins.int @@ -161,10 +163,10 @@ class WorkflowRequest(_message.Message): _WhichOneofArgType__router: _TypeAlias = _typing.Literal["_router", b"_router"] # noqa: Y015 def WhichOneof(self, oneof_group: _WhichOneofArgType__router) -> _WhichOneofReturnType__router | None: ... -Global___WorkflowRequest: _TypeAlias = WorkflowRequest # noqa: Y015 +Global___OrchestratorRequest: _TypeAlias = OrchestratorRequest # noqa: Y015 @_typing.final -class WorkflowResponse(_message.Message): +class OrchestratorResponse(_message.Message): DESCRIPTOR: _descriptor.Descriptor INSTANCEID_FIELD_NUMBER: _builtins.int @@ -176,26 +178,26 @@ class WorkflowResponse(_message.Message): instanceId: _builtins.str completionToken: _builtins.str @_builtins.property - def actions(self) -> _containers.RepeatedCompositeFieldContainer[_orchestrator_actions_pb2.WorkflowAction]: ... + def actions(self) -> _containers.RepeatedCompositeFieldContainer[_orchestrator_actions_pb2.OrchestratorAction]: ... @_builtins.property def customStatus(self) -> _wrappers_pb2.StringValue: ... @_builtins.property def numEventsProcessed(self) -> _wrappers_pb2.Int32Value: - """The number of work item events that were processed by the workflow. - This field is optional. If not set, the service should assume that the workflow processed all events. + """The number of work item events that were processed by the orchestrator. + This field is optional. If not set, the service should assume that the orchestrator processed all events. """ @_builtins.property - def version(self) -> _orchestration_pb2.WorkflowVersion: ... + def version(self) -> _orchestration_pb2.OrchestrationVersion: ... def __init__( self, *, instanceId: _builtins.str = ..., - actions: _abc.Iterable[_orchestrator_actions_pb2.WorkflowAction] | None = ..., + actions: _abc.Iterable[_orchestrator_actions_pb2.OrchestratorAction] | None = ..., customStatus: _wrappers_pb2.StringValue | None = ..., completionToken: _builtins.str = ..., numEventsProcessed: _wrappers_pb2.Int32Value | None = ..., - version: _orchestration_pb2.WorkflowVersion | None = ..., + version: _orchestration_pb2.OrchestrationVersion | None = ..., ) -> None: ... _HasFieldArgType: _TypeAlias = _typing.Literal["_version", b"_version", "customStatus", b"customStatus", "numEventsProcessed", b"numEventsProcessed", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... @@ -205,7 +207,7 @@ class WorkflowResponse(_message.Message): _WhichOneofArgType__version: _TypeAlias = _typing.Literal["_version", b"_version"] # noqa: Y015 def WhichOneof(self, oneof_group: _WhichOneofArgType__version) -> _WhichOneofReturnType__version | None: ... -Global___WorkflowResponse: _TypeAlias = WorkflowResponse # noqa: Y015 +Global___OrchestratorResponse: _TypeAlias = OrchestratorResponse # noqa: Y015 @_typing.final class CreateInstanceRequest(_message.Message): @@ -225,14 +227,18 @@ class CreateInstanceRequest(_message.Message): key: _builtins.str = ..., value: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["key", b"key", "value", b"value"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... INSTANCEID_FIELD_NUMBER: _builtins.int NAME_FIELD_NUMBER: _builtins.int VERSION_FIELD_NUMBER: _builtins.int INPUT_FIELD_NUMBER: _builtins.int SCHEDULEDSTARTTIMESTAMP_FIELD_NUMBER: _builtins.int + ORCHESTRATIONIDREUSEPOLICY_FIELD_NUMBER: _builtins.int EXECUTIONID_FIELD_NUMBER: _builtins.int TAGS_FIELD_NUMBER: _builtins.int PARENTTRACECONTEXT_FIELD_NUMBER: _builtins.int @@ -245,6 +251,8 @@ class CreateInstanceRequest(_message.Message): @_builtins.property def scheduledStartTimestamp(self) -> _timestamp_pb2.Timestamp: ... @_builtins.property + def orchestrationIdReusePolicy(self) -> _orchestration_pb2.OrchestrationIdReusePolicy: ... + @_builtins.property def executionId(self) -> _wrappers_pb2.StringValue: ... @_builtins.property def tags(self) -> _containers.ScalarMap[_builtins.str, _builtins.str]: ... @@ -258,14 +266,16 @@ class CreateInstanceRequest(_message.Message): version: _wrappers_pb2.StringValue | None = ..., input: _wrappers_pb2.StringValue | None = ..., scheduledStartTimestamp: _timestamp_pb2.Timestamp | None = ..., + orchestrationIdReusePolicy: _orchestration_pb2.OrchestrationIdReusePolicy | None = ..., executionId: _wrappers_pb2.StringValue | None = ..., tags: _abc.Mapping[_builtins.str, _builtins.str] | None = ..., parentTraceContext: _orchestration_pb2.TraceContext | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["executionId", b"executionId", "input", b"input", "parentTraceContext", b"parentTraceContext", "scheduledStartTimestamp", b"scheduledStartTimestamp", "version", b"version"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["executionId", b"executionId", "input", b"input", "orchestrationIdReusePolicy", b"orchestrationIdReusePolicy", "parentTraceContext", b"parentTraceContext", "scheduledStartTimestamp", b"scheduledStartTimestamp", "version", b"version"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["executionId", b"executionId", "input", b"input", "instanceId", b"instanceId", "name", b"name", "parentTraceContext", b"parentTraceContext", "scheduledStartTimestamp", b"scheduledStartTimestamp", "tags", b"tags", "version", b"version"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["executionId", b"executionId", "input", b"input", "instanceId", b"instanceId", "name", b"name", "orchestrationIdReusePolicy", b"orchestrationIdReusePolicy", "parentTraceContext", b"parentTraceContext", "scheduledStartTimestamp", b"scheduledStartTimestamp", "tags", b"tags", "version", b"version"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CreateInstanceRequest: _TypeAlias = CreateInstanceRequest # noqa: Y015 @@ -280,8 +290,11 @@ class CreateInstanceResponse(_message.Message): *, instanceId: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CreateInstanceResponse: _TypeAlias = CreateInstanceResponse # noqa: Y015 @@ -299,8 +312,11 @@ class GetInstanceRequest(_message.Message): instanceId: _builtins.str = ..., getInputsAndOutputs: _builtins.bool = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["getInputsAndOutputs", b"getInputsAndOutputs", "instanceId", b"instanceId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___GetInstanceRequest: _TypeAlias = GetInstanceRequest # noqa: Y015 @@ -309,20 +325,21 @@ class GetInstanceResponse(_message.Message): DESCRIPTOR: _descriptor.Descriptor EXISTS_FIELD_NUMBER: _builtins.int - WORKFLOWSTATE_FIELD_NUMBER: _builtins.int + ORCHESTRATIONSTATE_FIELD_NUMBER: _builtins.int exists: _builtins.bool @_builtins.property - def workflowState(self) -> _orchestration_pb2.WorkflowState: ... + def orchestrationState(self) -> _orchestration_pb2.OrchestrationState: ... def __init__( self, *, exists: _builtins.bool = ..., - workflowState: _orchestration_pb2.WorkflowState | None = ..., + orchestrationState: _orchestration_pb2.OrchestrationState | None = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["workflowState", b"workflowState"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["orchestrationState", b"orchestrationState"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["exists", b"exists", "workflowState", b"workflowState"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["exists", b"exists", "orchestrationState", b"orchestrationState"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___GetInstanceResponse: _TypeAlias = GetInstanceResponse # noqa: Y015 @@ -348,6 +365,7 @@ class RaiseEventRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["input", b"input", "instanceId", b"instanceId", "name", b"name"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___RaiseEventRequest: _TypeAlias = RaiseEventRequest # noqa: Y015 @@ -360,6 +378,11 @@ class RaiseEventResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___RaiseEventResponse: _TypeAlias = RaiseEventResponse # noqa: Y015 @@ -385,6 +408,7 @@ class TerminateRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId", "output", b"output", "recursive", b"recursive"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TerminateRequest: _TypeAlias = TerminateRequest # noqa: Y015 @@ -397,6 +421,11 @@ class TerminateResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___TerminateResponse: _TypeAlias = TerminateResponse # noqa: Y015 @@ -419,6 +448,7 @@ class SuspendRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId", "reason", b"reason"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___SuspendRequest: _TypeAlias = SuspendRequest # noqa: Y015 @@ -431,6 +461,11 @@ class SuspendResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___SuspendResponse: _TypeAlias = SuspendResponse # noqa: Y015 @@ -453,6 +488,7 @@ class ResumeRequest(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId", "reason", b"reason"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ResumeRequest: _TypeAlias = ResumeRequest # noqa: Y015 @@ -465,6 +501,11 @@ class ResumeResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___ResumeResponse: _TypeAlias = ResumeResponse # noqa: Y015 @@ -538,6 +579,7 @@ class PurgeInstanceFilter(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["createdTimeFrom", b"createdTimeFrom", "createdTimeTo", b"createdTimeTo", "runtimeStatus", b"runtimeStatus"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___PurgeInstanceFilter: _TypeAlias = PurgeInstanceFilter # noqa: Y015 @@ -560,6 +602,7 @@ class PurgeInstancesResponse(_message.Message): def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["deletedInstanceCount", b"deletedInstanceCount", "isComplete", b"isComplete"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___PurgeInstancesResponse: _TypeAlias = PurgeInstancesResponse # noqa: Y015 @@ -570,6 +613,11 @@ class GetWorkItemsRequest(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___GetWorkItemsRequest: _TypeAlias = GetWorkItemsRequest # noqa: Y015 @@ -577,26 +625,26 @@ Global___GetWorkItemsRequest: _TypeAlias = GetWorkItemsRequest # noqa: Y015 class WorkItem(_message.Message): DESCRIPTOR: _descriptor.Descriptor - WORKFLOWREQUEST_FIELD_NUMBER: _builtins.int + ORCHESTRATORREQUEST_FIELD_NUMBER: _builtins.int ACTIVITYREQUEST_FIELD_NUMBER: _builtins.int COMPLETIONTOKEN_FIELD_NUMBER: _builtins.int completionToken: _builtins.str @_builtins.property - def workflowRequest(self) -> Global___WorkflowRequest: ... + def orchestratorRequest(self) -> Global___OrchestratorRequest: ... @_builtins.property def activityRequest(self) -> Global___ActivityRequest: ... def __init__( self, *, - workflowRequest: Global___WorkflowRequest | None = ..., + orchestratorRequest: Global___OrchestratorRequest | None = ..., activityRequest: Global___ActivityRequest | None = ..., completionToken: _builtins.str = ..., ) -> None: ... - _HasFieldArgType: _TypeAlias = _typing.Literal["activityRequest", b"activityRequest", "request", b"request", "workflowRequest", b"workflowRequest"] # noqa: Y015 + _HasFieldArgType: _TypeAlias = _typing.Literal["activityRequest", b"activityRequest", "orchestratorRequest", b"orchestratorRequest", "request", b"request"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["activityRequest", b"activityRequest", "completionToken", b"completionToken", "request", b"request", "workflowRequest", b"workflowRequest"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["activityRequest", b"activityRequest", "completionToken", b"completionToken", "orchestratorRequest", b"orchestratorRequest", "request", b"request"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... - _WhichOneofReturnType_request: _TypeAlias = _typing.Literal["workflowRequest", "activityRequest"] # noqa: Y015 + _WhichOneofReturnType_request: _TypeAlias = _typing.Literal["orchestratorRequest", "activityRequest"] # noqa: Y015 _WhichOneofArgType_request: _TypeAlias = _typing.Literal["request", b"request"] # noqa: Y015 def WhichOneof(self, oneof_group: _WhichOneofArgType_request) -> _WhichOneofReturnType_request | None: ... @@ -611,6 +659,11 @@ class CompleteTaskResponse(_message.Message): def __init__( self, ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... + _ClearFieldArgType: _TypeAlias = _Never # noqa: Y015 + def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___CompleteTaskResponse: _TypeAlias = CompleteTaskResponse # noqa: Y015 @@ -629,8 +682,8 @@ class RerunWorkflowFromEventRequest(_message.Message): OVERWRITEINPUT_FIELD_NUMBER: _builtins.int NEWCHILDWORKFLOWINSTANCEID_FIELD_NUMBER: _builtins.int sourceInstanceID: _builtins.str - """sourceInstanceID is the workflow instance ID to rerun. Can be a top - level instance, or child workflow instance. + """sourceInstanceID is the orchestration instance ID to rerun. Can be a top + level instance, or sub-orchestration instance. """ eventID: _builtins.int """the event id to start the new workflow instance from.""" @@ -695,14 +748,17 @@ class RerunWorkflowFromEventResponse(_message.Message): *, newInstanceID: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["newInstanceID", b"newInstanceID"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___RerunWorkflowFromEventResponse: _TypeAlias = RerunWorkflowFromEventResponse # noqa: Y015 @_typing.final class ListInstanceIDsRequest(_message.Message): - """ListInstanceIDsRequest is used to list all workflow instances.""" + """ListInstanceIDsRequest is used to list all orchestration instances.""" DESCRIPTOR: _descriptor.Descriptor @@ -772,8 +828,8 @@ Global___ListInstanceIDsResponse: _TypeAlias = ListInstanceIDsResponse # noqa: @_typing.final class GetInstanceHistoryRequest(_message.Message): - """GetInstanceHistoryRequest is used to get the full history of a - workflow instance. + """GetInstanceHistoryRequest is used to get the full history of an + orchestration instance. """ DESCRIPTOR: _descriptor.Descriptor @@ -785,8 +841,11 @@ class GetInstanceHistoryRequest(_message.Message): *, instanceId: _builtins.str = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["instanceId", b"instanceId"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___GetInstanceHistoryRequest: _TypeAlias = GetInstanceHistoryRequest # noqa: Y015 @@ -804,7 +863,10 @@ class GetInstanceHistoryResponse(_message.Message): *, events: _abc.Iterable[_history_events_pb2.HistoryEvent] | None = ..., ) -> None: ... + _HasFieldArgType: _TypeAlias = _Never # noqa: Y015 + def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... _ClearFieldArgType: _TypeAlias = _typing.Literal["events", b"events"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... Global___GetInstanceHistoryResponse: _TypeAlias = GetInstanceHistoryResponse # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2_grpc.py index 85712b33c..917d74523 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2_grpc.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/orchestrator_service_pb2_grpc.py @@ -6,7 +6,7 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from dapr.ext.workflow._durabletask.internal import orchestrator_service_pb2 as orchestrator__service__pb2 -GRPC_GENERATED_VERSION = '1.76.0' +GRPC_GENERATED_VERSION = '1.80.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False @@ -97,12 +97,7 @@ def __init__(self, channel): _registered_method=True) self.CompleteOrchestratorTask = channel.unary_unary( '/TaskHubSidecarService/CompleteOrchestratorTask', - request_serializer=orchestrator__service__pb2.WorkflowResponse.SerializeToString, - response_deserializer=orchestrator__service__pb2.CompleteTaskResponse.FromString, - _registered_method=True) - self.CompleteWorkflowTask = channel.unary_unary( - '/TaskHubSidecarService/CompleteWorkflowTask', - request_serializer=orchestrator__service__pb2.WorkflowResponse.SerializeToString, + request_serializer=orchestrator__service__pb2.OrchestratorResponse.SerializeToString, response_deserializer=orchestrator__service__pb2.CompleteTaskResponse.FromString, _registered_method=True) self.RerunWorkflowFromEvent = channel.unary_unary( @@ -133,56 +128,56 @@ def Hello(self, request, context): raise NotImplementedError('Method not implemented!') def StartInstance(self, request, context): - """Starts a new workflow instance. + """Starts a new orchestration instance. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetInstance(self, request, context): - """Gets the status of an existing workflow instance. + """Gets the status of an existing orchestration instance. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def WaitForInstanceStart(self, request, context): - """Waits for a workflow instance to reach a running or completion state. + """Waits for an orchestration instance to reach a running or completion state. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def WaitForInstanceCompletion(self, request, context): - """Waits for a workflow instance to reach a completion state (completed, failed, terminated, etc.). + """Waits for an orchestration instance to reach a completion state (completed, failed, terminated, etc.). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def RaiseEvent(self, request, context): - """Raises an event to a running workflow instance. + """Raises an event to a running orchestration instance. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def TerminateInstance(self, request, context): - """Terminates a running workflow instance. + """Terminates a running orchestration instance. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SuspendInstance(self, request, context): - """Suspends a running workflow instance. + """Suspends a running orchestration instance. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ResumeInstance(self, request, context): - """Resumes a suspended workflow instance. + """Resumes a suspended orchestration instance. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -207,15 +202,7 @@ def CompleteActivityTask(self, request, context): raise NotImplementedError('Method not implemented!') def CompleteOrchestratorTask(self, request, context): - """Deprecated: Use CompleteWorkflowTask instead. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def CompleteWorkflowTask(self, request, context): - """Completes a workflow work item. - """ + """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') @@ -304,12 +291,7 @@ def add_TaskHubSidecarServiceServicer_to_server(servicer, server): ), 'CompleteOrchestratorTask': grpc.unary_unary_rpc_method_handler( servicer.CompleteOrchestratorTask, - request_deserializer=orchestrator__service__pb2.WorkflowResponse.FromString, - response_serializer=orchestrator__service__pb2.CompleteTaskResponse.SerializeToString, - ), - 'CompleteWorkflowTask': grpc.unary_unary_rpc_method_handler( - servicer.CompleteWorkflowTask, - request_deserializer=orchestrator__service__pb2.WorkflowResponse.FromString, + request_deserializer=orchestrator__service__pb2.OrchestratorResponse.FromString, response_serializer=orchestrator__service__pb2.CompleteTaskResponse.SerializeToString, ), 'RerunWorkflowFromEvent': grpc.unary_unary_rpc_method_handler( @@ -677,34 +659,7 @@ def CompleteOrchestratorTask(request, request, target, '/TaskHubSidecarService/CompleteOrchestratorTask', - orchestrator__service__pb2.WorkflowResponse.SerializeToString, - orchestrator__service__pb2.CompleteTaskResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True) - - @staticmethod - def CompleteWorkflowTask(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary( - request, - target, - '/TaskHubSidecarService/CompleteWorkflowTask', - orchestrator__service__pb2.WorkflowResponse.SerializeToString, + orchestrator__service__pb2.OrchestratorResponse.SerializeToString, orchestrator__service__pb2.CompleteTaskResponse.FromString, options, channel_credentials, diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.py index 4e1561f4e..e9d0e0e5b 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.py @@ -28,7 +28,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13runtime_state.proto\x12\x1d\x64urabletask.protos.backend.v1\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"_\n\x13RuntimeStateStalled\x12\x1e\n\x06reason\x18\x01 \x01(\x0e\x32\x0e.StalledReason\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_description\"\xbc\x05\n\x14WorkflowRuntimeState\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12 \n\tnewEvents\x18\x02 \x03(\x0b\x32\r.HistoryEvent\x12 \n\toldEvents\x18\x03 \x03(\x0b\x32\r.HistoryEvent\x12#\n\x0cpendingTasks\x18\x04 \x03(\x0b\x32\r.HistoryEvent\x12$\n\rpendingTimers\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12S\n\x0fpendingMessages\x18\x06 \x03(\x0b\x32:.durabletask.protos.backend.v1.WorkflowRuntimeStateMessage\x12*\n\nstartEvent\x18\x07 \x01(\x0b\x32\x16.ExecutionStartedEvent\x12\x30\n\x0e\x63ompletedEvent\x18\x08 \x01(\x0b\x32\x18.ExecutionCompletedEvent\x12/\n\x0b\x63reatedTime\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x0flastUpdatedTime\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rcompletedTime\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x16\n\x0e\x63ontinuedAsNew\x18\x0c \x01(\x08\x12\x13\n\x0bisSuspended\x18\r \x01(\x08\x12\x32\n\x0c\x63ustomStatus\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12H\n\x07stalled\x18\x0f \x01(\x0b\x32\x32.durabletask.protos.backend.v1.RuntimeStateStalledH\x00\x88\x01\x01\x42\n\n\x08_stalled\"\\\n\x1bWorkflowRuntimeStateMessage\x12#\n\x0chistoryEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x18\n\x10targetInstanceId\x18\x02 \x01(\tBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13runtime_state.proto\x12\x1d\x64urabletask.protos.backend.v1\x1a\x13orchestration.proto\x1a\x14history_events.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"_\n\x13RuntimeStateStalled\x12\x1e\n\x06reason\x18\x01 \x01(\x0e\x32\x0e.StalledReason\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x0e\n\x0c_description\"\xc6\x05\n\x19OrchestrationRuntimeState\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12 \n\tnewEvents\x18\x02 \x03(\x0b\x32\r.HistoryEvent\x12 \n\toldEvents\x18\x03 \x03(\x0b\x32\r.HistoryEvent\x12#\n\x0cpendingTasks\x18\x04 \x03(\x0b\x32\r.HistoryEvent\x12$\n\rpendingTimers\x18\x05 \x03(\x0b\x32\r.HistoryEvent\x12X\n\x0fpendingMessages\x18\x06 \x03(\x0b\x32?.durabletask.protos.backend.v1.OrchestrationRuntimeStateMessage\x12*\n\nstartEvent\x18\x07 \x01(\x0b\x32\x16.ExecutionStartedEvent\x12\x30\n\x0e\x63ompletedEvent\x18\x08 \x01(\x0b\x32\x18.ExecutionCompletedEvent\x12/\n\x0b\x63reatedTime\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x0flastUpdatedTime\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rcompletedTime\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x16\n\x0e\x63ontinuedAsNew\x18\x0c \x01(\x08\x12\x13\n\x0bisSuspended\x18\r \x01(\x08\x12\x32\n\x0c\x63ustomStatus\x18\x0e \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12H\n\x07stalled\x18\x0f \x01(\x0b\x32\x32.durabletask.protos.backend.v1.RuntimeStateStalledH\x00\x88\x01\x01\x42\n\n\x08_stalled\"a\n OrchestrationRuntimeStateMessage\x12#\n\x0chistoryEvent\x18\x01 \x01(\x0b\x32\r.HistoryEvent\x12\x18\n\x10TargetInstanceID\x18\x02 \x01(\tBV\n+io.dapr.durabletask.implementation.protobufZ\x0b/api/protos\xaa\x02\x19\x44\x61pr.DurableTask.Protobufb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -38,8 +38,8 @@ _globals['DESCRIPTOR']._serialized_options = b'\n+io.dapr.durabletask.implementation.protobufZ\013/api/protos\252\002\031Dapr.DurableTask.Protobuf' _globals['_RUNTIMESTATESTALLED']._serialized_start=162 _globals['_RUNTIMESTATESTALLED']._serialized_end=257 - _globals['_WORKFLOWRUNTIMESTATE']._serialized_start=260 - _globals['_WORKFLOWRUNTIMESTATE']._serialized_end=960 - _globals['_WORKFLOWRUNTIMESTATEMESSAGE']._serialized_start=962 - _globals['_WORKFLOWRUNTIMESTATEMESSAGE']._serialized_end=1054 + _globals['_ORCHESTRATIONRUNTIMESTATE']._serialized_start=260 + _globals['_ORCHESTRATIONRUNTIMESTATE']._serialized_end=970 + _globals['_ORCHESTRATIONRUNTIMESTATEMESSAGE']._serialized_start=972 + _globals['_ORCHESTRATIONRUNTIMESTATEMESSAGE']._serialized_end=1069 # @@protoc_insertion_point(module_scope) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.pyi b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.pyi index fd0b75f80..8ff145605 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.pyi +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2.pyi @@ -26,10 +26,10 @@ from dapr.ext.workflow._durabletask.internal import orchestration_pb2 as _orches import sys import typing as _typing -if sys.version_info >= (3, 10): - from typing import TypeAlias as _TypeAlias +if sys.version_info >= (3, 11): + from typing import TypeAlias as _TypeAlias, Never as _Never else: - from typing_extensions import TypeAlias as _TypeAlias + from typing_extensions import TypeAlias as _TypeAlias, Never as _Never DESCRIPTOR: _descriptor.FileDescriptor @@ -58,8 +58,8 @@ class RuntimeStateStalled(_message.Message): Global___RuntimeStateStalled: _TypeAlias = RuntimeStateStalled # noqa: Y015 @_typing.final -class WorkflowRuntimeState(_message.Message): - """WorkflowRuntimeState holds the current state for a workflow.""" +class OrchestrationRuntimeState(_message.Message): + """OrchestrationRuntimeState holds the current state for an orchestration.""" DESCRIPTOR: _descriptor.Descriptor @@ -90,7 +90,7 @@ class WorkflowRuntimeState(_message.Message): @_builtins.property def pendingTimers(self) -> _containers.RepeatedCompositeFieldContainer[_history_events_pb2.HistoryEvent]: ... @_builtins.property - def pendingMessages(self) -> _containers.RepeatedCompositeFieldContainer[Global___WorkflowRuntimeStateMessage]: ... + def pendingMessages(self) -> _containers.RepeatedCompositeFieldContainer[Global___OrchestrationRuntimeStateMessage]: ... @_builtins.property def startEvent(self) -> _history_events_pb2.ExecutionStartedEvent: ... @_builtins.property @@ -113,7 +113,7 @@ class WorkflowRuntimeState(_message.Message): oldEvents: _abc.Iterable[_history_events_pb2.HistoryEvent] | None = ..., pendingTasks: _abc.Iterable[_history_events_pb2.HistoryEvent] | None = ..., pendingTimers: _abc.Iterable[_history_events_pb2.HistoryEvent] | None = ..., - pendingMessages: _abc.Iterable[Global___WorkflowRuntimeStateMessage] | None = ..., + pendingMessages: _abc.Iterable[Global___OrchestrationRuntimeStateMessage] | None = ..., startEvent: _history_events_pb2.ExecutionStartedEvent | None = ..., completedEvent: _history_events_pb2.ExecutionCompletedEvent | None = ..., createdTime: _timestamp_pb2.Timestamp | None = ..., @@ -132,28 +132,29 @@ class WorkflowRuntimeState(_message.Message): _WhichOneofArgType__stalled: _TypeAlias = _typing.Literal["_stalled", b"_stalled"] # noqa: Y015 def WhichOneof(self, oneof_group: _WhichOneofArgType__stalled) -> _WhichOneofReturnType__stalled | None: ... -Global___WorkflowRuntimeState: _TypeAlias = WorkflowRuntimeState # noqa: Y015 +Global___OrchestrationRuntimeState: _TypeAlias = OrchestrationRuntimeState # noqa: Y015 @_typing.final -class WorkflowRuntimeStateMessage(_message.Message): - """WorkflowRuntimeStateMessage holds a HistoryEvent payload and the target instance ID.""" +class OrchestrationRuntimeStateMessage(_message.Message): + """OrchestrationRuntimeStateMessage holds an OrchestratorMessage and the target instance ID.""" DESCRIPTOR: _descriptor.Descriptor HISTORYEVENT_FIELD_NUMBER: _builtins.int TARGETINSTANCEID_FIELD_NUMBER: _builtins.int - targetInstanceId: _builtins.str + TargetInstanceID: _builtins.str @_builtins.property def historyEvent(self) -> _history_events_pb2.HistoryEvent: ... def __init__( self, *, historyEvent: _history_events_pb2.HistoryEvent | None = ..., - targetInstanceId: _builtins.str = ..., + TargetInstanceID: _builtins.str = ..., ) -> None: ... _HasFieldArgType: _TypeAlias = _typing.Literal["historyEvent", b"historyEvent"] # noqa: Y015 def HasField(self, field_name: _HasFieldArgType) -> _builtins.bool: ... - _ClearFieldArgType: _TypeAlias = _typing.Literal["historyEvent", b"historyEvent", "targetInstanceId", b"targetInstanceId"] # noqa: Y015 + _ClearFieldArgType: _TypeAlias = _typing.Literal["TargetInstanceID", b"TargetInstanceID", "historyEvent", b"historyEvent"] # noqa: Y015 def ClearField(self, field_name: _ClearFieldArgType) -> None: ... + def WhichOneof(self, oneof_group: _Never) -> None: ... -Global___WorkflowRuntimeStateMessage: _TypeAlias = WorkflowRuntimeStateMessage # noqa: Y015 +Global___OrchestrationRuntimeStateMessage: _TypeAlias = OrchestrationRuntimeStateMessage # noqa: Y015 diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2_grpc.py b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2_grpc.py index bb1e985eb..56526119e 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2_grpc.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/_durabletask/internal/runtime_state_pb2_grpc.py @@ -4,7 +4,7 @@ import warnings -GRPC_GENERATED_VERSION = '1.76.0' +GRPC_GENERATED_VERSION = '1.80.0' GRPC_VERSION = grpc.__version__ _version_not_supported = False diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py index ceb8672be..3e522cad9 100644 --- a/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/__init__.py @@ -14,7 +14,13 @@ """ from .dapr_workflow_client import DaprWorkflowClient +from .mcp import DaprMCPClient + +# Re-export MCPToolDef so async users don't need to import from the sync module. +from dapr.ext.workflow.mcp import MCPToolDef __all__ = [ 'DaprWorkflowClient', + 'DaprMCPClient', + 'MCPToolDef', ] diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/aio/mcp.py b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/mcp.py new file mode 100644 index 000000000..5afa0ffa2 --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/aio/mcp.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- + +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +"""Async variant of :class:`~dapr.ext.workflow.mcp.DaprMCPClient`.""" + +import logging +import uuid +from typing import Optional, Set + +from dapr.ext.workflow.aio.dapr_workflow_client import DaprWorkflowClient +from dapr.ext.workflow.mcp import MCP_WORKFLOW_PREFIX, _DaprMCPClientBase, _MCP_METHOD_LIST_TOOLS +from dapr.ext.workflow.workflow_state import WorkflowStatus + +logger = logging.getLogger(__name__) + + +class DaprMCPClient(_DaprMCPClientBase): + """Async framework-agnostic client for discovering MCP tools via Dapr workflows. + + This is the async counterpart of :class:`dapr.ext.workflow.mcp.DaprMCPClient`. + All methods that interact with the Dapr sidecar are ``async``. + + Args: + timeout_in_seconds: Maximum seconds to wait for each ``ListTools`` + workflow to complete. + allowed_tools: Optional set of tool names to keep. + wf_client: Optional pre-configured async :class:`DaprWorkflowClient`. + + Example:: + + from dapr.ext.workflow.aio import DaprMCPClient + + client = DaprMCPClient() + await client.connect("weather") + tools = client.get_all_tools() + """ + + def __init__( + self, + *, + timeout_in_seconds: int = 60, + allowed_tools: Optional[Set[str]] = None, + wf_client: Optional[DaprWorkflowClient] = None, + ) -> None: + super().__init__( + timeout_in_seconds=timeout_in_seconds, + allowed_tools=allowed_tools, + ) + self._wf_client = wf_client or DaprWorkflowClient() + + async def connect(self, mcpserver_name: str) -> None: + """Discover tools from a Dapr MCPServer resource. + + Schedules ``dapr.internal.mcp..ListTools``, awaits workflow + completion, and caches the resulting :class:`MCPToolDef` list. + + Args: + mcpserver_name: Name of the ``MCPServer`` Dapr resource (must + match the ``metadata.name`` in the MCPServer YAML). + + Raises: + RuntimeError: If the workflow times out or ends with a non-COMPLETED + status. + ValueError: If *mcpserver_name* is empty. + """ + if not mcpserver_name or not mcpserver_name.strip(): + raise ValueError("mcpserver_name must be a non-empty string") + + instance_id = str(uuid.uuid4()) + # TODO(@sicoyle): reminder to add a func like I have in durabletask-go to use for here instead of building like this! + workflow_name = f"{MCP_WORKFLOW_PREFIX}{mcpserver_name}{_MCP_METHOD_LIST_TOOLS}" + + logger.debug( + "Scheduling %s (instance=%s)", workflow_name, instance_id + ) + + await self._wf_client.schedule_new_workflow( + workflow=workflow_name, + input={"mcpServerName": mcpserver_name}, + instance_id=instance_id, + ) + + state = await self._wf_client.wait_for_workflow_completion( + instance_id=instance_id, + timeout_in_seconds=self._timeout, + fetch_payloads=True, + ) + + if state is None: + raise RuntimeError( + f"ListTools workflow for MCPServer '{mcpserver_name}' " + f"timed out after {self._timeout}s" + ) + + if state.runtime_status != WorkflowStatus.COMPLETED: + raise RuntimeError( + f"ListTools workflow for MCPServer '{mcpserver_name}' " + f"ended with status {state.runtime_status.name!r}: " + f"{state.serialized_output or ''}" + ) + + self._process_list_tools_result(mcpserver_name, state.serialized_output) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/mcp.py b/ext/dapr-ext-workflow/dapr/ext/workflow/mcp.py new file mode 100644 index 000000000..ce1d038fe --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/mcp.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- + +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +""" +DaprMCPClient — framework-agnostic client for discovering and cataloguing +MCP tools exposed by Dapr MCPServer resources. + +The client schedules Dapr's built-in workflow orchestrations +(``dapr.internal.mcp..ListTools`` / ``CallTool``) and returns +plain :class:`MCPToolDef` dataclasses that any agent framework can consume. + +Usage:: + + from dapr.ext.workflow import DaprMCPClient + + client = DaprMCPClient() + client.connect("weather") + for tool in client.get_all_tools(): + print(tool.name, tool.description) +""" + +import json +import logging +import uuid +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Set + +from dapr.ext.workflow.dapr_workflow_client import DaprWorkflowClient +from dapr.ext.workflow.workflow_state import WorkflowStatus + +logger = logging.getLogger(__name__) + +# MCP workflow name constants — mirrors the proto enums in +# dapr/dapr/dapr/proto/workflows/v1/mcp.proto as plain strings. +MCP_WORKFLOW_PREFIX: str = "dapr.internal.mcp." +"""Prefix for all built-in MCP workflow orchestrations.""" + +_MCP_METHOD_LIST_TOOLS = ".ListTools" +_MCP_METHOD_CALL_TOOL = ".CallTool" + + +# TODO(@sicoyle): see if I can use the mcp pkg class instead for this? +@dataclass(frozen=True) +class MCPToolDef: + """Framework-agnostic description of a single MCP tool. + + Returned by :meth:`DaprMCPClient.get_all_tools` and consumed by + agent frameworks to build their own tool wrappers. + + Attributes: + name: The MCP tool name as returned by the server (e.g. ``get_weather``). + description: Human-readable description of what the tool does. + input_schema: JSON Schema dict describing the tool's input parameters. + server_name: Name of the Dapr ``MCPServer`` resource that hosts this tool. + call_tool_workflow: Pre-computed workflow name for invoking this tool + (e.g. ``dapr.internal.mcp.weather.CallTool.get_weather``). + """ + + name: str + description: str + input_schema: Dict[str, Any] = field(default_factory=dict) + server_name: str = "" + call_tool_workflow: str = "" + + +class _DaprMCPClientBase: + """Shared state and getters for sync/async MCP clients.""" + + def __init__( + self, + *, + timeout_in_seconds: int = 60, + allowed_tools: Optional[Set[str]] = None, + ) -> None: + if timeout_in_seconds <= 0: + raise ValueError("timeout_in_seconds must be a positive integer") + self._timeout = timeout_in_seconds + self._allowed_tools = allowed_tools + self._server_tools: Dict[str, List[MCPToolDef]] = {} + + def _process_list_tools_result( + self, mcpserver_name: str, serialized_output: Optional[str] + ) -> None: + """Parse a ListTools workflow output and cache the MCPToolDef list.""" + try: + result = json.loads(serialized_output) if serialized_output else {} + except json.JSONDecodeError as exc: + raise RuntimeError( + f"ListTools workflow for MCPServer '{mcpserver_name}' returned " + f"malformed JSON: {exc}" + ) from exc + + tools: List[MCPToolDef] = [] + for tool_def in result.get("tools", []): + name = tool_def.get("name", "") + if self._allowed_tools is not None and name not in self._allowed_tools: + logger.debug("Skipping tool '%s' (not in allowed_tools)", name) + continue + # Workflow name includes the tool name for per-tool observability: + # dapr.internal.mcp..CallTool. + call_tool_wf = f"{MCP_WORKFLOW_PREFIX}{mcpserver_name}{_MCP_METHOD_CALL_TOOL}.{name}" + tools.append( + MCPToolDef( + name=name, + description=tool_def.get("description", ""), + input_schema=tool_def.get("inputSchema") or {}, + server_name=mcpserver_name, + call_tool_workflow=call_tool_wf, + ) + ) + + self._server_tools[mcpserver_name] = tools + logger.info( + "Connected to MCPServer '%s': %d tool(s) loaded", + mcpserver_name, + len(tools), + ) + + def get_all_tools(self) -> List[MCPToolDef]: + """Return all cached tools from every connected MCPServer.""" + return [t for tools in self._server_tools.values() for t in tools] + + def get_server_tools(self, server_name: str) -> List[MCPToolDef]: + """Return cached tools for a specific MCPServer.""" + return list(self._server_tools.get(server_name, [])) + + def get_connected_servers(self) -> List[str]: + """Return the names of all MCPServers connected so far.""" + return list(self._server_tools.keys()) + + +class DaprMCPClient(_DaprMCPClientBase): + """Framework-agnostic client for discovering MCP tools via Dapr workflows. + + This client schedules Dapr's built-in workflow orchestrations + (``ListTools`` / ``CallTool``) via :class:`DaprWorkflowClient`. + It returns :class:`MCPToolDef` dataclasses — plain data objects + with no framework dependencies — that any agent framework can convert + to its own tool type. + + Args: + timeout_in_seconds: Maximum seconds to wait for each ``ListTools`` + workflow to complete. Defaults to 60. + allowed_tools: Optional set of tool names to keep. When provided, + only tools whose name appears in this set are included in the + catalogue. ``None`` (default) keeps all tools. + wf_client: Optional pre-configured :class:`DaprWorkflowClient`. + If omitted, a new client is created with default settings. + + Example:: + + client = DaprMCPClient() + client.connect("weather") + tools = client.get_all_tools() # List[MCPToolDef] + + # Each framework converts MCPToolDef to its own tool type: + for t in tools: + print(f"{t.name}: {t.call_tool_workflow}") + """ + + def __init__( + self, + *, + timeout_in_seconds: int = 60, + allowed_tools: Optional[Set[str]] = None, + wf_client: Optional[DaprWorkflowClient] = None, + ) -> None: + super().__init__( + timeout_in_seconds=timeout_in_seconds, + allowed_tools=allowed_tools, + ) + self._wf_client = wf_client or DaprWorkflowClient() + + # ------------------------------------------------------------------ + # Public API + # ------------------------------------------------------------------ + + def connect(self, mcpserver_name: str) -> None: + """Discover tools from a Dapr MCPServer resource. + + Schedules ``dapr.internal.mcp..ListTools``, blocks until the + workflow completes, and caches the resulting :class:`MCPToolDef` list. + + Args: + mcpserver_name: Name of the ``MCPServer`` Dapr resource (must + match the ``metadata.name`` in the MCPServer YAML). + + Raises: + RuntimeError: If the workflow times out or ends with a non-COMPLETED + status. + """ + if not mcpserver_name or not mcpserver_name.strip(): + raise ValueError("mcpserver_name must be a non-empty string") + + instance_id = str(uuid.uuid4()) + # TODO(@sicoyle): reminder to add a func like I have in durabletask-go to use for here instead of building like this! + workflow_name = f"{MCP_WORKFLOW_PREFIX}{mcpserver_name}{_MCP_METHOD_LIST_TOOLS}" + + logger.debug( + "Scheduling %s (instance=%s)", workflow_name, instance_id + ) + + self._wf_client.schedule_new_workflow( + workflow=workflow_name, + input={"mcpServerName": mcpserver_name}, + instance_id=instance_id, + ) + + state = self._wf_client.wait_for_workflow_completion( + instance_id=instance_id, + timeout_in_seconds=self._timeout, + fetch_payloads=True, + ) + + if state is None: + raise RuntimeError( + f"ListTools workflow for MCPServer '{mcpserver_name}' " + f"timed out after {self._timeout}s" + ) + + if state.runtime_status != WorkflowStatus.COMPLETED: + raise RuntimeError( + f"ListTools workflow for MCPServer '{mcpserver_name}' " + f"ended with status {state.runtime_status.name!r}: " + f"{state.serialized_output or ''}" + ) + + self._process_list_tools_result(mcpserver_name, state.serialized_output) diff --git a/ext/dapr-ext-workflow/dapr/ext/workflow/mcp_schema.py b/ext/dapr-ext-workflow/dapr/ext/workflow/mcp_schema.py new file mode 100644 index 000000000..41eabafd5 --- /dev/null +++ b/ext/dapr-ext-workflow/dapr/ext/workflow/mcp_schema.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- + +# Copyright 2026 The Dapr Authors +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +"""Utility for converting MCP JSON Schema definitions to Pydantic models.""" + +import logging +from typing import Any, Dict, List, Optional, Type + +from pydantic import BaseModel, Field, create_model + +logger = logging.getLogger(__name__) + +# Mapping from JSON Schema types to Python types. +TYPE_MAPPING = { + "string": str, + "number": float, + "integer": int, + "boolean": bool, + "object": dict, + "array": list, + "null": type(None), +} + +# TODO(@sicoyle): see if I can remove this and use something from official modelcontextprotocol python-sdk instead??? +def create_pydantic_model_from_schema( + schema: Dict[str, Any], model_name: str +) -> Type[BaseModel]: + """Create a Pydantic model from a JSON Schema definition. + + This function converts a JSON Schema object (commonly used in MCP tool + definitions) to a Pydantic model that can be used for argument validation. + + Args: + schema: JSON Schema dictionary containing type information. + model_name: Name for the generated model class. + + Returns: + A dynamically created Pydantic model class. + + Raises: + ValueError: If the schema is invalid or cannot be converted. + """ + logger.debug("Creating Pydantic model '%s' from schema", model_name) + + try: + properties = schema.get("properties", {}) + required = set(schema.get("required", [])) + + # Handle schemas that wrap arguments in a 'kwargs' field. + # Some MCP tools use this pattern — unwrap to accept flat arguments. + if ( + len(properties) == 1 + and "kwargs" in properties + and properties["kwargs"].get("type") == "object" + and "properties" in properties["kwargs"] + ): + logger.debug( + "Detected 'kwargs' wrapper in schema for '%s', unwrapping", model_name + ) + kwargs_schema = properties["kwargs"] + properties = kwargs_schema["properties"] + required = set(kwargs_schema.get("required", [])) + + fields: Dict[str, Any] = {} + + for field_name, field_props in properties.items(): + # Handle anyOf/oneOf for nullable/union fields. + if "anyOf" in field_props or "oneOf" in field_props: + variants = field_props.get("anyOf") or field_props.get("oneOf") + types = [v.get("type", "string") for v in variants] + has_null = "null" in types + non_null_variants = [v for v in variants if v.get("type") != "null"] + if non_null_variants: + primary_type = non_null_variants[0].get("type", "string") + field_type = TYPE_MAPPING.get(primary_type, str) + if primary_type == "array" and "items" in non_null_variants[0]: + item_type = non_null_variants[0]["items"].get("type", "string") + field_type = List[TYPE_MAPPING.get(item_type, str)] + elif primary_type == "object": + field_type = dict + else: + field_type = str + if has_null: + field_type = Optional[field_type] + else: + json_type = field_props.get("type", "string") + field_type = TYPE_MAPPING.get(json_type, str) + if json_type == "array" and "items" in field_props: + item_type = field_props["items"].get("type", "string") + field_type = List[TYPE_MAPPING.get(item_type, str)] + + if field_name in required: + default = ... + else: + default = None + if not ( + hasattr(field_type, "__origin__") + and field_type.__origin__ is Optional + ): + field_type = Optional[field_type] + + field_description = field_props.get("description", "") + fields[field_name] = ( + field_type, + Field(default, description=field_description), + ) + + return create_model(model_name, **fields) + + except Exception as e: + logger.error("Failed to create model from schema: %s", e) + raise ValueError(f"Invalid schema: {e}") from e diff --git a/ext/dapr-ext-workflow/tests/test_mcp_client.py b/ext/dapr-ext-workflow/tests/test_mcp_client.py new file mode 100644 index 000000000..87f6633fe --- /dev/null +++ b/ext/dapr-ext-workflow/tests/test_mcp_client.py @@ -0,0 +1,363 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2026 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import json +import unittest +from datetime import datetime +from unittest import mock +from unittest.mock import MagicMock + +from dapr.ext.workflow._durabletask import client +from dapr.ext.workflow.mcp import DaprMCPClient, MCPToolDef, MCP_WORKFLOW_PREFIX +from dapr.ext.workflow.workflow_state import WorkflowState, WorkflowStatus + + +def _make_completed_state(output_json: dict) -> WorkflowState: + """Create a WorkflowState that simulates a COMPLETED workflow.""" + inner = client.WorkflowState( + instance_id="test-id", + name="test-workflow", + runtime_status=client.OrchestrationStatus.COMPLETED, + created_at=datetime.now(), + last_updated_at=datetime.now(), + serialized_input=None, + serialized_output=json.dumps(output_json), + serialized_custom_status=None, + failure_details=None, + ) + return WorkflowState(inner) + + +def _make_failed_state() -> WorkflowState: + """Create a WorkflowState that simulates a FAILED workflow.""" + inner = client.WorkflowState( + instance_id="test-id", + name="test-workflow", + runtime_status=client.OrchestrationStatus.FAILED, + created_at=datetime.now(), + last_updated_at=datetime.now(), + serialized_input=None, + serialized_output="error details", + serialized_custom_status=None, + failure_details=None, + ) + return WorkflowState(inner) + + +SAMPLE_LIST_TOOLS_RESPONSE = { + "tools": [ + { + "name": "get_weather", + "description": "Get current weather for a location.", + "inputSchema": { + "type": "object", + "properties": { + "location": {"type": "string", "description": "City name"}, + }, + "required": ["location"], + }, + }, + { + "name": "get_forecast", + "description": "Get multi-day forecast.", + "inputSchema": { + "type": "object", + "properties": { + "location": {"type": "string"}, + "days": {"type": "integer"}, + }, + "required": ["location"], + }, + }, + ] +} + + +class TestMCPToolDef(unittest.TestCase): + """Tests for the MCPToolDef dataclass.""" + + def test_frozen(self): + tool = MCPToolDef( + name="test", + description="desc", + input_schema={"type": "object"}, + server_name="srv", + call_tool_workflow="dapr.internal.mcp.srv.CallTool", + ) + with self.assertRaises(AttributeError): + tool.name = "changed" + + def test_defaults(self): + tool = MCPToolDef(name="test", description="desc") + self.assertEqual(tool.input_schema, {}) + self.assertEqual(tool.server_name, "") + self.assertEqual(tool.call_tool_workflow, "") + + +class TestDaprMCPClientConnect(unittest.TestCase): + """Tests for DaprMCPClient.connect().""" + + def _make_client(self, wf_client: MagicMock) -> DaprMCPClient: + return DaprMCPClient(timeout_in_seconds=30, wf_client=wf_client) + + def test_connect_schedules_correct_workflow(self): + """connect() should schedule dapr.internal.mcp..ListTools.""" + mock_wf = MagicMock() + mock_wf.schedule_new_workflow.return_value = "inst-1" + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + SAMPLE_LIST_TOOLS_RESPONSE + ) + + mcp_client = self._make_client(mock_wf) + mcp_client.connect("weather") + + mock_wf.schedule_new_workflow.assert_called_once() + call_kwargs = mock_wf.schedule_new_workflow.call_args + self.assertEqual( + call_kwargs.kwargs["workflow"], + "dapr.internal.mcp.weather.ListTools", + ) + self.assertEqual( + call_kwargs.kwargs["input"], + {"mcpServerName": "weather"}, + ) + + def test_connect_caches_tools(self): + """connect() should cache MCPToolDef objects.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + SAMPLE_LIST_TOOLS_RESPONSE + ) + + mcp_client = self._make_client(mock_wf) + mcp_client.connect("weather") + + tools = mcp_client.get_all_tools() + self.assertEqual(len(tools), 2) + self.assertIsInstance(tools[0], MCPToolDef) + self.assertEqual(tools[0].name, "get_weather") + self.assertEqual(tools[1].name, "get_forecast") + + def test_connect_sets_server_name_and_workflow(self): + """Each MCPToolDef should have server_name and call_tool_workflow set.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + SAMPLE_LIST_TOOLS_RESPONSE + ) + + mcp_client = self._make_client(mock_wf) + mcp_client.connect("weather") + + tool = mcp_client.get_all_tools()[0] + self.assertEqual(tool.server_name, "weather") + self.assertEqual( + tool.call_tool_workflow, + "dapr.internal.mcp.weather.CallTool.get_weather", + ) + + def test_connect_preserves_description_and_schema(self): + """MCPToolDef should carry the original description and inputSchema.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + SAMPLE_LIST_TOOLS_RESPONSE + ) + + mcp_client = self._make_client(mock_wf) + mcp_client.connect("weather") + + tool = mcp_client.get_all_tools()[0] + self.assertEqual(tool.description, "Get current weather for a location.") + self.assertIn("properties", tool.input_schema) + + def test_connect_timeout_raises(self): + """connect() should raise RuntimeError on timeout (None state).""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = None + + mcp_client = self._make_client(mock_wf) + with self.assertRaises(RuntimeError) as ctx: + mcp_client.connect("weather") + self.assertIn("timed out", str(ctx.exception)) + + def test_connect_failed_status_raises(self): + """connect() should raise RuntimeError on FAILED workflow status.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_failed_state() + + mcp_client = self._make_client(mock_wf) + with self.assertRaises(RuntimeError) as ctx: + mcp_client.connect("weather") + self.assertIn("FAILED", str(ctx.exception)) + + def test_connect_empty_tools(self): + """connect() should handle empty tools list gracefully.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + {"tools": []} + ) + + mcp_client = self._make_client(mock_wf) + mcp_client.connect("empty-server") + + self.assertEqual(len(mcp_client.get_all_tools()), 0) + self.assertIn("empty-server", mcp_client.get_connected_servers()) + + +class TestDaprMCPClientFiltering(unittest.TestCase): + """Tests for allowed_tools filtering.""" + + def test_allowed_tools_filters(self): + """Only tools in allowed_tools should be kept.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + SAMPLE_LIST_TOOLS_RESPONSE + ) + + mcp_client = DaprMCPClient( + allowed_tools={"get_weather"}, + wf_client=mock_wf, + ) + mcp_client.connect("weather") + + tools = mcp_client.get_all_tools() + self.assertEqual(len(tools), 1) + self.assertEqual(tools[0].name, "get_weather") + + def test_allowed_tools_none_keeps_all(self): + """allowed_tools=None should keep all tools.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state( + SAMPLE_LIST_TOOLS_RESPONSE + ) + + mcp_client = DaprMCPClient(allowed_tools=None, wf_client=mock_wf) + mcp_client.connect("weather") + + self.assertEqual(len(mcp_client.get_all_tools()), 2) + + +class TestDaprMCPClientMultiServer(unittest.TestCase): + """Tests for connecting to multiple MCPServer resources.""" + + def test_multiple_servers_accumulate(self): + """Tools from multiple connect() calls should accumulate.""" + mock_wf = MagicMock() + + weather_response = _make_completed_state(SAMPLE_LIST_TOOLS_RESPONSE) + local_response = _make_completed_state({ + "tools": [ + {"name": "search_files", "description": "Search files."}, + ] + }) + mock_wf.wait_for_workflow_completion.side_effect = [ + weather_response, + local_response, + ] + + mcp_client = DaprMCPClient(wf_client=mock_wf) + mcp_client.connect("weather") + mcp_client.connect("local-tools") + + self.assertEqual(len(mcp_client.get_all_tools()), 3) + self.assertEqual(len(mcp_client.get_server_tools("weather")), 2) + self.assertEqual(len(mcp_client.get_server_tools("local-tools")), 1) + self.assertEqual( + mcp_client.get_connected_servers(), + ["weather", "local-tools"], + ) + + def test_get_server_tools_unknown_returns_empty(self): + """get_server_tools() for unknown server returns empty list.""" + mock_wf = MagicMock() + mcp_client = DaprMCPClient(wf_client=mock_wf) + self.assertEqual(mcp_client.get_server_tools("nonexistent"), []) + + +class TestDaprMCPClientValidation(unittest.TestCase): + """Tests for input validation.""" + + def test_init_zero_timeout_raises(self): + with self.assertRaises(ValueError): + DaprMCPClient(timeout_in_seconds=0, wf_client=MagicMock()) + + def test_init_negative_timeout_raises(self): + with self.assertRaises(ValueError): + DaprMCPClient(timeout_in_seconds=-1, wf_client=MagicMock()) + + def test_connect_empty_server_name_raises(self): + mcp_client = DaprMCPClient(wf_client=MagicMock()) + with self.assertRaises(ValueError): + mcp_client.connect("") + + def test_connect_whitespace_server_name_raises(self): + mcp_client = DaprMCPClient(wf_client=MagicMock()) + with self.assertRaises(ValueError): + mcp_client.connect(" ") + + def test_connect_malformed_json_raises(self): + """connect() should raise RuntimeError on malformed JSON output.""" + mock_wf = MagicMock() + inner = client.WorkflowState( + instance_id="test", + name="test", + runtime_status=client.OrchestrationStatus.COMPLETED, + created_at=datetime.now(), + last_updated_at=datetime.now(), + serialized_input=None, + serialized_output="not valid json{{{", + serialized_custom_status=None, + failure_details=None, + ) + mock_wf.wait_for_workflow_completion.return_value = WorkflowState(inner) + + mcp_client = DaprMCPClient(wf_client=mock_wf) + with self.assertRaises(RuntimeError) as ctx: + mcp_client.connect("weather") + self.assertIn("malformed JSON", str(ctx.exception)) + + def test_connect_missing_tool_name_uses_empty_string(self): + """Tools without a 'name' field should use empty string.""" + mock_wf = MagicMock() + mock_wf.wait_for_workflow_completion.return_value = _make_completed_state({ + "tools": [{"description": "No name tool"}] + }) + + mcp_client = DaprMCPClient(wf_client=mock_wf) + mcp_client.connect("server") + + tools = mcp_client.get_all_tools() + self.assertEqual(len(tools), 1) + self.assertEqual(tools[0].name, "") + + +class TestMCPWorkflowPrefix(unittest.TestCase): + """Tests for the workflow naming constant.""" + + def test_prefix_value(self): + self.assertEqual(MCP_WORKFLOW_PREFIX, "dapr.internal.mcp.") + + def test_list_tools_name(self): + name = f"{MCP_WORKFLOW_PREFIX}weather.ListTools" + self.assertEqual(name, "dapr.internal.mcp.weather.ListTools") + + def test_call_tool_name(self): + # CallTool workflows include the tool name as a suffix: + # dapr.internal.mcp..CallTool. + name = f"{MCP_WORKFLOW_PREFIX}weather.CallTool.get_forecast" + self.assertEqual(name, "dapr.internal.mcp.weather.CallTool.get_forecast") + + +if __name__ == "__main__": + unittest.main() diff --git a/ext/dapr-ext-workflow/tests/test_mcp_schema.py b/ext/dapr-ext-workflow/tests/test_mcp_schema.py new file mode 100644 index 000000000..328f9b104 --- /dev/null +++ b/ext/dapr-ext-workflow/tests/test_mcp_schema.py @@ -0,0 +1,245 @@ +# -*- coding: utf-8 -*- + +""" +Copyright 2026 The Dapr Authors +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import unittest +from typing import Optional + +from pydantic import BaseModel + +from dapr.ext.workflow.mcp_schema import create_pydantic_model_from_schema + + +class TestBasicTypes(unittest.TestCase): + """Tests for basic JSON Schema type conversions.""" + + def test_string_field(self): + schema = { + "type": "object", + "properties": {"name": {"type": "string", "description": "A name"}}, + "required": ["name"], + } + Model = create_pydantic_model_from_schema(schema, "TestModel") + instance = Model(name="Alice") + self.assertEqual(instance.name, "Alice") + + def test_integer_field(self): + schema = { + "type": "object", + "properties": {"count": {"type": "integer"}}, + "required": ["count"], + } + Model = create_pydantic_model_from_schema(schema, "IntModel") + instance = Model(count=42) + self.assertEqual(instance.count, 42) + + def test_number_field(self): + schema = { + "type": "object", + "properties": {"price": {"type": "number"}}, + "required": ["price"], + } + Model = create_pydantic_model_from_schema(schema, "NumModel") + instance = Model(price=9.99) + self.assertAlmostEqual(instance.price, 9.99) + + def test_boolean_field(self): + schema = { + "type": "object", + "properties": {"active": {"type": "boolean"}}, + "required": ["active"], + } + Model = create_pydantic_model_from_schema(schema, "BoolModel") + instance = Model(active=True) + self.assertTrue(instance.active) + + def test_array_field(self): + schema = { + "type": "object", + "properties": { + "tags": {"type": "array", "items": {"type": "string"}} + }, + "required": ["tags"], + } + Model = create_pydantic_model_from_schema(schema, "ArrayModel") + instance = Model(tags=["a", "b"]) + self.assertEqual(instance.tags, ["a", "b"]) + + +class TestRequiredOptional(unittest.TestCase): + """Tests for required vs optional field handling.""" + + def test_required_field_has_no_default(self): + schema = { + "type": "object", + "properties": {"location": {"type": "string"}}, + "required": ["location"], + } + Model = create_pydantic_model_from_schema(schema, "ReqModel") + with self.assertRaises(Exception): + Model() # Missing required field + + def test_optional_field_defaults_to_none(self): + schema = { + "type": "object", + "properties": {"location": {"type": "string"}}, + "required": [], + } + Model = create_pydantic_model_from_schema(schema, "OptModel") + instance = Model() + self.assertIsNone(instance.location) + + def test_mixed_required_optional(self): + schema = { + "type": "object", + "properties": { + "location": {"type": "string"}, + "days": {"type": "integer"}, + }, + "required": ["location"], + } + Model = create_pydantic_model_from_schema(schema, "MixedModel") + instance = Model(location="Tokyo") + self.assertEqual(instance.location, "Tokyo") + self.assertIsNone(instance.days) + + +class TestAnyOfOneOf(unittest.TestCase): + """Tests for anyOf/oneOf nullable/union field handling.""" + + def test_anyof_nullable_string(self): + schema = { + "type": "object", + "properties": { + "label": { + "anyOf": [ + {"type": "string"}, + {"type": "null"}, + ] + } + }, + "required": ["label"], + } + Model = create_pydantic_model_from_schema(schema, "NullableModel") + instance = Model(label=None) + self.assertIsNone(instance.label) + instance2 = Model(label="hello") + self.assertEqual(instance2.label, "hello") + + def test_oneof_nullable_integer(self): + schema = { + "type": "object", + "properties": { + "count": { + "oneOf": [ + {"type": "integer"}, + {"type": "null"}, + ] + } + }, + "required": ["count"], + } + Model = create_pydantic_model_from_schema(schema, "OneOfModel") + instance = Model(count=5) + self.assertEqual(instance.count, 5) + + +class TestKwargsUnwrapping(unittest.TestCase): + """Tests for the kwargs wrapper unwrapping pattern.""" + + def test_kwargs_wrapper_is_unwrapped(self): + """Schemas wrapping args in a 'kwargs' field should be unwrapped.""" + schema = { + "type": "object", + "properties": { + "kwargs": { + "type": "object", + "properties": { + "city": {"type": "string"}, + "units": {"type": "string"}, + }, + "required": ["city"], + } + }, + } + Model = create_pydantic_model_from_schema(schema, "KwargsModel") + instance = Model(city="Seattle") + self.assertEqual(instance.city, "Seattle") + self.assertIsNone(instance.units) + + def test_non_kwargs_not_unwrapped(self): + """Schemas without the kwargs wrapper should not be affected.""" + schema = { + "type": "object", + "properties": { + "city": {"type": "string"}, + }, + "required": ["city"], + } + Model = create_pydantic_model_from_schema(schema, "FlatModel") + instance = Model(city="Tokyo") + self.assertEqual(instance.city, "Tokyo") + + +class TestEmptyAndEdgeCases(unittest.TestCase): + """Tests for edge cases.""" + + def test_empty_properties(self): + schema = {"type": "object", "properties": {}} + Model = create_pydantic_model_from_schema(schema, "EmptyModel") + instance = Model() + self.assertIsInstance(instance, BaseModel) + + def test_no_properties_key(self): + schema = {"type": "object"} + Model = create_pydantic_model_from_schema(schema, "NoPropsModel") + instance = Model() + self.assertIsInstance(instance, BaseModel) + + def test_description_preserved(self): + schema = { + "type": "object", + "properties": { + "city": { + "type": "string", + "description": "The city to query", + } + }, + "required": ["city"], + } + Model = create_pydantic_model_from_schema(schema, "DescModel") + field_info = Model.model_fields["city"] + self.assertEqual(field_info.description, "The city to query") + + def test_returns_pydantic_model_subclass(self): + schema = { + "type": "object", + "properties": {"x": {"type": "integer"}}, + "required": ["x"], + } + Model = create_pydantic_model_from_schema(schema, "SubclassCheck") + self.assertTrue(issubclass(Model, BaseModel)) + + def test_model_name_set(self): + schema = { + "type": "object", + "properties": {"x": {"type": "integer"}}, + "required": ["x"], + } + Model = create_pydantic_model_from_schema(schema, "MyToolArgs") + self.assertEqual(Model.__name__, "MyToolArgs") + + +if __name__ == "__main__": + unittest.main()