Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ readme = { file = "README.md", content-type = "text/markdown" }
requires-python = ">=3.11"
dependencies = [
"uipath>=2.4.0, <2.5.0",
"uipath-runtime>=0.4.0, <0.5.0",
"uipath-runtime>=0.4.0,<0.5.0",
"langgraph>=1.0.0, <2.0.0",
"langchain-core>=1.2.5, <2.0.0",
"aiosqlite==0.21.0",
Expand Down Expand Up @@ -115,3 +115,7 @@ name = "testpypi"
url = "https://test.pypi.org/simple/"
publish-url = "https://test.pypi.org/legacy/"
explicit = true

[tool.uv.sources]
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

will remove

uipath = { path = "../uipath-python", editable = true }
uipath-runtime = { path = "../uipath-runtime-python", editable = true }
38 changes: 32 additions & 6 deletions src/uipath_langchain/agent/react/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@
from .router import (
create_route_agent,
)
from .router_conversational import (
create_route_agent_conversational,
create_user_message_wait_node,
)
from .terminate_node import (
create_terminate_node,
)
Expand Down Expand Up @@ -74,16 +78,18 @@ def create_agent(
os.environ["LANGCHAIN_RECURSION_LIMIT"] = str(config.recursion_limit)

agent_tools = list(tools)
flow_control_tools: list[BaseTool] = create_flow_control_tools(output_schema)
flow_control_tools: list[BaseTool] = (
[] if config.is_conversational else create_flow_control_tools(output_schema)
)
llm_tools: list[BaseTool] = [*agent_tools, *flow_control_tools]

init_node = create_init_node(messages, input_schema)
init_node = create_init_node(messages, input_schema, config.is_conversational)

tool_nodes = create_tool_node(agent_tools)
tool_nodes_with_guardrails = create_tools_guardrails_subgraph(
tool_nodes, guardrails
)
terminate_node = create_terminate_node(output_schema)
terminate_node = create_terminate_node(output_schema, config.is_conversational)

CompleteAgentGraphState = create_state_with_input(
input_schema if input_schema is not None else BaseModel
Expand All @@ -109,19 +115,39 @@ def create_agent(

builder.add_edge(START, AgentGraphNode.INIT)

llm_node = create_llm_node(model, llm_tools, config.thinking_messages_limit)
llm_node = create_llm_node(
model, llm_tools, config.thinking_messages_limit, config.is_conversational
)
llm_with_guardrails_subgraph = create_llm_guardrails_subgraph(
(AgentGraphNode.LLM, llm_node), guardrails
)
builder.add_node(AgentGraphNode.AGENT, llm_with_guardrails_subgraph)
builder.add_edge(AgentGraphNode.INIT, AgentGraphNode.AGENT)

tool_node_names = list(tool_nodes_with_guardrails.keys())
route_agent = create_route_agent(config.thinking_messages_limit)

if config.is_conversational:
route_agent = create_route_agent_conversational()
user_message_wait_node = create_user_message_wait_node()
builder.add_node(AgentGraphNode.USER_MESSAGE_WAIT, user_message_wait_node)
builder.add_edge(AgentGraphNode.USER_MESSAGE_WAIT, AgentGraphNode.AGENT)
target_node_names = [
AgentGraphNode.USER_MESSAGE_WAIT,
*tool_node_names,
AgentGraphNode.TERMINATE,
]
else:
route_agent = create_route_agent(config.thinking_messages_limit)
target_node_names = [
AgentGraphNode.AGENT,
*tool_node_names,
AgentGraphNode.TERMINATE,
]

builder.add_conditional_edges(
AgentGraphNode.AGENT,
route_agent,
[AgentGraphNode.AGENT, *tool_node_names, AgentGraphNode.TERMINATE],
target_node_names,
)

for tool_name in tool_node_names:
Expand Down
14 changes: 11 additions & 3 deletions src/uipath_langchain/agent/react/init_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import Any, Callable, Sequence

from langchain_core.messages import HumanMessage, SystemMessage
from langgraph.types import Overwrite
from pydantic import BaseModel

from .job_attachments import (
Expand All @@ -14,12 +15,19 @@ def create_init_node(
messages: Sequence[SystemMessage | HumanMessage]
| Callable[[Any], Sequence[SystemMessage | HumanMessage]],
input_schema: type[BaseModel] | None,
is_conversational: bool = False,
):
def graph_state_init(state: Any) -> Any:
resolved_messages: Sequence[SystemMessage | HumanMessage] | Overwrite
if callable(messages):
resolved_messages = messages(state)
resolved_messages = list(messages(state))
else:
resolved_messages = messages
resolved_messages = list(messages)
if is_conversational:
# For conversational agents we need to reorder the messages so that the system message is first, followed by
# the initial user message. The initial user message is put in the state by UiPathLangGraphRuntime. The add
# reducer is used for the messages property in the state, so by default new messages are appended to the end.
resolved_messages = Overwrite([*resolved_messages, *state.messages])

schema = input_schema if input_schema is not None else BaseModel
job_attachments = get_job_attachments(schema, state)
Expand All @@ -28,7 +36,7 @@ def graph_state_init(state: Any) -> Any:
}

return {
"messages": list(resolved_messages),
"messages": resolved_messages,
"inner_state": {
"job_attachments": job_attachments_dict,
},
Expand Down
3 changes: 2 additions & 1 deletion src/uipath_langchain/agent/react/llm_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ def create_llm_node(
model: BaseChatModel,
tools: Sequence[BaseTool] | None = None,
thinking_messages_limit: int = MAX_CONSECUTIVE_THINKING_MESSAGES,
is_conversational: bool = False
):
"""Create LLM node with dynamic tool_choice enforcement.

Expand All @@ -58,7 +59,7 @@ async def llm_node(state: AgentGraphState):

consecutive_thinking_messages = count_consecutive_thinking_messages(messages)

if bindable_tools and consecutive_thinking_messages >= thinking_messages_limit:
if not is_conversational and bindable_tools and consecutive_thinking_messages >= thinking_messages_limit:
llm = base_llm.bind(tool_choice=tool_choice_required_value)
else:
llm = base_llm
Expand Down
25 changes: 3 additions & 22 deletions src/uipath_langchain/agent/react/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@

from typing import Literal

from langchain_core.messages import AIMessage, AnyMessage, ToolCall
from langchain_core.messages import ToolCall
from uipath.agent.react import END_EXECUTION_TOOL, RAISE_ERROR_TOOL

from ..exceptions import AgentNodeRoutingException
from .router_utils import validate_last_message_is_AI
from .types import AgentGraphNode, AgentGraphState
from .utils import count_consecutive_thinking_messages

Expand All @@ -27,26 +28,6 @@ def __has_control_flow_tool(tool_calls: list[ToolCall]) -> bool:
return any(tc.get("name") in FLOW_CONTROL_TOOLS for tc in tool_calls)


def __validate_last_message_is_AI(messages: list[AnyMessage]) -> AIMessage:
"""Validate and return last message from state.

Raises:
AgentNodeRoutingException: If messages are empty or last message is not AIMessage
"""
if not messages:
raise AgentNodeRoutingException(
"No messages in state - cannot route after agent"
)

last_message = messages[-1]
if not isinstance(last_message, AIMessage):
raise AgentNodeRoutingException(
f"Last message is not AIMessage (type: {type(last_message).__name__}) - cannot route after agent"
)

return last_message


def create_route_agent(thinking_messages_limit: int = 0):
"""Create a routing function configured with thinking_messages_limit.

Expand Down Expand Up @@ -77,7 +58,7 @@ def route_agent(
AgentNodeRoutingException: When encountering unexpected state (empty messages, non-AIMessage, or excessive completions)
"""
messages = state.messages
last_message = __validate_last_message_is_AI(messages)
last_message = validate_last_message_is_AI(messages)

tool_calls = list(last_message.tool_calls) if last_message.tool_calls else []
tool_calls = __filter_control_flow_tool_calls(tool_calls)
Expand Down
60 changes: 60 additions & 0 deletions src/uipath_langchain/agent/react/router_conversational.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
"""Routing functions for conditional edges in the agent graph."""

import logging
from typing import Literal

from langgraph.types import interrupt
from uipath.platform.common.interrupt_models import UserMessageWait

from uipath_langchain.agent.react.router_utils import validate_last_message_is_AI

from .types import AgentGraphNode, AgentGraphState

logger = logging.getLogger(__name__)


def create_route_agent_conversational(
thinking_messages_limit: int = 0, is_conversational: bool = False
):
"""Create a routing function for conversational agents. It routes between agent and tool calls until
the agent response has no tool calls, then it routes to the USER_MESSAGE_WAIT node which does an interrupt.

Returns:
Routing function for LangGraph conditional edges
"""

def route_agent_conversational(
state: AgentGraphState,
) -> list[str] | Literal[AgentGraphNode.USER_MESSAGE_WAIT]:
"""Route after agent

Routing logic:
3. If tool calls, route to specific tool nodes (return list of tool names)
4. If no tool calls, route to user message wait node

Returns:
- list[str]: Tool node names for parallel execution
- AgentGraphNode.USER_MESSAGE_WAIT: When there are no tool calls

Raises:
AgentNodeRoutingException: When encountering unexpected state (empty messages, non-AIMessage, or excessive completions)
"""
last_message = validate_last_message_is_AI(state.messages)
if last_message.tool_calls:
return [tc["name"] for tc in last_message.tool_calls]
else:
return AgentGraphNode.USER_MESSAGE_WAIT

return route_agent_conversational


def create_user_message_wait_node():
def user_message_wait(state: AgentGraphState):
logger.info("Interrupting for user input...")
graph_input = interrupt(UserMessageWait())
logger.info("User message wait interrupt completed.")
if "messages" not in graph_input:
raise ValueError("UserMessageWait interrupt did not return messages.")
return {"messages": graph_input.get("messages")}

return user_message_wait
26 changes: 26 additions & 0 deletions src/uipath_langchain/agent/react/router_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
"""Routing functions for conditional edges in the agent graph."""


from langchain_core.messages import AIMessage, AnyMessage

from ..exceptions import AgentNodeRoutingException


def validate_last_message_is_AI(messages: list[AnyMessage]) -> AIMessage:
"""Validate and return last message from state.

Raises:
AgentNodeRoutingException: If messages are empty or last message is not AIMessage
"""
if not messages:
raise AgentNodeRoutingException(
"No messages in state - cannot route after agent"
)

last_message = messages[-1]
if not isinstance(last_message, AIMessage):
raise AgentNodeRoutingException(
f"Last message is not AIMessage (type: {type(last_message).__name__}) - cannot route after agent"
)

return last_message
30 changes: 16 additions & 14 deletions src/uipath_langchain/agent/react/terminate_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ def _handle_agent_termination(termination: AgentTermination) -> NoReturn:

def create_terminate_node(
response_schema: type[BaseModel] | None = None,
is_conversational: bool = False
):
"""Handles Agent Graph termination for multiple sources and output or error propagation to Orchestrator.

Expand All @@ -60,23 +61,24 @@ def terminate_node(state: AgentGraphState):
if state.inner_state.termination:
_handle_agent_termination(state.inner_state.termination)

last_message = state.messages[-1]
if not isinstance(last_message, AIMessage):
raise AgentNodeRoutingException(
f"Expected last message to be AIMessage, got {type(last_message).__name__}"
)
if not is_conversational:
last_message = state.messages[-1]
if not isinstance(last_message, AIMessage):
raise AgentNodeRoutingException(
f"Expected last message to be AIMessage, got {type(last_message).__name__}"
)

for tool_call in last_message.tool_calls:
tool_name = tool_call["name"]
for tool_call in last_message.tool_calls:
tool_name = tool_call["name"]

if tool_name == END_EXECUTION_TOOL.name:
return _handle_end_execution(tool_call["args"], response_schema)
if tool_name == END_EXECUTION_TOOL.name:
return _handle_end_execution(tool_call["args"], response_schema)

if tool_name == RAISE_ERROR_TOOL.name:
_handle_raise_error(tool_call["args"])
if tool_name == RAISE_ERROR_TOOL.name:
_handle_raise_error(tool_call["args"])

raise AgentNodeRoutingException(
"No control flow tool call found in terminate node. Unexpected state."
)
raise AgentNodeRoutingException(
"No control flow tool call found in terminate node. Unexpected state."
)

return terminate_node
4 changes: 4 additions & 0 deletions src/uipath_langchain/agent/react/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ class AgentGraphNode(StrEnum):
TOOLS = "tools"
TERMINATE = "terminate"
GUARDED_TERMINATE = "guarded-terminate"
USER_MESSAGE_WAIT = "user-message-wait"


class AgentGraphConfig(BaseModel):
Expand All @@ -61,3 +62,6 @@ class AgentGraphConfig(BaseModel):
ge=0,
description="Max consecutive thinking messages before enforcing tool usage. 0 = force tools every time.",
)
is_conversational: bool = Field(
default=False, description="If set, creates a graph for conversational agents"
)
18 changes: 3 additions & 15 deletions src/uipath_langchain/chat/mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ def map_event(
Returns:
A UiPathConversationMessageEvent if the message should be emitted, None otherwise.
"""

# Format timestamp as ISO 8601 UTC with milliseconds: 2025-01-04T10:30:00.123Z
timestamp = (
datetime.now(timezone.utc)
Expand Down Expand Up @@ -307,21 +308,8 @@ def map_event(
)
]

# --- Fallback for other BaseMessage types ---
text_content = self._extract_text(message.content)
return [
UiPathConversationMessageEvent(
message_id=message.id,
start=UiPathConversationMessageStartEvent(
role="assistant", timestamp=timestamp
),
content_part=UiPathConversationContentPartEvent(
content_part_id=f"cp-{message.id}",
chunk=UiPathConversationContentPartChunkEvent(data=text_content),
),
end=UiPathConversationMessageEndEvent(),
)
]
# Don't send events for system or user messages. Agent messages are handled above.
return []


__all__ = ["UiPathChatMessagesMapper"]
Loading
Loading