Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 16 additions & 2 deletions runtime/node/agent/providers/openai_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import base64
import hashlib
import re

import binascii
import os
Expand Down Expand Up @@ -383,18 +384,31 @@ def _deserialize_chat_response(self, response: Any) -> Message:
type="function"
))

content = self._get_attr(msg, "content") or ""
content = self._strip_thinking_tokens(content)

return Message(
role=MessageRole.ASSISTANT,
content=self._get_attr(msg, "content") or "",
content=content,
tool_calls=tool_calls
)

_THINK_PATTERN = re.compile(r"<think>.*?</think>\s*", re.DOTALL)

@classmethod
def _strip_thinking_tokens(cls, text: str) -> str:
"""Strip <think>...</think> blocks from model output (e.g. DeepSeek-R1, MiniMax-M2.7)."""
if "<think>" not in text:
return text
return cls._THINK_PATTERN.sub("", text).strip()

def _append_chat_response_output(self, timeline: List[Any], response: Any) -> None:
"""Add chat response to timeline, preserving tool_calls (Chat API compatible)."""
msg = response.choices[0].message
content = self._strip_thinking_tokens(msg.content or "")
assistant_msg = {
"role": "assistant",
"content": msg.content or ""
"content": content
}

if getattr(msg, "tool_calls", None):
Expand Down