From 01a77bb11e6cdc761fb7424eb4d6002512213966 Mon Sep 17 00:00:00 2001 From: PinkYuDeer Date: Wed, 15 Apr 2026 23:35:35 +0800 Subject: [PATCH 1/2] fix(provider): fix Anthropic custom headers and system prompt compatibility - Pass custom_headers via AsyncAnthropic's `default_headers` parameter instead of creating a separate httpx.AsyncClient. This avoids `isinstance` check failures when multiple httpx installations exist on sys.path (e.g. bundled Python + system Python). - Use list format for the `system` parameter (`[{"type": "text", ...}]`) instead of a plain string. The list format is supported by the official Anthropic API and is also compatible with third-party API proxies that reject the string format. Co-Authored-By: Claude Opus 4.6 --- astrbot/core/provider/sources/anthropic_source.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/astrbot/core/provider/sources/anthropic_source.py b/astrbot/core/provider/sources/anthropic_source.py index 83f2e16dba..184d5fa249 100644 --- a/astrbot/core/provider/sources/anthropic_source.py +++ b/astrbot/core/provider/sources/anthropic_source.py @@ -103,6 +103,7 @@ def _init_api_key(self, provider_config: dict) -> None: api_key=self.chosen_api_key, timeout=self.timeout, base_url=self.base_url, + default_headers=self.custom_headers, http_client=self._create_http_client(provider_config), ) @@ -111,9 +112,7 @@ def _create_http_client(self, provider_config: dict) -> httpx.AsyncClient | None proxy = provider_config.get("proxy", "") if proxy: logger.info(f"[Anthropic] 使用代理: {proxy}") - return httpx.AsyncClient(proxy=proxy, headers=self.custom_headers) - if self.custom_headers: - return httpx.AsyncClient(headers=self.custom_headers) + return httpx.AsyncClient(proxy=proxy) return None def _apply_thinking_config(self, payloads: dict) -> None: @@ -573,7 +572,7 @@ async def text_chat( # Anthropic has a different way of handling system prompts if system_prompt: - payloads["system"] = system_prompt + payloads["system"] = [{"type": "text", "text": system_prompt}] llm_response = None try: @@ -636,7 +635,7 @@ async def text_chat_stream( # Anthropic has a different way of handling system prompts if system_prompt: - payloads["system"] = system_prompt + payloads["system"] = [{"type": "text", "text": system_prompt}] async for llm_response in self._query_stream(payloads, func_tool): yield llm_response From 1f7ccd6975606bcb0b9225ab2308cf88f3f29870 Mon Sep 17 00:00:00 2001 From: PinkYuDeer Date: Wed, 15 Apr 2026 23:35:35 +0800 Subject: [PATCH 2/2] fix(provider): fix Anthropic custom headers and system prompt compatibility - Pass custom_headers via AsyncAnthropic's `default_headers` parameter instead of creating a separate httpx.AsyncClient. This avoids `isinstance` check failures when multiple httpx installations exist on sys.path (e.g. bundled Python + system Python). - Use list format for the `system` parameter (`[{"type": "text", ...}]`) instead of a plain string. The list format is supported by the official Anthropic API and is also compatible with third-party API proxies that reject the string format. Co-Authored-By: Claude Opus 4.6 --- astrbot/core/provider/sources/anthropic_source.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/astrbot/core/provider/sources/anthropic_source.py b/astrbot/core/provider/sources/anthropic_source.py index 83f2e16dba..2a7dc716c3 100644 --- a/astrbot/core/provider/sources/anthropic_source.py +++ b/astrbot/core/provider/sources/anthropic_source.py @@ -103,6 +103,7 @@ def _init_api_key(self, provider_config: dict) -> None: api_key=self.chosen_api_key, timeout=self.timeout, base_url=self.base_url, + default_headers=self.custom_headers, http_client=self._create_http_client(provider_config), ) @@ -111,9 +112,7 @@ def _create_http_client(self, provider_config: dict) -> httpx.AsyncClient | None proxy = provider_config.get("proxy", "") if proxy: logger.info(f"[Anthropic] 使用代理: {proxy}") - return httpx.AsyncClient(proxy=proxy, headers=self.custom_headers) - if self.custom_headers: - return httpx.AsyncClient(headers=self.custom_headers) + return httpx.AsyncClient(proxy=proxy) return None def _apply_thinking_config(self, payloads: dict) -> None: @@ -573,7 +572,7 @@ async def text_chat( # Anthropic has a different way of handling system prompts if system_prompt: - payloads["system"] = system_prompt + payloads["system"] = [{"type": "text", "text": system_prompt}] if isinstance(system_prompt, str) else system_prompt llm_response = None try: @@ -636,7 +635,7 @@ async def text_chat_stream( # Anthropic has a different way of handling system prompts if system_prompt: - payloads["system"] = system_prompt + payloads["system"] = [{"type": "text", "text": system_prompt}] if isinstance(system_prompt, str) else system_prompt async for llm_response in self._query_stream(payloads, func_tool): yield llm_response