diff --git a/web/src/llm-api/__tests__/kimi-tool-compat.test.ts b/web/src/llm-api/__tests__/kimi-tool-compat.test.ts new file mode 100644 index 000000000..9e4fbdabb --- /dev/null +++ b/web/src/llm-api/__tests__/kimi-tool-compat.test.ts @@ -0,0 +1,112 @@ +import { describe, expect, it } from 'bun:test' + +import { addKimiToolCompatibilityFields, isKimiModel } from '../kimi-tool-compat' + +import type { ChatCompletionRequestBody } from '../types' + +describe('addKimiToolCompatibilityFields', () => { + it('adds declaration ids and tool-result names without mutating input', () => { + const body: ChatCompletionRequestBody = { + model: 'moonshotai/kimi-k2.6', + messages: [ + { + role: 'assistant', + content: '', + tool_calls: [ + { + id: 'call_123', + type: 'function', + function: { + name: 'read_files', + arguments: JSON.stringify({ paths: ['README.md'] }), + }, + }, + ], + }, + { + role: 'tool', + tool_call_id: 'call_123', + content: JSON.stringify({ message: 'ok' }), + }, + ], + tools: [ + { + type: 'function', + function: { + name: 'read_files', + description: 'Read files', + parameters: { type: 'object' }, + }, + }, + ], + } + + const result = addKimiToolCompatibilityFields(body) + + expect(result.tools?.[0]).toEqual({ + id: 'tool_1', + type: 'function', + function: { + name: 'read_files', + description: 'Read files', + parameters: { type: 'object' }, + }, + }) + expect(result.messages[1]).toEqual({ + role: 'tool', + tool_call_id: 'call_123', + name: 'read_files', + content: JSON.stringify({ message: 'ok' }), + }) + expect(body.tools?.[0]).not.toHaveProperty('id') + expect(body.messages[1]).not.toHaveProperty('name') + }) + + it('preserves existing ids and names', () => { + const body: ChatCompletionRequestBody = { + model: 'moonshotai/kimi-k2.6', + messages: [ + { + role: 'assistant', + content: '', + tool_calls: [ + { + id: 'call_456', + type: 'function', + function: { + name: 'write_todos', + arguments: JSON.stringify({ todos: [] }), + }, + }, + ], + }, + { + role: 'tool', + tool_call_id: 'call_456', + name: 'existing_name', + content: '{}', + }, + ], + tools: [ + { + id: 'existing_tool_id', + type: 'function', + function: { + name: 'write_todos', + parameters: { type: 'object' }, + }, + }, + ], + } + + expect(addKimiToolCompatibilityFields(body)).toEqual(body) + }) +}) + +describe('isKimiModel', () => { + it('matches only Moonshot model ids', () => { + expect(isKimiModel('moonshotai/kimi-k2.6')).toBe(true) + expect(isKimiModel('anthropic/claude-sonnet-4.5')).toBe(false) + expect(isKimiModel(undefined)).toBe(false) + }) +}) diff --git a/web/src/llm-api/canopywave.ts b/web/src/llm-api/canopywave.ts index 9a5b2ba12..341bc239c 100644 --- a/web/src/llm-api/canopywave.ts +++ b/web/src/llm-api/canopywave.ts @@ -9,6 +9,7 @@ import { extractRequestMetadata, insertMessageToBigQuery, } from './helpers' +import { addKimiToolCompatibilityFields, isKimiModel } from './kimi-tool-compat' import type { UsageData } from './helpers' import type { InsertMessageBigqueryFn } from '@codebuff/common/types/contracts/bigquery' @@ -88,8 +89,11 @@ function createCanopyWaveRequest(params: { fetch: typeof globalThis.fetch }) { const { body, originalModel, fetch } = params + const providerBody = isKimiModel(originalModel) + ? addKimiToolCompatibilityFields(body) + : body const canopywaveBody: Record = { - ...body, + ...providerBody, model: getCanopyWaveModelId(originalModel), } diff --git a/web/src/llm-api/kimi-tool-compat.ts b/web/src/llm-api/kimi-tool-compat.ts new file mode 100644 index 000000000..334a41b91 --- /dev/null +++ b/web/src/llm-api/kimi-tool-compat.ts @@ -0,0 +1,67 @@ +import type { ChatCompletionRequestBody } from './types' + +export function isKimiModel(model: unknown): model is string { + return typeof model === 'string' && model.startsWith('moonshotai/') +} + +function getToolCallNamesById( + messages: ChatCompletionRequestBody['messages'], +): Map { + const namesById = new Map() + + for (const message of messages) { + if (message.role !== 'assistant') { + continue + } + for (const toolCall of message.tool_calls ?? []) { + if (toolCall.id && toolCall.function.name) { + namesById.set(toolCall.id, toolCall.function.name) + } + } + } + + return namesById +} + +/** + * Kimi-compatible providers require two OpenAI-compatible extensions that are + * not part of the strict Chat Completions schema: ids on tool declarations and + * names on tool-result messages. + */ +export function addKimiToolCompatibilityFields( + body: ChatCompletionRequestBody, +): ChatCompletionRequestBody { + const namesByToolCallId = getToolCallNamesById(body.messages) + + return { + ...body, + tools: body.tools?.map((tool, index) => { + if (tool.type !== 'function' || tool.id) { + return tool + } + return { + ...tool, + id: `tool_${index + 1}`, + } + }), + messages: body.messages.map((message) => { + if ( + message.role !== 'tool' || + message.name || + typeof message.tool_call_id !== 'string' + ) { + return message + } + + const name = namesByToolCallId.get(message.tool_call_id) + if (!name) { + return message + } + + return { + ...message, + name, + } + }), + } +} diff --git a/web/src/llm-api/openrouter.ts b/web/src/llm-api/openrouter.ts index 2762a60d8..bf7231abd 100644 --- a/web/src/llm-api/openrouter.ts +++ b/web/src/llm-api/openrouter.ts @@ -9,6 +9,7 @@ import { extractRequestMetadata, insertMessageToBigQuery, } from './helpers' +import { addKimiToolCompatibilityFields, isKimiModel } from './kimi-tool-compat' import { OpenRouterErrorResponseSchema, OpenRouterStreamChatCompletionChunkSchema, @@ -61,6 +62,10 @@ function createOpenRouterRequest(params: { fetch: typeof globalThis.fetch }) { const { body, openrouterApiKey, fetch } = params + const providerBody = isKimiModel(body.model) + ? addKimiToolCompatibilityFields(body) + : body + return fetch('https://openrouter.ai/api/v1/chat/completions', { method: 'POST', headers: { @@ -69,7 +74,7 @@ function createOpenRouterRequest(params: { 'X-Title': 'Codebuff', 'Content-Type': 'application/json', }, - body: JSON.stringify(body), + body: JSON.stringify(providerBody), // Use custom agent with extended headers timeout for deep-thinking models // @ts-expect-error - dispatcher is a valid undici option not in fetch types dispatcher: openrouterAgent, diff --git a/web/src/llm-api/types.ts b/web/src/llm-api/types.ts index b3bb1eaf9..dd3b89a4d 100644 --- a/web/src/llm-api/types.ts +++ b/web/src/llm-api/types.ts @@ -28,9 +28,21 @@ export interface ChatMessage { tool_call_id?: string } +export interface ChatCompletionTool { + id?: string + type: string + function?: { + name: string + description?: string + parameters?: unknown + strict?: boolean + } +} + export interface ChatCompletionRequestBody { model: string messages: ChatMessage[] + tools?: ChatCompletionTool[] stream?: boolean temperature?: number max_tokens?: number