Skip to content

Commit 1ce2309

Browse files
committed
test: add Mistral/Devstral model detection tests for mergeToolResultText and rebase on main
1 parent f973900 commit 1ce2309

1 file changed

Lines changed: 118 additions & 0 deletions

File tree

src/api/providers/__tests__/openrouter.spec.ts

Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -80,10 +80,41 @@ vitest.mock("../fetchers/modelCache", () => ({
8080
excludedTools: ["existing_excluded"],
8181
includedTools: ["existing_included"],
8282
},
83+
"mistralai/mistral-large-latest": {
84+
maxTokens: 8192,
85+
contextWindow: 128000,
86+
supportsImages: false,
87+
supportsPromptCache: false,
88+
inputPrice: 2,
89+
outputPrice: 6,
90+
description: "Mistral Large",
91+
},
92+
"mistralai/devstral-2512": {
93+
maxTokens: 8192,
94+
contextWindow: 128000,
95+
supportsImages: false,
96+
supportsPromptCache: false,
97+
inputPrice: 1,
98+
outputPrice: 3,
99+
description: "Devstral",
100+
},
83101
})
84102
}),
85103
}))
86104

105+
const mockConvertToOpenAiMessages = vitest.fn().mockReturnValue([])
106+
107+
vitest.mock("../../transform/openai-format", async (importOriginal) => {
108+
const actual = (await importOriginal()) as Record<string, unknown>
109+
return {
110+
...actual,
111+
convertToOpenAiMessages: (...args: unknown[]) => {
112+
mockConvertToOpenAiMessages(...args)
113+
return (actual.convertToOpenAiMessages as (...a: unknown[]) => unknown)(...args)
114+
},
115+
}
116+
})
117+
87118
describe("OpenRouterHandler", () => {
88119
const mockOptions: ApiHandlerOptions = {
89120
openRouterApiKey: "test-key",
@@ -527,6 +558,93 @@ describe("OpenRouterHandler", () => {
527558
})
528559
})
529560

561+
describe("Mistral/Devstral model detection", () => {
562+
const createMockStream = () => ({
563+
async *[Symbol.asyncIterator]() {
564+
yield {
565+
id: "test-id",
566+
choices: [{ delta: { content: "response" } }],
567+
}
568+
yield {
569+
id: "test-id",
570+
choices: [{ delta: {} }],
571+
usage: { prompt_tokens: 10, completion_tokens: 5 },
572+
}
573+
},
574+
})
575+
576+
const setupMockCreate = () => {
577+
const mockCreate = vitest.fn().mockResolvedValue(createMockStream())
578+
;(OpenAI as any).prototype.chat = {
579+
completions: { create: mockCreate },
580+
} as any
581+
return mockCreate
582+
}
583+
584+
const messages: Anthropic.Messages.MessageParam[] = [{ role: "user" as const, content: "test" }]
585+
586+
beforeEach(() => {
587+
mockConvertToOpenAiMessages.mockClear()
588+
})
589+
590+
it("passes mergeToolResultText and normalizeToolCallId for Mistral models", async () => {
591+
const handler = new OpenRouterHandler({
592+
openRouterApiKey: "test-key",
593+
openRouterModelId: "mistralai/mistral-large-latest",
594+
})
595+
setupMockCreate()
596+
597+
const generator = handler.createMessage("system", messages)
598+
for await (const _chunk of generator) {
599+
// consume stream
600+
}
601+
602+
expect(mockConvertToOpenAiMessages).toHaveBeenCalledWith(
603+
messages,
604+
expect.objectContaining({
605+
normalizeToolCallId: expect.any(Function),
606+
mergeToolResultText: true,
607+
}),
608+
)
609+
})
610+
611+
it("passes mergeToolResultText and normalizeToolCallId for Devstral models", async () => {
612+
const handler = new OpenRouterHandler({
613+
openRouterApiKey: "test-key",
614+
openRouterModelId: "mistralai/devstral-2512",
615+
})
616+
setupMockCreate()
617+
618+
const generator = handler.createMessage("system", messages)
619+
for await (const _chunk of generator) {
620+
// consume stream
621+
}
622+
623+
expect(mockConvertToOpenAiMessages).toHaveBeenCalledWith(
624+
messages,
625+
expect.objectContaining({
626+
normalizeToolCallId: expect.any(Function),
627+
mergeToolResultText: true,
628+
}),
629+
)
630+
})
631+
632+
it("does not pass Mistral options for non-Mistral models", async () => {
633+
const handler = new OpenRouterHandler({
634+
openRouterApiKey: "test-key",
635+
openRouterModelId: "anthropic/claude-sonnet-4",
636+
})
637+
setupMockCreate()
638+
639+
const generator = handler.createMessage("system", messages)
640+
for await (const _chunk of generator) {
641+
// consume stream
642+
}
643+
644+
expect(mockConvertToOpenAiMessages).toHaveBeenCalledWith(messages, undefined)
645+
})
646+
})
647+
530648
describe("completePrompt", () => {
531649
it("returns correct response", async () => {
532650
const handler = new OpenRouterHandler(mockOptions)

0 commit comments

Comments
 (0)