Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions packages/opencode/src/config/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -983,6 +983,7 @@ export namespace Config {
.record(
z.string(),
ModelsDev.Model.partial().extend({
prompt: z.string().optional().describe("Custom system prompt for this model"),
variants: z
.record(
z.string(),
Expand Down
2 changes: 2 additions & 0 deletions packages/opencode/src/provider/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -723,6 +723,7 @@ export namespace Provider {
options: z.record(z.string(), z.any()),
headers: z.record(z.string(), z.string()),
release_date: z.string(),
prompt: z.string().optional(),
variants: z.record(z.string(), z.record(z.string(), z.any())).optional(),
})
.meta({
Expand Down Expand Up @@ -936,6 +937,7 @@ export namespace Provider {
headers: mergeDeep(existingModel?.headers ?? {}, model.headers ?? {}),
family: model.family ?? existingModel?.family ?? "",
release_date: model.release_date ?? existingModel?.release_date ?? "",
prompt: model.prompt ?? existingModel?.prompt,
variants: {},
}
const merged = mergeDeep(ProviderTransform.variants(parsedModel), model.variants ?? {})
Expand Down
1 change: 1 addition & 0 deletions packages/opencode/src/session/system.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ export namespace SystemPrompt {
}

export function provider(model: Provider.Model) {
if (model.prompt) return [model.prompt]
if (model.api.id.includes("gpt-5")) return [PROMPT_CODEX]
if (model.api.id.includes("gpt-") || model.api.id.includes("o1") || model.api.id.includes("o3"))
return [PROMPT_BEAST]
Expand Down
106 changes: 106 additions & 0 deletions packages/opencode/test/provider/provider.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2247,6 +2247,112 @@ test("cloudflare-ai-gateway loads with env variables", async () => {
})
})

test("custom model prompt is stored on model", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
"custom-prompt-provider": {
name: "Custom Prompt Provider",
npm: "@ai-sdk/openai-compatible",
env: [],
models: {
"custom-model": {
name: "Custom Model",
tool_call: true,
limit: { context: 128000, output: 4096 },
prompt: "You are a custom coding assistant.",
},
},
options: { apiKey: "test-key" },
},
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const providers = await Provider.list()
const model = providers[ProviderID.make("custom-prompt-provider")].models["custom-model"]
expect(model.prompt).toBe("You are a custom coding assistant.")
},
})
})

test("model prompt is undefined when not specified", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
"no-prompt-provider": {
name: "No Prompt Provider",
npm: "@ai-sdk/openai-compatible",
env: [],
models: {
"no-prompt-model": {
name: "No Prompt Model",
tool_call: true,
limit: { context: 128000, output: 4096 },
},
},
options: { apiKey: "test-key" },
},
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const providers = await Provider.list()
const model = providers[ProviderID.make("no-prompt-provider")].models["no-prompt-model"]
expect(model.prompt).toBeUndefined()
},
})
})

test("model prompt overrides existing model prompt", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
anthropic: {
models: {
"claude-sonnet-4-20250514": {
prompt: "You are a specialized Anthropic assistant.",
},
},
},
},
}),
)
},
})
await Instance.provide({
directory: tmp.path,
init: async () => {
Env.set("ANTHROPIC_API_KEY", "test-api-key")
},
fn: async () => {
const providers = await Provider.list()
const model = providers[ProviderID.anthropic].models["claude-sonnet-4-20250514"]
expect(model.prompt).toBe("You are a specialized Anthropic assistant.")
},
})
})

test("cloudflare-ai-gateway forwards config metadata options", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
Expand Down
96 changes: 96 additions & 0 deletions packages/opencode/test/session/system.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
import { describe, expect, test } from "bun:test"
import { SystemPrompt } from "../../src/session/system"
import type { Provider } from "../../src/provider/provider"

function makeModel(overrides: Partial<Provider.Model> & { api: Provider.Model["api"] }): Provider.Model {
const { api, ...rest } = overrides
return {
id: "test-model" as any,
providerID: "test-provider" as any,
name: "Test Model",
api,
capabilities: {
temperature: false,
reasoning: false,
attachment: false,
toolcall: true,
input: { text: true, audio: false, image: false, video: false, pdf: false },
output: { text: true, audio: false, image: false, video: false, pdf: false },
interleaved: false,
},
cost: { input: 0, output: 0, cache: { read: 0, write: 0 } },
limit: { context: 128000, output: 4096 },
status: "active",
options: {},
headers: {},
release_date: "2025-01-01",
...rest,
}
}

describe("SystemPrompt.provider", () => {
test("returns custom prompt when model has prompt field", () => {
const model = makeModel({
api: { id: "some-unknown-model", url: "", npm: "" },
prompt: "You are a custom coding assistant.",
})
const result = SystemPrompt.provider(model)
expect(result).toEqual(["You are a custom coding assistant."])
})

test("custom prompt takes priority over model ID matching", () => {
const model = makeModel({
api: { id: "claude-sonnet-4", url: "", npm: "" },
prompt: "Custom prompt overrides claude matching.",
})
const result = SystemPrompt.provider(model)
expect(result).toEqual(["Custom prompt overrides claude matching."])
})

test("falls back to claude prompt when no custom prompt and model ID contains claude", () => {
const model = makeModel({
api: { id: "claude-sonnet-4", url: "", npm: "" },
})
const result = SystemPrompt.provider(model)
expect(result.length).toBe(1)
expect(result[0]).not.toBe("")
// Should not be the fallback prompt (qwen)
const fallbackModel = makeModel({
api: { id: "some-unknown-model", url: "", npm: "" },
})
const fallback = SystemPrompt.provider(fallbackModel)
expect(result[0]).not.toBe(fallback[0])
})

test("falls back to gemini prompt for gemini models", () => {
const model = makeModel({
api: { id: "gemini-pro", url: "", npm: "" },
})
const result = SystemPrompt.provider(model)
expect(result.length).toBe(1)
expect(result[0]).not.toBe("")
})

test("falls back to default prompt for unknown models without custom prompt", () => {
const model = makeModel({
api: { id: "totally-unknown-model", url: "", npm: "" },
})
const result = SystemPrompt.provider(model)
expect(result.length).toBe(1)
expect(result[0]).not.toBe("")
})

test("model without prompt field uses default matching", () => {
const model = makeModel({
api: { id: "gpt-5-turbo", url: "", npm: "" },
})
const result = SystemPrompt.provider(model)
expect(result.length).toBe(1)
// gpt-5 should match PROMPT_CODEX, not the fallback
const fallbackModel = makeModel({
api: { id: "unknown", url: "", npm: "" },
})
const fallback = SystemPrompt.provider(fallbackModel)
expect(result[0]).not.toBe(fallback[0])
})
})
Loading