Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions packages/opencode/src/session/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ export namespace LLM {
sessionID: string
model: Provider.Model
agent: Agent.Info
permission?: PermissionNext.Ruleset
system: string[]
abort: AbortSignal
messages: ModelMessage[]
Expand Down Expand Up @@ -255,8 +256,11 @@ export namespace LLM {
})
}

async function resolveTools(input: Pick<StreamInput, "tools" | "agent" | "user">) {
const disabled = PermissionNext.disabled(Object.keys(input.tools), input.agent.permission)
async function resolveTools(input: Pick<StreamInput, "tools" | "agent" | "permission" | "user">) {
const disabled = PermissionNext.disabled(
Object.keys(input.tools),
PermissionNext.merge(input.agent.permission, input.permission ?? []),
)
for (const tool of Object.keys(input.tools)) {
if (input.user.tools?.[tool] === false || disabled.has(tool)) {
delete input.tools[tool]
Expand Down
1 change: 1 addition & 0 deletions packages/opencode/src/session/prompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -659,6 +659,7 @@ export namespace SessionPrompt {
const result = await processor.process({
user: lastUser,
agent,
permission: session.permission,
abort,
sessionID,
system,
Expand Down
92 changes: 91 additions & 1 deletion packages/opencode/test/session/llm.test.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { afterAll, beforeAll, beforeEach, describe, expect, test } from "bun:test"
import path from "path"
import type { ModelMessage } from "ai"
import { tool, type ModelMessage } from "ai"
import z from "zod"
import { LLM } from "../../src/session/llm"
import { Global } from "../../src/global"
import { Instance } from "../../src/project/instance"
Expand Down Expand Up @@ -323,6 +324,95 @@ describe("session.llm.stream", () => {
})
})

test("keeps tools enabled by prompt permissions", async () => {
const server = state.server
if (!server) {
throw new Error("Server not initialized")
}

const providerID = "alibaba"
const modelID = "qwen-plus"
const fixture = await loadFixture(providerID, modelID)
const model = fixture.model

const request = waitRequest(
"/chat/completions",
new Response(createChatStream("Hello"), {
status: 200,
headers: { "Content-Type": "text/event-stream" },
}),
)

await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
enabled_providers: [providerID],
provider: {
[providerID]: {
options: {
apiKey: "test-key",
baseURL: `${server.url.origin}/v1`,
},
},
},
}),
)
},
})

await Instance.provide({
directory: tmp.path,
fn: async () => {
const resolved = await Provider.getModel(providerID, model.id)
const sessionID = "session-test-tools"
const agent = {
name: "test",
mode: "primary",
options: {},
permission: [{ permission: "question", pattern: "*", action: "deny" }],
} satisfies Agent.Info

const user = {
id: "user-tools",
sessionID,
role: "user",
time: { created: Date.now() },
agent: agent.name,
model: { providerID, modelID: resolved.id },
tools: { question: true },
} satisfies MessageV2.User

const stream = await LLM.stream({
user,
sessionID,
model: resolved,
agent,
permission: [{ permission: "question", pattern: "*", action: "allow" }],
system: ["You are a helpful assistant."],
abort: new AbortController().signal,
messages: [{ role: "user", content: "Hello" }],
tools: {
question: tool({
description: "Ask a question",
inputSchema: z.object({}),
execute: async () => ({ output: "" }),
}),
},
})

for await (const _ of stream.fullStream) {
}

const capture = await request
const tools = capture.body.tools as Array<{ function?: { name?: string } }> | undefined
expect(tools?.some((item) => item.function?.name === "question")).toBe(true)
},
})
})

test("sends responses API payload for OpenAI models", async () => {
const server = state.server
if (!server) {
Expand Down
Loading