From a77a75d41fb20ab6e5b83180ab50e0b1e84a0224 Mon Sep 17 00:00:00 2001 From: "Hamish M. Blair" <> Date: Sun, 15 Mar 2026 20:01:17 -0700 Subject: [PATCH 1/2] Add local server provider with automatic model discovery MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add a custom loader for a "local" provider that auto-discovers models from any OpenAI-compatible local server (llama.cpp, ollama, vLLM, LM Studio, etc.) by querying the standard /v1/models endpoint at startup. Users configure only a baseURL and optional apiKey — no manual model listing required. Discovered models are merged with any manually configured models without overwriting them. Closes #6231 --- packages/opencode/src/provider/provider.ts | 43 ++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 349073197d7..cc8b4e6d7ed 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -668,6 +668,49 @@ export namespace Provider { }, } }, + async local(input) { + const baseURL = input.options?.baseURL + if (!baseURL) return { autoload: false } + try { + const url = `${String(baseURL).replace(/\/+$/, "")}/models` + const headers: Record = { Accept: "application/json" } + const apiKey = input.options?.apiKey ?? input.key + if (apiKey) headers["Authorization"] = `Bearer ${apiKey}` + const res = await fetch(url, { headers, signal: AbortSignal.timeout(3000) }) + if (!res.ok) return { autoload: false } + const json = (await res.json()) as { data?: Array<{ id: string }> } + if (!json.data?.length) return { autoload: false } + for (const model of json.data) { + if (!model.id || input.models[model.id]) continue + input.models[model.id] = { + id: ModelID.make(model.id), + providerID: ProviderID.make("local"), + name: model.id, + api: { id: model.id, url: baseURL, npm: "@ai-sdk/openai-compatible" }, + status: "active", + headers: {}, + options: {}, + cost: { input: 0, output: 0, cache: { read: 0, write: 0 } }, + limit: { context: 0, output: 0 }, + capabilities: { + temperature: true, + reasoning: false, + attachment: false, + toolcall: true, + input: { text: true, audio: false, image: false, video: false, pdf: false }, + output: { text: true, audio: false, image: false, video: false, pdf: false }, + interleaved: false, + }, + family: "", + release_date: "", + } + } + log.info("auto-discovered local models", { count: json.data.length }) + return { autoload: true } + } catch { + return { autoload: false } + } + }, } export const Model = z From ddddd03fd02faeac897ba4bdd3f80f10c0fed7b1 Mon Sep 17 00:00:00 2001 From: "Hamish M. Blair" <> Date: Tue, 17 Mar 2026 13:25:43 -0700 Subject: [PATCH 2/2] fix(opencode): allow custom loaders without models.dev entry and add local provider tests The CUSTOM_LOADERS loop skipped providers not registered in models.dev, which prevented the local provider from ever being invoked. Create a stub Info as a fallback so custom loaders can bootstrap themselves. Adds three tests for the local provider: successful auto-discovery, unreachable endpoint, and missing baseURL. --- packages/opencode/src/provider/provider.ts | 11 +- packages/opencode/test/provider/local.test.ts | 127 ++++++++++++++++++ 2 files changed, 134 insertions(+), 4 deletions(-) create mode 100644 packages/opencode/test/provider/local.test.ts diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index cc8b4e6d7ed..865731dbd7d 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -1091,10 +1091,13 @@ export namespace Provider { for (const [id, fn] of Object.entries(CUSTOM_LOADERS)) { const providerID = ProviderID.make(id) if (disabled.has(providerID)) continue - const data = database[providerID] - if (!data) { - log.error("Provider does not exist in model list " + providerID) - continue + const data = database[providerID] ?? { + id: providerID, + name: id, + source: "custom" as const, + env: [], + options: configProviders.find(([k]) => k === id)?.[1]?.options ?? {}, + models: {}, } const result = await fn(data) if (result && (result.autoload || providers[providerID])) { diff --git a/packages/opencode/test/provider/local.test.ts b/packages/opencode/test/provider/local.test.ts new file mode 100644 index 00000000000..0a4fb9e9488 --- /dev/null +++ b/packages/opencode/test/provider/local.test.ts @@ -0,0 +1,127 @@ +import { test, expect, mock, afterEach } from "bun:test" +import path from "path" + +import { tmpdir } from "../fixture/fixture" +import { Instance } from "../../src/project/instance" +import { Provider } from "../../src/provider/provider" +import { ProviderID } from "../../src/provider/schema" + +const originalFetch = globalThis.fetch + +function mockFetch(handler: (url: string) => Response | undefined) { + ;(globalThis as any).fetch = mock((url: string | URL | Request) => { + const u = typeof url === "string" ? url : url instanceof URL ? url.href : url.url + const result = handler(u) + if (result) return Promise.resolve(result) + return originalFetch(url as RequestInfo, undefined) + }) +} + +afterEach(() => { + globalThis.fetch = originalFetch +}) + +test("local provider discovers models from /models endpoint", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + provider: { + local: { + options: { + baseURL: "http://localhost:11434/v1", + }, + }, + }, + }), + ) + }, + }) + + mockFetch((url) => { + if (url === "http://localhost:11434/v1/models") { + return new Response( + JSON.stringify({ + data: [{ id: "llama-3-8b" }, { id: "mistral-7b" }], + }), + { status: 200, headers: { "Content-Type": "application/json" } }, + ) + } + }) + + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const providers = await Provider.list() + const local = providers["local"] + expect(local).toBeDefined() + expect(local.models["llama-3-8b"]).toBeDefined() + expect(local.models["mistral-7b"]).toBeDefined() + expect(local.models["llama-3-8b"].providerID).toBe(ProviderID.make("local")) + }, + }) +}) + +test("local provider not loaded when endpoint unreachable", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + provider: { + local: { + options: { + baseURL: "http://localhost:9999/v1", + }, + }, + }, + }), + ) + }, + }) + + ;(globalThis as any).fetch = mock((url: string | URL | Request) => { + const u = typeof url === "string" ? url : url instanceof URL ? url.href : url.url + if (u === "http://localhost:9999/v1/models") { + return Promise.reject(new Error("ECONNREFUSED")) + } + return originalFetch(url as RequestInfo, undefined) + }) + + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const providers = await Provider.list() + expect(providers["local"]).toBeUndefined() + }, + }) +}) + +test("local provider not loaded without baseURL", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + provider: { + local: { + options: {}, + }, + }, + }), + ) + }, + }) + + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const providers = await Provider.list() + expect(providers["local"]).toBeUndefined() + }, + }) +})