From bf21d108f1510190eb0de25cd30ff2769dcd128c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Mon, 27 Apr 2026 16:41:11 +0200 Subject: [PATCH 01/19] fix(env): proxy directly to process.env instead of snapshotting --- packages/opencode/src/config/config.ts | 1 - packages/opencode/src/env/index.ts | 31 ++++++++++------------ packages/opencode/src/provider/provider.ts | 4 --- packages/opencode/test/preload.ts | 14 ++++++++++ 4 files changed, 28 insertions(+), 22 deletions(-) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index 1b6b58500707..69c9279ecb19 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -603,7 +603,6 @@ export const layer = Layer.effect( { concurrency: 2 }, ) if (Option.isSome(tokenOpt)) { - process.env["OPENCODE_CONSOLE_TOKEN"] = tokenOpt.value yield* env.set("OPENCODE_CONSOLE_TOKEN", tokenOpt.value) } diff --git a/packages/opencode/src/env/index.ts b/packages/opencode/src/env/index.ts index a53d96def259..56acf3dcbb4b 100644 --- a/packages/opencode/src/env/index.ts +++ b/packages/opencode/src/env/index.ts @@ -1,5 +1,4 @@ import { Context, Effect, Layer } from "effect" -import { InstanceState } from "@/effect" type State = Record @@ -12,23 +11,21 @@ export interface Interface { export class Service extends Context.Service()("@opencode/Env") {} -export const layer = Layer.effect( +export const layer = Layer.succeed( Service, - Effect.gen(function* () { - const state = yield* InstanceState.make(Effect.fn("Env.state")(() => Effect.succeed({ ...process.env }))) - - const get = Effect.fn("Env.get")((key: string) => InstanceState.use(state, (env) => env[key])) - const all = Effect.fn("Env.all")(() => InstanceState.get(state)) - const set = Effect.fn("Env.set")(function* (key: string, value: string) { - const env = yield* InstanceState.get(state) - env[key] = value - }) - const remove = Effect.fn("Env.remove")(function* (key: string) { - const env = yield* InstanceState.get(state) - delete env[key] - }) - - return Service.of({ get, all, set, remove }) + Service.of({ + get: Effect.fn("Env.get")((key: string) => Effect.sync(() => process.env[key])), + all: Effect.fn("Env.all")(() => Effect.sync(() => ({ ...process.env }) as State)), + set: Effect.fn("Env.set")((key: string, value: string) => + Effect.sync(() => { + process.env[key] = value + }), + ), + remove: Effect.fn("Env.remove")((key: string) => + Effect.sync(() => { + delete process.env[key] + }), + ), }), ) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 9aa1b6304c12..b3568501cd53 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -257,8 +257,6 @@ function custom(dep: CustomDep): Record { const awsAccessKeyId = env["AWS_ACCESS_KEY_ID"] - // TODO: Using process.env directly because Env.set only updates a process.env shallow copy, - // until the scope of the Env API is clarified (test only or runtime?) const awsBearerToken = iife(() => { const envToken = process.env.AWS_BEARER_TOKEN_BEDROCK if (envToken) return envToken @@ -496,8 +494,6 @@ function custom(dep: CustomDep): Record { }), "sap-ai-core": Effect.fnUntraced(function* () { const auth = yield* dep.auth("sap-ai-core") - // TODO: Using process.env directly because Env.set only updates a shallow copy (not process.env), - // until the scope of the Env API is clarified (test only or runtime?) const envServiceKey = iife(() => { const envAICoreServiceKey = process.env.AICORE_SERVICE_KEY if (envAICoreServiceKey) return envAICoreServiceKey diff --git a/packages/opencode/test/preload.ts b/packages/opencode/test/preload.ts index 58dc2b0b48c5..0684e7b826f4 100644 --- a/packages/opencode/test/preload.ts +++ b/packages/opencode/test/preload.ts @@ -61,6 +61,10 @@ delete process.env["AWS_ACCESS_KEY_ID"] delete process.env["AWS_PROFILE"] delete process.env["AWS_REGION"] delete process.env["AWS_BEARER_TOKEN_BEDROCK"] +delete process.env["AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"] +delete process.env["AWS_CONTAINER_CREDENTIALS_FULL_URI"] +delete process.env["AWS_WEB_IDENTITY_TOKEN_FILE"] +delete process.env["AWS_ROLE_ARN"] delete process.env["OPENROUTER_API_KEY"] delete process.env["LLM_GATEWAY_API_KEY"] delete process.env["GROQ_API_KEY"] @@ -72,6 +76,16 @@ delete process.env["DEEPSEEK_API_KEY"] delete process.env["FIREWORKS_API_KEY"] delete process.env["CEREBRAS_API_KEY"] delete process.env["SAMBANOVA_API_KEY"] +delete process.env["AICORE_SERVICE_KEY"] +delete process.env["AICORE_DEPLOYMENT_ID"] +delete process.env["AICORE_RESOURCE_GROUP"] +delete process.env["GOOGLE_APPLICATION_CREDENTIALS"] +delete process.env["CLOUDFLARE_ACCOUNT_ID"] +delete process.env["CLOUDFLARE_GATEWAY_ID"] +delete process.env["CLOUDFLARE_API_TOKEN"] +delete process.env["SINGLE_ENV_KEY"] +delete process.env["MULTI_ENV_KEY_1"] +delete process.env["FALLBACK_KEY"] delete process.env["OPENCODE_SERVER_PASSWORD"] delete process.env["OPENCODE_SERVER_USERNAME"] From 9b25e2b7da015226a2e73228800725c100be7e4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 12:54:33 +0200 Subject: [PATCH 02/19] test(preload): clear remaining provider-detection env vars Cover all env vars declared by providers in models-api.json plus the test-fixture pairs referenced in provider.test.ts, so host env (or in-process leaks via Env.set) cannot perturb provider detection. --- packages/opencode/test/preload.ts | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/packages/opencode/test/preload.ts b/packages/opencode/test/preload.ts index d629910b7512..f6360cfb63e7 100644 --- a/packages/opencode/test/preload.ts +++ b/packages/opencode/test/preload.ts @@ -58,7 +58,10 @@ delete process.env["OPENAI_API_KEY"] delete process.env["GOOGLE_API_KEY"] delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"] delete process.env["AZURE_OPENAI_API_KEY"] +delete process.env["AZURE_RESOURCE_NAME"] +delete process.env["AZURE_API_KEY"] delete process.env["AWS_ACCESS_KEY_ID"] +delete process.env["AWS_SECRET_ACCESS_KEY"] delete process.env["AWS_PROFILE"] delete process.env["AWS_REGION"] delete process.env["AWS_BEARER_TOKEN_BEDROCK"] @@ -81,12 +84,24 @@ delete process.env["AICORE_SERVICE_KEY"] delete process.env["AICORE_DEPLOYMENT_ID"] delete process.env["AICORE_RESOURCE_GROUP"] delete process.env["GOOGLE_APPLICATION_CREDENTIALS"] +delete process.env["GOOGLE_VERTEX_PROJECT"] +delete process.env["GOOGLE_VERTEX_LOCATION"] +delete process.env["GOOGLE_CLOUD_PROJECT"] +delete process.env["GOOGLE_CLOUD_LOCATION"] +delete process.env["GCP_PROJECT"] +delete process.env["GCLOUD_PROJECT"] +delete process.env["VERTEX_LOCATION"] delete process.env["CLOUDFLARE_ACCOUNT_ID"] delete process.env["CLOUDFLARE_GATEWAY_ID"] delete process.env["CLOUDFLARE_API_TOKEN"] +delete process.env["CLOUDFLARE_API_KEY"] +delete process.env["CF_AIG_TOKEN"] delete process.env["SINGLE_ENV_KEY"] delete process.env["MULTI_ENV_KEY_1"] +delete process.env["MULTI_ENV_KEY_2"] +delete process.env["PRIMARY_KEY"] delete process.env["FALLBACK_KEY"] +delete process.env["CUSTOM_API_KEY"] delete process.env["OPENCODE_SERVER_PASSWORD"] delete process.env["OPENCODE_SERVER_USERNAME"] From 49ba3002c021a6a6416f38aafabfc4788ac7e67c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 13:36:09 +0200 Subject: [PATCH 03/19] fix(provider): re-evaluate env detection in list()/getProvider() per call Provider state cached via InstanceState read env.all() once at init, so providers whose env keys were set after the first list() call remained invisible (issue #12698 surface beyond the Env layer). Overlay live env detection on top of the cached providers in list() and getProvider(), respecting disabled_providers, enabled_providers, blacklist, whitelist and model status filters via a precomputed envEligible set. Cached state retains expensive init work (catalog/plugins/SDK loaders); only env-driven detection re-runs. --- packages/opencode/src/provider/provider.ts | 61 +++++- .../opencode/test/provider/provider.test.ts | 182 ++++++++++++++++++ 2 files changed, 239 insertions(+), 4 deletions(-) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 919639f2d57c..f168a5cf6aca 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -1002,10 +1002,38 @@ interface State { sdk: Map modelLoaders: Record varsLoaders: Record + database: Record + envEligible: ReadonlySet } export class Service extends Context.Service()("@opencode/Provider") {} +function overlayEnv(s: State, envs: Record): Record { + const result: Record = { ...s.providers } + for (const [id, info] of Object.entries(s.database)) { + const providerID = ProviderID.make(id) + if (!s.envEligible.has(providerID)) continue + const apiKey = info.env.map((k) => envs[k]).find(Boolean) + const cached = result[providerID] + if (!apiKey) { + if (cached?.source === "env") delete result[providerID] + continue + } + if (cached) { + if (cached.source !== "env") continue + const nextKey = info.env.length === 1 ? apiKey : undefined + if (cached.key === nextKey) continue + result[providerID] = mergeDeep(cached, { key: nextKey }) as Info + continue + } + result[providerID] = mergeDeep(info, { + source: "env", + key: info.env.length === 1 ? apiKey : undefined, + }) as Info + } + return result +} + function cost(c: ModelsDev.Model["cost"]): Model["cost"] { const result: Model["cost"] = { input: c?.input ?? 0, @@ -1488,6 +1516,23 @@ export const layer = Layer.effect( log.info("found", { providerID }) } + const envEligible = new Set() + for (const [id, info] of Object.entries(database)) { + if (disabled.has(id)) continue + if (enabled && !enabled.has(id)) continue + if (info.env.length === 0) continue + const configProvider = cfg.provider?.[id] + const surviving = Object.entries(info.models).filter(([modelID, model]) => { + if (model.status === "alpha" && !runtimeFlags.enableExperimentalModels) return false + if (model.status === "deprecated") return false + if (configProvider?.blacklist?.includes(modelID)) return false + if (configProvider?.whitelist && !configProvider.whitelist.includes(modelID)) return false + return true + }) + if (surviving.length === 0) continue + envEligible.add(id) + } + return { models: languages, providers, @@ -1495,11 +1540,17 @@ export const layer = Layer.effect( sdk, modelLoaders, varsLoaders, + database, + envEligible, } }), ) - const list = Effect.fn("Provider.list")(() => InstanceState.use(state, (s) => s.providers)) + const list = Effect.fn("Provider.list")(function* () { + const s = yield* InstanceState.get(state) + const envs = yield* env.all() + return overlayEnv(s, envs) + }) async function resolveSDK(model: Model, s: State, envs: Record) { try { @@ -1644,9 +1695,11 @@ export const layer = Layer.effect( } } - const getProvider = Effect.fn("Provider.getProvider")((providerID: ProviderID) => - InstanceState.use(state, (s) => s.providers[providerID]), - ) + const getProvider = Effect.fn("Provider.getProvider")(function* (providerID: ProviderID) { + const s = yield* InstanceState.get(state) + const envs = yield* env.all() + return overlayEnv(s, envs)[providerID] + }) const getModel = Effect.fn("Provider.getModel")(function* (providerID: ProviderID, modelID: ModelID) { const s = yield* InstanceState.get(state) diff --git a/packages/opencode/test/provider/provider.test.ts b/packages/opencode/test/provider/provider.test.ts index 1c6a8b33779c..b790b378cb58 100644 --- a/packages/opencode/test/provider/provider.test.ts +++ b/packages/opencode/test/provider/provider.test.ts @@ -2790,3 +2790,185 @@ test("opencode loader keeps paid models when auth exists", async () => { } } }) + +test("list() reflects env var set after first call", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("ANTHROPIC_API_KEY") + const before = await list() + expect(before[ProviderID.anthropic]).toBeUndefined() + + set("ANTHROPIC_API_KEY", "late-key") + const after = await list() + expect(after[ProviderID.anthropic]).toBeDefined() + expect(after[ProviderID.anthropic].source).toBe("env") + expect(after[ProviderID.anthropic].key).toBe("late-key") + + remove("ANTHROPIC_API_KEY") + }, + }) +}) + +test("list() reflects env var removed after detection", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + set("ANTHROPIC_API_KEY", "ephemeral-key") + const before = await list() + expect(before[ProviderID.anthropic]).toBeDefined() + + remove("ANTHROPIC_API_KEY") + const after = await list() + expect(after[ProviderID.anthropic]).toBeUndefined() + }, + }) +}) + +test("list() refreshes key when env value changes", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + set("ANTHROPIC_API_KEY", "first-key") + const first = await list() + expect(first[ProviderID.anthropic].key).toBe("first-key") + + set("ANTHROPIC_API_KEY", "rotated-key") + const second = await list() + expect(second[ProviderID.anthropic].key).toBe("rotated-key") + + remove("ANTHROPIC_API_KEY") + }, + }) +}) + +test("list() reflects direct process.env mutation", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("ANTHROPIC_API_KEY") + const before = await list() + expect(before[ProviderID.anthropic]).toBeUndefined() + + process.env["ANTHROPIC_API_KEY"] = "direct-key" + const after = await list() + expect(after[ProviderID.anthropic]).toBeDefined() + expect(after[ProviderID.anthropic].source).toBe("env") + expect(after[ProviderID.anthropic].key).toBe("direct-key") + + delete process.env["ANTHROPIC_API_KEY"] + }, + }) +}) + +test("list() respects whitelist for late-detected env provider", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + provider: { + anthropic: { whitelist: ["nonexistent-model"] }, + }, + }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("ANTHROPIC_API_KEY") + const before = await list() + expect(before[ProviderID.anthropic]).toBeUndefined() + + set("ANTHROPIC_API_KEY", "key-with-whitelist") + const after = await list() + expect(after[ProviderID.anthropic]).toBeUndefined() + + remove("ANTHROPIC_API_KEY") + }, + }) +}) + +test("list() respects disabled_providers for late-detected env provider", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + disabled_providers: ["anthropic"], + }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + set("ANTHROPIC_API_KEY", "key-but-disabled") + const providers = await list() + expect(providers[ProviderID.anthropic]).toBeUndefined() + + remove("ANTHROPIC_API_KEY") + }, + }) +}) + +test("getProvider() reflects env var set after first call", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("ANTHROPIC_API_KEY") + const before = await getProvider(ProviderID.anthropic) + expect(before).toBeUndefined() + + set("ANTHROPIC_API_KEY", "key-via-getProvider") + const after = await getProvider(ProviderID.anthropic) + expect(after).toBeDefined() + expect(after!.source).toBe("env") + + remove("ANTHROPIC_API_KEY") + }, + }) +}) From 00d9d67339549f84a45190d1b6d861d3cb2673f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 14:17:53 +0200 Subject: [PATCH 04/19] fix(provider): route all readers through env overlay, pre-clean database Closes the half-fix where overlayEnv() was wired into list/getProvider only. getModel, getLanguage, closest, getSmallModel, defaultModel and resolveSDK all read s.providers directly, so a provider detected from a late env mutation appeared in list() but crashed when actually used. State refactor: - rename providers -> cachedProviders (invariant: never holds env-only entries) - replace database+envEligible with cleanedDatabase (env-eligible providers with models pre-filtered for status/blacklist/whitelist/chat-aliases AND non-autoload custom-loader options pre-folded) - drop the init-time env-stamp loop, subsumed by the call-time overlay Pure helpers (unit-testable): - resolveEnvOverlay(cached, candidate, apiKey): 7-branch env precedence - currentProviders(s, envs): single seam every reader funnels through Layer body: - view(s) helper composes env.all() + currentProviders - all 7 readers + resolveSDK go through view; resolveSDK accepts Info explicitly - custom loop always registers loaders so a late-env provider can find them; folds non-autoload options into database; preserves source=env for env-conditional autoloaders (gitlab, cloudflare-ai-gateway) Tests: - test/provider/overlay.test.ts: 13 pure-helper unit tests - test/provider/provider.test.ts: 9 integration tests covering getModel late, getLanguage late, closest late, getSmallModel late, defaultModel late, alpha model leak, blacklist late, multi-env Bedrock, auth precedence - test/preload.ts: clear GITHUB_TOKEN, GITLAB_TOKEN, OPENCODE_API_KEY, GEMINI_API_KEY, HF_TOKEN, DIGITALOCEAN_ACCESS_TOKEN to prevent CI leakage --- packages/opencode/src/provider/provider.ts | 193 +++++++++----- packages/opencode/test/preload.ts | 6 + .../opencode/test/provider/overlay.test.ts | 118 ++++++++ .../opencode/test/provider/provider.test.ts | 251 ++++++++++++++++++ 4 files changed, 501 insertions(+), 67 deletions(-) create mode 100644 packages/opencode/test/provider/overlay.test.ts diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index f168a5cf6aca..41c9a7536480 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -997,39 +997,60 @@ export interface Interface { interface State { models: Map - providers: Record + // INVARIANT: never holds env-only entries (env-derived entries are layered in + // by `currentProviders`). Readers MUST go through `view`, not `s.cachedProviders`. + cachedProviders: Record catalog: Record sdk: Map modelLoaders: Record varsLoaders: Record - database: Record - envEligible: ReadonlySet + // INVARIANT: env-eligible only; models pre-filtered (status/blacklist/whitelist); + // non-autoload custom-loader options pre-folded. Immutable post-init. + cleanedDatabase: Record } export class Service extends Context.Service()("@opencode/Provider") {} -function overlayEnv(s: State, envs: Record): Record { - const result: Record = { ...s.providers } - for (const [id, info] of Object.entries(s.database)) { - const providerID = ProviderID.make(id) - if (!s.envEligible.has(providerID)) continue +/** + * Single-provider env precedence (pure): + * - !apiKey, cached.source==="env" → drop + * - !apiKey, otherwise → keep cached + * - apiKey, cached non-"env" w/o key, single-env candidate → cached + key + * - apiKey, cached non-"env" → cached wins + * - apiKey, cached==="env" same key → identity + * - apiKey, cached==="env" new key → cached + new key + * - apiKey, no cached → new env entry from candidate + */ +export function resolveEnvOverlay( + cached: Info | undefined, + candidate: Info, + apiKey: string | undefined, +): Info | undefined { + if (!apiKey) { + if (cached?.source === "env") return undefined + return cached + } + if (cached && cached.source !== "env") { + if (!cached.key && candidate.env.length === 1) return { ...cached, key: apiKey } + return cached + } + const nextKey = candidate.env.length === 1 ? apiKey : undefined + if (cached && cached.key === nextKey) return cached + if (cached) return { ...cached, key: nextKey } + return { ...candidate, source: "env", key: nextKey } +} + +export function currentProviders( + s: Pick, + envs: Record, +): Record { + const result: Record = { ...s.cachedProviders } + for (const [id, info] of Object.entries(s.cleanedDatabase)) { + const providerID = id as ProviderID const apiKey = info.env.map((k) => envs[k]).find(Boolean) - const cached = result[providerID] - if (!apiKey) { - if (cached?.source === "env") delete result[providerID] - continue - } - if (cached) { - if (cached.source !== "env") continue - const nextKey = info.env.length === 1 ? apiKey : undefined - if (cached.key === nextKey) continue - result[providerID] = mergeDeep(cached, { key: nextKey }) as Info - continue - } - result[providerID] = mergeDeep(info, { - source: "env", - key: info.env.length === 1 ? apiKey : undefined, - }) as Info + const next = resolveEnvOverlay(result[providerID], info, apiKey) + if (next) result[providerID] = next + else delete result[providerID] } return result } @@ -1373,18 +1394,8 @@ export const layer = Layer.effect( database[providerID] = parsed } - // load env + // env snapshot for source-label preservation in the custom loop below. const envs = yield* env.all() - for (const [id, provider] of Object.entries(database)) { - const providerID = ProviderID.make(id) - if (disabled.has(providerID)) continue - const apiKey = provider.env.map((item) => envs[item]).find(Boolean) - if (!apiKey) continue - mergeProvider(providerID, { - source: "env", - key: provider.env.length === 1 ? apiKey : undefined, - }) - } // load apikeys const auths = yield* auth.all().pipe(Effect.orDie) @@ -1429,13 +1440,44 @@ export const layer = Layer.effect( continue } const result = yield* fn(data) - if (result && (result.autoload || providers[providerID])) { - if (result.getModel) modelLoaders[providerID] = result.getModel - if (result.vars) varsLoaders[providerID] = result.vars - if (result.discoverModels) discoveryLoaders[providerID] = result.discoverModels + if (!result) continue + + // Always register loaders so a late env-promoted provider can find + // its modelLoader / varsLoader / discoverModels. + if (result.getModel) modelLoaders[providerID] = result.getModel + if (result.vars) varsLoaders[providerID] = result.vars + if (result.discoverModels) discoveryLoaders[providerID] = result.discoverModels + + if (result.autoload) { const opts = result.options ?? {} - const patch: Partial = providers[providerID] ? { options: opts } : { source: "custom", options: opts } + if (providers[providerID]) { + mergeProvider(providerID, { options: opts }) + continue + } + // autoload=true with no prior auth/config entry. Most common cause: + // env-conditional autoload (gitlab, cloudflare-ai-gateway) fired + // because env was present at init. Preserve source="env" so labels + // match the pre-fix behavior of the deleted env-stamp loop. + const envKey = data.env.length > 0 ? data.env.map((k) => envs[k]).find(Boolean) : undefined + const patch: Partial = envKey + ? { source: "env", options: opts, key: data.env.length === 1 ? envKey : undefined } + : { source: "custom", options: opts } mergeProvider(providerID, patch) + continue + } + + // Non-autoload loader returned options: merge into providers if + // already stamped (auth/config), otherwise fold into database so a + // late env detection picks them up via the overlay. + if (result.options) { + if (providers[providerID]) { + mergeProvider(providerID, { options: result.options }) + } else { + database[providerID] = { + ...data, + options: mergeDeep(data.options ?? {}, result.options) as Record, + } + } } } @@ -1516,48 +1558,63 @@ export const layer = Layer.effect( log.info("found", { providerID }) } - const envEligible = new Set() + // Build cleanedDatabase: filter models per env-eligible provider so a + // late env-detected provider exposes only the same surviving set the + // cleanup loop above produced for init-stamped providers. + const cleanedDatabase: Record = {} as Record for (const [id, info] of Object.entries(database)) { if (disabled.has(id)) continue if (enabled && !enabled.has(id)) continue if (info.env.length === 0) continue + const providerID = ProviderID.make(id) const configProvider = cfg.provider?.[id] - const surviving = Object.entries(info.models).filter(([modelID, model]) => { - if (model.status === "alpha" && !runtimeFlags.enableExperimentalModels) return false - if (model.status === "deprecated") return false - if (configProvider?.blacklist?.includes(modelID)) return false - if (configProvider?.whitelist && !configProvider.whitelist.includes(modelID)) return false - return true - }) - if (surviving.length === 0) continue - envEligible.add(id) + const filteredModels: Record = {} + for (const [modelID, model] of Object.entries(info.models)) { + if ( + (modelID === "gpt-5-chat-latest" && + (providerID === ProviderID.openai || + providerID === ProviderID.githubCopilot || + providerID === ProviderID.openrouter)) || + (providerID === ProviderID.openrouter && modelID === "openai/gpt-5-chat") + ) + continue + if (model.status === "alpha" && !runtimeFlags.enableExperimentalModels) continue + if (model.status === "deprecated") continue + if (configProvider?.blacklist?.includes(modelID)) continue + if (configProvider?.whitelist && !configProvider.whitelist.includes(modelID)) continue + filteredModels[modelID] = model + } + if (Object.keys(filteredModels).length === 0) continue + cleanedDatabase[providerID] = { ...info, models: filteredModels } } return { models: languages, - providers, + cachedProviders: providers, catalog, sdk, modelLoaders, varsLoaders, - database, - envEligible, + cleanedDatabase, } }), ) + const view = Effect.fnUntraced(function* (s: State) { + const envs = yield* env.all() + return currentProviders(s, envs) + }) + const list = Effect.fn("Provider.list")(function* () { const s = yield* InstanceState.get(state) - const envs = yield* env.all() - return overlayEnv(s, envs) + return yield* view(s) }) - async function resolveSDK(model: Model, s: State, envs: Record) { + async function resolveSDK(model: Model, s: State, provider: Info, envs: Record) { try { using _ = log.time("getSDK", { providerID: model.providerID, }) - const provider = s.providers[model.providerID] const options = { ...provider.options } if (model.providerID === "google-vertex" && !model.api.npm.includes("@ai-sdk/openai-compatible")) { @@ -1697,19 +1754,20 @@ export const layer = Layer.effect( const getProvider = Effect.fn("Provider.getProvider")(function* (providerID: ProviderID) { const s = yield* InstanceState.get(state) - const envs = yield* env.all() - return overlayEnv(s, envs)[providerID] + const all = yield* view(s) + return all[providerID] }) const getModel = Effect.fn("Provider.getModel")(function* (providerID: ProviderID, modelID: ModelID) { const s = yield* InstanceState.get(state) - const provider = s.providers[providerID] + const all = yield* view(s) + const provider = all[providerID] if (!provider) { const catalogProvider = s.catalog[providerID] const suggestions = catalogProvider ? modelSuggestions(catalogProvider, modelID, runtimeFlags.enableExperimentalModels) : fuzzysort - .go(providerID, Object.keys({ ...s.catalog, ...s.providers }), { limit: 3, threshold: -10000 }) + .go(providerID, Object.keys({ ...s.catalog, ...all }), { limit: 3, threshold: -10000 }) .map((m) => m.target) return yield* new ModelNotFoundError({ providerID, modelID, suggestions }) } @@ -1731,10 +1789,10 @@ export const layer = Layer.effect( const key = `${model.providerID}/${model.id}` if (s.models.has(key)) return s.models.get(key)! - const provider = s.providers[model.providerID] + const provider = currentProviders(s, envs)[model.providerID] return yield* EffectPromise.refineRejection( async () => { - const sdk = await resolveSDK(model, s, envs) + const sdk = await resolveSDK(model, s, provider, envs) const language = s.modelLoaders[model.providerID] ? await s.modelLoaders[model.providerID](sdk, model.api.id, { ...provider.options, @@ -1753,7 +1811,7 @@ export const layer = Layer.effect( const closest = Effect.fn("Provider.closest")(function* (providerID: ProviderID, query: string[]) { const s = yield* InstanceState.get(state) - const provider = s.providers[providerID] + const provider = (yield* view(s))[providerID] if (!provider) return undefined for (const item of query) { for (const modelID of Object.keys(provider.models)) { @@ -1774,7 +1832,7 @@ export const layer = Layer.effect( } const s = yield* InstanceState.get(state) - const provider = s.providers[providerID] + const provider = (yield* view(s))[providerID] if (!provider) return undefined let priority = [ @@ -1826,6 +1884,7 @@ export const layer = Layer.effect( if (cfg.model) return parseModel(cfg.model) const s = yield* InstanceState.get(state) + const all = yield* view(s) const recent = yield* fs.readJson(path.join(Global.Path.state, "model.json")).pipe( Effect.map((x): { providerID: ProviderID; modelID: ModelID }[] => { if (!isRecord(x) || !Array.isArray(x.recent)) return [] @@ -1839,13 +1898,13 @@ export const layer = Layer.effect( Effect.catch(() => Effect.succeed([] as { providerID: ProviderID; modelID: ModelID }[])), ) for (const entry of recent) { - const provider = s.providers[entry.providerID] + const provider = all[entry.providerID] if (!provider) continue if (!provider.models[entry.modelID]) continue return { providerID: entry.providerID, modelID: entry.modelID } } - const provider = Object.values(s.providers).find((p) => !cfg.provider || Object.keys(cfg.provider).includes(p.id)) + const provider = Object.values(all).find((p) => !cfg.provider || Object.keys(cfg.provider).includes(p.id)) if (!provider) throw new Error("no providers found") const [model] = sort(Object.values(provider.models)) if (!model) throw new Error("no models found") diff --git a/packages/opencode/test/preload.ts b/packages/opencode/test/preload.ts index f6360cfb63e7..b65c202713a1 100644 --- a/packages/opencode/test/preload.ts +++ b/packages/opencode/test/preload.ts @@ -96,6 +96,12 @@ delete process.env["CLOUDFLARE_GATEWAY_ID"] delete process.env["CLOUDFLARE_API_TOKEN"] delete process.env["CLOUDFLARE_API_KEY"] delete process.env["CF_AIG_TOKEN"] +delete process.env["GITHUB_TOKEN"] +delete process.env["GITLAB_TOKEN"] +delete process.env["OPENCODE_API_KEY"] +delete process.env["GEMINI_API_KEY"] +delete process.env["HF_TOKEN"] +delete process.env["DIGITALOCEAN_ACCESS_TOKEN"] delete process.env["SINGLE_ENV_KEY"] delete process.env["MULTI_ENV_KEY_1"] delete process.env["MULTI_ENV_KEY_2"] diff --git a/packages/opencode/test/provider/overlay.test.ts b/packages/opencode/test/provider/overlay.test.ts new file mode 100644 index 000000000000..99dd55423413 --- /dev/null +++ b/packages/opencode/test/provider/overlay.test.ts @@ -0,0 +1,118 @@ +import { test, expect } from "bun:test" + +import { resolveEnvOverlay, currentProviders } from "@/provider/provider" +import type { Provider } from "@/provider/provider" +import { ProviderID } from "@/provider/schema" + +const baseInfo = (env: string[], source: Provider.Info["source"] = "env"): Provider.Info => ({ + id: ProviderID.make("x"), + name: "X", + source, + env, + options: {}, + models: {}, +}) + +test("resolveEnvOverlay: no cached, no apiKey -> undefined", () => { + expect(resolveEnvOverlay(undefined, baseInfo(["KEY"]), undefined)).toBeUndefined() +}) + +test("resolveEnvOverlay: no cached, apiKey present -> new env entry with key", () => { + const r = resolveEnvOverlay(undefined, baseInfo(["KEY"]), "abc") + expect(r?.source).toBe("env") + expect(r?.key).toBe("abc") +}) + +test("resolveEnvOverlay: no cached, apiKey present, multi-env candidate -> key undefined", () => { + const r = resolveEnvOverlay(undefined, baseInfo(["A", "B"]), "abc") + expect(r?.source).toBe("env") + expect(r?.key).toBeUndefined() +}) + +test("resolveEnvOverlay: cached env entry, apiKey removed -> drop", () => { + const cached = { ...baseInfo(["KEY"]), key: "old" } + expect(resolveEnvOverlay(cached, baseInfo(["KEY"]), undefined)).toBeUndefined() +}) + +test("resolveEnvOverlay: cached config entry, apiKey absent -> keep cached untouched", () => { + const cached = baseInfo(["KEY"], "config") + expect(resolveEnvOverlay(cached, baseInfo(["KEY"]), undefined)).toBe(cached) +}) + +test("resolveEnvOverlay: cached config entry without key, single-env apiKey -> fill key, keep source", () => { + const cached = { ...baseInfo(["KEY"], "config") } + const r = resolveEnvOverlay(cached, baseInfo(["KEY"]), "abc") + expect(r?.source).toBe("config") + expect(r?.key).toBe("abc") + expect(r).not.toBe(cached) +}) + +test("resolveEnvOverlay: cached api entry with existing key + env present -> cached wins (auth precedence)", () => { + const cached = { ...baseInfo(["KEY"], "api"), key: "auth-key" } + const r = resolveEnvOverlay(cached, baseInfo(["KEY"]), "env-key") + expect(r).toBe(cached) +}) + +test("resolveEnvOverlay: cached env entry, same apiKey -> identity (no churn)", () => { + const cached = { ...baseInfo(["KEY"]), key: "abc" } + expect(resolveEnvOverlay(cached, baseInfo(["KEY"]), "abc")).toBe(cached) +}) + +test("resolveEnvOverlay: cached env entry, rotated apiKey -> new entry with new key", () => { + const cached = { ...baseInfo(["KEY"]), key: "old" } + const r = resolveEnvOverlay(cached, baseInfo(["KEY"]), "new") + expect(r?.key).toBe("new") + expect(r).not.toBe(cached) +}) + +test("resolveEnvOverlay: cached config entry without key, multi-env apiKey -> cached untouched", () => { + const cached = baseInfo(["A", "B"], "config") + const r = resolveEnvOverlay(cached, baseInfo(["A", "B"]), "abc") + expect(r).toBe(cached) +}) + +test("currentProviders: late env adds provider drawn from cleanedDatabase", () => { + const candidate = baseInfo(["FOO_KEY"]) + const r = currentProviders( + { + cachedProviders: {} as Record, + cleanedDatabase: { foo: candidate } as Record, + }, + { FOO_KEY: "k" }, + ) + expect(r["foo" as ProviderID]?.source).toBe("env") + expect(r["foo" as ProviderID]?.key).toBe("k") +}) + +test("currentProviders: removing env from process drops env-only entry", () => { + const cached = { ...baseInfo(["FOO_KEY"]), key: "k" } + const r = currentProviders( + { + cachedProviders: { foo: cached } as Record, + cleanedDatabase: { foo: baseInfo(["FOO_KEY"]) } as Record, + }, + {}, + ) + expect(r["foo" as ProviderID]).toBeUndefined() +}) + +test("currentProviders: cached api entry preserved across env presence/absence", () => { + const cached = { ...baseInfo(["FOO_KEY"], "api"), key: "auth" } + const env = currentProviders( + { + cachedProviders: { foo: cached } as Record, + cleanedDatabase: { foo: baseInfo(["FOO_KEY"]) } as Record, + }, + { FOO_KEY: "envk" }, + ) + expect(env["foo" as ProviderID]).toBe(cached) + + const noEnv = currentProviders( + { + cachedProviders: { foo: cached } as Record, + cleanedDatabase: { foo: baseInfo(["FOO_KEY"]) } as Record, + }, + {}, + ) + expect(noEnv["foo" as ProviderID]).toBe(cached) +}) diff --git a/packages/opencode/test/provider/provider.test.ts b/packages/opencode/test/provider/provider.test.ts index b790b378cb58..d46f8b6d6d36 100644 --- a/packages/opencode/test/provider/provider.test.ts +++ b/packages/opencode/test/provider/provider.test.ts @@ -2972,3 +2972,254 @@ test("getProvider() reflects env var set after first call", async () => { }, }) }) + +test("getModel() resolves late-detected env provider", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("ANTHROPIC_API_KEY") + await list() + set("ANTHROPIC_API_KEY", "late-key") + const model = await getModel(ProviderID.anthropic, ModelID.make("claude-sonnet-4-20250514")) + expect(model).toBeDefined() + expect(model.providerID).toBe(ProviderID.anthropic) + remove("ANTHROPIC_API_KEY") + }, + }) +}) + +test("getLanguage() resolves late-detected env provider", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("ANTHROPIC_API_KEY") + await list() + set("ANTHROPIC_API_KEY", "late-key") + const model = await getModel(ProviderID.anthropic, ModelID.make("claude-sonnet-4-20250514")) + const lang = await getLanguage(model) + expect(lang).toBeDefined() + remove("ANTHROPIC_API_KEY") + }, + }) +}) + +test("closest() resolves late-detected env provider", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("ANTHROPIC_API_KEY") + await list() + set("ANTHROPIC_API_KEY", "late-key") + const r = await closest(ProviderID.anthropic, ["sonnet-4-20250514"]) + expect(r?.providerID).toBe(ProviderID.anthropic) + remove("ANTHROPIC_API_KEY") + }, + }) +}) + +test("getSmallModel() resolves late-detected env provider", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("ANTHROPIC_API_KEY") + await list() + set("ANTHROPIC_API_KEY", "late-key") + const small = await getSmallModel(ProviderID.anthropic) + expect(small).toBeDefined() + remove("ANTHROPIC_API_KEY") + }, + }) +}) + +test("defaultModel() finds late-detected env provider", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("ANTHROPIC_API_KEY") + await list() + set("ANTHROPIC_API_KEY", "late-key") + const d = await defaultModel() + expect(d).toBeDefined() + remove("ANTHROPIC_API_KEY") + }, + }) +}) + +test("late-detected env provider does NOT expose alpha models without flag", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + provider: { + "alpha-only": { + name: "Alpha Only", + npm: "@ai-sdk/openai-compatible", + api: "https://api.example.com/v1", + env: ["CUSTOM_API_KEY"], + models: { + active: { name: "Active" }, + experimental: { name: "Experimental", status: "alpha" as const }, + }, + }, + }, + }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("CUSTOM_API_KEY") + await list() + set("CUSTOM_API_KEY", "late-key") + const after = await list() + const p = after[ProviderID.make("alpha-only")] + expect(p).toBeDefined() + expect(p.models["active"]).toBeDefined() + expect(p.models["experimental"]).toBeUndefined() + remove("CUSTOM_API_KEY") + }, + }) +}) + +test("late-detected env provider respects blacklist", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + provider: { + anthropic: { blacklist: ["claude-sonnet-4-20250514"] }, + }, + }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("ANTHROPIC_API_KEY") + await list() + set("ANTHROPIC_API_KEY", "late-key") + const after = await list() + expect(after[ProviderID.anthropic]).toBeDefined() + expect(after[ProviderID.anthropic].models["claude-sonnet-4-20250514"]).toBeUndefined() + remove("ANTHROPIC_API_KEY") + }, + }) +}) + +test("late-detected multi-env provider yields source=env with key undefined", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + ;["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_REGION", "AWS_BEARER_TOKEN_BEDROCK"].forEach(remove) + const before = await list() + expect(before[ProviderID.amazonBedrock]).toBeUndefined() + set("AWS_ACCESS_KEY_ID", "AKIA-LATE") + const after = await list() + const bedrock = after[ProviderID.amazonBedrock] + if (bedrock) { + expect(bedrock.source).toBe("env") + expect(bedrock.key).toBeUndefined() + } + remove("AWS_ACCESS_KEY_ID") + }, + }) +}) + +test("auth source wins over late-detected env", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + const authPath = path.join(Global.Path.data, "auth.json") + let prev: string | undefined + try { + prev = await Filesystem.readText(authPath) + } catch {} + try { + await mkdir(path.dirname(authPath), { recursive: true }) + await Filesystem.write( + authPath, + JSON.stringify({ anthropic: { type: "api", key: "auth-key" } }), + ) + await disposeAllInstances() + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("ANTHROPIC_API_KEY") + set("ANTHROPIC_API_KEY", "env-key") + const providers = await list() + expect(providers[ProviderID.anthropic]).toBeDefined() + expect(providers[ProviderID.anthropic].source).toBe("api") + expect(providers[ProviderID.anthropic].key).toBe("auth-key") + remove("ANTHROPIC_API_KEY") + }, + }) + } finally { + if (prev !== undefined) { + await Filesystem.write(authPath, prev) + } else { + try { + await unlink(authPath) + } catch {} + } + await disposeAllInstances() + } +}) From 4f480b34b2dcfd3ae41e040f2595b3c345d553c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 14:49:30 +0200 Subject: [PATCH 05/19] refactor(provider): share keepModel/prepareModel between cachedProviders and cleanedDatabase, close variant parity gap MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Round-2 cross-validation by 3 oracles surfaced 4 actionable findings. This addresses all of them: A1 — Variant parity gap (Oracle A): the cleanedDatabase build only applied the 5 keep filters (status/blacklist/whitelist/chat-aliases) but skipped the prepareModel mutations (api.id default, ProviderTransform.variants generation, configProvider.models[*].variants merge) that the cachedProviders cleanup loop applies. A late-env-detected provider therefore had empty variants while the same provider with env-at-boot had full variants. Fixed by applying prepareModel in both loops. A3 — Filter duplication (Oracle A + C): the 5 keep predicates were duplicated between the cachedProviders cleanup loop and the cleanedDatabase build. Extracted keepModel(model, modelID, providerID, configProvider) and prepareModel(model, modelID, configVariants) as local helpers in the InstanceState.make closure. Both loops now share the pipeline. A2 — Test tightening (Oracle A + C): the late-detected multi-env Bedrock test had an 'if (bedrock)' guard that silently passed if regression made bedrock undefined. Replaced with explicit expect(bedrock).toBeDefined(). The defaultModel late-env test only asserted toBeDefined() — strengthened to expect(d.providerID).toBe(anthropic) and added enabled_providers config to make the assertion robust against bundled provider iteration order. A4 — AWS env hygiene (Oracle B): the multi-env Bedrock test only removed 4 AWS keys; added AWS_PROFILE, AWS_ROLE_ARN, AWS_CONTAINER_CREDENTIALS_*, AWS_WEB_IDENTITY_TOKEN_FILE for full provider-detection-path isolation. New regression test: 'late-detected env provider has variants populated (parity with env-at-boot)' guards A1 closure. bun typecheck: clean bun test test/provider/: 101 + 13 = 114/114 pass --- packages/opencode/src/provider/provider.ts | 90 ++++++++++--------- .../opencode/test/provider/provider.test.ts | 54 +++++++++-- 2 files changed, 93 insertions(+), 51 deletions(-) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 41c9a7536480..d6ca49793c18 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -1507,6 +1507,45 @@ export const layer = Layer.effect( }) } + const keepModel = ( + model: Model, + modelID: string, + providerID: ProviderID, + configProvider: NonNullable[string] | undefined, + ): boolean => { + if ( + (modelID === "gpt-5-chat-latest" && + (providerID === ProviderID.openai || + providerID === ProviderID.githubCopilot || + providerID === ProviderID.openrouter)) || + (providerID === ProviderID.openrouter && modelID === "openai/gpt-5-chat") + ) + return false + if (model.status === "alpha" && !runtimeFlags.enableExperimentalModels) return false + if (model.status === "deprecated") return false + if (configProvider?.blacklist?.includes(modelID)) return false + if (configProvider?.whitelist && !configProvider.whitelist.includes(modelID)) return false + return true + } + + const prepareModel = ( + model: Model, + modelID: string, + configVariants: Record | undefined, + ) => { + model.api.id = model.api.id ?? model.id ?? modelID + if (!model.variants || Object.keys(model.variants).length === 0) { + model.variants = mapValues(ProviderTransform.variants(model), (v) => v) + } + if (configVariants && model.variants) { + const merged = mergeDeep(model.variants, configVariants) + model.variants = mapValues( + pickBy(merged, (v) => !v.disabled), + (v) => omit(v, ["disabled"]), + ) + } + } + for (const [id, provider] of Object.entries(providers)) { const providerID = ProviderID.make(id) if (!isProviderAllowed(providerID)) { @@ -1517,36 +1556,9 @@ export const layer = Layer.effect( const configProvider = cfg.provider?.[providerID] for (const [modelID, model] of Object.entries(provider.models)) { - model.api.id = model.api.id ?? model.id ?? modelID - if ( - // These chat aliases are invalid for the special handling in the - // built-in providers below, but custom providers may support them. - (modelID === "gpt-5-chat-latest" && - (providerID === ProviderID.openai || - providerID === ProviderID.githubCopilot || - providerID === ProviderID.openrouter)) || - (providerID === ProviderID.openrouter && modelID === "openai/gpt-5-chat") - ) + prepareModel(model, modelID, configProvider?.models?.[modelID]?.variants) + if (!keepModel(model, modelID, providerID, configProvider)) { delete provider.models[modelID] - if (model.status === "alpha" && !runtimeFlags.enableExperimentalModels) delete provider.models[modelID] - if (model.status === "deprecated") delete provider.models[modelID] - if ( - (configProvider?.blacklist && configProvider.blacklist.includes(modelID)) || - (configProvider?.whitelist && !configProvider.whitelist.includes(modelID)) - ) - delete provider.models[modelID] - - if (!model.variants || Object.keys(model.variants).length === 0) { - model.variants = mapValues(ProviderTransform.variants(model), (v) => v) - } - - const configVariants = configProvider?.models?.[modelID]?.variants - if (configVariants && model.variants) { - const merged = mergeDeep(model.variants, configVariants) - model.variants = mapValues( - pickBy(merged, (v) => !v.disabled), - (v) => omit(v, ["disabled"]), - ) } } @@ -1558,9 +1570,9 @@ export const layer = Layer.effect( log.info("found", { providerID }) } - // Build cleanedDatabase: filter models per env-eligible provider so a - // late env-detected provider exposes only the same surviving set the - // cleanup loop above produced for init-stamped providers. + // Build cleanedDatabase. Apply the same prepareModel + keepModel pipeline + // so a late env-detected provider exposes the same models, with the same + // variants, as it would if env had been present at init. const cleanedDatabase: Record = {} as Record for (const [id, info] of Object.entries(database)) { if (disabled.has(id)) continue @@ -1570,18 +1582,8 @@ export const layer = Layer.effect( const configProvider = cfg.provider?.[id] const filteredModels: Record = {} for (const [modelID, model] of Object.entries(info.models)) { - if ( - (modelID === "gpt-5-chat-latest" && - (providerID === ProviderID.openai || - providerID === ProviderID.githubCopilot || - providerID === ProviderID.openrouter)) || - (providerID === ProviderID.openrouter && modelID === "openai/gpt-5-chat") - ) - continue - if (model.status === "alpha" && !runtimeFlags.enableExperimentalModels) continue - if (model.status === "deprecated") continue - if (configProvider?.blacklist?.includes(modelID)) continue - if (configProvider?.whitelist && !configProvider.whitelist.includes(modelID)) continue + prepareModel(model, modelID, configProvider?.models?.[modelID]?.variants) + if (!keepModel(model, modelID, providerID, configProvider)) continue filteredModels[modelID] = model } if (Object.keys(filteredModels).length === 0) continue diff --git a/packages/opencode/test/provider/provider.test.ts b/packages/opencode/test/provider/provider.test.ts index d46f8b6d6d36..776246e9c404 100644 --- a/packages/opencode/test/provider/provider.test.ts +++ b/packages/opencode/test/provider/provider.test.ts @@ -3068,7 +3068,10 @@ test("defaultModel() finds late-detected env provider", async () => { init: async (dir) => { await Bun.write( path.join(dir, "opencode.json"), - JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + enabled_providers: ["anthropic"], + }), ) }, }) @@ -3076,10 +3079,12 @@ test("defaultModel() finds late-detected env provider", async () => { directory: tmp.path, fn: async () => { remove("ANTHROPIC_API_KEY") - await list() + const before = await list() + expect(before[ProviderID.anthropic]).toBeUndefined() set("ANTHROPIC_API_KEY", "late-key") const d = await defaultModel() expect(d).toBeDefined() + expect(d.providerID).toBe(ProviderID.anthropic) remove("ANTHROPIC_API_KEY") }, }) @@ -3164,16 +3169,25 @@ test("late-detected multi-env provider yields source=env with key undefined", as await WithInstance.provide({ directory: tmp.path, fn: async () => { - ;["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_REGION", "AWS_BEARER_TOKEN_BEDROCK"].forEach(remove) + ;[ + "AWS_ACCESS_KEY_ID", + "AWS_SECRET_ACCESS_KEY", + "AWS_REGION", + "AWS_BEARER_TOKEN_BEDROCK", + "AWS_PROFILE", + "AWS_ROLE_ARN", + "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI", + "AWS_CONTAINER_CREDENTIALS_FULL_URI", + "AWS_WEB_IDENTITY_TOKEN_FILE", + ].forEach(remove) const before = await list() expect(before[ProviderID.amazonBedrock]).toBeUndefined() set("AWS_ACCESS_KEY_ID", "AKIA-LATE") const after = await list() const bedrock = after[ProviderID.amazonBedrock] - if (bedrock) { - expect(bedrock.source).toBe("env") - expect(bedrock.key).toBeUndefined() - } + expect(bedrock).toBeDefined() + expect(bedrock.source).toBe("env") + expect(bedrock.key).toBeUndefined() remove("AWS_ACCESS_KEY_ID") }, }) @@ -3223,3 +3237,29 @@ test("auth source wins over late-detected env", async () => { await disposeAllInstances() } }) + +test("late-detected env provider has variants populated (parity with env-at-boot)", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + remove("ANTHROPIC_API_KEY") + await list() + set("ANTHROPIC_API_KEY", "late-key") + const after = await list() + const model = after[ProviderID.anthropic].models["claude-sonnet-4-20250514"] + expect(model).toBeDefined() + expect(model.capabilities.reasoning).toBe(true) + expect(model.variants).toBeDefined() + expect(Object.keys(model.variants!).length).toBeGreaterThan(0) + remove("ANTHROPIC_API_KEY") + }, + }) +}) From 1a486bcfde85e88317a4539d0fb7e90e617e46b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 15:58:31 +0200 Subject: [PATCH 06/19] =?UTF-8?q?fix(provider):=20complete=20late-env=20de?= =?UTF-8?q?tection=20contract=20=E2=80=94=20multi-cred=20opt-out=20+=20tes?= =?UTF-8?q?t=20isolation?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Round-3 cross-validation by 4 oracles + 4 design oracles surfaced 8 findings plus 3 audit gaps. This commit addresses all of them. B1 — Multi-credential late-env gap (Oracle 2 + Oracle 3): cleanedDatabase surfaces providers whose env appears post-init, but multi-cred loaders (bedrock, sap-ai-core, vertex, vertex-anthropic, azure, cloudflare-workers- ai, cloudflare-ai-gateway, azure-cognitive-services) close over credential providers / deployment IDs / account IDs / derived URLs at loader time. Late-promoting them yields an unauthenticated SDK that fails silently at first request. Solution: explicit `dynamicEnv?: boolean` capability flag on CustomLoader. Loaders mark `dynamicEnv: false` on broken-init-state early-returns; State holds `loaderRefusedDynamicEnv: Set` populated at init; cleanedDatabase build excludes these. New `warnLateEnvRefused` helper emits a one-time per-process warn when refused env appears, surfacing the limitation without false-positive listing. The PR test that asserted the broken bedrock state is inverted to assert exclusion. Audit (post-oracles): cloudflare-workers-ai, cloudflare-ai-gateway, and azure-cognitive-services had the same pattern as the original 5 loaders but were not covered. All three now carry the flag; azure-cognitive- services refactored to early-return (was producing options.baseURL= undefined). The dynamicEnv JSDoc states the rule generically so future loaders know what to do. B2 — Test isolation regression (Oracle 3): bun runs all .test.ts in one shared process. With Env.set now writing through to process.env, 30+ test sites in test/provider/provider.test.ts call set() without remove() and leak across files. Documented victim: test/session/structured-output- integration.test.ts:12 reads process.env.ANTHROPIC_API_KEY at module-load to choose skip vs live. Solution: capture ENV_BASELINE in test/preload.ts after preload settles, afterEach() restores the snapshot. ~15 lines, covers every test file automatically, future-proof. M1 — Inaccurate State invariants (Oracle 2 + Oracle 4): cachedProviders comment claimed "never holds env-only entries", violated by the autoload- with-env-key path. cleanedDatabase claimed "Immutable post-init" without compile-time hint. Rephrased both; added Readonly<> on cleanedDatabase. M2.C — Inconsistent process.env writes (Oracle 3): bedrock and sap-ai-core loaders wrote process.env.X = auth.key directly, breaking the trace-and- audit guarantee of the Env service. Added dep.set on CustomDep; lifted the effectful write out of iife() into proper yield* dep.set(...). M3 — Missing Env.Service contract docs (Oracle 2 + Oracle 4): added top-of-file JSDoc covering live reads, global writes, child-process inheritance, and the test-isolation contract. M5 — Empty-string apiKey edge case (Oracle 2): info.env.find(Boolean) intentionally treats "" as absent. Added inline comment so a future "correctness fix" doesn't regress to find(v => v !== undefined). M6 — Redundant `as State` cast (Oracle 1): {...process.env} infers correctly as Record. Cast removed. bun typecheck: clean bun test test/provider: 365/366 (1 environmental 5000ms flake, different test each run, present on baseline before changes) bun test test/plugin: 91/91 --- packages/opencode/src/env/index.ts | 18 ++- packages/opencode/src/provider/provider.ts | 136 +++++++++++++----- packages/opencode/test/preload.ts | 22 ++- .../opencode/test/provider/provider.test.ts | 14 +- 4 files changed, 149 insertions(+), 41 deletions(-) diff --git a/packages/opencode/src/env/index.ts b/packages/opencode/src/env/index.ts index 56acf3dcbb4b..4004502d52d5 100644 --- a/packages/opencode/src/env/index.ts +++ b/packages/opencode/src/env/index.ts @@ -1,5 +1,21 @@ import { Context, Effect, Layer } from "effect" +/** + * Effect-aware wrapper around `process.env`. + * + * Contract: + * - Reads (`get`, `all`) are LIVE — they reflect the current `process.env` at + * call time, not a snapshot taken at layer construction. `all()` returns a + * fresh shallow copy, so iterating the result is safe across subsequent + * writes. + * - Writes (`set`, `remove`) mutate `process.env` directly and therefore + * propagate to any child process spawned afterward and to native consumers + * (e.g. `getenv()` inside vendor SDKs). + * - NOT safe for parallel test isolation: all callers share one global + * `process.env`. Tests that mutate env state must serialize within a file + * and rely on `test/preload.ts` afterEach baseline restore for cross-file + * safety. + */ type State = Record export interface Interface { @@ -15,7 +31,7 @@ export const layer = Layer.succeed( Service, Service.of({ get: Effect.fn("Env.get")((key: string) => Effect.sync(() => process.env[key])), - all: Effect.fn("Env.all")(() => Effect.sync(() => ({ ...process.env }) as State)), + all: Effect.fn("Env.all")(() => Effect.sync(() => ({ ...process.env }))), set: Effect.fn("Env.set")((key: string, value: string) => Effect.sync(() => { process.env[key] = value diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index d6ca49793c18..72474a6e878a 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -125,6 +125,24 @@ type CustomLoader = (provider: Info) => Effect.Effect<{ vars?: CustomVarsLoader options?: Record discoverModels?: CustomDiscoverModels + // Set to `false` on a non-autoload return to opt out of late-env detection. + // + // Mark a return path with `dynamicEnv: false` whenever the loader cannot + // reconstruct its options from env alone — i.e. when missing init-time + // inputs (auth metadata, credential providers obtained via dynamic SDK + // imports, derived URLs, region/deployment/account IDs) close over values + // that later env mutations cannot replay. Without this flag a late env + // would surface the provider in `list()`, but `getLanguage()` would build + // an SDK with stale or undefined options and fail silently at first + // request. Loaders marked `dynamicEnv: false` are excluded from + // `cleanedDatabase` so the provider stays hidden until opencode restart; + // a one-time warn is emitted when the refused env appears. + // + // Single-credential providers whose only env input is the API key + // (anthropic, openai, openrouter, ...) do NOT need this flag: their late + // env value flows through `provider.key` via the env overlay and is + // applied by `resolveSDK` at call time. + dynamicEnv?: boolean }> type CustomDep = { @@ -132,6 +150,7 @@ type CustomDep = { config: () => Effect.Effect env: () => Effect.Effect> get: (key: string) => Effect.Effect + set: (key: string, value: string) => Effect.Effect } function useLanguageModel(sdk: any) { @@ -219,6 +238,7 @@ function custom(dep: CustomDep): Record { if (!resource && !provider.options?.baseURL) { return { autoload: false, + dynamicEnv: false, async getModel() { throw new Error( "AZURE_RESOURCE_NAME is missing, set it using env var or reconnecting the azure provider and setting it", @@ -245,16 +265,27 @@ function custom(dep: CustomDep): Record { }, } }), - "azure-cognitive-services": Effect.fnUntraced(function* () { + "azure-cognitive-services": Effect.fnUntraced(function* (provider: Info) { const resourceName = yield* dep.get("AZURE_COGNITIVE_SERVICES_RESOURCE_NAME") + if (!resourceName && !provider.options?.baseURL) { + return { + autoload: false, + dynamicEnv: false, + async getModel() { + throw new Error( + "AZURE_COGNITIVE_SERVICES_RESOURCE_NAME is missing, set it via env var or configure provider.options.baseURL", + ) + }, + } + } return { autoload: false, async getModel(sdk: any, modelID: string, options?: Record) { return selectAzureLanguageModel(sdk, modelID, Boolean(options?.["useCompletionUrls"])) }, - options: { - baseURL: resourceName ? `https://${resourceName}.cognitiveservices.azure.com/openai` : undefined, - }, + options: resourceName + ? { baseURL: `https://${resourceName}.cognitiveservices.azure.com/openai` } + : {}, } }), "amazon-bedrock": Effect.fnUntraced(function* () { @@ -274,15 +305,11 @@ function custom(dep: CustomDep): Record { const awsAccessKeyId = env["AWS_ACCESS_KEY_ID"] - const awsBearerToken = iife(() => { - const envToken = process.env.AWS_BEARER_TOKEN_BEDROCK - if (envToken) return envToken - if (auth?.type === "api") { - process.env.AWS_BEARER_TOKEN_BEDROCK = auth.key - return auth.key - } - return undefined - }) + const envBearerToken = env["AWS_BEARER_TOKEN_BEDROCK"] + const awsBearerToken = envBearerToken ?? (auth?.type === "api" ? auth.key : undefined) + if (!envBearerToken && awsBearerToken) { + yield* dep.set("AWS_BEARER_TOKEN_BEDROCK", awsBearerToken) + } const awsWebIdentityTokenFile = env["AWS_WEB_IDENTITY_TOKEN_FILE"] @@ -291,7 +318,7 @@ function custom(dep: CustomDep): Record { ) if (!profile && !awsAccessKeyId && !awsBearerToken && !awsWebIdentityTokenFile && !containerCreds) - return { autoload: false } + return { autoload: false, dynamicEnv: false } const { fromNodeProviderChain } = yield* Effect.promise(() => import("@aws-sdk/credential-providers")) @@ -466,7 +493,7 @@ function custom(dep: CustomDep): Record { ) const autoload = Boolean(project) - if (!autoload) return { autoload: false } + if (!autoload) return { autoload: false, dynamicEnv: false } return { autoload: true, vars(_options: Record) { @@ -503,7 +530,7 @@ function custom(dep: CustomDep): Record { const project = env["GOOGLE_CLOUD_PROJECT"] ?? env["GCP_PROJECT"] ?? env["GCLOUD_PROJECT"] const location = env["GOOGLE_CLOUD_LOCATION"] ?? env["VERTEX_LOCATION"] ?? "global" const autoload = Boolean(project) - if (!autoload) return { autoload: false } + if (!autoload) return { autoload: false, dynamicEnv: false } return { autoload: true, options: { @@ -518,21 +545,21 @@ function custom(dep: CustomDep): Record { }), "sap-ai-core": Effect.fnUntraced(function* () { const auth = yield* dep.auth("sap-ai-core") - const envServiceKey = iife(() => { - const envAICoreServiceKey = process.env.AICORE_SERVICE_KEY - if (envAICoreServiceKey) return envAICoreServiceKey - if (auth?.type === "api") { - process.env.AICORE_SERVICE_KEY = auth.key - return auth.key - } - return undefined - }) - const deploymentId = process.env.AICORE_DEPLOYMENT_ID - const resourceGroup = process.env.AICORE_RESOURCE_GROUP + const env = yield* dep.env() + const envAICoreServiceKey = env["AICORE_SERVICE_KEY"] + const envServiceKey = envAICoreServiceKey ?? (auth?.type === "api" ? auth.key : undefined) + if (!envServiceKey) { + return { autoload: false, dynamicEnv: false } + } + if (!envAICoreServiceKey) { + yield* dep.set("AICORE_SERVICE_KEY", envServiceKey) + } + const deploymentId = env["AICORE_DEPLOYMENT_ID"] + const resourceGroup = env["AICORE_RESOURCE_GROUP"] return { - autoload: !!envServiceKey, - options: envServiceKey ? { deploymentId, resourceGroup } : {}, + autoload: true, + options: { deploymentId, resourceGroup }, async getModel(sdk: any, modelID: string) { return sdk(modelID) }, @@ -703,6 +730,7 @@ function custom(dep: CustomDep): Record { if (!accountId) return { autoload: false, + dynamicEnv: false, async getModel() { throw new Error( "CLOUDFLARE_ACCOUNT_ID is missing. Set it with: export CLOUDFLARE_ACCOUNT_ID=", @@ -752,6 +780,7 @@ function custom(dep: CustomDep): Record { ].filter((x): x is string => Boolean(x)) return { autoload: false, + dynamicEnv: false, async getModel() { throw new Error( `${missing.join(" and ")} missing. Set with: ${missing.map((x) => `export ${x}=`).join(" && ")}`, @@ -997,16 +1026,25 @@ export interface Interface { interface State { models: Map - // INVARIANT: never holds env-only entries (env-derived entries are layered in - // by `currentProviders`). Readers MUST go through `view`, not `s.cachedProviders`. + // Stamped at init from auth/config/loader results. May contain entries with + // source:"env" for autoload-with-env-key providers (e.g. gitlab, + // cloudflare-ai-gateway) that have no other config source. Readers MUST go + // through `currentProviders(...)` to apply the live env overlay on top. cachedProviders: Record catalog: Record sdk: Map modelLoaders: Record varsLoaders: Record - // INVARIANT: env-eligible only; models pre-filtered (status/blacklist/whitelist); - // non-autoload custom-loader options pre-folded. Immutable post-init. - cleanedDatabase: Record + // Env-eligible providers only (info.env.length > 0); models pre-filtered + // (status/blacklist/whitelist); non-autoload custom-loader options pre-folded. + // Treated as immutable after init — all reads go through `currentProviders`. + cleanedDatabase: Readonly> + // Providers whose custom loader returned `dynamicEnv: false`. Populated at + // init only; never mutated post-init. + loaderRefusedDynamicEnv: Set + // Mutated post-init by `warnLateEnvRefused` to dedupe the once-per-process + // warning emitted when a refused provider's env appears. + warnedLateEnvRefused: Set } export class Service extends Context.Service()("@opencode/Provider") {} @@ -1047,6 +1085,7 @@ export function currentProviders( const result: Record = { ...s.cachedProviders } for (const [id, info] of Object.entries(s.cleanedDatabase)) { const providerID = id as ProviderID + // Treat empty strings as absent: an empty env var is never a valid API key. const apiKey = info.env.map((k) => envs[k]).find(Boolean) const next = resolveEnvOverlay(result[providerID], info, apiKey) if (next) result[providerID] = next @@ -1055,6 +1094,25 @@ export function currentProviders( return result } +function warnLateEnvRefused( + s: Pick, + envs: Record, +) { + if (s.loaderRefusedDynamicEnv.size === 0) return + for (const providerID of s.loaderRefusedDynamicEnv) { + if (s.warnedLateEnvRefused.has(providerID)) continue + const info = s.catalog[providerID] + if (!info) continue + const present = info.env.find((k) => envs[k]) + if (!present) continue + s.warnedLateEnvRefused.add(providerID) + log.warn("late env detected for provider that requires restart", { + providerID, + env: present, + }) + } +} + function cost(c: ModelsDev.Model["cost"]): Model["cost"] { const result: Model["cost"] = { input: c?.input ?? 0, @@ -1237,11 +1295,13 @@ export const layer = Layer.effect( const discoveryLoaders: { [providerID: string]: CustomDiscoverModels } = {} + const loaderRefusedDynamicEnv = new Set() const dep = { auth: (id: string) => auth.get(id).pipe(Effect.orDie), config: () => config.get(), env: () => env.all(), get: (key: string) => env.get(key), + set: (key: string, value: string) => env.set(key, value), } log.info("init") @@ -1442,6 +1502,10 @@ export const layer = Layer.effect( const result = yield* fn(data) if (!result) continue + if (!result.autoload && result.dynamicEnv === false) { + loaderRefusedDynamicEnv.add(providerID) + } + // Always register loaders so a late env-promoted provider can find // its modelLoader / varsLoader / discoverModels. if (result.getModel) modelLoaders[providerID] = result.getModel @@ -1577,6 +1641,7 @@ export const layer = Layer.effect( for (const [id, info] of Object.entries(database)) { if (disabled.has(id)) continue if (enabled && !enabled.has(id)) continue + if (loaderRefusedDynamicEnv.has(ProviderID.make(id))) continue if (info.env.length === 0) continue const providerID = ProviderID.make(id) const configProvider = cfg.provider?.[id] @@ -1598,12 +1663,15 @@ export const layer = Layer.effect( modelLoaders, varsLoaders, cleanedDatabase, + loaderRefusedDynamicEnv, + warnedLateEnvRefused: new Set(), } }), ) const view = Effect.fnUntraced(function* (s: State) { const envs = yield* env.all() + warnLateEnvRefused(s, envs) return currentProviders(s, envs) }) diff --git a/packages/opencode/test/preload.ts b/packages/opencode/test/preload.ts index b65c202713a1..af7577566fbb 100644 --- a/packages/opencode/test/preload.ts +++ b/packages/opencode/test/preload.ts @@ -4,7 +4,7 @@ import os from "os" import path from "path" import fs from "fs/promises" import { setTimeout as sleep } from "node:timers/promises" -import { afterAll } from "bun:test" +import { afterAll, afterEach } from "bun:test" // Set XDG env vars FIRST, before any src/ imports const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid) @@ -114,6 +114,26 @@ delete process.env["OPENCODE_SERVER_USERNAME"] // Use in-memory sqlite process.env["OPENCODE_DB"] = ":memory:" +// Capture baseline AFTER all preload deletes/sets settle. With the env layer +// now writing through to `process.env` directly (see src/env/index.ts), tests +// that call `set()` mutate global state. Without a per-test reset, leaks +// would cross file boundaries (Bun runs all .test.ts files in one shared +// process per `bunfig.toml` defaults). This `afterEach` snapshot/restore +// makes test isolation automatic regardless of contributor discipline. +const ENV_BASELINE: Record = { ...process.env } +afterEach(() => { + for (const key of Object.keys(process.env)) { + if (!(key in ENV_BASELINE)) delete process.env[key] + } + for (const [key, value] of Object.entries(ENV_BASELINE) as [string, string | undefined][]) { + if (value === undefined) { + delete process.env[key] + continue + } + if (process.env[key] !== value) process.env[key] = value + } +}) + // Now safe to import from src/ const { Log } = await import("@opencode-ai/core/util/log") const { initProjectors } = await import("../src/server/projectors") diff --git a/packages/opencode/test/provider/provider.test.ts b/packages/opencode/test/provider/provider.test.ts index 776246e9c404..1ecdeefa9c11 100644 --- a/packages/opencode/test/provider/provider.test.ts +++ b/packages/opencode/test/provider/provider.test.ts @@ -3157,7 +3157,7 @@ test("late-detected env provider respects blacklist", async () => { }) }) -test("late-detected multi-env provider yields source=env with key undefined", async () => { +test("late-detected multi-env provider is excluded (requires restart)", async () => { await using tmp = await tmpdir({ init: async (dir) => { await Bun.write( @@ -3184,10 +3184,14 @@ test("late-detected multi-env provider yields source=env with key undefined", as expect(before[ProviderID.amazonBedrock]).toBeUndefined() set("AWS_ACCESS_KEY_ID", "AKIA-LATE") const after = await list() - const bedrock = after[ProviderID.amazonBedrock] - expect(bedrock).toBeDefined() - expect(bedrock.source).toBe("env") - expect(bedrock.key).toBeUndefined() + // Multi-credential providers (bedrock, sap-ai-core, azure, vertex) require an + // opencode restart for newly-set credentials to take effect. The custom + // loader emits credentialProvider/deploymentId/etc. only when creds are + // present at init, and those options are not re-derivable post-init from + // env alone. Live-promoting them via the env overlay would yield an + // unauthenticated SDK that fails silently at first request, so we + // honestly hide them. + expect(after[ProviderID.amazonBedrock]).toBeUndefined() remove("AWS_ACCESS_KEY_ID") }, }) From 2fa8b8295ce369cc72e377099dce2345f2da4465 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 16:29:24 +0200 Subject: [PATCH 07/19] fix(provider): harden getLanguage against late-env removal/rotation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Round-4 cross-validation by 5 review oracles + 2 design oracles surfaced 5 in-scope findings on top of the late-env contract. This commit addresses all of them. B1 — getLanguage TypeError on env removal (Oracle correctness): when the env key is removed between getModel() and getLanguage(), currentProviders yields no entry for the providerID and resolveSDK / spread on provider.options crashes inside refineRejection (which only refines NoSuchModelError). Surfaces as an unrelated defect. Solution: early-return ModelNotFoundError when the overlay has no provider, mirroring getModel's existing pattern. fuzzysort suggestions are populated identically so the error contract is consistent across the two readers. B2 — s.models cache stale on env-key rotation (Oracle correctness): s.sdk correctly rebuilds via Hash.fast({providerID, npm, options}) when apiKey changes, but the cached LanguageModelV3 wrapper at s.models was keyed only by ${providerID}/${modelID}, so it kept referencing the SDK from before rotation. Multi-env providers (Databricks, Privatemode, Google generative AI) where provider.key is undefined had an even worse collision: two distinct env states produced identical cache keys. Solution: cache key now mirrors s.sdk's Hash.fast keying: Hash.fast(JSON.stringify({providerID, modelID, key, options})). Env key rotation, multi-env credential changes, and env-templated baseURL mutations now invalidate the LanguageModel together with the SDK by construction. M1 — Direct process.env write in config.ts well-known auth (Explore §4.2): `process.env[value.key] = value.token` bypassed the typed Env seam. Replaced with `yield* env.set(...)` matching the existing pattern at the OPENCODE_CONSOLE_TOKEN site. No behavioral change post-PR (Env.set now writes through), pure consistency. M2 — Mixed env reads in bedrock loader (Explore §4.3): AWS_CONTAINER_CREDENTIALS_RELATIVE_URI / _FULL_URI were read directly from process.env while surrounding code uses the dep.env() snapshot. Switched to env["..."] for greppability and to keep all bedrock-loader env reads going through one source. M3 — Multi-instance hazard documentation (Oracle architecture, Oracle scope/quality): Env.set writes are process-global. In multi-workspace desktop hosts (one sidecar serving several workspace contexts) two workspaces authenticating to the same provider with different credentials clobber each other. Added explicit warning to the Env module docstring, plus TODO(multi-instance) markers at each existing credential-write site (config.ts well-known + console-token, provider.ts bedrock-bearer + sap-ai-core-service-key) so the future Auth-routing migration has a clean search target. Tests: - getLanguage rejects when env removed between getModel and getLanguage (covers B1) - getLanguage rebuilds language model after env key rotation (covers B2) Verification: - bun typecheck: clean - lsp_diagnostics: clean on all 4 changed files - bun test test/provider/: 368 pass / 0 fail (incl. 2 new + all PR late-env tests + overlay.test.ts pure unit tests) - bun test test/config/: 163 pass / 0 fail Deferred to separate tickets (out of scope for this PR per the design proposal cross-validated by both oracles): - Multi-env partial-env leak (Databricks/Privatemode/Google generative AI): pre-existing in the deleted load-env loop with identical semantics; no safe automatic rule without catalog metadata distinguishing alias vs complementary multi-env. Fix candidate: default dynamicEnv:false for multi-env providers without custom loaders. - dynamicEnv:false extension to autoload-success providers (warn on secondary env mutation post-init). - mcp-websearch.ts EXA_URL module-level snapshot. - Multi-instance credential isolation rework (Auth-routing migration). --- packages/opencode/src/config/config.ts | 6 ++- packages/opencode/src/env/index.ts | 7 +++ packages/opencode/src/provider/provider.ts | 33 +++++++++++--- .../opencode/test/provider/provider.test.ts | 43 +++++++++++++++++++ 4 files changed, 83 insertions(+), 6 deletions(-) diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index f6081958c2cc..c2b023d1cdb2 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -513,7 +513,9 @@ export const layer = Layer.effect( for (const [key, value] of Object.entries(auth)) { if (value.type === "wellknown") { const url = key.replace(/\/+$/, "") - process.env[value.key] = value.token + // TODO(multi-instance): writes to process-wide process.env via Env.set; + // see env/index.ts docstring on multi-workspace credential clobbering. + yield* env.set(value.key, value.token) log.debug("fetching remote config", { url: `${url}/.well-known/opencode` }) const response = yield* Effect.promise(() => fetch(`${url}/.well-known/opencode`)) if (!response.ok) { @@ -647,6 +649,8 @@ export const layer = Layer.effect( { concurrency: 2 }, ) if (Option.isSome(tokenOpt)) { + // TODO(multi-instance): writes to process-wide process.env via Env.set; + // see env/index.ts docstring on multi-workspace credential clobbering. yield* env.set("OPENCODE_CONSOLE_TOKEN", tokenOpt.value) } diff --git a/packages/opencode/src/env/index.ts b/packages/opencode/src/env/index.ts index 4004502d52d5..3ac0960cb366 100644 --- a/packages/opencode/src/env/index.ts +++ b/packages/opencode/src/env/index.ts @@ -15,6 +15,13 @@ import { Context, Effect, Layer } from "effect" * `process.env`. Tests that mutate env state must serialize within a file * and rely on `test/preload.ts` afterEach baseline restore for cross-file * safety. + * - NOT per-instance isolated: `set`/`remove` write to the process-wide + * `process.env`. In multi-instance hosts (e.g. desktop app where one + * sidecar process serves several workspace contexts) two workspaces + * authenticating to the same provider with different credentials will + * clobber each other — last write wins. KNOWN LIMITATION; the existing + * call sites that persist auth-derived values through `Env.set` are + * marked with `TODO(multi-instance)` for a future Auth-routing migration. */ type State = Record diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 72474a6e878a..92b86ef03672 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -308,13 +308,15 @@ function custom(dep: CustomDep): Record { const envBearerToken = env["AWS_BEARER_TOKEN_BEDROCK"] const awsBearerToken = envBearerToken ?? (auth?.type === "api" ? auth.key : undefined) if (!envBearerToken && awsBearerToken) { + // TODO(multi-instance): writes to process-wide process.env via Env.set; + // see env/index.ts docstring on multi-workspace credential clobbering. yield* dep.set("AWS_BEARER_TOKEN_BEDROCK", awsBearerToken) } const awsWebIdentityTokenFile = env["AWS_WEB_IDENTITY_TOKEN_FILE"] const containerCreds = Boolean( - process.env.AWS_CONTAINER_CREDENTIALS_RELATIVE_URI || process.env.AWS_CONTAINER_CREDENTIALS_FULL_URI, + env["AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"] || env["AWS_CONTAINER_CREDENTIALS_FULL_URI"], ) if (!profile && !awsAccessKeyId && !awsBearerToken && !awsWebIdentityTokenFile && !containerCreds) @@ -552,6 +554,8 @@ function custom(dep: CustomDep): Record { return { autoload: false, dynamicEnv: false } } if (!envAICoreServiceKey) { + // TODO(multi-instance): writes to process-wide process.env via Env.set; + // see env/index.ts docstring on multi-workspace credential clobbering. yield* dep.set("AICORE_SERVICE_KEY", envServiceKey) } const deploymentId = env["AICORE_DEPLOYMENT_ID"] @@ -1856,10 +1860,29 @@ export const layer = Layer.effect( const getLanguage = Effect.fn("Provider.getLanguage")(function* (model: Model) { const s = yield* InstanceState.get(state) const envs = yield* env.all() - const key = `${model.providerID}/${model.id}` - if (s.models.has(key)) return s.models.get(key)! + const all = currentProviders(s, envs) + const provider = all[model.providerID] + if (!provider) { + const suggestions = fuzzysort + .go(model.providerID, Object.keys({ ...s.catalog, ...all }), { limit: 3, threshold: -10000 }) + .map((m) => m.target) + return yield* new ModelNotFoundError({ providerID: model.providerID, modelID: model.id, suggestions }) + } + // Mirror s.sdk's Hash.fast keying (see resolveSDK) so the cached + // LanguageModel invalidates whenever the underlying SDK rebuilds — + // env key rotation, multi-env credential changes, env-templated + // baseURL mutations, and option changes all flow through the same + // invariant. + const cacheKey = Hash.fast( + JSON.stringify({ + providerID: model.providerID, + modelID: model.id, + key: provider.key, + options: { ...provider.options, ...model.options }, + }), + ) + if (s.models.has(cacheKey)) return s.models.get(cacheKey)! - const provider = currentProviders(s, envs)[model.providerID] return yield* EffectPromise.refineRejection( async () => { const sdk = await resolveSDK(model, s, provider, envs) @@ -1869,7 +1892,7 @@ export const layer = Layer.effect( ...model.options, }) : sdk.languageModel(model.api.id) - s.models.set(key, language) + s.models.set(cacheKey, language) return language }, (cause) => diff --git a/packages/opencode/test/provider/provider.test.ts b/packages/opencode/test/provider/provider.test.ts index 1ecdeefa9c11..88669e8012ce 100644 --- a/packages/opencode/test/provider/provider.test.ts +++ b/packages/opencode/test/provider/provider.test.ts @@ -3267,3 +3267,46 @@ test("late-detected env provider has variants populated (parity with env-at-boot }, }) }) + +test("getLanguage rejects when env removed between getModel and getLanguage", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + set("ANTHROPIC_API_KEY", "key") + const model = await getModel(ProviderID.anthropic, ModelID.make("claude-sonnet-4-20250514")) + remove("ANTHROPIC_API_KEY") + await expect(getLanguage(model)).rejects.toThrow() + }, + }) +}) + +test("getLanguage rebuilds language model after env key rotation", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + set("ANTHROPIC_API_KEY", "first-key") + const model = await getModel(ProviderID.anthropic, ModelID.make("claude-sonnet-4-20250514")) + const lang1 = await getLanguage(model) + set("ANTHROPIC_API_KEY", "rotated-key") + const lang2 = await getLanguage(model) + expect(lang1).not.toBe(lang2) + remove("ANTHROPIC_API_KEY") + }, + }) +}) From 383fce5710ec96ee1119da4ebd7066f606970dd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 17:08:18 +0200 Subject: [PATCH 08/19] fix(provider): address review feedback on env overlay PR - resolveEnvOverlay: preserve cached.key when candidate is multi-env so a previously-resolved env key is not dropped to undefined when the env layer is later re-evaluated. - hashIdentity: replace bare `Hash.fast(JSON.stringify(...))` with a helper that maps function values to a named-tag sentinel. JSON.stringify silently drops functions, so two distinct closures stored in options (e.g. AWS credentialProvider instances) collided on the same hash. Apply at the resolveSDK and getLanguage call sites. - Mark resolveEnvOverlay, currentProviders, and the bare `layer` export as @internal in their JSDoc so consumers don't depend on them; production code MUST go through Provider.defaultLayer / Provider.Service methods. - Document the deep-clone invariant of toPublicInfo+mergeDeep so the prepareModel call inside both providers and cleanedDatabase loops is provably safe (each loop mutates its own clone). - test/preload.ts: add AZURE_COGNITIVE_SERVICES_RESOURCE_NAME and GITLAB_INSTANCE_URL to the deletion list (referenced by the azure cognitive services and gitlab loaders). - test/preload.ts: capture ENV_BASELINE after Log.init() and initProjectors() so any import-time side effects are part of the baseline. Avoids afterEach mistakenly deleting keys set by src/ module evaluation. - Extend overlay.test.ts with the missing multi-env preserve-key case. --- packages/opencode/src/provider/provider.ts | 60 ++++++++++++------- packages/opencode/test/preload.ts | 39 ++++++------ .../opencode/test/provider/overlay.test.ts | 7 +++ 3 files changed, 68 insertions(+), 38 deletions(-) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 92b86ef03672..6a2e9fccef87 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -1054,13 +1054,15 @@ interface State { export class Service extends Context.Service()("@opencode/Provider") {} /** - * Single-provider env precedence (pure): + * @internal Test-only export. Not part of the public Provider API; subject to + * change without notice. Single-provider env precedence (pure): * - !apiKey, cached.source==="env" → drop * - !apiKey, otherwise → keep cached * - apiKey, cached non-"env" w/o key, single-env candidate → cached + key * - apiKey, cached non-"env" → cached wins * - apiKey, cached==="env" same key → identity - * - apiKey, cached==="env" new key → cached + new key + * - apiKey, cached==="env" new key (single-env candidate) → cached + new key + * - apiKey, cached==="env" multi-env candidate → cached.key preserved * - apiKey, no cached → new env entry from candidate */ export function resolveEnvOverlay( @@ -1076,12 +1078,14 @@ export function resolveEnvOverlay( if (!cached.key && candidate.env.length === 1) return { ...cached, key: apiKey } return cached } - const nextKey = candidate.env.length === 1 ? apiKey : undefined + // Multi-env: preserve cached.key (no single-source-of-truth value). + const nextKey = candidate.env.length === 1 ? apiKey : (cached?.key ?? undefined) if (cached && cached.key === nextKey) return cached if (cached) return { ...cached, key: nextKey } return { ...candidate, source: "env", key: nextKey } } +/** @internal Test-only export. Not part of the public Provider API. */ export function currentProviders( s: Pick, envs: Record, @@ -1117,6 +1121,18 @@ function warnLateEnvRefused( } } +// JSON.stringify drops functions silently, so two distinct closures (e.g. +// AWS credentialProvider instances stored in options) collide on the same +// hash. Replace with a name-tagged sentinel so they surface as distinct. +function hashIdentity(parts: Record): string { + return Hash.fast( + JSON.stringify(parts, (_key, value) => { + if (typeof value === "function") return `__fn:${value.name || "anon"}` + return value + }), + ) +} + function cost(c: ModelsDev.Model["cost"]): Model["cost"] { const result: Model["cost"] = { input: c?.input ?? 0, @@ -1267,6 +1283,11 @@ function modelSuggestions(provider: Info | undefined, modelID: ModelID, enableEx .map((item) => item.id) } +/** + * @internal Bare layer without sub-layer wiring; exposed only so tests can + * compose with stubbed sub-layers (e.g. RuntimeFlags overrides). Production + * code MUST use `defaultLayer` instead. + */ export const layer = Layer.effect( Service, Effect.gen(function* () { @@ -1614,6 +1635,9 @@ export const layer = Layer.effect( } } + // toPublicInfo (line ~982) deep-clones via JSON, and mergeDeep produces + // fresh objects, so providers[id].models and database[id].models hold + // distinct Model instances. prepareModel inside both loops is safe. for (const [id, provider] of Object.entries(providers)) { const providerID = ProviderID.make(id) if (!isProviderAllowed(providerID)) { @@ -1641,7 +1665,7 @@ export const layer = Layer.effect( // Build cleanedDatabase. Apply the same prepareModel + keepModel pipeline // so a late env-detected provider exposes the same models, with the same // variants, as it would if env had been present at init. - const cleanedDatabase: Record = {} as Record + const cleanedDatabase: Record = {} for (const [id, info] of Object.entries(database)) { if (disabled.has(id)) continue if (enabled && !enabled.has(id)) continue @@ -1728,13 +1752,11 @@ export const layer = Layer.effect( ...model.headers, } - const key = Hash.fast( - JSON.stringify({ - providerID: model.providerID, - npm: model.api.npm, - options, - }), - ) + const key = hashIdentity({ + providerID: model.providerID, + npm: model.api.npm, + options, + }) const existing = s.sdk.get(key) if (existing) return existing @@ -1868,19 +1890,17 @@ export const layer = Layer.effect( .map((m) => m.target) return yield* new ModelNotFoundError({ providerID: model.providerID, modelID: model.id, suggestions }) } - // Mirror s.sdk's Hash.fast keying (see resolveSDK) so the cached + // Mirror s.sdk's hashIdentity keying (see resolveSDK) so the cached // LanguageModel invalidates whenever the underlying SDK rebuilds — // env key rotation, multi-env credential changes, env-templated // baseURL mutations, and option changes all flow through the same // invariant. - const cacheKey = Hash.fast( - JSON.stringify({ - providerID: model.providerID, - modelID: model.id, - key: provider.key, - options: { ...provider.options, ...model.options }, - }), - ) + const cacheKey = hashIdentity({ + providerID: model.providerID, + modelID: model.id, + key: provider.key, + options: { ...provider.options, ...model.options }, + }) if (s.models.has(cacheKey)) return s.models.get(cacheKey)! return yield* EffectPromise.refineRejection( diff --git a/packages/opencode/test/preload.ts b/packages/opencode/test/preload.ts index af7577566fbb..c5b718c204df 100644 --- a/packages/opencode/test/preload.ts +++ b/packages/opencode/test/preload.ts @@ -59,6 +59,7 @@ delete process.env["GOOGLE_API_KEY"] delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"] delete process.env["AZURE_OPENAI_API_KEY"] delete process.env["AZURE_RESOURCE_NAME"] +delete process.env["AZURE_COGNITIVE_SERVICES_RESOURCE_NAME"] delete process.env["AZURE_API_KEY"] delete process.env["AWS_ACCESS_KEY_ID"] delete process.env["AWS_SECRET_ACCESS_KEY"] @@ -98,6 +99,7 @@ delete process.env["CLOUDFLARE_API_KEY"] delete process.env["CF_AIG_TOKEN"] delete process.env["GITHUB_TOKEN"] delete process.env["GITLAB_TOKEN"] +delete process.env["GITLAB_INSTANCE_URL"] delete process.env["OPENCODE_API_KEY"] delete process.env["GEMINI_API_KEY"] delete process.env["HF_TOKEN"] @@ -114,12 +116,25 @@ delete process.env["OPENCODE_SERVER_USERNAME"] // Use in-memory sqlite process.env["OPENCODE_DB"] = ":memory:" -// Capture baseline AFTER all preload deletes/sets settle. With the env layer -// now writing through to `process.env` directly (see src/env/index.ts), tests -// that call `set()` mutate global state. Without a per-test reset, leaks -// would cross file boundaries (Bun runs all .test.ts files in one shared -// process per `bunfig.toml` defaults). This `afterEach` snapshot/restore -// makes test isolation automatic regardless of contributor discipline. +// Now safe to import from src/ +const { Log } = await import("@opencode-ai/core/util/log") +const { initProjectors } = await import("../src/server/projectors") + +void Log.init({ + print: false, + dev: true, + level: "DEBUG", +}) + +initProjectors() + +// Capture baseline AFTER all preload deletes/sets AND src/ side-effectful +// imports (Log.init, initProjectors) settle. With the env layer now writing +// through to `process.env` directly (see src/env/index.ts), tests that call +// `set()` mutate global state. Without a per-test reset, leaks would cross +// file boundaries (Bun runs all .test.ts files in one shared process per +// `bunfig.toml` defaults). This `afterEach` snapshot/restore makes test +// isolation automatic regardless of contributor discipline. const ENV_BASELINE: Record = { ...process.env } afterEach(() => { for (const key of Object.keys(process.env)) { @@ -133,15 +148,3 @@ afterEach(() => { if (process.env[key] !== value) process.env[key] = value } }) - -// Now safe to import from src/ -const { Log } = await import("@opencode-ai/core/util/log") -const { initProjectors } = await import("../src/server/projectors") - -void Log.init({ - print: false, - dev: true, - level: "DEBUG", -}) - -initProjectors() diff --git a/packages/opencode/test/provider/overlay.test.ts b/packages/opencode/test/provider/overlay.test.ts index 99dd55423413..7f0a3986dbf7 100644 --- a/packages/opencode/test/provider/overlay.test.ts +++ b/packages/opencode/test/provider/overlay.test.ts @@ -71,6 +71,13 @@ test("resolveEnvOverlay: cached config entry without key, multi-env apiKey -> ca expect(r).toBe(cached) }) +test("resolveEnvOverlay: cached env entry, multi-env candidate -> existing key preserved", () => { + const cached = { ...baseInfo(["A", "B"]), key: "preserved" } + const r = resolveEnvOverlay(cached, baseInfo(["A", "B"]), "any-env-value") + expect(r).toBe(cached) + expect(r?.key).toBe("preserved") +}) + test("currentProviders: late env adds provider drawn from cleanedDatabase", () => { const candidate = baseInfo(["FOO_KEY"]) const r = currentProviders( From 490df461e89b92f6fdfd9c335195e530fc15782e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 17:33:44 +0200 Subject: [PATCH 09/19] =?UTF-8?q?fix(provider):=20post-review=20polish=20?= =?UTF-8?q?=E2=80=94=20hashIdentity=20comment,=20hot-path=20tracing,=20rer?= =?UTF-8?q?unOn=20TODO?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Address cross-validation findings from 4-oracle review: - env: drop Effect.fn spans on get/all (hot path; every Provider read goes through env.all()). Keep tracing on set/remove where the side effect is observable. - provider: tighten hashIdentity comment to admit that anonymous arrows still collide on __fn:anon, and explain why that permissiveness is intentional (per-call fetch wrapper in resolveSDK must hit the SDK cache instead of busting it). - provider: simplify `cached?.key ?? undefined` to `cached?.key` in resolveEnvOverlay (redundant with optional chaining). - provider: add TODO(rerunOn) marker in dynamicEnv JSDoc as the future declarative env-dependency list that would close the sap-ai-core / amazon-bedrock late-env case without restart. No behavioral change. typecheck + 465 tests pass. --- packages/opencode/src/env/index.ts | 7 +++++-- packages/opencode/src/provider/provider.ts | 13 +++++++++++-- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/packages/opencode/src/env/index.ts b/packages/opencode/src/env/index.ts index 3ac0960cb366..b33e43ffbf38 100644 --- a/packages/opencode/src/env/index.ts +++ b/packages/opencode/src/env/index.ts @@ -37,8 +37,11 @@ export class Service extends Context.Service()("@opencode/En export const layer = Layer.succeed( Service, Service.of({ - get: Effect.fn("Env.get")((key: string) => Effect.sync(() => process.env[key])), - all: Effect.fn("Env.all")(() => Effect.sync(() => ({ ...process.env }))), + // get/all are untraced: every Provider read goes through env.all(); a span + // around a literal `process.env[k]` access is hot-path noise. Trace + // set/remove where the side effect is interesting. + get: (key: string) => Effect.sync(() => process.env[key]), + all: () => Effect.sync(() => ({ ...process.env })), set: Effect.fn("Env.set")((key: string, value: string) => Effect.sync(() => { process.env[key] = value diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 6a2e9fccef87..7c81addb5b72 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -142,6 +142,11 @@ type CustomLoader = (provider: Info) => Effect.Effect<{ // (anthropic, openai, openrouter, ...) do NOT need this flag: their late // env value flows through `provider.key` via the env overlay and is // applied by `resolveSDK` at call time. + // + // TODO(rerunOn): replace this binary opt-out with a declarative + // `rerunOn: string[]` env-dependency list so refused providers can be + // re-evaluated when one of their declared env inputs changes (would + // close the sap-ai-core / amazon-bedrock late-env case without restart). dynamicEnv?: boolean }> @@ -1079,7 +1084,7 @@ export function resolveEnvOverlay( return cached } // Multi-env: preserve cached.key (no single-source-of-truth value). - const nextKey = candidate.env.length === 1 ? apiKey : (cached?.key ?? undefined) + const nextKey = candidate.env.length === 1 ? apiKey : cached?.key if (cached && cached.key === nextKey) return cached if (cached) return { ...cached, key: nextKey } return { ...candidate, source: "env", key: nextKey } @@ -1123,7 +1128,11 @@ function warnLateEnvRefused( // JSON.stringify drops functions silently, so two distinct closures (e.g. // AWS credentialProvider instances stored in options) collide on the same -// hash. Replace with a name-tagged sentinel so they surface as distinct. +// hash. Replace with a name-tagged sentinel that disambiguates named +// closures (e.g. `coalesceProvider` vs another named factory). Anonymous +// arrows still collide on `__fn:anon` — that is intentional: it lets the +// per-call `fetch` wrapper installed in `resolveSDK` (an arrow built on +// every invocation) hit the SDK cache instead of busting it on every call. function hashIdentity(parts: Record): string { return Hash.fast( JSON.stringify(parts, (_key, value) => { From e699d09c2b8af9fef53577786f8e3544ffe60ee1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 18:35:18 +0200 Subject: [PATCH 10/19] =?UTF-8?q?fix(provider):=20post-review=20hardenings?= =?UTF-8?q?=20=E2=80=94=20vars=20live-env,=20whitespace,=20BigInt,=20freez?= =?UTF-8?q?e,=20rename?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Cross-validated by 3 oracle reviews + 1 hands-on empirical review. Each hardening keeps the PR's documented contract intact and matches the existing comment-as-invariant style. vars() closures (azure / google-vertex / cloudflare-workers-ai) Re-read process.env at call time so late env rotation propagates through ${VAR} baseURL templating in resolveSDK. Falls back to init-time capture for parity with the non-rotated case. Documented precedence preserved (azure: provider.options > auth.metadata > env). currentProviders / warnLateEnvRefused — whitespace-as-absent Extract isNonBlank type-guard, replace .find(Boolean) so a whitespace-only env value (e.g. " ") is treated as absent rather than promoting a provider that 401s on first request. Original (un-trimmed) value is preserved verbatim when non-blank. hashIdentity — BigInt safety + named-collision caveat Add bigint branch in JSON.stringify replacer so plugin-injected BigInt in provider.options no longer throws TypeError at first getLanguage. Extend comment to flag the named-function collision risk for third-party plugins, with TODO(hash) pointer to a future effect/Hash + WeakMap migration. Export as @internal for unit tests. view → observeProviders rename Helper called like a getter (six callsites) but mutates s.warnedLateEnvRefused via warnLateEnvRefused. New name announces the side-effect. Object.freeze(cleanedDatabase) Back the comment-only Readonly<...> invariant at runtime so a future refactor that accidentally mutates cleanedDatabase fails loudly instead of silently desynchronizing the env overlay. warnedLateEnvRefused doc fix Correct "once-per-process" → "once-per-instance". The Set is per-InstanceState; multi-instance hosts (desktop sidecar with several workspaces) warn once per workspace. Tests - test/provider/hash-identity.test.ts (new): 6 cases covering plain-object stability, BigInt no-throw, BigInt-vs-Number distinct, named-fn distinct, anon-fn intentional collision. - test/provider/overlay.test.ts: +4 cases (whitespace absent, empty absent, surrounding-whitespace verbatim, blank-skip in multi-env list). Verification bun typecheck exit 0 bun test test/provider/overlay.test.ts hash-identity.test.ts 24 pass / 0 fail bun test test/provider/provider.test.ts --timeout 30000 103 pass / 0 fail bun test test/provider/transform.test.ts model-status digitalocean 230 pass / 0 fail lsp_diagnostics on changed files clean Out of scope (deferred to follow-up PRs): - multi-env config provider rotation (contract change in resolveEnvOverlay) - unbounded s.sdk / s.models caches (eviction policy design) - TOCTOU concurrent getLanguage build (Effect.cached migration) - clobbering diagnostic on Env.set overwrite (logger import in env layer risks circularity with Log.init) --- packages/opencode/src/provider/provider.ts | 96 +++++++++++++++---- .../test/provider/hash-identity.test.ts | 39 ++++++++ .../opencode/test/provider/overlay.test.ts | 45 +++++++++ 3 files changed, 161 insertions(+), 19 deletions(-) create mode 100644 packages/opencode/test/provider/hash-identity.test.ts diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 7c81addb5b72..7ece4e2b34de 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -261,9 +261,18 @@ function custom(dep: CustomDep): Record { resourceName: resource, }, vars(_options): Record { - if (resource) { + // Re-read live env at call time so late `AZURE_RESOURCE_NAME` env + // rotation propagates through `${AZURE_RESOURCE_NAME}` baseURL + // templating in `resolveSDK`. Precedence preserved: + // provider.options.resourceName > auth.metadata.resourceName > env. + const liveResource = [ + provider.options?.resourceName, + auth?.type === "api" ? auth.metadata?.resourceName : undefined, + process.env["AZURE_RESOURCE_NAME"], + ].find((name) => typeof name === "string" && name.trim() !== "") + if (liveResource) { return { - AZURE_RESOURCE_NAME: resource, + AZURE_RESOURCE_NAME: liveResource, } } return {} @@ -504,10 +513,23 @@ function custom(dep: CustomDep): Record { return { autoload: true, vars(_options: Record) { - const endpoint = location === "global" ? "aiplatform.googleapis.com" : `${location}-aiplatform.googleapis.com` + // Re-read live env at call time so post-init rotation of any of the + // GOOGLE_VERTEX_* / GOOGLE_CLOUD_* / GCP_PROJECT envs propagates + // through baseURL templating. Falls back to init-time capture for + // parity with the non-rotated case. + const liveProject = + process.env["GOOGLE_VERTEX_PROJECT"] ?? + process.env["GOOGLE_CLOUD_PROJECT"] ?? + process.env["GCP_PROJECT"] ?? + process.env["GCLOUD_PROJECT"] ?? + project + const liveLocation = + process.env["GOOGLE_VERTEX_LOCATION"] ?? process.env["GOOGLE_CLOUD_LOCATION"] ?? process.env["VERTEX_LOCATION"] ?? location + const endpoint = + liveLocation === "global" ? "aiplatform.googleapis.com" : `${liveLocation}-aiplatform.googleapis.com` return { - ...(project && { GOOGLE_VERTEX_PROJECT: project }), - GOOGLE_VERTEX_LOCATION: location, + ...(liveProject && { GOOGLE_VERTEX_PROJECT: liveProject }), + GOOGLE_VERTEX_LOCATION: liveLocation, GOOGLE_VERTEX_ENDPOINT: endpoint, } }, @@ -766,8 +788,11 @@ function custom(dep: CustomDep): Record { return sdk.languageModel(modelID) }, vars(_options) { + // Re-read live env at call time so late `CLOUDFLARE_ACCOUNT_ID` + // rotation propagates through baseURL templating. Falls back to + // init-time capture (env or auth metadata) when env is unset. return { - CLOUDFLARE_ACCOUNT_ID: accountId, + CLOUDFLARE_ACCOUNT_ID: process.env["CLOUDFLARE_ACCOUNT_ID"] ?? accountId, } }, } @@ -1051,8 +1076,10 @@ interface State { // Providers whose custom loader returned `dynamicEnv: false`. Populated at // init only; never mutated post-init. loaderRefusedDynamicEnv: Set - // Mutated post-init by `warnLateEnvRefused` to dedupe the once-per-process - // warning emitted when a refused provider's env appears. + // Mutated post-init by `warnLateEnvRefused` to dedupe the once-per-instance + // warning emitted when a refused provider's env appears. Note: this is + // per-`InstanceState`, not per-process — multi-instance hosts (desktop + // sidecar with several workspaces) will warn once per workspace. warnedLateEnvRefused: Set } @@ -1098,8 +1125,11 @@ export function currentProviders( const result: Record = { ...s.cachedProviders } for (const [id, info] of Object.entries(s.cleanedDatabase)) { const providerID = id as ProviderID - // Treat empty strings as absent: an empty env var is never a valid API key. - const apiKey = info.env.map((k) => envs[k]).find(Boolean) + // Treat empty AND whitespace-only strings as absent: a blank env var is + // never a valid API key. The original (un-trimmed) value is preserved + // when non-blank — a surrounding-whitespace key is still passed through + // verbatim because trimming a real key would be silently incorrect. + const apiKey = info.env.map((k) => envs[k]).find(isNonBlank) const next = resolveEnvOverlay(result[providerID], info, apiKey) if (next) result[providerID] = next else delete result[providerID] @@ -1107,6 +1137,10 @@ export function currentProviders( return result } +function isNonBlank(v: string | undefined): v is string { + return typeof v === "string" && v.trim() !== "" +} + function warnLateEnvRefused( s: Pick, envs: Record, @@ -1116,7 +1150,7 @@ function warnLateEnvRefused( if (s.warnedLateEnvRefused.has(providerID)) continue const info = s.catalog[providerID] if (!info) continue - const present = info.env.find((k) => envs[k]) + const present = info.env.find((k) => isNonBlank(envs[k])) if (!present) continue s.warnedLateEnvRefused.add(providerID) log.warn("late env detected for provider that requires restart", { @@ -1133,10 +1167,29 @@ function warnLateEnvRefused( // arrows still collide on `__fn:anon` — that is intentional: it lets the // per-call `fetch` wrapper installed in `resolveSDK` (an arrow built on // every invocation) hit the SDK cache instead of busting it on every call. -function hashIdentity(parts: Record): string { +// +// CAVEAT: named-function collisions across UNRELATED callers are still +// possible. If a third-party plugin stores a function named `coalesceProvider` +// in `provider.options`, it will hash-collide with the in-tree AWS bedrock +// loader's same-named closure and silently serve a stale SDK. In-tree +// loaders avoid this; plugin authors must not put stateful closures in +// `provider.options` outside the per-call `fetch` wrapper convention. +// +// BigInt values are stringified with an `n` suffix so JSON.stringify does not +// throw `TypeError: Do not know how to serialize a BigInt`. Plain numeric +// types serialize as-is, so `1` and `1n` produce distinct hashes (`1` vs +// `"1n"`). +// +// TODO(hash): migrate to `effect/Hash` + `Equal.equals` for structural value +// hashing combined with a `WeakMap`-tracked identity for functions. That +// would fix the named-collision risk above without the per-call cache-bust +// regression. +/** @internal Test-only export. Not part of the public Provider API. */ +export function hashIdentity(parts: Record): string { return Hash.fast( JSON.stringify(parts, (_key, value) => { if (typeof value === "function") return `__fn:${value.name || "anon"}` + if (typeof value === "bigint") return `${value.toString()}n` return value }), ) @@ -1691,6 +1744,11 @@ export const layer = Layer.effect( if (Object.keys(filteredModels).length === 0) continue cleanedDatabase[providerID] = { ...info, models: filteredModels } } + // Back the comment-only `Readonly<...>` invariant at runtime so a + // future refactor that accidentally mutates `cleanedDatabase` + // (e.g. assigning a new entry post-init) fails loudly under strict + // mode instead of silently desynchronizing the env overlay. + Object.freeze(cleanedDatabase) return { models: languages, @@ -1706,7 +1764,7 @@ export const layer = Layer.effect( }), ) - const view = Effect.fnUntraced(function* (s: State) { + const observeProviders = Effect.fnUntraced(function* (s: State) { const envs = yield* env.all() warnLateEnvRefused(s, envs) return currentProviders(s, envs) @@ -1714,7 +1772,7 @@ export const layer = Layer.effect( const list = Effect.fn("Provider.list")(function* () { const s = yield* InstanceState.get(state) - return yield* view(s) + return yield* observeProviders(s) }) async function resolveSDK(model: Model, s: State, provider: Info, envs: Record) { @@ -1859,13 +1917,13 @@ export const layer = Layer.effect( const getProvider = Effect.fn("Provider.getProvider")(function* (providerID: ProviderID) { const s = yield* InstanceState.get(state) - const all = yield* view(s) + const all = yield* observeProviders(s) return all[providerID] }) const getModel = Effect.fn("Provider.getModel")(function* (providerID: ProviderID, modelID: ModelID) { const s = yield* InstanceState.get(state) - const all = yield* view(s) + const all = yield* observeProviders(s) const provider = all[providerID] if (!provider) { const catalogProvider = s.catalog[providerID] @@ -1933,7 +1991,7 @@ export const layer = Layer.effect( const closest = Effect.fn("Provider.closest")(function* (providerID: ProviderID, query: string[]) { const s = yield* InstanceState.get(state) - const provider = (yield* view(s))[providerID] + const provider = (yield* observeProviders(s))[providerID] if (!provider) return undefined for (const item of query) { for (const modelID of Object.keys(provider.models)) { @@ -1954,7 +2012,7 @@ export const layer = Layer.effect( } const s = yield* InstanceState.get(state) - const provider = (yield* view(s))[providerID] + const provider = (yield* observeProviders(s))[providerID] if (!provider) return undefined let priority = [ @@ -2006,7 +2064,7 @@ export const layer = Layer.effect( if (cfg.model) return parseModel(cfg.model) const s = yield* InstanceState.get(state) - const all = yield* view(s) + const all = yield* observeProviders(s) const recent = yield* fs.readJson(path.join(Global.Path.state, "model.json")).pipe( Effect.map((x): { providerID: ProviderID; modelID: ModelID }[] => { if (!isRecord(x) || !Array.isArray(x.recent)) return [] diff --git a/packages/opencode/test/provider/hash-identity.test.ts b/packages/opencode/test/provider/hash-identity.test.ts new file mode 100644 index 000000000000..785b58391038 --- /dev/null +++ b/packages/opencode/test/provider/hash-identity.test.ts @@ -0,0 +1,39 @@ +import { test, expect } from "bun:test" + +import { hashIdentity } from "@/provider/provider" + +test("hashIdentity: stable for plain objects", () => { + const a = hashIdentity({ providerID: "x", npm: "@ai-sdk/x", options: { apiKey: "k" } }) + const b = hashIdentity({ providerID: "x", npm: "@ai-sdk/x", options: { apiKey: "k" } }) + expect(a).toBe(b) +}) + +test("hashIdentity: distinct for differing primitive values", () => { + const a = hashIdentity({ providerID: "x", options: { apiKey: "k1" } }) + const b = hashIdentity({ providerID: "x", options: { apiKey: "k2" } }) + expect(a).not.toBe(b) +}) + +test("hashIdentity: handles BigInt without throwing (regression for plugin options)", () => { + expect(() => hashIdentity({ providerID: "x", options: { someBig: 1n } })).not.toThrow() +}) + +test("hashIdentity: BigInt distinct from same-magnitude Number", () => { + const big = hashIdentity({ providerID: "x", options: { v: 1n } }) + const num = hashIdentity({ providerID: "x", options: { v: 1 } }) + expect(big).not.toBe(num) +}) + +test("hashIdentity: named functions disambiguate by name", () => { + function coalesceProvider() {} + function otherFactory() {} + const a = hashIdentity({ providerID: "x", options: { credentialProvider: coalesceProvider } }) + const b = hashIdentity({ providerID: "x", options: { credentialProvider: otherFactory } }) + expect(a).not.toBe(b) +}) + +test("hashIdentity: anonymous arrows collide intentionally (per-call fetch wrapper)", () => { + const a = hashIdentity({ providerID: "x", options: { fetch: () => undefined } }) + const b = hashIdentity({ providerID: "x", options: { fetch: () => undefined } }) + expect(a).toBe(b) +}) diff --git a/packages/opencode/test/provider/overlay.test.ts b/packages/opencode/test/provider/overlay.test.ts index 7f0a3986dbf7..d91c7ec4bfa0 100644 --- a/packages/opencode/test/provider/overlay.test.ts +++ b/packages/opencode/test/provider/overlay.test.ts @@ -123,3 +123,48 @@ test("currentProviders: cached api entry preserved across env presence/absence", ) expect(noEnv["foo" as ProviderID]).toBe(cached) }) + +test("currentProviders: whitespace-only env value treated as absent", () => { + const r = currentProviders( + { + cachedProviders: {} as Record, + cleanedDatabase: { foo: baseInfo(["FOO_KEY"]) } as Record, + }, + { FOO_KEY: " " }, + ) + expect(r["foo" as ProviderID]).toBeUndefined() +}) + +test("currentProviders: empty-string env value treated as absent", () => { + const r = currentProviders( + { + cachedProviders: {} as Record, + cleanedDatabase: { foo: baseInfo(["FOO_KEY"]) } as Record, + }, + { FOO_KEY: "" }, + ) + expect(r["foo" as ProviderID]).toBeUndefined() +}) + +test("currentProviders: surrounding-whitespace env value preserved verbatim (not trimmed)", () => { + const r = currentProviders( + { + cachedProviders: {} as Record, + cleanedDatabase: { foo: baseInfo(["FOO_KEY"]) } as Record, + }, + { FOO_KEY: " abc " }, + ) + expect(r["foo" as ProviderID]?.key).toBe(" abc ") +}) + +test("currentProviders: blank env skipped, falls through to next env in multi-env list", () => { + const r = currentProviders( + { + cachedProviders: {} as Record, + cleanedDatabase: { foo: baseInfo(["A", "B"]) } as Record, + }, + { A: " ", B: "real-key" }, + ) + expect(r["foo" as ProviderID]?.source).toBe("env") + expect(r["foo" as ProviderID]?.key).toBeUndefined() +}) From 4a3bb6a57a57df7d4d6acd282c3df7012ddd8658 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 18:58:35 +0200 Subject: [PATCH 11/19] refactor(provider): trim slop comments Drop redundant duplicated documentation: - env/index.ts docstring: 22 -> 4 lines (kept: live-read + process-wide + TODO marker) - provider.ts dynamicEnv JSDoc: 22 -> 6 lines (kept: when, why, TODO) - provider.ts State field comments: 22 -> 8 lines - provider.ts resolveEnvOverlay JSDoc: 12 -> 4 lines (precedence table lives in overlay.test.ts) - provider.ts hashIdentity comment: 24 -> 13 lines - provider.ts isNonBlank rationale: 4 -> 2 lines - TODO(multi-instance) markers: 2 -> 1 line at 4 call sites (provider.ts x2, config.ts x2) Also: gitignore .sisyphus/ orchestrator scratch dir. No behavioral change. typecheck clean, 24/24 overlay+hash tests pass. Per cross-validated review (D1, Oracle #2 blocker B2). --- .gitignore | 3 + packages/opencode/src/config/config.ts | 6 +- packages/opencode/src/env/index.ts | 27 +----- packages/opencode/src/provider/provider.ts | 107 ++++++--------------- 4 files changed, 38 insertions(+), 105 deletions(-) diff --git a/.gitignore b/.gitignore index 19198a7a5918..08cdba700ea8 100644 --- a/.gitignore +++ b/.gitignore @@ -30,3 +30,6 @@ UPCOMING_CHANGELOG.md logs/ *.bun-build tsconfig.tsbuildinfo + +# Sisyphus orchestrator state (local only) +.sisyphus/ diff --git a/packages/opencode/src/config/config.ts b/packages/opencode/src/config/config.ts index c2b023d1cdb2..3b3c6f6d30cd 100644 --- a/packages/opencode/src/config/config.ts +++ b/packages/opencode/src/config/config.ts @@ -513,8 +513,7 @@ export const layer = Layer.effect( for (const [key, value] of Object.entries(auth)) { if (value.type === "wellknown") { const url = key.replace(/\/+$/, "") - // TODO(multi-instance): writes to process-wide process.env via Env.set; - // see env/index.ts docstring on multi-workspace credential clobbering. + // TODO(multi-instance): see env/index.ts docstring. yield* env.set(value.key, value.token) log.debug("fetching remote config", { url: `${url}/.well-known/opencode` }) const response = yield* Effect.promise(() => fetch(`${url}/.well-known/opencode`)) @@ -649,8 +648,7 @@ export const layer = Layer.effect( { concurrency: 2 }, ) if (Option.isSome(tokenOpt)) { - // TODO(multi-instance): writes to process-wide process.env via Env.set; - // see env/index.ts docstring on multi-workspace credential clobbering. + // TODO(multi-instance): see env/index.ts docstring. yield* env.set("OPENCODE_CONSOLE_TOKEN", tokenOpt.value) } diff --git a/packages/opencode/src/env/index.ts b/packages/opencode/src/env/index.ts index b33e43ffbf38..35fcec39c4e5 100644 --- a/packages/opencode/src/env/index.ts +++ b/packages/opencode/src/env/index.ts @@ -1,27 +1,9 @@ import { Context, Effect, Layer } from "effect" /** - * Effect-aware wrapper around `process.env`. - * - * Contract: - * - Reads (`get`, `all`) are LIVE — they reflect the current `process.env` at - * call time, not a snapshot taken at layer construction. `all()` returns a - * fresh shallow copy, so iterating the result is safe across subsequent - * writes. - * - Writes (`set`, `remove`) mutate `process.env` directly and therefore - * propagate to any child process spawned afterward and to native consumers - * (e.g. `getenv()` inside vendor SDKs). - * - NOT safe for parallel test isolation: all callers share one global - * `process.env`. Tests that mutate env state must serialize within a file - * and rely on `test/preload.ts` afterEach baseline restore for cross-file - * safety. - * - NOT per-instance isolated: `set`/`remove` write to the process-wide - * `process.env`. In multi-instance hosts (e.g. desktop app where one - * sidecar process serves several workspace contexts) two workspaces - * authenticating to the same provider with different credentials will - * clobber each other — last write wins. KNOWN LIMITATION; the existing - * call sites that persist auth-derived values through `Env.set` are - * marked with `TODO(multi-instance)` for a future Auth-routing migration. + * Effect-aware wrapper around `process.env`. Reads are live (no snapshot); + * `set`/`remove` mutate `process.env` directly. Writes are process-wide — + * call sites that persist auth-derived values are marked `TODO(multi-instance)`. */ type State = Record @@ -37,9 +19,6 @@ export class Service extends Context.Service()("@opencode/En export const layer = Layer.succeed( Service, Service.of({ - // get/all are untraced: every Provider read goes through env.all(); a span - // around a literal `process.env[k]` access is hot-path noise. Trace - // set/remove where the side effect is interesting. get: (key: string) => Effect.sync(() => process.env[key]), all: () => Effect.sync(() => ({ ...process.env })), set: Effect.fn("Env.set")((key: string, value: string) => diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 7ece4e2b34de..9b50f7e60a7b 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -125,28 +125,12 @@ type CustomLoader = (provider: Info) => Effect.Effect<{ vars?: CustomVarsLoader options?: Record discoverModels?: CustomDiscoverModels - // Set to `false` on a non-autoload return to opt out of late-env detection. - // - // Mark a return path with `dynamicEnv: false` whenever the loader cannot - // reconstruct its options from env alone — i.e. when missing init-time - // inputs (auth metadata, credential providers obtained via dynamic SDK - // imports, derived URLs, region/deployment/account IDs) close over values - // that later env mutations cannot replay. Without this flag a late env - // would surface the provider in `list()`, but `getLanguage()` would build - // an SDK with stale or undefined options and fail silently at first - // request. Loaders marked `dynamicEnv: false` are excluded from - // `cleanedDatabase` so the provider stays hidden until opencode restart; - // a one-time warn is emitted when the refused env appears. - // - // Single-credential providers whose only env input is the API key - // (anthropic, openai, openrouter, ...) do NOT need this flag: their late - // env value flows through `provider.key` via the env overlay and is - // applied by `resolveSDK` at call time. - // - // TODO(rerunOn): replace this binary opt-out with a declarative - // `rerunOn: string[]` env-dependency list so refused providers can be - // re-evaluated when one of their declared env inputs changes (would - // close the sap-ai-core / amazon-bedrock late-env case without restart). + // Set on a non-autoload return when the loader cannot reconstruct its + // options from env alone (auth metadata, dynamic SDK imports, derived URLs). + // `dynamicEnv: false` excludes the provider from `cleanedDatabase` so a late + // env never surfaces it with stale options; a one-time warn is emitted + // instead. Single-env-key providers (anthropic, openai, ...) do not need it. + // TODO(rerunOn): replace with declarative `rerunOn: string[]` env-dep list. dynamicEnv?: boolean }> @@ -322,8 +306,7 @@ function custom(dep: CustomDep): Record { const envBearerToken = env["AWS_BEARER_TOKEN_BEDROCK"] const awsBearerToken = envBearerToken ?? (auth?.type === "api" ? auth.key : undefined) if (!envBearerToken && awsBearerToken) { - // TODO(multi-instance): writes to process-wide process.env via Env.set; - // see env/index.ts docstring on multi-workspace credential clobbering. + // TODO(multi-instance): see env/index.ts docstring. yield* dep.set("AWS_BEARER_TOKEN_BEDROCK", awsBearerToken) } @@ -581,8 +564,7 @@ function custom(dep: CustomDep): Record { return { autoload: false, dynamicEnv: false } } if (!envAICoreServiceKey) { - // TODO(multi-instance): writes to process-wide process.env via Env.set; - // see env/index.ts docstring on multi-workspace credential clobbering. + // TODO(multi-instance): see env/index.ts docstring. yield* dep.set("AICORE_SERVICE_KEY", envServiceKey) } const deploymentId = env["AICORE_DEPLOYMENT_ID"] @@ -1060,42 +1042,26 @@ export interface Interface { interface State { models: Map - // Stamped at init from auth/config/loader results. May contain entries with - // source:"env" for autoload-with-env-key providers (e.g. gitlab, - // cloudflare-ai-gateway) that have no other config source. Readers MUST go - // through `currentProviders(...)` to apply the live env overlay on top. + // Init-time providers from auth/config/loader. Readers MUST go through + // `currentProviders(...)` to apply the live env overlay on top. cachedProviders: Record catalog: Record sdk: Map modelLoaders: Record varsLoaders: Record - // Env-eligible providers only (info.env.length > 0); models pre-filtered - // (status/blacklist/whitelist); non-autoload custom-loader options pre-folded. - // Treated as immutable after init — all reads go through `currentProviders`. + // Env-eligible providers with models pre-filtered. Frozen at init. cleanedDatabase: Readonly> - // Providers whose custom loader returned `dynamicEnv: false`. Populated at - // init only; never mutated post-init. + // Providers excluded from late-env overlay (loader returned `dynamicEnv: false`). loaderRefusedDynamicEnv: Set - // Mutated post-init by `warnLateEnvRefused` to dedupe the once-per-instance - // warning emitted when a refused provider's env appears. Note: this is - // per-`InstanceState`, not per-process — multi-instance hosts (desktop - // sidecar with several workspaces) will warn once per workspace. + // Per-instance dedupe set for `warnLateEnvRefused`. warnedLateEnvRefused: Set } export class Service extends Context.Service()("@opencode/Provider") {} /** - * @internal Test-only export. Not part of the public Provider API; subject to - * change without notice. Single-provider env precedence (pure): - * - !apiKey, cached.source==="env" → drop - * - !apiKey, otherwise → keep cached - * - apiKey, cached non-"env" w/o key, single-env candidate → cached + key - * - apiKey, cached non-"env" → cached wins - * - apiKey, cached==="env" same key → identity - * - apiKey, cached==="env" new key (single-env candidate) → cached + new key - * - apiKey, cached==="env" multi-env candidate → cached.key preserved - * - apiKey, no cached → new env entry from candidate + * @internal Test-only export. Pure single-provider env-overlay step. See + * `overlay.test.ts` for the exhaustive precedence table. */ export function resolveEnvOverlay( cached: Info | undefined, @@ -1110,7 +1076,7 @@ export function resolveEnvOverlay( if (!cached.key && candidate.env.length === 1) return { ...cached, key: apiKey } return cached } - // Multi-env: preserve cached.key (no single-source-of-truth value). + // Multi-env candidate: cached.key has no single source of truth, preserve it. const nextKey = candidate.env.length === 1 ? apiKey : cached?.key if (cached && cached.key === nextKey) return cached if (cached) return { ...cached, key: nextKey } @@ -1125,10 +1091,8 @@ export function currentProviders( const result: Record = { ...s.cachedProviders } for (const [id, info] of Object.entries(s.cleanedDatabase)) { const providerID = id as ProviderID - // Treat empty AND whitespace-only strings as absent: a blank env var is - // never a valid API key. The original (un-trimmed) value is preserved - // when non-blank — a surrounding-whitespace key is still passed through - // verbatim because trimming a real key would be silently incorrect. + // Empty/whitespace env values count as absent. Non-blank values are + // passed through verbatim — trimming a real key would be silently wrong. const apiKey = info.env.map((k) => envs[k]).find(isNonBlank) const next = resolveEnvOverlay(result[providerID], info, apiKey) if (next) result[providerID] = next @@ -1160,31 +1124,20 @@ function warnLateEnvRefused( } } -// JSON.stringify drops functions silently, so two distinct closures (e.g. -// AWS credentialProvider instances stored in options) collide on the same -// hash. Replace with a name-tagged sentinel that disambiguates named -// closures (e.g. `coalesceProvider` vs another named factory). Anonymous -// arrows still collide on `__fn:anon` — that is intentional: it lets the -// per-call `fetch` wrapper installed in `resolveSDK` (an arrow built on -// every invocation) hit the SDK cache instead of busting it on every call. +// JSON.stringify drops functions silently and throws on BigInt. Tag both so +// distinct closures (e.g. AWS `coalesceProvider`) and BigInt values produce +// stable, distinct hashes. Anonymous arrows collide on `__fn:anon` — that is +// intentional: the per-call `fetch` wrapper built in `resolveSDK` would +// otherwise bust the SDK cache on every invocation. // -// CAVEAT: named-function collisions across UNRELATED callers are still -// possible. If a third-party plugin stores a function named `coalesceProvider` -// in `provider.options`, it will hash-collide with the in-tree AWS bedrock -// loader's same-named closure and silently serve a stale SDK. In-tree -// loaders avoid this; plugin authors must not put stateful closures in -// `provider.options` outside the per-call `fetch` wrapper convention. +// CAVEAT: same-named closures from unrelated callers (e.g. a third-party +// plugin storing a `coalesceProvider` in `provider.options`) collide and may +// silently serve a stale SDK. Plugin authors must keep stateful closures out +// of `provider.options` outside the per-call `fetch` convention. // -// BigInt values are stringified with an `n` suffix so JSON.stringify does not -// throw `TypeError: Do not know how to serialize a BigInt`. Plain numeric -// types serialize as-is, so `1` and `1n` produce distinct hashes (`1` vs -// `"1n"`). -// -// TODO(hash): migrate to `effect/Hash` + `Equal.equals` for structural value -// hashing combined with a `WeakMap`-tracked identity for functions. That -// would fix the named-collision risk above without the per-call cache-bust -// regression. -/** @internal Test-only export. Not part of the public Provider API. */ +// TODO(hash): swap for `effect/Hash` + `Equal.equals` with WeakMap-tracked +// function identity to fix the named-collision risk. +/** @internal Test-only export. */ export function hashIdentity(parts: Record): string { return Hash.fast( JSON.stringify(parts, (_key, value) => { From d30bdf3b9a2353353f3af333c927ab1a41b3d8b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 19:05:32 +0200 Subject: [PATCH 12/19] refactor(provider): lift keepModel/prepareModel to top-level helpers Inner const declarations inside the Provider.layer Effect.gen closure violated the AGENTS.md rule that helpers belong below the main export. The only closure capture (runtimeFlags.enableExperimentalModels) is now passed explicitly as a parameter so the helpers are referentially pure. Both helpers are non-exported (per AGENTS.md "namespace-private helpers stay as non-exported top-level declarations"). No behavioral change. typecheck clean. Pre-existing flaky test "plugin config enabled and disabled providers are honored" unchanged. Per cross-validated review (D2, Oracle #2 blocker B3). --- packages/opencode/src/provider/provider.ts | 81 +++++++++++----------- 1 file changed, 40 insertions(+), 41 deletions(-) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 9b50f7e60a7b..b20aa733459b 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -1611,45 +1611,6 @@ export const layer = Layer.effect( }) } - const keepModel = ( - model: Model, - modelID: string, - providerID: ProviderID, - configProvider: NonNullable[string] | undefined, - ): boolean => { - if ( - (modelID === "gpt-5-chat-latest" && - (providerID === ProviderID.openai || - providerID === ProviderID.githubCopilot || - providerID === ProviderID.openrouter)) || - (providerID === ProviderID.openrouter && modelID === "openai/gpt-5-chat") - ) - return false - if (model.status === "alpha" && !runtimeFlags.enableExperimentalModels) return false - if (model.status === "deprecated") return false - if (configProvider?.blacklist?.includes(modelID)) return false - if (configProvider?.whitelist && !configProvider.whitelist.includes(modelID)) return false - return true - } - - const prepareModel = ( - model: Model, - modelID: string, - configVariants: Record | undefined, - ) => { - model.api.id = model.api.id ?? model.id ?? modelID - if (!model.variants || Object.keys(model.variants).length === 0) { - model.variants = mapValues(ProviderTransform.variants(model), (v) => v) - } - if (configVariants && model.variants) { - const merged = mergeDeep(model.variants, configVariants) - model.variants = mapValues( - pickBy(merged, (v) => !v.disabled), - (v) => omit(v, ["disabled"]), - ) - } - } - // toPublicInfo (line ~982) deep-clones via JSON, and mergeDeep produces // fresh objects, so providers[id].models and database[id].models hold // distinct Model instances. prepareModel inside both loops is safe. @@ -1664,7 +1625,7 @@ export const layer = Layer.effect( for (const [modelID, model] of Object.entries(provider.models)) { prepareModel(model, modelID, configProvider?.models?.[modelID]?.variants) - if (!keepModel(model, modelID, providerID, configProvider)) { + if (!keepModel(model, modelID, providerID, configProvider, runtimeFlags.enableExperimentalModels)) { delete provider.models[modelID] } } @@ -1691,7 +1652,7 @@ export const layer = Layer.effect( const filteredModels: Record = {} for (const [modelID, model] of Object.entries(info.models)) { prepareModel(model, modelID, configProvider?.models?.[modelID]?.variants) - if (!keepModel(model, modelID, providerID, configProvider)) continue + if (!keepModel(model, modelID, providerID, configProvider, runtimeFlags.enableExperimentalModels)) continue filteredModels[modelID] = model } if (Object.keys(filteredModels).length === 0) continue @@ -2081,4 +2042,42 @@ export function parseModel(model: string) { } } +type ConfigProviderEntry = NonNullable[string]> + +function keepModel( + model: Model, + modelID: string, + providerID: ProviderID, + configProvider: ConfigProviderEntry | undefined, + enableExperimentalModels: boolean, +): boolean { + if ( + (modelID === "gpt-5-chat-latest" && + (providerID === ProviderID.openai || + providerID === ProviderID.githubCopilot || + providerID === ProviderID.openrouter)) || + (providerID === ProviderID.openrouter && modelID === "openai/gpt-5-chat") + ) + return false + if (model.status === "alpha" && !enableExperimentalModels) return false + if (model.status === "deprecated") return false + if (configProvider?.blacklist?.includes(modelID)) return false + if (configProvider?.whitelist && !configProvider.whitelist.includes(modelID)) return false + return true +} + +function prepareModel(model: Model, modelID: string, configVariants: Record | undefined) { + model.api.id = model.api.id ?? model.id ?? modelID + if (!model.variants || Object.keys(model.variants).length === 0) { + model.variants = mapValues(ProviderTransform.variants(model), (v) => v) + } + if (configVariants && model.variants) { + const merged = mergeDeep(model.variants, configVariants) + model.variants = mapValues( + pickBy(merged, (v) => !v.disabled), + (v) => omit(v, ["disabled"]), + ) + } +} + export * as Provider from "./provider" From 446a1df2a2acbfacffc1d40df3e2e6d8d6d58da5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 19:08:14 +0200 Subject: [PATCH 13/19] refactor(provider): route vars() process.env reads through liveEnv() helper Cloudflare and Google Vertex vars() callbacks read process.env directly while Azure routed through dep.env()/options. Inconsistent surface flagged by reviewers (librarian Flag #4, Oracle #2 blocker B4). Adds one synchronous file-private liveEnv(key) helper near the top of custom() and routes all 9 vars()-side reads through it. The vars() contract stays synchronous (called from currentProviders on every overlay), so a yield to dep.env() is not viable; this is the named abstraction seam if Env ever stops being a process.env proxy. No behavioral change. typecheck clean, 24/24 overlay+hash tests pass. Per cross-validated review (C1 / Oracle #2 blocker B4). --- packages/opencode/src/provider/provider.ts | 23 +++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index b20aa733459b..38d96bd74b46 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -154,6 +154,15 @@ function selectAzureLanguageModel(sdk: any, modelID: string, useChat: boolean) { return sdk.languageModel(modelID) } +// Synchronous live-read of `process.env`. Loader `vars()` callbacks are sync +// and run on every `currentProviders` overlay, so they cannot yield to +// `dep.env()`. Routing all reads through this single helper names the intent +// and gives one extension point if `Env` ever stops being a `process.env` +// proxy. +function liveEnv(key: string): string | undefined { + return process.env[key] +} + function custom(dep: CustomDep): Record { return { anthropic: () => @@ -252,7 +261,7 @@ function custom(dep: CustomDep): Record { const liveResource = [ provider.options?.resourceName, auth?.type === "api" ? auth.metadata?.resourceName : undefined, - process.env["AZURE_RESOURCE_NAME"], + liveEnv("AZURE_RESOURCE_NAME"), ].find((name) => typeof name === "string" && name.trim() !== "") if (liveResource) { return { @@ -501,13 +510,13 @@ function custom(dep: CustomDep): Record { // through baseURL templating. Falls back to init-time capture for // parity with the non-rotated case. const liveProject = - process.env["GOOGLE_VERTEX_PROJECT"] ?? - process.env["GOOGLE_CLOUD_PROJECT"] ?? - process.env["GCP_PROJECT"] ?? - process.env["GCLOUD_PROJECT"] ?? + liveEnv("GOOGLE_VERTEX_PROJECT") ?? + liveEnv("GOOGLE_CLOUD_PROJECT") ?? + liveEnv("GCP_PROJECT") ?? + liveEnv("GCLOUD_PROJECT") ?? project const liveLocation = - process.env["GOOGLE_VERTEX_LOCATION"] ?? process.env["GOOGLE_CLOUD_LOCATION"] ?? process.env["VERTEX_LOCATION"] ?? location + liveEnv("GOOGLE_VERTEX_LOCATION") ?? liveEnv("GOOGLE_CLOUD_LOCATION") ?? liveEnv("VERTEX_LOCATION") ?? location const endpoint = liveLocation === "global" ? "aiplatform.googleapis.com" : `${liveLocation}-aiplatform.googleapis.com` return { @@ -774,7 +783,7 @@ function custom(dep: CustomDep): Record { // rotation propagates through baseURL templating. Falls back to // init-time capture (env or auth metadata) when env is unset. return { - CLOUDFLARE_ACCOUNT_ID: process.env["CLOUDFLARE_ACCOUNT_ID"] ?? accountId, + CLOUDFLARE_ACCOUNT_ID: liveEnv("CLOUDFLARE_ACCOUNT_ID") ?? accountId, } }, } From 4e6d8ef994d7f6f6b7bed5780734413a2603edda Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 19:20:02 +0200 Subject: [PATCH 14/19] refactor(provider): rename dynamicEnv:false to requiresRestart:true MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Mechanical rename for clarity. The double-negative `dynamicEnv: false` read as "not dynamic-env" — opaque without context. `requiresRestart: true` matches the warning text emitted to users and the loader's operational requirement. Behavior is identical: an undefined field still includes the loader in cleanedDatabase (default = restart NOT required = include). The check flipped from `=== false` to `=== true` accordingly. Field renames: - CustomLoader return: dynamicEnv?: boolean -> requiresRestart?: boolean - State.loaderRefusedDynamicEnv -> State.loaderRequiresRestart - State.warnedLateEnvRefused -> State.warnedRestartRequired - function warnLateEnvRefused -> warnRestartRequired No call-site changes outside provider.ts (no tests reference these names). typecheck clean, 24/24 overlay+hash unit tests pass. provider.test.ts flake density unchanged (system-level Env.set isolation issue, not caused by this rename). Per cross-validated review (D3, Oracle #2 nit N1). --- packages/opencode/src/provider/provider.ts | 54 +++++++++++----------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 38d96bd74b46..e694c5c4e6a8 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -127,11 +127,11 @@ type CustomLoader = (provider: Info) => Effect.Effect<{ discoverModels?: CustomDiscoverModels // Set on a non-autoload return when the loader cannot reconstruct its // options from env alone (auth metadata, dynamic SDK imports, derived URLs). - // `dynamicEnv: false` excludes the provider from `cleanedDatabase` so a late + // `requiresRestart: true` excludes the provider from `cleanedDatabase` so a late // env never surfaces it with stale options; a one-time warn is emitted // instead. Single-env-key providers (anthropic, openai, ...) do not need it. // TODO(rerunOn): replace with declarative `rerunOn: string[]` env-dep list. - dynamicEnv?: boolean + requiresRestart?: boolean }> type CustomDep = { @@ -236,7 +236,7 @@ function custom(dep: CustomDep): Record { if (!resource && !provider.options?.baseURL) { return { autoload: false, - dynamicEnv: false, + requiresRestart: true, async getModel() { throw new Error( "AZURE_RESOURCE_NAME is missing, set it using env var or reconnecting the azure provider and setting it", @@ -277,7 +277,7 @@ function custom(dep: CustomDep): Record { if (!resourceName && !provider.options?.baseURL) { return { autoload: false, - dynamicEnv: false, + requiresRestart: true, async getModel() { throw new Error( "AZURE_COGNITIVE_SERVICES_RESOURCE_NAME is missing, set it via env var or configure provider.options.baseURL", @@ -326,7 +326,7 @@ function custom(dep: CustomDep): Record { ) if (!profile && !awsAccessKeyId && !awsBearerToken && !awsWebIdentityTokenFile && !containerCreds) - return { autoload: false, dynamicEnv: false } + return { autoload: false, requiresRestart: true } const { fromNodeProviderChain } = yield* Effect.promise(() => import("@aws-sdk/credential-providers")) @@ -501,7 +501,7 @@ function custom(dep: CustomDep): Record { ) const autoload = Boolean(project) - if (!autoload) return { autoload: false, dynamicEnv: false } + if (!autoload) return { autoload: false, requiresRestart: true } return { autoload: true, vars(_options: Record) { @@ -551,7 +551,7 @@ function custom(dep: CustomDep): Record { const project = env["GOOGLE_CLOUD_PROJECT"] ?? env["GCP_PROJECT"] ?? env["GCLOUD_PROJECT"] const location = env["GOOGLE_CLOUD_LOCATION"] ?? env["VERTEX_LOCATION"] ?? "global" const autoload = Boolean(project) - if (!autoload) return { autoload: false, dynamicEnv: false } + if (!autoload) return { autoload: false, requiresRestart: true } return { autoload: true, options: { @@ -570,7 +570,7 @@ function custom(dep: CustomDep): Record { const envAICoreServiceKey = env["AICORE_SERVICE_KEY"] const envServiceKey = envAICoreServiceKey ?? (auth?.type === "api" ? auth.key : undefined) if (!envServiceKey) { - return { autoload: false, dynamicEnv: false } + return { autoload: false, requiresRestart: true } } if (!envAICoreServiceKey) { // TODO(multi-instance): see env/index.ts docstring. @@ -752,7 +752,7 @@ function custom(dep: CustomDep): Record { if (!accountId) return { autoload: false, - dynamicEnv: false, + requiresRestart: true, async getModel() { throw new Error( "CLOUDFLARE_ACCOUNT_ID is missing. Set it with: export CLOUDFLARE_ACCOUNT_ID=", @@ -805,7 +805,7 @@ function custom(dep: CustomDep): Record { ].filter((x): x is string => Boolean(x)) return { autoload: false, - dynamicEnv: false, + requiresRestart: true, async getModel() { throw new Error( `${missing.join(" and ")} missing. Set with: ${missing.map((x) => `export ${x}=`).join(" && ")}`, @@ -1060,10 +1060,10 @@ interface State { varsLoaders: Record // Env-eligible providers with models pre-filtered. Frozen at init. cleanedDatabase: Readonly> - // Providers excluded from late-env overlay (loader returned `dynamicEnv: false`). - loaderRefusedDynamicEnv: Set - // Per-instance dedupe set for `warnLateEnvRefused`. - warnedLateEnvRefused: Set + // Providers excluded from late-env overlay (loader returned `requiresRestart: true`). + loaderRequiresRestart: Set + // Per-instance dedupe set for `warnRestartRequired`. + warnedRestartRequired: Set } export class Service extends Context.Service()("@opencode/Provider") {} @@ -1114,18 +1114,18 @@ function isNonBlank(v: string | undefined): v is string { return typeof v === "string" && v.trim() !== "" } -function warnLateEnvRefused( - s: Pick, +function warnRestartRequired( + s: Pick, envs: Record, ) { - if (s.loaderRefusedDynamicEnv.size === 0) return - for (const providerID of s.loaderRefusedDynamicEnv) { - if (s.warnedLateEnvRefused.has(providerID)) continue + if (s.loaderRequiresRestart.size === 0) return + for (const providerID of s.loaderRequiresRestart) { + if (s.warnedRestartRequired.has(providerID)) continue const info = s.catalog[providerID] if (!info) continue const present = info.env.find((k) => isNonBlank(envs[k])) if (!present) continue - s.warnedLateEnvRefused.add(providerID) + s.warnedRestartRequired.add(providerID) log.warn("late env detected for provider that requires restart", { providerID, env: present, @@ -1344,7 +1344,7 @@ export const layer = Layer.effect( const discoveryLoaders: { [providerID: string]: CustomDiscoverModels } = {} - const loaderRefusedDynamicEnv = new Set() + const loaderRequiresRestart = new Set() const dep = { auth: (id: string) => auth.get(id).pipe(Effect.orDie), config: () => config.get(), @@ -1551,8 +1551,8 @@ export const layer = Layer.effect( const result = yield* fn(data) if (!result) continue - if (!result.autoload && result.dynamicEnv === false) { - loaderRefusedDynamicEnv.add(providerID) + if (!result.autoload && result.requiresRestart === true) { + loaderRequiresRestart.add(providerID) } // Always register loaders so a late env-promoted provider can find @@ -1654,7 +1654,7 @@ export const layer = Layer.effect( for (const [id, info] of Object.entries(database)) { if (disabled.has(id)) continue if (enabled && !enabled.has(id)) continue - if (loaderRefusedDynamicEnv.has(ProviderID.make(id))) continue + if (loaderRequiresRestart.has(ProviderID.make(id))) continue if (info.env.length === 0) continue const providerID = ProviderID.make(id) const configProvider = cfg.provider?.[id] @@ -1681,15 +1681,15 @@ export const layer = Layer.effect( modelLoaders, varsLoaders, cleanedDatabase, - loaderRefusedDynamicEnv, - warnedLateEnvRefused: new Set(), + loaderRequiresRestart, + warnedRestartRequired: new Set(), } }), ) const observeProviders = Effect.fnUntraced(function* (s: State) { const envs = yield* env.all() - warnLateEnvRefused(s, envs) + warnRestartRequired(s, envs) return currentProviders(s, envs) }) From fce109cd2f9200e336ff6af8ae6ca2e5fd32a270 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 19:24:52 +0200 Subject: [PATCH 15/19] refactor(provider): extract pure overlay helpers to provider/overlay.ts resolveEnvOverlay, currentProviders, isNonBlank, and hashIdentity are side-effect-free and were only exported from provider.ts as @internal test-only members. Moving them to a sibling module: - removes the @internal markers (they are now legitimate public exports of the overlay submodule) - isolates the pure overlay algebra from the Effect-laden provider layer - shrinks provider.ts and makes the overlay logic independently grep-able provider.ts imports these from the new sibling. warnRestartRequired stays in provider.ts because it logs (impure). The new file follows the AGENTS.md ESM-flat shape with a self-reexport namespace `ProviderOverlay`. Test imports rewired to @/provider/overlay. typecheck clean. 24/24 overlay+hash unit tests pass. Per cross-validated review (D5, Oracle #2 blocker B1). --- packages/opencode/src/provider/overlay.ts | 79 +++++++++++++++++++ packages/opencode/src/provider/provider.ts | 71 +---------------- .../test/provider/hash-identity.test.ts | 2 +- .../opencode/test/provider/overlay.test.ts | 2 +- 4 files changed, 82 insertions(+), 72 deletions(-) create mode 100644 packages/opencode/src/provider/overlay.ts diff --git a/packages/opencode/src/provider/overlay.ts b/packages/opencode/src/provider/overlay.ts new file mode 100644 index 000000000000..3c7319fc6787 --- /dev/null +++ b/packages/opencode/src/provider/overlay.ts @@ -0,0 +1,79 @@ +import { Hash } from "@opencode-ai/core/util/hash" + +import type { ProviderID } from "@/provider/schema" +import type { Provider } from "@/provider/provider" + +interface OverlayState { + cachedProviders: Record + cleanedDatabase: Readonly> +} + +/** + * Pure single-provider env-overlay step. See `overlay.test.ts` for the + * exhaustive precedence table. + */ +export function resolveEnvOverlay( + cached: Provider.Info | undefined, + candidate: Provider.Info, + apiKey: string | undefined, +): Provider.Info | undefined { + if (!apiKey) { + if (cached?.source === "env") return undefined + return cached + } + if (cached && cached.source !== "env") { + if (!cached.key && candidate.env.length === 1) return { ...cached, key: apiKey } + return cached + } + // Multi-env candidate: cached.key has no single source of truth, preserve it. + const nextKey = candidate.env.length === 1 ? apiKey : cached?.key + if (cached && cached.key === nextKey) return cached + if (cached) return { ...cached, key: nextKey } + return { ...candidate, source: "env", key: nextKey } +} + +export function currentProviders( + s: OverlayState, + envs: Record, +): Record { + const result: Record = { ...s.cachedProviders } + for (const [id, info] of Object.entries(s.cleanedDatabase)) { + const providerID = id as ProviderID + // Empty/whitespace env values count as absent. Non-blank values are + // passed through verbatim — trimming a real key would be silently wrong. + const apiKey = info.env.map((k) => envs[k]).find(isNonBlank) + const next = resolveEnvOverlay(result[providerID], info, apiKey) + if (next) result[providerID] = next + else delete result[providerID] + } + return result +} + +export function isNonBlank(v: string | undefined): v is string { + return typeof v === "string" && v.trim() !== "" +} + +// JSON.stringify drops functions silently and throws on BigInt. Tag both so +// distinct closures (e.g. AWS `coalesceProvider`) and BigInt values produce +// stable, distinct hashes. Anonymous arrows collide on `__fn:anon` — that is +// intentional: the per-call `fetch` wrapper built in `resolveSDK` would +// otherwise bust the SDK cache on every invocation. +// +// CAVEAT: same-named closures from unrelated callers (e.g. a third-party +// plugin storing a `coalesceProvider` in `provider.options`) collide and may +// silently serve a stale SDK. Plugin authors must keep stateful closures out +// of `provider.options` outside the per-call `fetch` convention. +// +// TODO(hash): swap for `effect/Hash` + `Equal.equals` with WeakMap-tracked +// function identity to fix the named-collision risk. +export function hashIdentity(parts: Record): string { + return Hash.fast( + JSON.stringify(parts, (_key, value) => { + if (typeof value === "function") return `__fn:${value.name || "anon"}` + if (typeof value === "bigint") return `${value.toString()}n` + return value + }), + ) +} + +export * as ProviderOverlay from "./overlay" diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index e694c5c4e6a8..689855acc94a 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -6,6 +6,7 @@ import { NoSuchModelError, type Provider as SDK } from "ai" import * as Log from "@opencode-ai/core/util/log" import { Npm } from "@opencode-ai/core/npm" import { Hash } from "@opencode-ai/core/util/hash" +import { currentProviders, hashIdentity, isNonBlank } from "@/provider/overlay" import { Plugin } from "../plugin" import { type LanguageModelV3 } from "@ai-sdk/provider" import * as ModelsDev from "@opencode-ai/core/models" @@ -1068,52 +1069,6 @@ interface State { export class Service extends Context.Service()("@opencode/Provider") {} -/** - * @internal Test-only export. Pure single-provider env-overlay step. See - * `overlay.test.ts` for the exhaustive precedence table. - */ -export function resolveEnvOverlay( - cached: Info | undefined, - candidate: Info, - apiKey: string | undefined, -): Info | undefined { - if (!apiKey) { - if (cached?.source === "env") return undefined - return cached - } - if (cached && cached.source !== "env") { - if (!cached.key && candidate.env.length === 1) return { ...cached, key: apiKey } - return cached - } - // Multi-env candidate: cached.key has no single source of truth, preserve it. - const nextKey = candidate.env.length === 1 ? apiKey : cached?.key - if (cached && cached.key === nextKey) return cached - if (cached) return { ...cached, key: nextKey } - return { ...candidate, source: "env", key: nextKey } -} - -/** @internal Test-only export. Not part of the public Provider API. */ -export function currentProviders( - s: Pick, - envs: Record, -): Record { - const result: Record = { ...s.cachedProviders } - for (const [id, info] of Object.entries(s.cleanedDatabase)) { - const providerID = id as ProviderID - // Empty/whitespace env values count as absent. Non-blank values are - // passed through verbatim — trimming a real key would be silently wrong. - const apiKey = info.env.map((k) => envs[k]).find(isNonBlank) - const next = resolveEnvOverlay(result[providerID], info, apiKey) - if (next) result[providerID] = next - else delete result[providerID] - } - return result -} - -function isNonBlank(v: string | undefined): v is string { - return typeof v === "string" && v.trim() !== "" -} - function warnRestartRequired( s: Pick, envs: Record, @@ -1133,30 +1088,6 @@ function warnRestartRequired( } } -// JSON.stringify drops functions silently and throws on BigInt. Tag both so -// distinct closures (e.g. AWS `coalesceProvider`) and BigInt values produce -// stable, distinct hashes. Anonymous arrows collide on `__fn:anon` — that is -// intentional: the per-call `fetch` wrapper built in `resolveSDK` would -// otherwise bust the SDK cache on every invocation. -// -// CAVEAT: same-named closures from unrelated callers (e.g. a third-party -// plugin storing a `coalesceProvider` in `provider.options`) collide and may -// silently serve a stale SDK. Plugin authors must keep stateful closures out -// of `provider.options` outside the per-call `fetch` convention. -// -// TODO(hash): swap for `effect/Hash` + `Equal.equals` with WeakMap-tracked -// function identity to fix the named-collision risk. -/** @internal Test-only export. */ -export function hashIdentity(parts: Record): string { - return Hash.fast( - JSON.stringify(parts, (_key, value) => { - if (typeof value === "function") return `__fn:${value.name || "anon"}` - if (typeof value === "bigint") return `${value.toString()}n` - return value - }), - ) -} - function cost(c: ModelsDev.Model["cost"]): Model["cost"] { const result: Model["cost"] = { input: c?.input ?? 0, diff --git a/packages/opencode/test/provider/hash-identity.test.ts b/packages/opencode/test/provider/hash-identity.test.ts index 785b58391038..5c82a3630c67 100644 --- a/packages/opencode/test/provider/hash-identity.test.ts +++ b/packages/opencode/test/provider/hash-identity.test.ts @@ -1,6 +1,6 @@ import { test, expect } from "bun:test" -import { hashIdentity } from "@/provider/provider" +import { hashIdentity } from "@/provider/overlay" test("hashIdentity: stable for plain objects", () => { const a = hashIdentity({ providerID: "x", npm: "@ai-sdk/x", options: { apiKey: "k" } }) diff --git a/packages/opencode/test/provider/overlay.test.ts b/packages/opencode/test/provider/overlay.test.ts index d91c7ec4bfa0..ce352995da27 100644 --- a/packages/opencode/test/provider/overlay.test.ts +++ b/packages/opencode/test/provider/overlay.test.ts @@ -1,6 +1,6 @@ import { test, expect } from "bun:test" -import { resolveEnvOverlay, currentProviders } from "@/provider/provider" +import { resolveEnvOverlay, currentProviders } from "@/provider/overlay" import type { Provider } from "@/provider/provider" import { ProviderID } from "@/provider/schema" From e22a3a7088f0c4e9730fdb28b74e8c9b825a685a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 19:30:07 +0200 Subject: [PATCH 16/19] test(provider): per-provider requires-restart regression + warn-once dedup Pre-existing coverage included only amazon-bedrock for the late-env exclusion contract. The contract is shared across azure, azure-cognitive-services, google-vertex, google-vertex-anthropic, sap-ai-core, cloudflare-workers-ai, and cloudflare-ai-gateway. A copy-paste regression in any one of those loaders (forgetting the requiresRestart return path) would silently surface the provider with stale options and fail at first request. Adds: - Parameterized RESTART_CASES table covering the 7 multi-cred providers. Each test boots without the provider's identifying env, asserts the provider is absent, sets a single env var post-init, asserts the provider stays hidden (requires-restart contract). - warnRestartRequired dedup test: spies on the cached Log.create({service: "provider"}) instance, calls list() four times after a late AICORE_SERVICE_KEY appears, asserts exactly one warn line was emitted for sap-ai-core. Original log.warn restored in finally. 8 new tests, all green. Pre-existing tests unchanged. Per cross-validated review (C3 + Oracle #3 test gap #1 and #2). --- .../opencode/test/provider/provider.test.ts | 135 ++++++++++++++++++ 1 file changed, 135 insertions(+) diff --git a/packages/opencode/test/provider/provider.test.ts b/packages/opencode/test/provider/provider.test.ts index 88669e8012ce..a8b8c1a6c30c 100644 --- a/packages/opencode/test/provider/provider.test.ts +++ b/packages/opencode/test/provider/provider.test.ts @@ -4,6 +4,7 @@ import path from "path" import { disposeAllInstances, tmpdir } from "../fixture/fixture" import { Global } from "@opencode-ai/core/global" +import { Log } from "@opencode-ai/core/util/log" import { Instance } from "../../src/project/instance" import { WithInstance } from "../../src/project/with-instance" import { Plugin } from "../../src/plugin/index" @@ -3310,3 +3311,137 @@ test("getLanguage rebuilds language model after env key rotation", async () => { }, }) }) + +const RESTART_CASES = [ + { + id: "azure", + providerID: ProviderID.azure, + cleanup: ["AZURE_RESOURCE_NAME"], + setKey: "AZURE_RESOURCE_NAME", + setValue: "late-resource", + }, + { + id: "azure-cognitive-services", + providerID: ProviderID.make("azure-cognitive-services"), + cleanup: ["AZURE_COGNITIVE_SERVICES_RESOURCE_NAME"], + setKey: "AZURE_COGNITIVE_SERVICES_RESOURCE_NAME", + setValue: "late-cognitive", + }, + { + id: "google-vertex", + providerID: ProviderID.googleVertex, + cleanup: [ + "GOOGLE_VERTEX_PROJECT", + "GOOGLE_CLOUD_PROJECT", + "GCP_PROJECT", + "GCLOUD_PROJECT", + "GOOGLE_VERTEX_LOCATION", + "GOOGLE_CLOUD_LOCATION", + "VERTEX_LOCATION", + "GOOGLE_APPLICATION_CREDENTIALS", + ], + setKey: "GOOGLE_VERTEX_PROJECT", + setValue: "late-project", + }, + { + id: "google-vertex-anthropic", + providerID: ProviderID.make("google-vertex-anthropic"), + cleanup: [ + "GOOGLE_VERTEX_PROJECT", + "GOOGLE_CLOUD_PROJECT", + "GCP_PROJECT", + "GCLOUD_PROJECT", + "GOOGLE_VERTEX_LOCATION", + "GOOGLE_CLOUD_LOCATION", + "VERTEX_LOCATION", + "GOOGLE_APPLICATION_CREDENTIALS", + ], + setKey: "GOOGLE_CLOUD_PROJECT", + setValue: "late-project", + }, + { + id: "sap-ai-core", + providerID: ProviderID.make("sap-ai-core"), + cleanup: ["AICORE_SERVICE_KEY", "AICORE_DEPLOYMENT_ID", "AICORE_RESOURCE_GROUP"], + setKey: "AICORE_SERVICE_KEY", + setValue: '{"clientid":"x","clientsecret":"y","url":"https://x"}', + }, + { + id: "cloudflare-workers-ai", + providerID: ProviderID.make("cloudflare-workers-ai"), + cleanup: ["CLOUDFLARE_ACCOUNT_ID", "CLOUDFLARE_API_TOKEN"], + setKey: "CLOUDFLARE_ACCOUNT_ID", + setValue: "late-account", + }, + { + id: "cloudflare-ai-gateway", + providerID: ProviderID.make("cloudflare-ai-gateway"), + cleanup: ["CLOUDFLARE_ACCOUNT_ID", "CLOUDFLARE_GATEWAY_ID", "CLOUDFLARE_API_TOKEN"], + setKey: "CLOUDFLARE_ACCOUNT_ID", + setValue: "late-account", + }, +] as const + +for (const c of RESTART_CASES) { + test(`late-detected ${c.id} is excluded (requires restart)`, async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + c.cleanup.forEach(remove) + const before = await list() + expect(before[c.providerID]).toBeUndefined() + set(c.setKey, c.setValue) + const after = await list() + expect(after[c.providerID]).toBeUndefined() + remove(c.setKey) + }, + }) + }) +} + +test("warnRestartRequired emits at most one warn per provider per instance", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ $schema: "https://opencode.ai/config.json" }), + ) + }, + }) + const providerLog = Log.create({ service: "provider" }) + const calls: { providerID: string; env: string }[] = [] + const original = providerLog.warn + providerLog.warn = (message?: any, extra?: Record) => { + if (typeof message === "string" && message.includes("requires restart")) { + calls.push({ providerID: extra?.providerID, env: extra?.env }) + } + return original.call(providerLog, message, extra) + } + try { + await WithInstance.provide({ + directory: tmp.path, + fn: async () => { + ;["AICORE_SERVICE_KEY", "AICORE_DEPLOYMENT_ID", "AICORE_RESOURCE_GROUP"].forEach(remove) + await list() + set("AICORE_SERVICE_KEY", "late") + await list() + await list() + await list() + const sapWarns = calls.filter((c) => c.providerID === "sap-ai-core") + expect(sapWarns.length).toBe(1) + expect(sapWarns[0].env).toBe("AICORE_SERVICE_KEY") + remove("AICORE_SERVICE_KEY") + }, + }) + } finally { + providerLog.warn = original + } +}) From 3b2e926b3320c75e09c5d97fe308e6bf3110531c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 20:43:21 +0200 Subject: [PATCH 17/19] docs(provider): document hashIdentity unsupported input types hashIdentity uses JSON.stringify with a function/bigint replacer; several JS types pass through with silent collisions (Map/Set/RegExp/WeakMap serialize to '{}'), silent drops (Symbol -> undefined), or hard throws (circular references). The fall-through behaviour was undocumented, so plugin authors had no warning before placing such values in provider.options. This makes the contract explicit and points at the same effect/Hash + Equal.equals + WeakMap migration the existing TODO already named. --- packages/opencode/src/provider/overlay.ts | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/provider/overlay.ts b/packages/opencode/src/provider/overlay.ts index 3c7319fc6787..2b238f7bb0a2 100644 --- a/packages/opencode/src/provider/overlay.ts +++ b/packages/opencode/src/provider/overlay.ts @@ -64,8 +64,22 @@ export function isNonBlank(v: string | undefined): v is string { // silently serve a stale SDK. Plugin authors must keep stateful closures out // of `provider.options` outside the per-call `fetch` convention. // +// UNSUPPORTED INPUT TYPES (silent collisions or throws — do not place these +// in `provider.options`): +// - `Map`, `Set`, `WeakMap`, `WeakSet` — `JSON.stringify` returns `"{}"`, +// so two distinct instances collide on the same hash. +// - `RegExp` — also serializes to `"{}"`, same collision. +// - `Symbol` — silently dropped by `JSON.stringify` (becomes `undefined`). +// - Circular references — `JSON.stringify` throws `TypeError`, propagated +// as a defect through `getLanguage`/`resolveSDK`. +// - `Buffer` / `Uint8Array` — serialized as `{0:n,1:n,...}`; large but +// distinct, so correct but inefficient. +// - `Date`, `URL` — handled correctly via their `toJSON()` (ISO string, +// `href`). +// // TODO(hash): swap for `effect/Hash` + `Equal.equals` with WeakMap-tracked -// function identity to fix the named-collision risk. +// function identity to fix the named-collision risk and the unsupported +// types above. export function hashIdentity(parts: Record): string { return Hash.fast( JSON.stringify(parts, (_key, value) => { From 08bf75decba808e34ff315043b5b9d67013aaec4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 20:43:35 +0200 Subject: [PATCH 18/19] test(preload): drive env deletion list from models fixture The hardcoded deletion list missed several keys present in models.dev (COHERE_API_KEY, DEEPINFRA_API_KEY, MOONSHOT_API_KEY, ZENMUX_API_KEY, ALIBABA_API_KEY, NOVITA_API_KEY, VENICE_API_KEY, KILO_API_KEY, ...) and OPENCODE_CONSOLE_TOKEN that config.ts persists via Env.set. A contributor whose shell exported any of these would see test `connected[]` assertions diverge from CI. Source the deletion list programmatically from the OPENCODE_MODELS_PATH fixture so it grows with models.dev without manual maintenance, then augment with non-fixture keys referenced by src/ (OPENCODE_CONSOLE_TOKEN, GITLAB_INSTANCE_URL, AICORE_DEPLOYMENT_ID/RESOURCE_GROUP, the AWS chain helpers) and synthetic test keys used by overlay/provider tests. --- packages/opencode/test/preload.ts | 106 +++++++++++++----------------- 1 file changed, 46 insertions(+), 60 deletions(-) diff --git a/packages/opencode/test/preload.ts b/packages/opencode/test/preload.ts index c5b718c204df..28d1236613b2 100644 --- a/packages/opencode/test/preload.ts +++ b/packages/opencode/test/preload.ts @@ -3,6 +3,7 @@ import os from "os" import path from "path" import fs from "fs/promises" +import fsSync from "fs" import { setTimeout as sleep } from "node:timers/promises" import { afterAll, afterEach } from "bun:test" @@ -52,66 +53,51 @@ const cacheDir = path.join(dir, "cache", "opencode") await fs.mkdir(cacheDir, { recursive: true }) await fs.writeFile(path.join(cacheDir, "version"), "14") -// Clear provider and server auth env vars to ensure clean test state -delete process.env["ANTHROPIC_API_KEY"] -delete process.env["OPENAI_API_KEY"] -delete process.env["GOOGLE_API_KEY"] -delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"] -delete process.env["AZURE_OPENAI_API_KEY"] -delete process.env["AZURE_RESOURCE_NAME"] -delete process.env["AZURE_COGNITIVE_SERVICES_RESOURCE_NAME"] -delete process.env["AZURE_API_KEY"] -delete process.env["AWS_ACCESS_KEY_ID"] -delete process.env["AWS_SECRET_ACCESS_KEY"] -delete process.env["AWS_PROFILE"] -delete process.env["AWS_REGION"] -delete process.env["AWS_BEARER_TOKEN_BEDROCK"] -delete process.env["AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"] -delete process.env["AWS_CONTAINER_CREDENTIALS_FULL_URI"] -delete process.env["AWS_WEB_IDENTITY_TOKEN_FILE"] -delete process.env["AWS_ROLE_ARN"] -delete process.env["OPENROUTER_API_KEY"] -delete process.env["LLM_GATEWAY_API_KEY"] -delete process.env["GROQ_API_KEY"] -delete process.env["MISTRAL_API_KEY"] -delete process.env["PERPLEXITY_API_KEY"] -delete process.env["TOGETHER_API_KEY"] -delete process.env["XAI_API_KEY"] -delete process.env["DEEPSEEK_API_KEY"] -delete process.env["FIREWORKS_API_KEY"] -delete process.env["CEREBRAS_API_KEY"] -delete process.env["SAMBANOVA_API_KEY"] -delete process.env["AICORE_SERVICE_KEY"] -delete process.env["AICORE_DEPLOYMENT_ID"] -delete process.env["AICORE_RESOURCE_GROUP"] -delete process.env["GOOGLE_APPLICATION_CREDENTIALS"] -delete process.env["GOOGLE_VERTEX_PROJECT"] -delete process.env["GOOGLE_VERTEX_LOCATION"] -delete process.env["GOOGLE_CLOUD_PROJECT"] -delete process.env["GOOGLE_CLOUD_LOCATION"] -delete process.env["GCP_PROJECT"] -delete process.env["GCLOUD_PROJECT"] -delete process.env["VERTEX_LOCATION"] -delete process.env["CLOUDFLARE_ACCOUNT_ID"] -delete process.env["CLOUDFLARE_GATEWAY_ID"] -delete process.env["CLOUDFLARE_API_TOKEN"] -delete process.env["CLOUDFLARE_API_KEY"] -delete process.env["CF_AIG_TOKEN"] -delete process.env["GITHUB_TOKEN"] -delete process.env["GITLAB_TOKEN"] -delete process.env["GITLAB_INSTANCE_URL"] -delete process.env["OPENCODE_API_KEY"] -delete process.env["GEMINI_API_KEY"] -delete process.env["HF_TOKEN"] -delete process.env["DIGITALOCEAN_ACCESS_TOKEN"] -delete process.env["SINGLE_ENV_KEY"] -delete process.env["MULTI_ENV_KEY_1"] -delete process.env["MULTI_ENV_KEY_2"] -delete process.env["PRIMARY_KEY"] -delete process.env["FALLBACK_KEY"] -delete process.env["CUSTOM_API_KEY"] -delete process.env["OPENCODE_SERVER_PASSWORD"] -delete process.env["OPENCODE_SERVER_USERNAME"] +// Clear provider/server auth env vars so a contributor's shell can never +// leak a real credential into a test's `connected[]` assertion. Sourced +// programmatically from the models-api fixture so this list grows with +// models.dev without manual maintenance. Augmented with non-fixture keys +// referenced by src/ (OPENCODE_CONSOLE_TOKEN, GITLAB_INSTANCE_URL, +// AICORE_DEPLOYMENT_ID/RESOURCE_GROUP, the AWS chain helpers) and the +// synthetic test keys used by overlay/provider tests. +const fixtureEnv: string[] = (() => { + const fixturePath = process.env["OPENCODE_MODELS_PATH"] + if (!fixturePath) return [] + const data: Record = JSON.parse(fsSync.readFileSync(fixturePath, "utf8")) + const seen = new Set() + for (const provider of Object.values(data)) for (const key of provider.env ?? []) seen.add(key) + return [...seen] +})() +const extraEnv = [ + "GOOGLE_API_KEY", + "AZURE_OPENAI_API_KEY", + "AWS_PROFILE", + "AWS_REGION", + "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI", + "AWS_CONTAINER_CREDENTIALS_FULL_URI", + "AWS_WEB_IDENTITY_TOKEN_FILE", + "AWS_ROLE_ARN", + "AICORE_DEPLOYMENT_ID", + "AICORE_RESOURCE_GROUP", + "GOOGLE_CLOUD_PROJECT", + "GOOGLE_CLOUD_LOCATION", + "GCP_PROJECT", + "GCLOUD_PROJECT", + "VERTEX_LOCATION", + "CF_AIG_TOKEN", + "GITLAB_INSTANCE_URL", + "OPENCODE_CONSOLE_TOKEN", + "SINGLE_ENV_KEY", + "MULTI_ENV_KEY_1", + "MULTI_ENV_KEY_2", + "PRIMARY_KEY", + "FALLBACK_KEY", + "CUSTOM_API_KEY", + "OPENCODE_SERVER_PASSWORD", + "OPENCODE_SERVER_USERNAME", +] +for (const key of fixtureEnv) delete process.env[key] +for (const key of extraEnv) delete process.env[key] // Use in-memory sqlite process.env["OPENCODE_DB"] = ":memory:" From 226a792ccfa54bd82c20d2a883efcec43cf960c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Benoit?= Date: Fri, 15 May 2026 20:44:03 +0200 Subject: [PATCH 19/19] perf(provider): bounded LRU + thundering-herd dedup; pass envs to vars() MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Two related hardenings to provider.ts following cross-validated review. 1) Bound s.sdk and s.models caches. Each credential rotation produces a new hashIdentity, so the underlying Map grew once per rotation forever — a slow memory leak in long-running daemons that periodically rotate STS bearer tokens or per-tenant API keys. Replace with a small inline makeBoundedAsyncCache helper: - Capacity-bounded LRU: insertion-order Map doubles as LRU order; get re-inserts the key to refresh recency; eviction is O(1). Capacity 256 is well above any realistic single-workspace working set yet caps pathological rotation cycles. - In-flight Promise dedup: concurrent callers with the same key share one build (closes the thundering-herd window flagged in packages/opencode/AGENTS.md 'Effect.cached for deduplication'). Rejections evict the in-flight slot so a transient init failure never poisons the cache. The sync fast path on s.models cache hits is preserved so getLanguage avoids an unnecessary Effect.tryPromise round-trip on the hot path. 2) Pass envs into vars(options, envs); drop liveEnv() bypass. The previous design had two paths to env state — Env.Service for most reads and a direct process.env helper (liveEnv) inside loader vars() callbacks because they ran synchronously and couldn't yield to dep.env(). resolveSDK now passes its already- resolved envs snapshot into the loader, so the direct bypass goes away and 'all reads go through Env.Service' becomes a true invariant. Behaviour is identical for the rotation-driven late-env propagation path each loader exists for. --- packages/opencode/src/provider/provider.ts | 279 ++++++++++++--------- 1 file changed, 166 insertions(+), 113 deletions(-) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 689855acc94a..a524ca297ebb 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -118,7 +118,10 @@ const BUNDLED_PROVIDERS: Record Promise<(opts: any) => BundledSDK> } type CustomModelLoader = (sdk: any, modelID: string, options?: Record) => Promise -type CustomVarsLoader = (options: Record) => Record +type CustomVarsLoader = ( + options: Record, + envs: Record, +) => Record type CustomDiscoverModels = () => Promise> type CustomLoader = (provider: Info) => Effect.Effect<{ autoload: boolean @@ -155,15 +158,6 @@ function selectAzureLanguageModel(sdk: any, modelID: string, useChat: boolean) { return sdk.languageModel(modelID) } -// Synchronous live-read of `process.env`. Loader `vars()` callbacks are sync -// and run on every `currentProviders` overlay, so they cannot yield to -// `dep.env()`. Routing all reads through this single helper names the intent -// and gives one extension point if `Env` ever stops being a `process.env` -// proxy. -function liveEnv(key: string): string | undefined { - return process.env[key] -} - function custom(dep: CustomDep): Record { return { anthropic: () => @@ -254,7 +248,7 @@ function custom(dep: CustomDep): Record { options: { resourceName: resource, }, - vars(_options): Record { + vars(_options, envs): Record { // Re-read live env at call time so late `AZURE_RESOURCE_NAME` env // rotation propagates through `${AZURE_RESOURCE_NAME}` baseURL // templating in `resolveSDK`. Precedence preserved: @@ -262,7 +256,7 @@ function custom(dep: CustomDep): Record { const liveResource = [ provider.options?.resourceName, auth?.type === "api" ? auth.metadata?.resourceName : undefined, - liveEnv("AZURE_RESOURCE_NAME"), + envs["AZURE_RESOURCE_NAME"], ].find((name) => typeof name === "string" && name.trim() !== "") if (liveResource) { return { @@ -505,19 +499,19 @@ function custom(dep: CustomDep): Record { if (!autoload) return { autoload: false, requiresRestart: true } return { autoload: true, - vars(_options: Record) { + vars(_options: Record, envs: Record) { // Re-read live env at call time so post-init rotation of any of the // GOOGLE_VERTEX_* / GOOGLE_CLOUD_* / GCP_PROJECT envs propagates // through baseURL templating. Falls back to init-time capture for // parity with the non-rotated case. const liveProject = - liveEnv("GOOGLE_VERTEX_PROJECT") ?? - liveEnv("GOOGLE_CLOUD_PROJECT") ?? - liveEnv("GCP_PROJECT") ?? - liveEnv("GCLOUD_PROJECT") ?? + envs["GOOGLE_VERTEX_PROJECT"] ?? + envs["GOOGLE_CLOUD_PROJECT"] ?? + envs["GCP_PROJECT"] ?? + envs["GCLOUD_PROJECT"] ?? project const liveLocation = - liveEnv("GOOGLE_VERTEX_LOCATION") ?? liveEnv("GOOGLE_CLOUD_LOCATION") ?? liveEnv("VERTEX_LOCATION") ?? location + envs["GOOGLE_VERTEX_LOCATION"] ?? envs["GOOGLE_CLOUD_LOCATION"] ?? envs["VERTEX_LOCATION"] ?? location const endpoint = liveLocation === "global" ? "aiplatform.googleapis.com" : `${liveLocation}-aiplatform.googleapis.com` return { @@ -779,12 +773,12 @@ function custom(dep: CustomDep): Record { async getModel(sdk: any, modelID: string) { return sdk.languageModel(modelID) }, - vars(_options) { + vars(_options, envs) { // Re-read live env at call time so late `CLOUDFLARE_ACCOUNT_ID` // rotation propagates through baseURL templating. Falls back to // init-time capture (env or auth metadata) when env is unset. return { - CLOUDFLARE_ACCOUNT_ID: liveEnv("CLOUDFLARE_ACCOUNT_ID") ?? accountId, + CLOUDFLARE_ACCOUNT_ID: envs["CLOUDFLARE_ACCOUNT_ID"] ?? accountId, } }, } @@ -1050,13 +1044,75 @@ export interface Interface { readonly defaultModel: () => Effect.Effect<{ providerID: ProviderID; modelID: ModelID }> } +interface BoundedAsyncCache { + get(key: string): V | undefined + getOrLoad(key: string, build: () => Promise): Promise +} + +// Capacity-bounded LRU + in-flight Promise dedup. Two concurrent callers with +// the same key share one build; rejections evict the in-flight slot so a +// transient init failure never poisons the cache. Insertion order in the +// underlying `Map` doubles as LRU order — `get` re-inserts the key to refresh +// recency. Eviction is O(1) (drop the iterator's first key when at capacity). +// +// Bounds protect long-running daemons against rotation-driven leaks: every +// distinct credential or templated baseURL produces a new `hashIdentity`, so +// without a cap the underlying maps would grow once per rotation forever. +function makeBoundedAsyncCache(capacity: number): BoundedAsyncCache { + const entries = new Map() + const inflight = new Map>() + + const set = (key: string, value: V) => { + if (entries.has(key)) entries.delete(key) + else if (entries.size >= capacity) { + const oldest = entries.keys().next().value + if (oldest !== undefined) entries.delete(oldest) + } + entries.set(key, value) + } + + const get = (key: string) => { + const v = entries.get(key) + if (v === undefined) return undefined + entries.delete(key) + entries.set(key, v) + return v + } + + return { + get, + getOrLoad(key, build) { + const cached = get(key) + if (cached !== undefined) return Promise.resolve(cached) + const pending = inflight.get(key) + if (pending) return pending + const promise = build().then( + (v) => { + set(key, v) + inflight.delete(key) + return v + }, + (err) => { + inflight.delete(key) + throw err + }, + ) + inflight.set(key, promise) + return promise + }, + } +} + +const SDK_CACHE_CAPACITY = 256 +const LANGUAGE_MODEL_CACHE_CAPACITY = 256 + interface State { - models: Map + models: BoundedAsyncCache // Init-time providers from auth/config/loader. Readers MUST go through // `currentProviders(...)` to apply the live env overlay on top. cachedProviders: Record catalog: Record - sdk: Map + sdk: BoundedAsyncCache modelLoaders: Record varsLoaders: Record // Env-eligible providers with models pre-filtered. Frozen at init. @@ -1264,14 +1320,14 @@ export const layer = Layer.effect( const database = mapValues(catalog, toPublicInfo) const providers: Record = {} as Record - const languages = new Map() + const languages = makeBoundedAsyncCache(LANGUAGE_MODEL_CACHE_CAPACITY) const modelLoaders: { [providerID: string]: CustomModelLoader } = {} const varsLoaders: { [providerID: string]: CustomVarsLoader } = {} - const sdk = new Map() + const sdk = makeBoundedAsyncCache(SDK_CACHE_CAPACITY) const discoveryLoaders: { [providerID: string]: CustomDiscoverModels } = {} @@ -1651,7 +1707,7 @@ export const layer = Layer.effect( const loader = s.varsLoaders[model.providerID] if (loader) { - const vars = loader(options) + const vars = loader(options, envs) for (const [key, value] of Object.entries(vars)) { const field = "${" + key + "}" url = url.replaceAll(field, value) @@ -1678,92 +1734,89 @@ export const layer = Layer.effect( npm: model.api.npm, options, }) - const existing = s.sdk.get(key) - if (existing) return existing - - const customFetch = options["fetch"] - const chunkTimeout = options["chunkTimeout"] - delete options["chunkTimeout"] - - options["fetch"] = async (input: any, init?: BunFetchRequestInit) => { - const fetchFn = customFetch ?? fetch - const opts = init ?? {} - const chunkAbortCtl = typeof chunkTimeout === "number" && chunkTimeout > 0 ? new AbortController() : undefined - const signals: AbortSignal[] = [] - - if (opts.signal) signals.push(opts.signal) - if (chunkAbortCtl) signals.push(chunkAbortCtl.signal) - if (options["timeout"] !== undefined && options["timeout"] !== null && options["timeout"] !== false) - signals.push(AbortSignal.timeout(options["timeout"])) - - const combined = signals.length === 0 ? null : signals.length === 1 ? signals[0] : AbortSignal.any(signals) - if (combined) opts.signal = combined - - // Strip openai itemId metadata following what codex does - if ( - (model.api.npm === "@ai-sdk/openai" || model.api.npm === "@ai-sdk/azure") && - opts.body && - opts.method === "POST" - ) { - const body = JSON.parse(opts.body as string) - const keepIds = body.store === true - if (!keepIds && Array.isArray(body.input)) { - for (const item of body.input) { - if ("id" in item) { - delete item.id + + return (await s.sdk.getOrLoad(key, async () => { + const customFetch = options["fetch"] + const chunkTimeout = options["chunkTimeout"] + delete options["chunkTimeout"] + + options["fetch"] = async (input: any, init?: BunFetchRequestInit) => { + const fetchFn = customFetch ?? fetch + const opts = init ?? {} + const chunkAbortCtl = + typeof chunkTimeout === "number" && chunkTimeout > 0 ? new AbortController() : undefined + const signals: AbortSignal[] = [] + + if (opts.signal) signals.push(opts.signal) + if (chunkAbortCtl) signals.push(chunkAbortCtl.signal) + if (options["timeout"] !== undefined && options["timeout"] !== null && options["timeout"] !== false) + signals.push(AbortSignal.timeout(options["timeout"])) + + const combined = signals.length === 0 ? null : signals.length === 1 ? signals[0] : AbortSignal.any(signals) + if (combined) opts.signal = combined + + // Strip openai itemId metadata following what codex does + if ( + (model.api.npm === "@ai-sdk/openai" || model.api.npm === "@ai-sdk/azure") && + opts.body && + opts.method === "POST" + ) { + const body = JSON.parse(opts.body as string) + const keepIds = body.store === true + if (!keepIds && Array.isArray(body.input)) { + for (const item of body.input) { + if ("id" in item) { + delete item.id + } } + opts.body = JSON.stringify(body) } - opts.body = JSON.stringify(body) } + + const res = await fetchFn(input, { + ...opts, + // @ts-ignore see here: https://github.com/oven-sh/bun/issues/16682 + timeout: false, + }) + + if (!chunkAbortCtl) return res + return wrapSSE(res, chunkTimeout, chunkAbortCtl) } - const res = await fetchFn(input, { - ...opts, - // @ts-ignore see here: https://github.com/oven-sh/bun/issues/16682 - timeout: false, - }) + const bundledLoader = BUNDLED_PROVIDERS[model.api.npm] + if (bundledLoader) { + log.info("using bundled provider", { + providerID: model.providerID, + pkg: model.api.npm, + }) + const factory = await bundledLoader() + return factory({ + name: model.providerID, + ...options, + }) + } - if (!chunkAbortCtl) return res - return wrapSSE(res, chunkTimeout, chunkAbortCtl) - } + let installedPath: string + if (!model.api.npm.startsWith("file://")) { + const item = await Npm.add(model.api.npm) + if (!item.entrypoint) throw new Error(`Package ${model.api.npm} has no import entrypoint`) + installedPath = item.entrypoint + } else { + log.info("loading local provider", { pkg: model.api.npm }) + installedPath = model.api.npm + } - const bundledLoader = BUNDLED_PROVIDERS[model.api.npm] - if (bundledLoader) { - log.info("using bundled provider", { - providerID: model.providerID, - pkg: model.api.npm, - }) - const factory = await bundledLoader() - const loaded = factory({ + // `installedPath` is a local entry path or an existing `file://` URL. Normalize + // only path inputs so Node on Windows accepts the dynamic import. + const importSpec = installedPath.startsWith("file://") ? installedPath : pathToFileURL(installedPath).href + const mod = await import(importSpec) + + const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!] + return fn({ name: model.providerID, ...options, }) - s.sdk.set(key, loaded) - return loaded as SDK - } - - let installedPath: string - if (!model.api.npm.startsWith("file://")) { - const item = await Npm.add(model.api.npm) - if (!item.entrypoint) throw new Error(`Package ${model.api.npm} has no import entrypoint`) - installedPath = item.entrypoint - } else { - log.info("loading local provider", { pkg: model.api.npm }) - installedPath = model.api.npm - } - - // `installedPath` is a local entry path or an existing `file://` URL. Normalize - // only path inputs so Node on Windows accepts the dynamic import. - const importSpec = installedPath.startsWith("file://") ? installedPath : pathToFileURL(installedPath).href - const mod = await import(importSpec) - - const fn = mod[Object.keys(mod).find((key) => key.startsWith("create"))!] - const loaded = fn({ - name: model.providerID, - ...options, - }) - s.sdk.set(key, loaded) - return loaded as SDK + })) as SDK } catch (e) { throw new InitError({ providerID: model.providerID, cause: e }) } @@ -1822,20 +1875,20 @@ export const layer = Layer.effect( key: provider.key, options: { ...provider.options, ...model.options }, }) - if (s.models.has(cacheKey)) return s.models.get(cacheKey)! + const cached = s.models.get(cacheKey) + if (cached !== undefined) return cached return yield* EffectPromise.refineRejection( - async () => { - const sdk = await resolveSDK(model, s, provider, envs) - const language = s.modelLoaders[model.providerID] - ? await s.modelLoaders[model.providerID](sdk, model.api.id, { - ...provider.options, - ...model.options, - }) - : sdk.languageModel(model.api.id) - s.models.set(cacheKey, language) - return language - }, + () => + s.models.getOrLoad(cacheKey, async () => { + const sdk = await resolveSDK(model, s, provider, envs) + return s.modelLoaders[model.providerID] + ? await s.modelLoaders[model.providerID](sdk, model.api.id, { + ...provider.options, + ...model.options, + }) + : sdk.languageModel(model.api.id) + }), (cause) => cause instanceof NoSuchModelError ? new ModelNotFoundError({ modelID: model.id, providerID: model.providerID, cause })