From e77898c1843c0dccd8ddb18afd92a5a43a36913c Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 18 Feb 2026 10:47:03 +0000 Subject: [PATCH 1/5] Initial plan From 1956dbe057da579a0556a998f2a3a869b5a1031f Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 18 Feb 2026 10:51:34 +0000 Subject: [PATCH 2/5] Implement automatic cleanup of stale lock entries during sync Co-authored-by: fbosch <6979916+fbosch@users.noreply.github.com> --- src/commands/sync.ts | 12 +- tests/sync-lock-cleanup.test.js | 208 ++++++++++++++++++++++++++++++++ 2 files changed, 218 insertions(+), 2 deletions(-) create mode 100644 tests/sync-lock-cleanup.test.js diff --git a/src/commands/sync.ts b/src/commands/sync.ts index ea32338..b42db99 100644 --- a/src/commands/sync.ts +++ b/src/commands/sync.ts @@ -2,7 +2,7 @@ import { createHash } from "node:crypto"; import { access, mkdir, readFile } from "node:fs/promises"; import path from "node:path"; import pc from "picocolors"; -import type { DocsCacheLock } from "#cache/lock"; +import type { DocsCacheLock, DocsCacheLockSource } from "#cache/lock"; import { readLock, resolveLockPath, writeLock } from "#cache/lock"; import { MANIFEST_FILENAME } from "#cache/manifest"; import { computeManifestHash, materializeSource } from "#cache/materialize"; @@ -240,7 +240,15 @@ const buildLock = async ( ) => { const toolVersion = await loadToolVersion(); const now = new Date().toISOString(); - const sources = { ...(previous?.sources ?? {}) }; + const configSourceIds = new Set(plan.sources.map((source) => source.id)); + const sources: Record = {}; + if (previous?.sources) { + for (const [id, source] of Object.entries(previous.sources)) { + if (configSourceIds.has(id)) { + sources[id] = source; + } + } + } for (const result of plan.results) { const prior = sources[result.id]; sources[result.id] = buildLockSource(result, prior, now); diff --git a/tests/sync-lock-cleanup.test.js b/tests/sync-lock-cleanup.test.js new file mode 100644 index 0000000..c69a654 --- /dev/null +++ b/tests/sync-lock-cleanup.test.js @@ -0,0 +1,208 @@ +import assert from "node:assert/strict"; +import { access, mkdir, readFile, writeFile } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import path from "node:path"; +import { test } from "node:test"; + +import { runSync } from "../dist/api.mjs"; + +const exists = async (target) => { + try { + await access(target); + return true; + } catch { + return false; + } +}; + +test("sync removes lock entries for sources removed from config", async () => { + const tmpRoot = path.join( + tmpdir(), + `docs-cache-lock-cleanup-${Date.now().toString(36)}`, + ); + await mkdir(tmpRoot, { recursive: true }); + const cacheDir = path.join(tmpRoot, ".docs"); + const repoDir = path.join(tmpRoot, "repo"); + const configPath = path.join(tmpRoot, "docs.config.json"); + const lockPath = path.join(tmpRoot, "docs-lock.json"); + + await mkdir(repoDir, { recursive: true }); + await writeFile(path.join(repoDir, "a.md"), "alpha", "utf8"); + + // Initial config with two sources + const config = { + $schema: + "https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json", + sources: [ + { + id: "source-one", + repo: "https://example.com/repo.git", + include: ["**/*.md"], + }, + { + id: "source-two", + repo: "https://example.com/repo.git", + include: ["**/*.md"], + }, + ], + }; + await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8"); + + let resolveCallCount = 0; + const resolveRemoteCommit = async ({ repo }) => { + resolveCallCount += 1; + return { + repo, + ref: "HEAD", + resolvedCommit: `commit-${resolveCallCount}`, + }; + }; + + const fetchSource = async () => ({ + repoDir, + cleanup: async () => undefined, + fromCache: false, + }); + + // First sync with both sources + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Verify lock contains both sources + assert.equal(await exists(lockPath), true); + const lockContent1 = await readFile(lockPath, "utf8"); + const lock1 = JSON.parse(lockContent1); + assert.ok(lock1.sources["source-one"]); + assert.ok(lock1.sources["source-two"]); + + // Update config to remove source-two + config.sources = [ + { + id: "source-one", + repo: "https://example.com/repo.git", + include: ["**/*.md"], + }, + ]; + await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8"); + + // Second sync with only source-one + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Verify lock only contains source-one + const lockContent2 = await readFile(lockPath, "utf8"); + const lock2 = JSON.parse(lockContent2); + assert.ok(lock2.sources["source-one"], "source-one should still be in lock"); + assert.equal( + lock2.sources["source-two"], + undefined, + "source-two should be removed from lock", + ); + assert.equal( + Object.keys(lock2.sources).length, + 1, + "lock should only have one source", + ); +}); + +test("sync preserves lock entries for sources still in config", async () => { + const tmpRoot = path.join( + tmpdir(), + `docs-cache-lock-preserve-${Date.now().toString(36)}`, + ); + await mkdir(tmpRoot, { recursive: true }); + const cacheDir = path.join(tmpRoot, ".docs"); + const repoDir = path.join(tmpRoot, "repo"); + const configPath = path.join(tmpRoot, "docs.config.json"); + const lockPath = path.join(tmpRoot, "docs-lock.json"); + + await mkdir(repoDir, { recursive: true }); + await writeFile(path.join(repoDir, "a.md"), "alpha", "utf8"); + + const config = { + $schema: + "https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json", + sources: [ + { + id: "source-one", + repo: "https://example.com/repo.git", + include: ["**/*.md"], + }, + ], + }; + await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8"); + + const resolveRemoteCommit = async ({ repo }) => ({ + repo, + ref: "HEAD", + resolvedCommit: "fixed-commit", + }); + + const fetchSource = async () => ({ + repoDir, + cleanup: async () => undefined, + fromCache: false, + }); + + // First sync + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Second sync with same config + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Verify source is still in lock + const lockContent2 = await readFile(lockPath, "utf8"); + const lock2 = JSON.parse(lockContent2); + assert.ok(lock2.sources["source-one"], "source-one should still be in lock"); + assert.equal(lock2.sources["source-one"].resolvedCommit, "fixed-commit"); +}); From d5a91949a244b13b00dd1a4bcadd9c4465bfa02c Mon Sep 17 00:00:00 2001 From: Frederik Bosch <6979916+fbosch@users.noreply.github.com> Date: Mon, 23 Feb 2026 12:29:18 +0100 Subject: [PATCH 3/5] Update src/commands/sync.ts Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- src/commands/sync.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/commands/sync.ts b/src/commands/sync.ts index b42db99..018d368 100644 --- a/src/commands/sync.ts +++ b/src/commands/sync.ts @@ -240,7 +240,9 @@ const buildLock = async ( ) => { const toolVersion = await loadToolVersion(); const now = new Date().toISOString(); - const configSourceIds = new Set(plan.sources.map((source) => source.id)); + const configSourceIds = new Set( + plan.config.sources.map((source) => source.id), + ); const sources: Record = {}; if (previous?.sources) { for (const [id, source] of Object.entries(previous.sources)) { From 8d872552403d4b297146778e900f8f97bc7ca155 Mon Sep 17 00:00:00 2001 From: Frederik Bosch <6979916+fbosch@users.noreply.github.com> Date: Mon, 23 Feb 2026 12:29:24 +0100 Subject: [PATCH 4/5] Update tests/sync-lock-cleanup.test.js Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- tests/sync-lock-cleanup.test.js | 105 ++++++++++++++++++++++++++++++++ 1 file changed, 105 insertions(+) diff --git a/tests/sync-lock-cleanup.test.js b/tests/sync-lock-cleanup.test.js index c69a654..342d2e3 100644 --- a/tests/sync-lock-cleanup.test.js +++ b/tests/sync-lock-cleanup.test.js @@ -206,3 +206,108 @@ test("sync preserves lock entries for sources still in config", async () => { assert.ok(lock2.sources["source-one"], "source-one should still be in lock"); assert.equal(lock2.sources["source-one"].resolvedCommit, "fixed-commit"); }); + +test( + "sync preserves all lock entries when using sourceFilter for a subset of sources", + async () => { + const tmpRoot = path.join( + tmpdir(), + `docs-cache-lock-preserve-source-filter-${Date.now().toString(36)}`, + ); + await mkdir(tmpRoot, { recursive: true }); + const cacheDir = path.join(tmpRoot, ".docs"); + const repoDir = path.join(tmpRoot, "repo"); + const configPath = path.join(tmpRoot, "docs.config.json"); + const lockPath = path.join(tmpRoot, "docs-lock.json"); + + await mkdir(repoDir, { recursive: true }); + await writeFile(path.join(repoDir, "a.md"), "alpha", "utf8"); + + const config = { + $schema: + "https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json", + sources: [ + { + id: "source-one", + repo: "https://example.com/source-one.git", + include: ["**/*.md"], + }, + { + id: "source-two", + repo: "https://example.com/source-two.git", + include: ["**/*.md"], + }, + ], + }; + await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8"); + + const resolveRemoteCommit = async ({ repo }) => ({ + repo, + ref: "HEAD", + resolvedCommit: repo.endsWith("source-one.git") + ? "commit-source-one" + : "commit-source-two", + }); + + const fetchSource = async () => ({ + repoDir, + cleanup: async () => undefined, + fromCache: false, + }); + + // Initial sync to populate lock for both sources + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Sync again, but only for source-one using sourceFilter + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + sourceFilter: ["source-one"], + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Verify both sources are still in lock with their respective commits + const lockContent = await readFile(lockPath, "utf8"); + const lock = JSON.parse(lockContent); + + assert.ok( + lock.sources["source-one"], + "source-one should still be in lock after filtered sync", + ); + assert.ok( + lock.sources["source-two"], + "source-two should still be in lock after filtered sync", + ); + + assert.equal( + lock.sources["source-one"].resolvedCommit, + "commit-source-one", + ); + assert.equal( + lock.sources["source-two"].resolvedCommit, + "commit-source-two", + ); + }, +); From 4c217529510df97a620f0cdef760a9626e97c8ba Mon Sep 17 00:00:00 2001 From: Frederik Bosch <6979916+fbosch@users.noreply.github.com> Date: Mon, 23 Feb 2026 12:31:00 +0100 Subject: [PATCH 5/5] chore: lint --- tests/sync-lock-cleanup.test.js | 179 +++++++++++++++----------------- 1 file changed, 85 insertions(+), 94 deletions(-) diff --git a/tests/sync-lock-cleanup.test.js b/tests/sync-lock-cleanup.test.js index 342d2e3..4d2cbef 100644 --- a/tests/sync-lock-cleanup.test.js +++ b/tests/sync-lock-cleanup.test.js @@ -207,107 +207,98 @@ test("sync preserves lock entries for sources still in config", async () => { assert.equal(lock2.sources["source-one"].resolvedCommit, "fixed-commit"); }); -test( - "sync preserves all lock entries when using sourceFilter for a subset of sources", - async () => { - const tmpRoot = path.join( - tmpdir(), - `docs-cache-lock-preserve-source-filter-${Date.now().toString(36)}`, - ); - await mkdir(tmpRoot, { recursive: true }); - const cacheDir = path.join(tmpRoot, ".docs"); - const repoDir = path.join(tmpRoot, "repo"); - const configPath = path.join(tmpRoot, "docs.config.json"); - const lockPath = path.join(tmpRoot, "docs-lock.json"); - - await mkdir(repoDir, { recursive: true }); - await writeFile(path.join(repoDir, "a.md"), "alpha", "utf8"); - - const config = { - $schema: - "https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json", - sources: [ - { - id: "source-one", - repo: "https://example.com/source-one.git", - include: ["**/*.md"], - }, - { - id: "source-two", - repo: "https://example.com/source-two.git", - include: ["**/*.md"], - }, - ], - }; - await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8"); - - const resolveRemoteCommit = async ({ repo }) => ({ - repo, - ref: "HEAD", - resolvedCommit: repo.endsWith("source-one.git") - ? "commit-source-one" - : "commit-source-two", - }); +test("sync preserves all lock entries when using sourceFilter for a subset of sources", async () => { + const tmpRoot = path.join( + tmpdir(), + `docs-cache-lock-preserve-source-filter-${Date.now().toString(36)}`, + ); + await mkdir(tmpRoot, { recursive: true }); + const cacheDir = path.join(tmpRoot, ".docs"); + const repoDir = path.join(tmpRoot, "repo"); + const configPath = path.join(tmpRoot, "docs.config.json"); + const lockPath = path.join(tmpRoot, "docs-lock.json"); - const fetchSource = async () => ({ - repoDir, - cleanup: async () => undefined, - fromCache: false, - }); + await mkdir(repoDir, { recursive: true }); + await writeFile(path.join(repoDir, "a.md"), "alpha", "utf8"); - // Initial sync to populate lock for both sources - await runSync( + const config = { + $schema: + "https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json", + sources: [ { - configPath, - cacheDirOverride: cacheDir, - json: false, - lockOnly: false, - offline: false, - failOnMiss: false, + id: "source-one", + repo: "https://example.com/source-one.git", + include: ["**/*.md"], }, { - resolveRemoteCommit, - fetchSource, + id: "source-two", + repo: "https://example.com/source-two.git", + include: ["**/*.md"], }, - ); + ], + }; + await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8"); - // Sync again, but only for source-one using sourceFilter - await runSync( - { - configPath, - cacheDirOverride: cacheDir, - json: false, - lockOnly: false, - offline: false, - failOnMiss: false, - sourceFilter: ["source-one"], - }, - { - resolveRemoteCommit, - fetchSource, - }, - ); + const resolveRemoteCommit = async ({ repo }) => ({ + repo, + ref: "HEAD", + resolvedCommit: repo.endsWith("source-one.git") + ? "commit-source-one" + : "commit-source-two", + }); + + const fetchSource = async () => ({ + repoDir, + cleanup: async () => undefined, + fromCache: false, + }); - // Verify both sources are still in lock with their respective commits - const lockContent = await readFile(lockPath, "utf8"); - const lock = JSON.parse(lockContent); + // Initial sync to populate lock for both sources + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); - assert.ok( - lock.sources["source-one"], - "source-one should still be in lock after filtered sync", - ); - assert.ok( - lock.sources["source-two"], - "source-two should still be in lock after filtered sync", - ); + // Sync again, but only for source-one using sourceFilter + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + sourceFilter: ["source-one"], + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Verify both sources are still in lock with their respective commits + const lockContent = await readFile(lockPath, "utf8"); + const lock = JSON.parse(lockContent); - assert.equal( - lock.sources["source-one"].resolvedCommit, - "commit-source-one", - ); - assert.equal( - lock.sources["source-two"].resolvedCommit, - "commit-source-two", - ); - }, -); + assert.ok( + lock.sources["source-one"], + "source-one should still be in lock after filtered sync", + ); + assert.ok( + lock.sources["source-two"], + "source-two should still be in lock after filtered sync", + ); + + assert.equal(lock.sources["source-one"].resolvedCommit, "commit-source-one"); + assert.equal(lock.sources["source-two"].resolvedCommit, "commit-source-two"); +});