diff --git a/src/commands/sync.ts b/src/commands/sync.ts index ea32338..018d368 100644 --- a/src/commands/sync.ts +++ b/src/commands/sync.ts @@ -2,7 +2,7 @@ import { createHash } from "node:crypto"; import { access, mkdir, readFile } from "node:fs/promises"; import path from "node:path"; import pc from "picocolors"; -import type { DocsCacheLock } from "#cache/lock"; +import type { DocsCacheLock, DocsCacheLockSource } from "#cache/lock"; import { readLock, resolveLockPath, writeLock } from "#cache/lock"; import { MANIFEST_FILENAME } from "#cache/manifest"; import { computeManifestHash, materializeSource } from "#cache/materialize"; @@ -240,7 +240,17 @@ const buildLock = async ( ) => { const toolVersion = await loadToolVersion(); const now = new Date().toISOString(); - const sources = { ...(previous?.sources ?? {}) }; + const configSourceIds = new Set( + plan.config.sources.map((source) => source.id), + ); + const sources: Record = {}; + if (previous?.sources) { + for (const [id, source] of Object.entries(previous.sources)) { + if (configSourceIds.has(id)) { + sources[id] = source; + } + } + } for (const result of plan.results) { const prior = sources[result.id]; sources[result.id] = buildLockSource(result, prior, now); diff --git a/tests/sync-lock-cleanup.test.js b/tests/sync-lock-cleanup.test.js new file mode 100644 index 0000000..4d2cbef --- /dev/null +++ b/tests/sync-lock-cleanup.test.js @@ -0,0 +1,304 @@ +import assert from "node:assert/strict"; +import { access, mkdir, readFile, writeFile } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import path from "node:path"; +import { test } from "node:test"; + +import { runSync } from "../dist/api.mjs"; + +const exists = async (target) => { + try { + await access(target); + return true; + } catch { + return false; + } +}; + +test("sync removes lock entries for sources removed from config", async () => { + const tmpRoot = path.join( + tmpdir(), + `docs-cache-lock-cleanup-${Date.now().toString(36)}`, + ); + await mkdir(tmpRoot, { recursive: true }); + const cacheDir = path.join(tmpRoot, ".docs"); + const repoDir = path.join(tmpRoot, "repo"); + const configPath = path.join(tmpRoot, "docs.config.json"); + const lockPath = path.join(tmpRoot, "docs-lock.json"); + + await mkdir(repoDir, { recursive: true }); + await writeFile(path.join(repoDir, "a.md"), "alpha", "utf8"); + + // Initial config with two sources + const config = { + $schema: + "https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json", + sources: [ + { + id: "source-one", + repo: "https://example.com/repo.git", + include: ["**/*.md"], + }, + { + id: "source-two", + repo: "https://example.com/repo.git", + include: ["**/*.md"], + }, + ], + }; + await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8"); + + let resolveCallCount = 0; + const resolveRemoteCommit = async ({ repo }) => { + resolveCallCount += 1; + return { + repo, + ref: "HEAD", + resolvedCommit: `commit-${resolveCallCount}`, + }; + }; + + const fetchSource = async () => ({ + repoDir, + cleanup: async () => undefined, + fromCache: false, + }); + + // First sync with both sources + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Verify lock contains both sources + assert.equal(await exists(lockPath), true); + const lockContent1 = await readFile(lockPath, "utf8"); + const lock1 = JSON.parse(lockContent1); + assert.ok(lock1.sources["source-one"]); + assert.ok(lock1.sources["source-two"]); + + // Update config to remove source-two + config.sources = [ + { + id: "source-one", + repo: "https://example.com/repo.git", + include: ["**/*.md"], + }, + ]; + await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8"); + + // Second sync with only source-one + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Verify lock only contains source-one + const lockContent2 = await readFile(lockPath, "utf8"); + const lock2 = JSON.parse(lockContent2); + assert.ok(lock2.sources["source-one"], "source-one should still be in lock"); + assert.equal( + lock2.sources["source-two"], + undefined, + "source-two should be removed from lock", + ); + assert.equal( + Object.keys(lock2.sources).length, + 1, + "lock should only have one source", + ); +}); + +test("sync preserves lock entries for sources still in config", async () => { + const tmpRoot = path.join( + tmpdir(), + `docs-cache-lock-preserve-${Date.now().toString(36)}`, + ); + await mkdir(tmpRoot, { recursive: true }); + const cacheDir = path.join(tmpRoot, ".docs"); + const repoDir = path.join(tmpRoot, "repo"); + const configPath = path.join(tmpRoot, "docs.config.json"); + const lockPath = path.join(tmpRoot, "docs-lock.json"); + + await mkdir(repoDir, { recursive: true }); + await writeFile(path.join(repoDir, "a.md"), "alpha", "utf8"); + + const config = { + $schema: + "https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json", + sources: [ + { + id: "source-one", + repo: "https://example.com/repo.git", + include: ["**/*.md"], + }, + ], + }; + await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8"); + + const resolveRemoteCommit = async ({ repo }) => ({ + repo, + ref: "HEAD", + resolvedCommit: "fixed-commit", + }); + + const fetchSource = async () => ({ + repoDir, + cleanup: async () => undefined, + fromCache: false, + }); + + // First sync + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Second sync with same config + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Verify source is still in lock + const lockContent2 = await readFile(lockPath, "utf8"); + const lock2 = JSON.parse(lockContent2); + assert.ok(lock2.sources["source-one"], "source-one should still be in lock"); + assert.equal(lock2.sources["source-one"].resolvedCommit, "fixed-commit"); +}); + +test("sync preserves all lock entries when using sourceFilter for a subset of sources", async () => { + const tmpRoot = path.join( + tmpdir(), + `docs-cache-lock-preserve-source-filter-${Date.now().toString(36)}`, + ); + await mkdir(tmpRoot, { recursive: true }); + const cacheDir = path.join(tmpRoot, ".docs"); + const repoDir = path.join(tmpRoot, "repo"); + const configPath = path.join(tmpRoot, "docs.config.json"); + const lockPath = path.join(tmpRoot, "docs-lock.json"); + + await mkdir(repoDir, { recursive: true }); + await writeFile(path.join(repoDir, "a.md"), "alpha", "utf8"); + + const config = { + $schema: + "https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json", + sources: [ + { + id: "source-one", + repo: "https://example.com/source-one.git", + include: ["**/*.md"], + }, + { + id: "source-two", + repo: "https://example.com/source-two.git", + include: ["**/*.md"], + }, + ], + }; + await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8"); + + const resolveRemoteCommit = async ({ repo }) => ({ + repo, + ref: "HEAD", + resolvedCommit: repo.endsWith("source-one.git") + ? "commit-source-one" + : "commit-source-two", + }); + + const fetchSource = async () => ({ + repoDir, + cleanup: async () => undefined, + fromCache: false, + }); + + // Initial sync to populate lock for both sources + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Sync again, but only for source-one using sourceFilter + await runSync( + { + configPath, + cacheDirOverride: cacheDir, + json: false, + lockOnly: false, + offline: false, + failOnMiss: false, + sourceFilter: ["source-one"], + }, + { + resolveRemoteCommit, + fetchSource, + }, + ); + + // Verify both sources are still in lock with their respective commits + const lockContent = await readFile(lockPath, "utf8"); + const lock = JSON.parse(lockContent); + + assert.ok( + lock.sources["source-one"], + "source-one should still be in lock after filtered sync", + ); + assert.ok( + lock.sources["source-two"], + "source-two should still be in lock after filtered sync", + ); + + assert.equal(lock.sources["source-one"].resolvedCommit, "commit-source-one"); + assert.equal(lock.sources["source-two"].resolvedCommit, "commit-source-two"); +});