Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 12 additions & 2 deletions src/commands/sync.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { createHash } from "node:crypto";
import { access, mkdir, readFile } from "node:fs/promises";
import path from "node:path";
import pc from "picocolors";
import type { DocsCacheLock } from "#cache/lock";
import type { DocsCacheLock, DocsCacheLockSource } from "#cache/lock";
import { readLock, resolveLockPath, writeLock } from "#cache/lock";
import { MANIFEST_FILENAME } from "#cache/manifest";
import { computeManifestHash, materializeSource } from "#cache/materialize";
Expand Down Expand Up @@ -240,7 +240,17 @@ const buildLock = async (
) => {
const toolVersion = await loadToolVersion();
const now = new Date().toISOString();
const sources = { ...(previous?.sources ?? {}) };
const configSourceIds = new Set(
plan.config.sources.map((source) => source.id),
);
const sources: Record<string, DocsCacheLockSource> = {};
if (previous?.sources) {
for (const [id, source] of Object.entries(previous.sources)) {
if (configSourceIds.has(id)) {
sources[id] = source;
}
}
}
for (const result of plan.results) {
const prior = sources[result.id];
sources[result.id] = buildLockSource(result, prior, now);
Expand Down
304 changes: 304 additions & 0 deletions tests/sync-lock-cleanup.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,304 @@
import assert from "node:assert/strict";
import { access, mkdir, readFile, writeFile } from "node:fs/promises";
import { tmpdir } from "node:os";
import path from "node:path";
import { test } from "node:test";

import { runSync } from "../dist/api.mjs";

const exists = async (target) => {
try {
await access(target);
return true;
} catch {
return false;
}
};

test("sync removes lock entries for sources removed from config", async () => {
const tmpRoot = path.join(
tmpdir(),
`docs-cache-lock-cleanup-${Date.now().toString(36)}`,
);
await mkdir(tmpRoot, { recursive: true });
const cacheDir = path.join(tmpRoot, ".docs");
const repoDir = path.join(tmpRoot, "repo");
const configPath = path.join(tmpRoot, "docs.config.json");
const lockPath = path.join(tmpRoot, "docs-lock.json");

await mkdir(repoDir, { recursive: true });
await writeFile(path.join(repoDir, "a.md"), "alpha", "utf8");

// Initial config with two sources
const config = {
$schema:
"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",
sources: [
{
id: "source-one",
repo: "https://example.com/repo.git",
include: ["**/*.md"],
},
{
id: "source-two",
repo: "https://example.com/repo.git",
include: ["**/*.md"],
},
],
};
await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");

let resolveCallCount = 0;
const resolveRemoteCommit = async ({ repo }) => {
resolveCallCount += 1;
return {
repo,
ref: "HEAD",
resolvedCommit: `commit-${resolveCallCount}`,
};
};

const fetchSource = async () => ({
repoDir,
cleanup: async () => undefined,
fromCache: false,
});

// First sync with both sources
await runSync(
{
configPath,
cacheDirOverride: cacheDir,
json: false,
lockOnly: false,
offline: false,
failOnMiss: false,
},
{
resolveRemoteCommit,
fetchSource,
},
);

// Verify lock contains both sources
assert.equal(await exists(lockPath), true);
const lockContent1 = await readFile(lockPath, "utf8");
const lock1 = JSON.parse(lockContent1);
assert.ok(lock1.sources["source-one"]);
assert.ok(lock1.sources["source-two"]);

// Update config to remove source-two
config.sources = [
{
id: "source-one",
repo: "https://example.com/repo.git",
include: ["**/*.md"],
},
];
await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");

// Second sync with only source-one
await runSync(
{
configPath,
cacheDirOverride: cacheDir,
json: false,
lockOnly: false,
offline: false,
failOnMiss: false,
},
{
resolveRemoteCommit,
fetchSource,
},
);

// Verify lock only contains source-one
const lockContent2 = await readFile(lockPath, "utf8");
const lock2 = JSON.parse(lockContent2);
assert.ok(lock2.sources["source-one"], "source-one should still be in lock");
assert.equal(
lock2.sources["source-two"],
undefined,
"source-two should be removed from lock",
);
assert.equal(
Object.keys(lock2.sources).length,
1,
"lock should only have one source",
);
});

test("sync preserves lock entries for sources still in config", async () => {
const tmpRoot = path.join(
tmpdir(),
`docs-cache-lock-preserve-${Date.now().toString(36)}`,
);
await mkdir(tmpRoot, { recursive: true });
const cacheDir = path.join(tmpRoot, ".docs");
const repoDir = path.join(tmpRoot, "repo");
const configPath = path.join(tmpRoot, "docs.config.json");
const lockPath = path.join(tmpRoot, "docs-lock.json");

await mkdir(repoDir, { recursive: true });
await writeFile(path.join(repoDir, "a.md"), "alpha", "utf8");

const config = {
$schema:
"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",
sources: [
{
id: "source-one",
repo: "https://example.com/repo.git",
include: ["**/*.md"],
},
],
};
await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");

const resolveRemoteCommit = async ({ repo }) => ({
repo,
ref: "HEAD",
resolvedCommit: "fixed-commit",
});

const fetchSource = async () => ({
repoDir,
cleanup: async () => undefined,
fromCache: false,
});

// First sync
await runSync(
{
configPath,
cacheDirOverride: cacheDir,
json: false,
lockOnly: false,
offline: false,
failOnMiss: false,
},
{
resolveRemoteCommit,
fetchSource,
},
);

// Second sync with same config
await runSync(
{
configPath,
cacheDirOverride: cacheDir,
json: false,
lockOnly: false,
offline: false,
failOnMiss: false,
},
{
resolveRemoteCommit,
fetchSource,
},
);

// Verify source is still in lock
const lockContent2 = await readFile(lockPath, "utf8");
const lock2 = JSON.parse(lockContent2);
assert.ok(lock2.sources["source-one"], "source-one should still be in lock");
assert.equal(lock2.sources["source-one"].resolvedCommit, "fixed-commit");
});

test("sync preserves all lock entries when using sourceFilter for a subset of sources", async () => {
const tmpRoot = path.join(
tmpdir(),
`docs-cache-lock-preserve-source-filter-${Date.now().toString(36)}`,
);
await mkdir(tmpRoot, { recursive: true });
const cacheDir = path.join(tmpRoot, ".docs");
const repoDir = path.join(tmpRoot, "repo");
const configPath = path.join(tmpRoot, "docs.config.json");
const lockPath = path.join(tmpRoot, "docs-lock.json");

await mkdir(repoDir, { recursive: true });
await writeFile(path.join(repoDir, "a.md"), "alpha", "utf8");

const config = {
$schema:
"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",
sources: [
{
id: "source-one",
repo: "https://example.com/source-one.git",
include: ["**/*.md"],
},
{
id: "source-two",
repo: "https://example.com/source-two.git",
include: ["**/*.md"],
},
],
};
await writeFile(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");

const resolveRemoteCommit = async ({ repo }) => ({
repo,
ref: "HEAD",
resolvedCommit: repo.endsWith("source-one.git")
? "commit-source-one"
: "commit-source-two",
});

const fetchSource = async () => ({
repoDir,
cleanup: async () => undefined,
fromCache: false,
});

// Initial sync to populate lock for both sources
await runSync(
{
configPath,
cacheDirOverride: cacheDir,
json: false,
lockOnly: false,
offline: false,
failOnMiss: false,
},
{
resolveRemoteCommit,
fetchSource,
},
);

// Sync again, but only for source-one using sourceFilter
await runSync(
{
configPath,
cacheDirOverride: cacheDir,
json: false,
lockOnly: false,
offline: false,
failOnMiss: false,
sourceFilter: ["source-one"],
},
{
resolveRemoteCommit,
fetchSource,
},
);

// Verify both sources are still in lock with their respective commits
const lockContent = await readFile(lockPath, "utf8");
const lock = JSON.parse(lockContent);

assert.ok(
lock.sources["source-one"],
"source-one should still be in lock after filtered sync",
);
assert.ok(
lock.sources["source-two"],
"source-two should still be in lock after filtered sync",
);

assert.equal(lock.sources["source-one"].resolvedCommit, "commit-source-one");
assert.equal(lock.sources["source-two"].resolvedCommit, "commit-source-two");
});
Loading