diff --git a/bin/flatcover.js b/bin/flatcover.js index 5f8e096..c1e8d14 100755 --- a/bin/flatcover.js +++ b/bin/flatcover.js @@ -14,6 +14,8 @@ import { parseArgs } from 'node:util'; import { readFileSync } from 'node:fs'; +import { createReadStream } from 'node:fs'; +import { createInterface } from 'node:readline'; import { dirname, join } from 'node:path'; import { Pool, RetryAgent } from 'undici'; import { FlatlockSet } from '../src/set.js'; @@ -21,6 +23,7 @@ import { FlatlockSet } from '../src/set.js'; const { values, positionals } = parseArgs({ options: { workspace: { type: 'string', short: 'w' }, + list: { type: 'string', short: 'l' }, dev: { type: 'boolean', default: false }, peer: { type: 'boolean', default: true }, specs: { type: 'boolean', short: 's', default: false }, @@ -39,16 +42,28 @@ const { values, positionals } = parseArgs({ allowPositionals: true }); -if (values.help || positionals.length === 0) { +// Check if stdin input is requested via '-' positional argument (Unix convention) +const useStdin = positionals[0] === '-'; + +// Determine if we have a valid input source +const hasInputSource = positionals.length > 0 || values.list; + +if (values.help || !hasInputSource) { console.log(`flatcover - Check lockfile package coverage against a registry Usage: flatcover --cover - flatcover --cover --registry + flatcover --list packages.json --cover + cat packages.ndjson | flatcover - --cover flatcover --cover --registry --auth user:pass +Input sources (mutually exclusive): + Parse lockfile (package-lock.json, pnpm-lock.yaml, yarn.lock) + -l, --list Read JSON array of {name, version} objects from file + - Read NDJSON {name, version} objects from stdin (one per line) + Options: - -w, --workspace Workspace path within monorepo + -w, --workspace Workspace path within monorepo (lockfile mode only) -s, --specs Include version (name@version or {name,version}) --json Output as JSON array --ndjson Output as newline-delimited JSON (streaming) @@ -68,14 +83,27 @@ Coverage options: Output formats (with --cover): (default) CSV: package,version,present + --full CSV: package,version,present,integrity,resolved --json [{"name":"...","version":"...","present":true}, ...] + --full --json Adds "integrity" and "resolved" fields to JSON --ndjson {"name":"...","version":"...","present":true} per line Examples: + # From lockfile flatcover package-lock.json --cover + flatcover package-lock.json --cover --full --json + + # From JSON list file + flatcover --list packages.json --cover --summary + echo '[{"name":"lodash","version":"4.17.21"}]' > pkgs.json && flatcover -l pkgs.json --cover + + # From stdin (NDJSON) - use '-' to read from stdin + echo '{"name":"lodash","version":"4.17.21"}' | flatcover - --cover + cat packages.ndjson | flatcover - --cover --json + + # With custom registry flatcover package-lock.json --cover --registry https://npm.pkg.github.com --token ghp_xxx - flatcover pnpm-lock.yaml --cover --auth admin:secret --ndjson - flatcover pnpm-lock.yaml -w packages/core --cover --summary`); + flatcover pnpm-lock.yaml --cover --auth admin:secret --ndjson`); process.exit(values.help ? 0 : 1); } @@ -89,6 +117,19 @@ if (values.auth && values.token) { process.exit(1); } +// Validate mutually exclusive input sources +// Note: useStdin means positionals[0] === '-', so it's already counted in positionals.length +if (positionals.length > 0 && values.list) { + console.error('Error: Cannot use both lockfile/stdin and --list'); + process.exit(1); +} + +// --workspace only works with lockfile input (not stdin or --list) +if (values.workspace && (useStdin || values.list || !positionals.length)) { + console.error('Error: --workspace can only be used with lockfile input'); + process.exit(1); +} + // --full implies --specs if (values.full) { values.specs = true; @@ -102,6 +143,70 @@ if (values.cover) { const lockfilePath = positionals[0]; const concurrency = Math.max(1, Math.min(50, Number.parseInt(values.concurrency, 10) || 20)); +/** + * Read packages from a JSON list file + * @param {string} filePath - Path to JSON file containing [{name, version}, ...] + * @returns {Array<{ name: string, version: string }>} + */ +function readJsonList(filePath) { + const content = readFileSync(filePath, 'utf8'); + const data = JSON.parse(content); + + if (!Array.isArray(data)) { + throw new Error('--list file must contain a JSON array'); + } + + const packages = []; + for (const item of data) { + if (!item.name || !item.version) { + throw new Error('Each item in --list must have "name" and "version" fields'); + } + packages.push({ + name: item.name, + version: item.version, + integrity: item.integrity, + resolved: item.resolved + }); + } + + return packages; +} + +/** + * Read packages from stdin as NDJSON + * @returns {Promise>} + */ +async function readStdinNdjson() { + const packages = []; + + const rl = createInterface({ + input: process.stdin, + crlfDelay: Infinity + }); + + for await (const line of rl) { + const trimmed = line.trim(); + if (!trimmed) continue; + + try { + const item = JSON.parse(trimmed); + if (!item.name || !item.version) { + throw new Error('Each line must have "name" and "version" fields'); + } + packages.push({ + name: item.name, + version: item.version, + integrity: item.integrity, + resolved: item.resolved + }); + } catch (err) { + throw new Error(`Invalid JSON on stdin: ${err.message}`); + } + } + + return packages; +} + /** * Encode package name for URL (handle scoped packages) * @param {string} name - Package name like @babel/core @@ -161,21 +266,22 @@ function createClient(registryUrl, { auth, token }) { /** * Check coverage for all dependencies - * @param {Array<{ name: string, version: string }>} deps + * @param {Array<{ name: string, version: string, integrity?: string, resolved?: string }>} deps * @param {{ registry: string, auth?: string, token?: string, progress: boolean }} options - * @returns {AsyncGenerator<{ name: string, version: string, present: boolean, error?: string }>} + * @returns {AsyncGenerator<{ name: string, version: string, present: boolean, integrity?: string, resolved?: string, error?: string }>} */ async function* checkCoverage(deps, { registry, auth, token, progress }) { const { client, headers, baseUrl } = createClient(registry, { auth, token }); // Group by package name to avoid duplicate requests - /** @type {Map>} */ + // Store full dep info (including integrity/resolved) keyed by version + /** @type {Map>} */ const byPackage = new Map(); for (const dep of deps) { if (!byPackage.has(dep.name)) { - byPackage.set(dep.name, new Set()); + byPackage.set(dep.name, new Map()); } - byPackage.get(dep.name).add(dep.version); + byPackage.get(dep.name).set(dep.version, dep); } const packages = [...byPackage.entries()]; @@ -187,7 +293,7 @@ async function* checkCoverage(deps, { registry, auth, token, progress }) { const batch = packages.slice(i, i + concurrency); const results = await Promise.all( - batch.map(async ([name, versions]) => { + batch.map(async ([name, versionMap]) => { const encodedName = encodePackageName(name); const basePath = baseUrl.pathname.replace(/\/$/, ''); const path = `${basePath}/${encodedName}`; @@ -216,21 +322,29 @@ async function* checkCoverage(deps, { registry, auth, token, progress }) { packumentVersions = packument.versions || {}; } - // Check each version + // Check each version, preserving integrity/resolved from original dep const versionResults = []; - for (const version of versions) { + for (const [version, dep] of versionMap) { const present = packumentVersions ? !!packumentVersions[version] : false; - versionResults.push({ name, version, present }); + const result = { name, version, present }; + if (dep.integrity) result.integrity = dep.integrity; + if (dep.resolved) result.resolved = dep.resolved; + versionResults.push(result); } return versionResults; } catch (err) { // Return error for all versions of this package - return [...versions].map(version => ({ - name, - version, - present: false, - error: err.message - })); + return [...versionMap.values()].map(dep => { + const result = { + name: dep.name, + version: dep.version, + present: false, + error: err.message + }; + if (dep.integrity) result.integrity = dep.integrity; + if (dep.resolved) result.resolved = dep.resolved; + return result; + }); } }) ); @@ -300,10 +414,10 @@ function outputDeps(deps, { specs, json, ndjson, full }) { /** * Output coverage results - * @param {AsyncGenerator<{ name: string, version: string, present: boolean, error?: string }>} results - * @param {{ json: boolean, ndjson: boolean, summary: boolean }} options + * @param {AsyncGenerator<{ name: string, version: string, present: boolean, integrity?: string, resolved?: string, error?: string }>} results + * @param {{ json: boolean, ndjson: boolean, summary: boolean, full: boolean }} options */ -async function outputCoverage(results, { json, ndjson, summary }) { +async function outputCoverage(results, { json, ndjson, summary, full }) { const all = []; let presentCount = 0; let missingCount = 0; @@ -317,7 +431,10 @@ async function outputCoverage(results, { json, ndjson, summary }) { if (ndjson) { // Stream immediately - console.log(JSON.stringify({ name: result.name, version: result.version, present: result.present })); + const obj = { name: result.name, version: result.version, present: result.present }; + if (full && result.integrity) obj.integrity = result.integrity; + if (full && result.resolved) obj.resolved = result.resolved; + console.log(JSON.stringify(obj)); } else { all.push(result); } @@ -328,13 +445,25 @@ async function outputCoverage(results, { json, ndjson, summary }) { all.sort((a, b) => a.name.localeCompare(b.name) || a.version.localeCompare(b.version)); if (json) { - const data = all.map(r => ({ name: r.name, version: r.version, present: r.present })); + const data = all.map(r => { + const obj = { name: r.name, version: r.version, present: r.present }; + if (full && r.integrity) obj.integrity = r.integrity; + if (full && r.resolved) obj.resolved = r.resolved; + return obj; + }); console.log(JSON.stringify(data, null, 2)); } else { // CSV output - console.log('package,version,present'); - for (const r of all) { - console.log(`${r.name},${r.version},${r.present}`); + if (full) { + console.log('package,version,present,integrity,resolved'); + for (const r of all) { + console.log(`${r.name},${r.version},${r.present},${r.integrity || ''},${r.resolved || ''}`); + } + } else { + console.log('package,version,present'); + for (const r of all) { + console.log(`${r.name},${r.version},${r.present}`); + } } } } @@ -350,22 +479,41 @@ async function outputCoverage(results, { json, ndjson, summary }) { } try { - const lockfile = await FlatlockSet.fromPath(lockfilePath); let deps; - if (values.workspace) { - const repoDir = dirname(lockfilePath); - const workspacePkgPath = join(repoDir, values.workspace, 'package.json'); - const workspacePkg = JSON.parse(readFileSync(workspacePkgPath, 'utf8')); - - deps = await lockfile.dependenciesOf(workspacePkg, { - workspacePath: values.workspace, - repoDir, - dev: values.dev, - peer: values.peer - }); + // Determine input source and load dependencies + if (useStdin) { + // Read from stdin (NDJSON) + deps = await readStdinNdjson(); + if (deps.length === 0) { + console.error('Error: No packages read from stdin'); + process.exit(1); + } + } else if (values.list) { + // Read from JSON list file + deps = readJsonList(values.list); + if (deps.length === 0) { + console.error('Error: No packages found in --list file'); + process.exit(1); + } } else { - deps = lockfile; + // Read from lockfile (existing behavior) + const lockfile = await FlatlockSet.fromPath(lockfilePath); + + if (values.workspace) { + const repoDir = dirname(lockfilePath); + const workspacePkgPath = join(repoDir, values.workspace, 'package.json'); + const workspacePkg = JSON.parse(readFileSync(workspacePkgPath, 'utf8')); + + deps = await lockfile.dependenciesOf(workspacePkg, { + workspacePath: values.workspace, + repoDir, + dev: values.dev, + peer: values.peer + }); + } else { + deps = lockfile; + } } if (values.cover) { @@ -381,7 +529,8 @@ try { await outputCoverage(results, { json: values.json, ndjson: values.ndjson, - summary: values.summary + summary: values.summary, + full: values.full }); } else { // Standard flatlock mode diff --git a/test/flatcover.test.js b/test/flatcover.test.js new file mode 100644 index 0000000..1660abc --- /dev/null +++ b/test/flatcover.test.js @@ -0,0 +1,395 @@ +/** + * @fileoverview Tests for flatcover CLI functionality + * + * Tests cover: + * - --full flag with --cover mode (integrity/resolved fields) + * - --list option for JSON file input + * - stdin (-) input for NDJSON + */ + +import assert from 'node:assert/strict'; +import { execSync } from 'node:child_process'; +import { unlinkSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { dirname, join } from 'node:path'; +import { after, before, describe, test } from 'node:test'; +import { fileURLToPath } from 'node:url'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const binPath = join(__dirname, '..', 'bin', 'flatcover.js'); +const lockfilePath = join(__dirname, '..', 'pnpm-lock.yaml'); + +/** + * Run flatcover CLI with given args and return stdout + * @param {string} args - CLI arguments + * @param {object} options - execSync options + * @returns {string} stdout output + */ +function runFlatcover(args, options = {}) { + return execSync(`node ${binPath} ${args}`, { + encoding: 'utf8', + timeout: 30000, + ...options + }); +} + +/** + * Run flatcover with lockfile input + * @param {string} args - CLI arguments (after lockfile) + * @returns {string} stdout output + */ +function runFlatcoverWithLockfile(args) { + return runFlatcover(`${lockfilePath} ${args}`); +} + +describe('flatcover --full --cover', () => { + describe('JSON output format', () => { + test('includes integrity field when --full --cover --json', () => { + const output = runFlatcoverWithLockfile('--full --cover --json'); + const data = JSON.parse(output); + + assert.ok(Array.isArray(data), 'Output should be JSON array'); + assert.ok(data.length > 0, 'Should have results'); + + // Find a result with integrity (most packages have it) + const withIntegrity = data.filter(r => r.integrity); + assert.ok(withIntegrity.length > 0, 'Should have results with integrity field'); + + // Verify structure of entries with integrity + for (const result of withIntegrity.slice(0, 5)) { + assert.ok(result.name, 'Should have name'); + assert.ok(result.version, 'Should have version'); + assert.ok(typeof result.present === 'boolean', 'Should have present boolean'); + assert.ok(result.integrity.startsWith('sha'), 'Integrity should be SHA hash'); + } + }); + + test('does NOT include integrity field without --full', () => { + const output = runFlatcoverWithLockfile('--cover --json'); + const data = JSON.parse(output); + + assert.ok(Array.isArray(data), 'Output should be JSON array'); + assert.ok(data.length > 0, 'Should have results'); + + // No result should have integrity without --full + const withIntegrity = data.filter(r => r.integrity); + assert.equal(withIntegrity.length, 0, 'Should NOT have integrity without --full'); + }); + }); + + describe('NDJSON output format', () => { + test('includes integrity field when --full --cover --ndjson', () => { + const output = runFlatcoverWithLockfile('--full --cover --ndjson'); + const lines = output.trim().split('\n'); + + assert.ok(lines.length > 0, 'Should have output lines'); + + // Parse first few lines + const results = lines.slice(0, 10).map(line => JSON.parse(line)); + + // Find results with integrity + const withIntegrity = results.filter(r => r.integrity); + assert.ok(withIntegrity.length > 0, 'Should have results with integrity field'); + + for (const result of withIntegrity) { + assert.ok(result.name, 'Should have name'); + assert.ok(result.version, 'Should have version'); + assert.ok(typeof result.present === 'boolean', 'Should have present boolean'); + assert.ok(result.integrity.startsWith('sha'), 'Integrity should be SHA hash'); + } + }); + + test('does NOT include integrity field without --full', () => { + const output = runFlatcoverWithLockfile('--cover --ndjson'); + const lines = output.trim().split('\n'); + const results = lines.slice(0, 10).map(line => JSON.parse(line)); + + const withIntegrity = results.filter(r => r.integrity); + assert.equal(withIntegrity.length, 0, 'Should NOT have integrity without --full'); + }); + }); + + describe('CSV output format', () => { + test('includes integrity,resolved columns when --full --cover', () => { + const output = runFlatcoverWithLockfile('--full --cover'); + const lines = output.trim().split('\n'); + + assert.ok(lines.length > 1, 'Should have header and data'); + + // Check header + const header = lines[0]; + assert.equal( + header, + 'package,version,present,integrity,resolved', + 'Header should include integrity,resolved columns' + ); + + // Check first data row has 5 columns + const dataRow = lines[1].split(','); + assert.equal(dataRow.length, 5, 'Data row should have 5 columns'); + }); + + test('does NOT include integrity,resolved columns without --full', () => { + const output = runFlatcoverWithLockfile('--cover'); + const lines = output.trim().split('\n'); + + assert.ok(lines.length > 1, 'Should have header and data'); + + // Check header + const header = lines[0]; + assert.equal( + header, + 'package,version,present', + 'Header should NOT include integrity,resolved columns' + ); + + // Check first data row has 3 columns + const dataRow = lines[1].split(','); + assert.equal(dataRow.length, 3, 'Data row should have 3 columns'); + }); + + test('CSV data row includes integrity value', () => { + const output = runFlatcoverWithLockfile('--full --cover'); + const lines = output.trim().split('\n'); + + // Find a row with integrity (non-empty 4th column) + const dataRows = lines.slice(1); + const rowWithIntegrity = dataRows.find(row => { + const cols = row.split(','); + return cols[3]?.startsWith('sha'); + }); + + assert.ok(rowWithIntegrity, 'Should have at least one row with integrity value'); + }); + }); +}); + +describe('flatcover --list (JSON file input)', () => { + const testListFile = join(tmpdir(), `flatcover-test-${Date.now()}.json`); + const testPackages = [ + { name: 'lodash', version: '4.17.21' }, + { name: 'express', version: '4.18.2' } + ]; + + before(() => { + writeFileSync(testListFile, JSON.stringify(testPackages)); + }); + + after(() => { + try { + unlinkSync(testListFile); + } catch { + // Ignore cleanup errors + } + }); + + test('reads packages from JSON list file', () => { + const output = runFlatcover(`--list ${testListFile} --cover --json`); + const data = JSON.parse(output); + + assert.ok(Array.isArray(data), 'Output should be JSON array'); + assert.equal(data.length, 2, 'Should have 2 results'); + + const names = data.map(r => r.name).sort(); + assert.deepEqual(names, ['express', 'lodash'], 'Should have correct packages'); + }); + + test('checks coverage with --list', () => { + const output = runFlatcover(`--list ${testListFile} --cover --json`); + const data = JSON.parse(output); + + // Both lodash and express should be present in npm registry + for (const result of data) { + assert.ok(result.name, 'Should have name'); + assert.ok(result.version, 'Should have version'); + assert.equal(result.present, true, `${result.name}@${result.version} should be present`); + } + }); + + test('outputs CSV format with --list', () => { + const output = runFlatcover(`--list ${testListFile} --cover`); + const lines = output.trim().split('\n'); + + assert.equal(lines[0], 'package,version,present', 'Should have CSV header'); + assert.equal(lines.length, 3, 'Should have header + 2 data rows'); + }); + + test('--list with --full includes integrity field (if provided)', () => { + // Create a list with integrity + const listWithIntegrity = [ + { + name: 'lodash', + version: '4.17.21', + integrity: 'sha512-test-integrity-hash' + } + ]; + const tempFile = join(tmpdir(), `flatcover-integrity-${Date.now()}.json`); + writeFileSync(tempFile, JSON.stringify(listWithIntegrity)); + + try { + const output = runFlatcover(`--list ${tempFile} --cover --full --json`); + const data = JSON.parse(output); + + assert.equal(data.length, 1, 'Should have 1 result'); + assert.equal( + data[0].integrity, + 'sha512-test-integrity-hash', + 'Should preserve integrity from input' + ); + } finally { + try { + unlinkSync(tempFile); + } catch { + // Ignore cleanup errors + } + } + }); + + test('rejects invalid JSON in --list file', () => { + const invalidFile = join(tmpdir(), `flatcover-invalid-${Date.now()}.json`); + writeFileSync(invalidFile, 'not valid json'); + + try { + assert.throws( + () => runFlatcover(`--list ${invalidFile} --cover`), + /Error/, + 'Should throw on invalid JSON' + ); + } finally { + try { + unlinkSync(invalidFile); + } catch { + // Ignore cleanup errors + } + } + }); + + test('rejects --list file without name/version fields', () => { + const invalidFile = join(tmpdir(), `flatcover-missing-fields-${Date.now()}.json`); + writeFileSync(invalidFile, JSON.stringify([{ foo: 'bar' }])); + + try { + assert.throws( + () => runFlatcover(`--list ${invalidFile} --cover`), + /name.*version|version.*name/i, + 'Should require name and version fields' + ); + } finally { + try { + unlinkSync(invalidFile); + } catch { + // Ignore cleanup errors + } + } + }); +}); + +describe('flatcover stdin input (- argument)', () => { + test('reads NDJSON from stdin', () => { + const ndjson = '{"name":"lodash","version":"4.17.21"}\n{"name":"express","version":"4.18.2"}'; + const output = runFlatcover('- --cover --json', { input: ndjson }); + const data = JSON.parse(output); + + assert.ok(Array.isArray(data), 'Output should be JSON array'); + assert.equal(data.length, 2, 'Should have 2 results'); + + const names = data.map(r => r.name).sort(); + assert.deepEqual(names, ['express', 'lodash'], 'Should have correct packages'); + }); + + test('checks coverage with stdin input', () => { + const ndjson = '{"name":"lodash","version":"4.17.21"}'; + const output = runFlatcover('- --cover --json', { input: ndjson }); + const data = JSON.parse(output); + + assert.equal(data.length, 1, 'Should have 1 result'); + assert.equal(data[0].name, 'lodash', 'Should have lodash'); + assert.equal(data[0].present, true, 'lodash should be present'); + }); + + test('outputs CSV format with stdin input', () => { + const ndjson = '{"name":"lodash","version":"4.17.21"}'; + const output = runFlatcover('- --cover', { input: ndjson }); + const lines = output.trim().split('\n'); + + assert.equal(lines[0], 'package,version,present', 'Should have CSV header'); + assert.equal(lines.length, 2, 'Should have header + 1 data row'); + }); + + test('stdin with --full preserves integrity field', () => { + const ndjson = '{"name":"lodash","version":"4.17.21","integrity":"sha512-test-hash"}'; + const output = runFlatcover('- --cover --full --json', { input: ndjson }); + const data = JSON.parse(output); + + assert.equal(data.length, 1, 'Should have 1 result'); + assert.equal(data[0].integrity, 'sha512-test-hash', 'Should preserve integrity'); + }); + + test('skips empty lines in stdin NDJSON', () => { + const ndjson = + '{"name":"lodash","version":"4.17.21"}\n\n{"name":"express","version":"4.18.2"}\n'; + const output = runFlatcover('- --cover --json', { input: ndjson }); + const data = JSON.parse(output); + + assert.equal(data.length, 2, 'Should have 2 results (empty lines skipped)'); + }); + + test('rejects invalid JSON on stdin', () => { + const invalidNdjson = 'not valid json'; + assert.throws( + () => runFlatcover('- --cover', { input: invalidNdjson }), + /Invalid JSON|Error/, + 'Should throw on invalid JSON' + ); + }); + + test('rejects stdin without name/version fields', () => { + const invalidNdjson = '{"foo":"bar"}'; + assert.throws( + () => runFlatcover('- --cover', { input: invalidNdjson }), + /name.*version|version.*name/i, + 'Should require name and version fields' + ); + }); +}); + +describe('flatcover input source validation', () => { + const testListFile = join(tmpdir(), `flatcover-validation-${Date.now()}.json`); + + before(() => { + writeFileSync(testListFile, JSON.stringify([{ name: 'lodash', version: '4.17.21' }])); + }); + + after(() => { + try { + unlinkSync(testListFile); + } catch { + // Ignore cleanup errors + } + }); + + test('rejects combining lockfile and --list', () => { + assert.throws( + () => runFlatcover(`${lockfilePath} --list ${testListFile} --cover`), + /Cannot use both|multiple input/i, + 'Should reject lockfile + --list' + ); + }); + + test('rejects --workspace with --list', () => { + assert.throws( + () => runFlatcover(`--list ${testListFile} --workspace packages/core --cover`), + /workspace.*lockfile/i, + 'Should reject --workspace with --list' + ); + }); + + test('rejects --workspace with stdin', () => { + const ndjson = '{"name":"lodash","version":"4.17.21"}'; + assert.throws( + () => runFlatcover('- --workspace packages/core --cover', { input: ndjson }), + /workspace.*lockfile/i, + 'Should reject --workspace with stdin' + ); + }); +});