diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md new file mode 100644 index 0000000..6460fdd --- /dev/null +++ b/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,39 @@ +--- +name: Feature Request / Bug Fix +about: Common template for pr and bugfix +title: '[FEAT/BUG]' +--- + +## Type of Change + +- [ ] 🚀 New feature (non-breaking change which adds functionality) +- [ ] 🐛 Bug fix (non-breaking change which fixes an issue) +- [ ] ⚠️ Breaking change (fix or feature that would cause existing functionality to not work as expected) +- [ ] ⚡ Performance improvement +- [ ] 📝 Documentation update +- [ ] 🔧 Refactoring (no functional changes, no api changes) +- [ ] 🧪 Tests + +## Description + +[Please describe the background, possible causes, how to reproduce the issue (code snippets or repo links are appreciated), and any necessary solutions. For feature requests, explain the motivation and use case.] + +## Environment + +- **OS:** [e.g. macOS, Windows, Linux] +- **Node.js Version:** [e.g. v18.16.0] +- **hyper-fs Version:** [e.g. 0.0.1] + +## Related Issues: + +[List the issue numbers related to this issue, e.g. #123] + +## Benchmarks + +_(Benchmarks show that...)_ + +## Checklist + +- [ ] I have searched existing issues to ensure this is not a duplicate +- [ ] I have provided a minimal reproduction (for bugs) +- [ ] I have run `pnpm test` and passed all the test sample diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index f2a6f90..8ad7c18 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -152,7 +152,7 @@ jobs: - name: Install dependencies run: pnpm install - name: Download artifacts - uses: actions/download-artifact@v6 + uses: actions/download-artifact@v4 with: name: bindings-${{ matrix.settings.target }} path: . @@ -208,7 +208,7 @@ jobs: pnpm config set supportedArchitectures.libc "[\"current\", \"musl\", \"gnu\"]" --json pnpm install - name: Download artifacts - uses: actions/download-artifact@v6 + uses: actions/download-artifact@v4 with: name: bindings-${{ matrix.target }} path: . @@ -252,7 +252,7 @@ jobs: - name: create npm dirs run: pnpm napi create-npm-dirs - name: Download all artifacts - uses: actions/download-artifact@v4 # v4 is standard, v6 is user specific maybe? keeping consistent with other edits + uses: actions/download-artifact@v4 with: path: artifacts - name: Move artifacts diff --git a/.gitignore b/.gitignore index 60b522b..e62a369 100644 --- a/.gitignore +++ b/.gitignore @@ -127,4 +127,11 @@ Cargo.lock !.yarn/releases !.yarn/sdks !.yarn/versions -/npm \ No newline at end of file +/npm + + +# vibe coding 🤓 +.cursor/ + +# 本地开发备忘,不进入版本控制 +TODO.md diff --git a/.husky/pre-commit b/.husky/pre-commit index fab6428..cb2c84d 100755 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -1,4 +1 @@ -#!/bin/sh -. "$(dirname "$0")/_/husky.sh" - pnpm lint-staged diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..b63072b --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,499 @@ +# 贡献指南 + +欢迎参与 hyper-fs 开发!本文档将引导你从零开始搭建环境、理解项目架构、实现新 API、编写测试,直到提交一个完整的 PR。 + +## 目录 + +- [环境准备](#环境准备) +- [项目架构](#项目架构) +- [开发一个新 API 的完整流程](#开发一个新-api-的完整流程) +- [参考 Node.js 源码](#参考-nodejs-源码) +- [编写 Rust 实现](#编写-rust-实现) +- [性能优化:并行化](#性能优化并行化) +- [编写测试](#编写测试) +- [运行性能基准测试](#运行性能基准测试) +- [代码风格与提交规范](#代码风格与提交规范) +- [CI 流程](#ci-流程) + +--- + +## 环境准备 + +### 必备工具 + +| 工具 | 版本要求 | 用途 | +| ----------- | ------------------------- | ------------------ | +| **Node.js** | >= 20 | 运行测试和构建脚本 | +| **pnpm** | >= 9 | 包管理器 | +| **Rust** | stable (通过 rustup 安装) | 编译原生模块 | +| **rustup** | 最新 | Rust 工具链管理 | + +### 初始化步骤 + +```bash +# 1. 克隆仓库 +git clone +cd hyper-fs + +# 2. 确保 Rust 工具链就绪 +rustup default stable + +# 3. 安装 Node.js 依赖 +pnpm install + +# 4. 构建原生模块(debug 模式,用于开发) +pnpm build:debug + +# 5. 运行测试,确认环境正常 +pnpm test +``` + +> **注意**:始终使用 `package.json` 中定义的脚本命令,不要直接跑 `cargo build` 或 `napi build`。这是因为 napi-rs 需要特定的参数来生成正确的 `.node` 二进制和类型声明。 + +### 常用命令速查 + +```bash +pnpm build:debug # 开发构建(不优化,编译快) +pnpm build # 发布构建(开启 LTO,编译慢但产物更快) +pnpm test # 运行所有测试(AVA) +pnpm bench # 运行所有基准测试 +pnpm bench readdir # 只运行 readdir 的基准测试 +pnpm lint # 代码检查(oxlint) +pnpm format # 格式化所有代码(Prettier + cargo fmt + taplo) +``` + +--- + +## 项目架构 + +``` +hyper-fs/ +├── src/ # Rust 源码(核心实现) +│ ├── lib.rs # 模块注册入口 +│ ├── types.rs # 共享类型(Dirent, Stats) +│ ├── utils.rs # 工具函数(文件类型判断等) +│ ├── readdir.rs # readdir / readdirSync +│ ├── stat.rs # stat / lstat +│ ├── read_file.rs # readFile / readFileSync +│ ├── write_file.rs # writeFile / appendFile +│ ├── cp.rs # cp / cpSync(递归复制,支持并发) +│ └── ... # 每个 API 一个文件 +├── __test__/ # 测试文件(TypeScript, AVA 框架) +│ ├── readdir.spec.ts +│ ├── stat.spec.ts +│ └── ... +├── benchmark/ # 性能基准测试 +│ ├── bench.ts # 基准测试入口(自动发现并运行) +│ ├── readdir.ts # readdir 性能对比 +│ ├── glob.ts # glob 性能对比 +│ ├── stat.ts # stat / lstat 性能对比 +│ ├── read_file.ts # readFile 性能对比(多种文件大小) +│ ├── write_file.ts # writeFile / appendFile 性能对比 +│ ├── copy_file.ts # copyFile 性能对比 +│ ├── exists.ts # exists / access 性能对比 +│ ├── mkdir.ts # mkdir 性能对比 +│ ├── rm.ts # rm 性能对比(含并发) +│ └── cp.ts # cp 性能对比(含并发,树形/平铺目录) +├── reference/ # Node.js fs 模块源码参考 +│ ├── fs.js # Node.js 主 fs 模块 +│ └── internal/fs/ # Node.js 内部实现 +├── index.js # napi-rs 自动生成的 JS 加载器 +├── index.d.ts # napi-rs 自动生成的类型声明 +├── Cargo.toml # Rust 依赖配置 +└── package.json # Node.js 项目配置 +``` + +### 关键技术栈 + +- **napi-rs** — Rust ↔ Node.js 桥接层,通过宏自动生成 JS 绑定 +- **jwalk** — 并行目录遍历(用于 readdir recursive) +- **ignore** — glob 模式匹配 + .gitignore 支持 +- **rayon** — 数据并行处理(用于 rm concurrency) +- **AVA** — 测试框架(TypeScript, ESM) +- **mitata** — 微基准测试库 + +--- + +## 开发一个新 API 的完整流程 + +以实现 `symlink` 为例,完整走一遍流程。 + +### 第一步:参考 Node.js 源码 + +在 `reference/` 目录下查阅 Node.js 原始实现,理解: + +1. **函数签名**:参数类型、可选项、返回值 +2. **边界行为**:空路径怎么处理?不存在的文件报什么错?权限不足呢? +3. **错误格式**:Node.js 使用 `ENOENT: no such file or directory, symlink 'xxx' -> 'yyy'` 这样的格式 + +```bash +# 查看 Node.js 中 symlink 的实现 +# reference/fs.js 搜索 "function symlink" +# reference/internal/fs/promises.js 搜索 "async function symlink" +``` + +### 第二步:创建 Rust 源文件 + +在 `src/` 下创建 `symlink.rs`,遵循以下模式: + +```rust +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::path::Path; + +// 1. 内部实现函数(不暴露给 JS) +fn symlink_impl(target: String, path: String) -> Result<()> { + // 实际逻辑... + // 错误格式模拟 Node.js: + // "ENOENT: no such file or directory, symlink 'target' -> 'path'" + Ok(()) +} + +// 2. 同步版本 +#[napi(js_name = "symlinkSync")] +pub fn symlink_sync(target: String, path: String) -> Result<()> { + symlink_impl(target, path) +} + +// 3. 异步版本(通过 AsyncTask 包装) +pub struct SymlinkTask { + pub target: String, + pub path: String, +} + +impl Task for SymlinkTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + symlink_impl(self.target.clone(), self.path.clone()) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "symlink")] +pub fn symlink(target: String, path: String) -> AsyncTask { + AsyncTask::new(SymlinkTask { target, path }) +} +``` + +### 代码模式要点 + +- **Options 结构体**:用 `#[napi(object)]` + `Option` 字段 +- **返回多态类型**:用 `Either`(如返回 `string[] | Dirent[]`) +- **错误前缀**:始终模拟 Node.js 格式(`ENOENT:`、`EACCES:`、`EEXIST:` 等) +- **平台差异**:用 `#[cfg(unix)]` / `#[cfg(not(unix))]` 处理 + +### 第三步:注册模块 + +编辑 `src/lib.rs`,按字母序添加: + +```rust +pub mod symlink; // 在 mod 声明区 +pub use symlink::*; // 在 use 声明区 +``` + +### 第四步:构建验证 + +```bash +pnpm build:debug +``` + +构建成功后 `index.d.ts` 会自动更新,新函数的类型声明会自动生成。 + +--- + +## 参考 Node.js 源码 + +`reference/` 目录包含从 Node.js 仓库复制的关键文件: + +| 文件 | 内容 | +| ----------------------------------- | ----------------------------------------------- | +| `reference/fs.js` | 所有 fs API 的回调/同步实现,是最重要的参考 | +| `reference/internal/fs/utils.js` | Stats 类构造、参数校验、错误处理、常量定义 | +| `reference/internal/fs/promises.js` | Promise 版本的实现(我们的 async 版本参考这个) | +| `reference/internal/fs/dir.js` | `opendir` / `Dir` 类实现 | +| `reference/internal/fs/watchers.js` | `watch` / `watchFile` 实现 | + +**使用方法**:实现任何 API 前,先在对应文件中搜索函数名,理解其完整行为——特别是边界情况和错误处理。 + +--- + +## 性能优化:并行化 + +hyper-fs 的核心优势是利用 Rust 的并行能力。以下是常用的并行化手段: + +### 1. jwalk — 并行目录遍历 + +用于 `readdir` 的递归模式: + +```rust +use jwalk::{Parallelism, WalkDir}; + +let walk = WalkDir::new(path) + .parallelism(Parallelism::RayonNewPool(concurrency)); +``` + +### 2. rayon — 数据并行 + +用于 `rm` 的并发删除: + +```rust +use rayon::prelude::*; + +entries.par_iter().try_for_each(|entry| { + remove_recursive(&entry.path(), opts) +})?; +``` + +### 3. ignore crate — 并行 glob + +用于 `glob` 的多线程匹配: + +```rust +use ignore::WalkBuilder; + +let mut builder = WalkBuilder::new(&cwd); +builder + .overrides(overrides) + .threads(concurrency); // 一行开启多线程 + +builder.build_parallel().run(/* ... */); +``` + +### 设计原则 + +- `concurrency` 选项默认值合理(通常 4 或 auto),用户可覆盖 +- 低文件数量时并行开销可能大于收益,需要 benchmark 验证 +- 使用 `Arc>>` 收集并行结果,注意锁粒度 + +--- + +## 编写测试 + +### 测试文件位置 + +每个 API 对应一个测试文件:`__test__/.spec.ts` + +### 测试框架 + +使用 [AVA](https://github.com/avajs/ava),TypeScript 通过 `@oxc-node/core` 编译。测试以 ESM 模式运行,**不能使用 `require()`**,必须使用 `import`。 + +### 测试结构模板 + +```typescript +import test from 'ava' +import { symlinkSync, symlink } from '../index.js' +import { existsSync, mkdirSync, readlinkSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +// 辅助函数:创建临时目录 +function tmpDir(): string { + const dir = join(tmpdir(), `hyper-fs-test-symlink-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + return dir +} + +// ===== 同步版本测试 ===== + +test('symlinkSync: should create a symbolic link', (t) => { + // 测试正常功能 +}) + +test('symlinkSync: should throw on non-existent target', (t) => { + // 测试错误处理 + t.throws(() => symlinkSync('/no/such/path', dest), { message: /ENOENT/ }) +}) + +// ===== 异步版本测试 ===== + +test('symlink: async should create a symbolic link', async (t) => { + await symlink(target, dest) + t.true(existsSync(dest)) +}) + +// ===== 双跑对比测试(关键!)===== + +test('symlinkSync: should match node:fs behavior', (t) => { + const nodeResult = nodeFs.readlinkSync(link) + const hyperResult = readlinkSync(link) + t.is(hyperResult, nodeResult) +}) +``` + +### 三类必须覆盖的测试 + +#### 1. 功能测试 + +验证 API 在正常场景下行为正确,sync 和 async 各一组。 + +#### 2. 双跑对比测试 + +同时调用 `node:fs` 和 `hyper-fs`,对比返回值。这是确保行为一致性的关键: + +```typescript +import * as nodeFs from 'node:fs' +import { statSync } from '../index.js' + +test('statSync: should match node:fs stat values', (t) => { + const nodeStat = nodeFs.statSync('./package.json') + const hyperStat = statSync('./package.json') + + t.is(hyperStat.size, nodeStat.size) + t.is(hyperStat.mode, nodeStat.mode) + t.is(hyperStat.isFile(), nodeStat.isFile()) + t.is(hyperStat.isDirectory(), nodeStat.isDirectory()) +}) +``` + +#### 3. 错误处理测试 + +验证错误消息格式与 Node.js 一致(`ENOENT`、`EACCES`、`EEXIST` 等): + +```typescript +test('should throw ENOENT on missing file', (t) => { + t.throws(() => someSync('./no-such-file'), { message: /ENOENT/ }) +}) + +test('async should throw ENOENT on missing file', async (t) => { + await t.throwsAsync(async () => await someAsync('./no-such-file'), { message: /ENOENT/ }) +}) +``` + +### 运行测试 + +```bash +pnpm test # 运行全部测试 +npx ava __test__/stat.spec.ts # 只运行 stat 的测试 +``` + +--- + +## 运行性能基准测试 + +### 基准测试结构 + +基准测试位于 `benchmark/` 目录。纯读操作(stat、readFile、exists 等)使用 [mitata](https://github.com/evanwashere/mitata) 库获得精确的微基准数据;破坏性/有副作用的操作(writeFile、copyFile、mkdir、rm)使用手动迭代 + `process.hrtime` 测量,每次迭代前重新搭建测试数据。 + +### 已有的基准测试 + +| 文件 | 覆盖 API | 模式 | +| --------------- | ---------------------------------------------------------- | -------- | +| `readdir.ts` | readdir(names / withFileTypes / recursive / concurrency) | mitata | +| `glob.ts` | glob vs node-glob vs fast-glob | mitata | +| `stat.ts` | stat / lstat / batch stat | mitata | +| `read_file.ts` | readFile(11B / 64KB / 4MB, Buffer / utf8) | mitata | +| `exists.ts` | exists / access / batch exists | mitata | +| `write_file.ts` | writeFile / appendFile(多种大小) | 手动迭代 | +| `copy_file.ts` | copyFile(11B / 64KB / 4MB) | 手动迭代 | +| `mkdir.ts` | mkdir(单层 / recursive / 已存在) | 手动迭代 | +| `rm.ts` | rm(flat / deep / tree + concurrency) | 手动迭代 | + +### 运行方式 + +```bash +pnpm bench # 运行所有基准测试 +pnpm bench readdir # 只运行包含 "readdir" 的基准 +pnpm bench stat # 只运行 stat 基准 +pnpm bench read_file # 只运行 readFile 基准 +pnpm bench glob # 只运行 glob 基准 +``` + +### 编写新的基准测试 + +创建 `benchmark/.ts`,按以下模板: + +```typescript +import { run, bench, group } from 'mitata' +import * as fs from 'node:fs' +import { someSync } from '../index.js' + +// 对标 Node.js 原生实现 +group('Some API', () => { + bench('Node.js', () => fs.someSync(args)).baseline() + bench('Hyper-FS', () => someSync(args)) +}) + +// 如果有并发选项,做并发对比 +group('Hyper-FS Concurrency', () => { + bench('Default', () => someSync(args)).baseline() + bench('4 Threads', () => someSync(args, { concurrency: 4 })) + bench('8 Threads', () => someSync(args, { concurrency: 8 })) +}) + +await run({ colors: true }) +``` + +### 基准测试要点 + +- **必须用 release 构建**:`pnpm build` 而不是 `pnpm build:debug`,否则性能数据没有参考意义 +- **Baseline 标记**:用 `.baseline()` 标记 Node.js 原生实现作为基准线 +- **大数据集**:尽量用 `node_modules` 等真实大目录做测试素材 +- **预热**:mitata 自带预热机制,手动 bench 时记得先跑一次 warmup + +--- + +## 代码风格与提交规范 + +### Rust 代码 + +- 缩进:2 空格(配置在 `rustfmt.toml`) +- 格式化:`pnpm format:rs`(等价于 `cargo fmt`) +- Lint:`cargo clippy`(CI 中自动执行) +- `#![deny(clippy::all)]` 已在 `lib.rs` 中启用 + +### TypeScript / JavaScript + +- 格式化:`pnpm format:prettier` +- 规则:120 字符宽、无分号、单引号、尾逗号 +- Lint:`pnpm lint`(oxlint) + +### 提交流程 + +```bash +# 1. 创建分支 +git checkout -b feat/add-symlink + +# 2. 开发 + 测试 +pnpm build:debug +pnpm test + +# 3. 格式化 +pnpm format + +# 4. 提交(husky + lint-staged 会自动格式化暂存文件) +git add . +git commit -m "feat: add symlink/symlinkSync" + +# 5. 性能测试(PR 中附上结果) +pnpm build +pnpm bench +``` + +### PR Checklist + +- [ ] 在 `src/` 下创建了对应的 `.rs` 文件 +- [ ] 在 `src/lib.rs` 中注册了新模块 +- [ ] `pnpm build:debug` 编译通过,零 warning +- [ ] 在 `__test__/` 下编写了测试(功能 + 双跑对比 + 错误处理) +- [ ] `pnpm test` 全部通过 +- [ ] 更新了 `README.md` 和 `README.zh-CN.md` 的 Roadmap 状态 +- [ ] (如适用)在 `benchmark/` 下编写了性能测试并附上结果 + +--- + +## CI 流程 + +GitHub Actions 会在 push / PR 时自动执行: + +1. **Lint** — `oxlint` + `cargo fmt --check` + `cargo clippy` +2. **Build** — 跨平台编译(macOS x64/arm64, Windows x64, Linux x64) +3. **Test** — 在 macOS / Windows / Linux 上运行测试(Node 20 & 22) +4. **Publish** — 版本 tag 触发自动发布到 npm + +本地开发只需关注 `pnpm build:debug` + `pnpm test`,CI 会处理跨平台验证。 diff --git a/Cargo.toml b/Cargo.toml index 8d235ca..f7bc9f7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] -authors = ["LongYinan "] +authors = [] edition = "2021" name = "hyper_fs" version = "0.1.0" @@ -9,14 +9,19 @@ version = "0.1.0" crate-type = ["cdylib"] [dependencies] +chrono = { version = "0.4", features = ["clock"] } +ignore = "0.4.25" jwalk = "0.8.1" -napi = "3.0.0" +napi = { version = "3.0.0", features = ["chrono_date"] } napi-derive = "3.4" rayon = "1.11.0" remove_dir_all = "1.0.0" serde = "1.0.228" walkdir = "2.5.0" +[target.'cfg(unix)'.dependencies] +libc = "0.2" + [build-dependencies] napi-build = "2" diff --git a/PR_TEMPLATE.md b/PR_TEMPLATE.md deleted file mode 100644 index 8741969..0000000 --- a/PR_TEMPLATE.md +++ /dev/null @@ -1,24 +0,0 @@ -## Description - -This PR optimizes/implements.... - -## Changes - -- **Feature 1**: - - Desc1... - - Desc2... - -- **Feature 2**: - - Desc1... - - Desc2... - -## Benchmarks - -_(Benchmarks show that...)_ - -## Checklist - -- [ ] Code compiles and passes linter (`cargo check`, `npm run lint`) -- [ ] Added/Updated tests in `__test__/xxx.spec.ts` -- [ ] Verified performance improvements with `benchmark/xxx.ts` -- [ ] Updated type definitions diff --git a/README.md b/README.md index 009cf62..eaf23bc 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ A high-performance, drop-in replacement for Node.js fs module, powered by Rust.

-## Installation (⚠️ Not Ready Yet) +## Installation ```bash npm install hyper-fs @@ -35,7 +35,7 @@ We are rewriting `fs` APIs one by one. ```ts path: string; // ✅ options?: { - encoding?: string; // ❌ + encoding?: string; // 🚧 ('utf8' default; 'buffer' not supported) withFileTypes?: boolean; // ✅ recursive?: boolean; // ✅ concurrency?: number; // ✨ @@ -53,19 +53,79 @@ We are rewriting `fs` APIs one by one. ### `readFile` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string; // ✅ + options?: { + encoding?: string; // ✅ (utf8, ascii, latin1, base64, base64url, hex) + flag?: string; // ✅ (r, r+, w+, a+, etc.) + }; + ``` +- **Return Type**: `string | Buffer` ### `writeFile` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string; // ✅ + data: string | Buffer; // ✅ + options?: { + encoding?: string; // ✅ (utf8, ascii, latin1, base64, base64url, hex) + mode?: number; // ✅ + flag?: string; // ✅ (w, wx, a, ax) + }; + ``` + +### `appendFile` + +- **Node.js Arguments**: + ```ts + path: string; // ✅ + data: string | Buffer; // ✅ + options?: { + encoding?: string; // ✅ (utf8, ascii, latin1, base64, base64url, hex) + mode?: number; // ✅ + flag?: string; // ✅ + }; + ``` ### `copyFile` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + src: string; // ✅ + dest: string; // ✅ + mode?: number; // ✅ (COPYFILE_EXCL) + ``` + +### `cp` + +- **Node.js Arguments** (Node 16.7+): + ```ts + src: string; // ✅ + dest: string; // ✅ + options?: { + recursive?: boolean; // ✅ + force?: boolean; // ✅ (default: true) + errorOnExist?: boolean; // ✅ + preserveTimestamps?: boolean; // ✅ + dereference?: boolean; // ✅ + verbatimSymlinks?: boolean; // ✅ + concurrency?: number; // ✨ + }; + ``` ### `mkdir` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string; // ✅ + options?: { + recursive?: boolean; // ✅ + mode?: number; // ✅ + }; + ``` +- **Return Type**: `string | undefined` (first created path when recursive) ### `rm` @@ -74,28 +134,39 @@ We are rewriting `fs` APIs one by one. path: string; // ✅ options?: { force?: boolean; // ✅ - maxRetries?: number; // ❌ + maxRetries?: number; // ✅ recursive?: boolean; // ✅ - retryDelay?: number; // ❌ + retryDelay?: number; // ✅ (default: 100ms) concurrency?: number; // ✨ }; ``` -- **Return Type**: - ```ts - void - ``` ### `rmdir` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string // ✅ + ``` ### `stat` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string // ✅ + ``` +- **Return Type**: `Stats` + - Numeric fields: `dev`, `mode`, `nlink`, `uid`, `gid`, `rdev`, `blksize`, `ino`, `size`, `blocks`, `atimeMs`, `mtimeMs`, `ctimeMs`, `birthtimeMs` + - **Date fields**: `atime`, `mtime`, `ctime`, `birthtime` → `Date` objects ✅ + - Methods: `isFile()`, `isDirectory()`, `isSymbolicLink()`, ... +- **Error distinction**: `ENOENT` vs `EACCES` ✅ ### `lstat` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string // ✅ + ``` +- **Return Type**: `Stats` ### `fstat` @@ -103,7 +174,19 @@ We are rewriting `fs` APIs one by one. ### `access` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string; // ✅ + mode?: number; // ✅ (F_OK, R_OK, W_OK, X_OK) + ``` + +### `exists` + +- **Node.js Arguments**: + ```ts + path: string // ✅ + ``` +- **Return Type**: `boolean` ### `open` @@ -119,39 +202,108 @@ We are rewriting `fs` APIs one by one. ### `unlink` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string // ✅ + ``` ### `rename` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + oldPath: string // ✅ + newPath: string // ✅ + ``` ### `readlink` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string // ✅ + ``` +- **Return Type**: `string` ### `realpath` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string // ✅ + ``` +- **Return Type**: `string` ### `chmod` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string // ✅ + mode: number // ✅ + ``` ### `chown` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string // ✅ + uid: number // ✅ + gid: number // ✅ + ``` ### `utimes` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string // ✅ + atime: number // ✅ + mtime: number // ✅ + ``` ### `truncate` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + path: string; // ✅ + len?: number; // ✅ + ``` -### `appendFile` +### `glob` -- **Status**: ❌ +- **Node.js Arguments**: + ```ts + pattern: string; // ✅ + options?: { + cwd?: string; // ✅ + withFileTypes?: boolean; // ✅ + exclude?: string[]; // ✅ + concurrency?: number; // ✨ + gitIgnore?: boolean; // ✨ + }; + ``` + +### `symlink` + +- **Node.js Arguments**: + ```ts + target: string // ✅ + path: string // ✅ + type?: 'file' | 'dir' | 'junction' // ✅ (Windows only, ignored on Unix) + ``` + +### `link` + +- **Node.js Arguments**: + ```ts + existingPath: string // ✅ + newPath: string // ✅ + ``` + +### `mkdtemp` + +- **Node.js Arguments**: + ```ts + prefix: string // ✅ + ``` +- **Return Type**: `string` +- Uses OS-level random source (`/dev/urandom` on Unix, `BCryptGenRandom` on Windows) with up to 10 retries ✅ ### `watch` @@ -160,9 +312,9 @@ We are rewriting `fs` APIs one by one. ## Usage ```ts -import { readdir } from 'hyper-fs' +import { readdir, stat, readFile, writeFile, mkdir, rm } from 'hyper-fs' -// Basic usage +// Read directory const files = await readdir('./src') // Recursive with file types @@ -170,8 +322,97 @@ const entries = await readdir('./src', { recursive: true, withFileTypes: true, }) + +// Read / write files +const content = await readFile('./package.json', { encoding: 'utf8' }) +await writeFile('./output.txt', 'hello world') + +// File stats +const s = await stat('./package.json') +console.log(s.size, s.isFile()) + +// Create directory +await mkdir('./new-dir', { recursive: true }) + +// Remove +await rm('./temp', { recursive: true, force: true }) ``` +## Benchmarks + +> Tested on Apple Silicon (arm64), Node.js 24.0.2, release build with LTO. +> Run `pnpm build && pnpm bench` to reproduce. + +### Where Hyper-FS Shines + +These are the scenarios where Rust's parallelism and zero-copy I/O make a real difference: + +| Scenario | Node.js | Hyper-FS | Speedup | +| ------------------------------------------------ | --------- | -------- | --------- | +| `readdir` recursive (node_modules, ~30k entries) | 281 ms | 23 ms | **12x** | +| `glob` recursive (`**/*.rs`) | 25 ms | 1.46 ms | **17x** | +| `glob` recursive vs fast-glob | 102 ms | 1.46 ms | **70x** | +| `copyFile` 4 MB | 4.67 ms | 0.09 ms | **50x** | +| `readFile` 4 MB utf8 | 1.86 ms | 0.92 ms | **2x** | +| `readFile` 64 KB utf8 | 42 µs | 18 µs | **2.4x** | +| `rm` 2000 files (4 threads) | 92 ms | 53 ms | **1.75x** | +| `access` R_OK (directory) | 4.18 µs | 1.55 µs | **2.7x** | +| `cp` 500-file flat dir (4 threads) | 86.45 ms | 32.88 ms | **2.6x** | +| `cp` tree dir ~363 nodes (4 threads) | 108.73 ms | 46.88 ms | **2.3x** | + +### On Par with Node.js + +Single-file operations have a ~0.3 µs napi bridge overhead, making them roughly equivalent: + +| Scenario | Node.js | Hyper-FS | Ratio | +| -------------------------- | ------- | -------- | ----- | +| `stat` (single file) | 1.45 µs | 1.77 µs | 1.2x | +| `readFile` small (Buffer) | 8.86 µs | 9.46 µs | 1.1x | +| `writeFile` small (string) | 74 µs | 66 µs | 0.9x | +| `writeFile` small (Buffer) | 115 µs | 103 µs | 0.9x | +| `appendFile` | 30 µs | 27 µs | 0.9x | + +### Where Node.js Wins + +Lightweight built-in calls where napi overhead is proportionally large: + +| Scenario | Node.js | Hyper-FS | Note | +| ---------------------------- | ------- | -------- | --------------------------------- | +| `existsSync` (existing file) | 444 ns | 1.34 µs | Node.js internal fast path | +| `accessSync` F_OK | 456 ns | 1.46 µs | Same — napi overhead dominates | +| `writeFile` 4 MB string | 2.93 ms | 5.69 ms | Large string crossing napi bridge | + +### Parallelism + +Hyper-FS uses multi-threaded parallelism for operations that traverse the filesystem: + +| API | Library | `concurrency` option | Default | +| --------------------- | ------------------------------------------------------------------------- | -------------------- | ------- | +| `readdir` (recursive) | [jwalk](https://github.com/Byron/jwalk) | ✅ | auto | +| `glob` | [ignore](https://github.com/BurntSushi/ripgrep/tree/master/crates/ignore) | ✅ | 4 | +| `rm` (recursive) | [rayon](https://github.com/rayon-rs/rayon) | ✅ | 1 | +| `cp` (recursive) | [rayon](https://github.com/rayon-rs/rayon) | ✅ | 1 | + +Single-file operations (`stat`, `readFile`, `writeFile`, `chmod`, etc.) are atomic syscalls — parallelism does not apply. + +### Key Takeaway + +**Hyper-FS excels at recursive / batch filesystem operations** (readdir, glob, rm, cp) where Rust's parallel walkers deliver 2–70x speedups. For single-file operations it performs on par with Node.js. The napi bridge adds a fixed ~0.3 µs overhead per call, which only matters for sub-microsecond operations like `existsSync`. + +**`cp` benchmark detail** (Apple Silicon, release build): + +| Scenario | Node.js | Hyper-FS 1T | Hyper-FS 4T | Hyper-FS 8T | +| ----------------------------------------- | --------- | ----------- | ----------- | ----------- | +| Flat dir (500 files) | 86.45 ms | 61.56 ms | 32.88 ms | 36.67 ms | +| Tree dir (breadth=4, depth=3, ~84 nodes) | 23.80 ms | 16.94 ms | 10.62 ms | 9.76 ms | +| Tree dir (breadth=3, depth=5, ~363 nodes) | 108.73 ms | 75.39 ms | 46.88 ms | 46.18 ms | + +Optimal concurrency for `cp` is **4 threads** on Apple Silicon — beyond that, I/O bandwidth becomes the bottleneck and diminishing returns set in. + +## Contributing + +See [CONTRIBUTING.md](./CONTRIBUTING.md) for the complete development guide — from environment setup, referencing Node.js source, writing Rust implementations, to testing and benchmarking. + ## License MIT diff --git a/README.zh-CN.md b/README.zh-CN.md new file mode 100644 index 0000000..932d400 --- /dev/null +++ b/README.zh-CN.md @@ -0,0 +1,420 @@ +# Hyper-FS + +[English](./README.md) | 中文 + +

+ Written in Rust + NPM Version + License +

+ +

+ 由 Rust 驱动的高性能 Node.js fs 模块「即插即用」替代品。 +

+ +## 安装 + +```bash +npm install hyper-fs +# or +pnpm add hyper-fs +``` + +## 状态与路线图 + +我们正在逐个重写 `fs` 的 API。 + +> **图例** +> +> - ✅:完全支持 +> - 🚧:部分支持 / 开发中 +> - ✨:hyper-fs 的新增能力 +> - ❌:暂未支持 + +### `readdir` + +- **Node.js 参数**: + ```ts + path: string; // ✅ + options?: { + encoding?: string; // 🚧(默认 'utf8';'buffer' 暂不支持) + withFileTypes?: boolean; // ✅ + recursive?: boolean; // ✅ + concurrency?: number; // ✨ + }; + ``` +- **返回类型**: + ```ts + string[] + | { + name: string, // ✅ + parentPath: string, // ✅ + isDir: boolean // ✅ + }[] + ``` + +### `readFile` + +- **Node.js 参数**: + ```ts + path: string; // ✅ + options?: { + encoding?: string; // ✅ (utf8, ascii, latin1, base64, base64url, hex) + flag?: string; // ✅ (r, r+, w+, a+ 等) + }; + ``` +- **返回类型**:`string | Buffer` + +### `writeFile` + +- **Node.js 参数**: + ```ts + path: string; // ✅ + data: string | Buffer; // ✅ + options?: { + encoding?: string; // ✅ (utf8, ascii, latin1, base64, base64url, hex) + mode?: number; // ✅ + flag?: string; // ✅ (w, wx, a, ax) + }; + ``` + +### `appendFile` + +- **Node.js 参数**: + ```ts + path: string; // ✅ + data: string | Buffer; // ✅ + options?: { + encoding?: string; // ✅ (utf8, ascii, latin1, base64, base64url, hex) + mode?: number; // ✅ + flag?: string; // ✅ + }; + ``` + +### `copyFile` + +- **Node.js 参数**: + ```ts + src: string; // ✅ + dest: string; // ✅ + mode?: number; // ✅ (COPYFILE_EXCL) + ``` + +### `cp` + +- **Node.js 参数**(Node 16.7+): + ```ts + src: string; // ✅ + dest: string; // ✅ + options?: { + recursive?: boolean; // ✅ + force?: boolean; // ✅(默认 true) + errorOnExist?: boolean; // ✅ + preserveTimestamps?: boolean; // ✅ + dereference?: boolean; // ✅ + verbatimSymlinks?: boolean; // ✅ + concurrency?: number; // ✨ + }; + ``` + +### `mkdir` + +- **Node.js 参数**: + ```ts + path: string; // ✅ + options?: { + recursive?: boolean; // ✅ + mode?: number; // ✅ + }; + ``` +- **返回类型**:`string | undefined`(recursive 模式下返回首个创建的路径) + +### `rm` + +- **Node.js 参数**: + ```ts + path: string; // ✅ + options?: { + force?: boolean; // ✅ + maxRetries?: number; // ✅ + retryDelay?: number; // ✅(默认 100ms) + recursive?: boolean; // ✅ + concurrency?: number; // ✨ + }; + ``` + +### `rmdir` + +- **Node.js 参数**: + ```ts + path: string // ✅ + ``` + +### `stat` + +- **Node.js 参数**: + ```ts + path: string // ✅ + ``` +- **返回类型**:`Stats` + - 数值字段:`dev`, `mode`, `nlink`, `uid`, `gid`, `rdev`, `blksize`, `ino`, `size`, `blocks`, `atimeMs`, `mtimeMs`, `ctimeMs`, `birthtimeMs` + - **Date 字段**:`atime`, `mtime`, `ctime`, `birthtime` → `Date` 对象 ✅ + - 方法:`isFile()`, `isDirectory()`, `isSymbolicLink()`, ... +- **错误区分**:`ENOENT` vs `EACCES` ✅ + +### `lstat` + +- **Node.js 参数**: + ```ts + path: string // ✅ + ``` +- **返回类型**:`Stats` + +### `fstat` + +- **状态**:❌ + +### `access` + +- **Node.js 参数**: + ```ts + path: string; // ✅ + mode?: number; // ✅ (F_OK, R_OK, W_OK, X_OK) + ``` + +### `exists` + +- **Node.js 参数**: + ```ts + path: string // ✅ + ``` +- **返回类型**:`boolean` + +### `open` + +- **状态**:❌ + +### `opendir` + +- **状态**:❌ + +### `close` + +- **状态**:❌ + +### `unlink` + +- **Node.js 参数**: + ```ts + path: string // ✅ + ``` + +### `rename` + +- **Node.js 参数**: + ```ts + oldPath: string // ✅ + newPath: string // ✅ + ``` + +### `readlink` + +- **Node.js 参数**: + ```ts + path: string // ✅ + ``` +- **返回类型**:`string` + +### `realpath` + +- **Node.js 参数**: + ```ts + path: string // ✅ + ``` +- **返回类型**:`string` + +### `chmod` + +- **Node.js 参数**: + ```ts + path: string // ✅ + mode: number // ✅ + ``` + +### `chown` + +- **Node.js 参数**: + ```ts + path: string // ✅ + uid: number // ✅ + gid: number // ✅ + ``` + +### `utimes` + +- **Node.js 参数**: + ```ts + path: string // ✅ + atime: number // ✅ + mtime: number // ✅ + ``` + +### `truncate` + +- **Node.js 参数**: + ```ts + path: string; // ✅ + len?: number; // ✅ + ``` + +### `glob` + +- **Node.js 参数**: + ```ts + pattern: string; // ✅ + options?: { + cwd?: string; // ✅ + withFileTypes?: boolean; // ✅ + exclude?: string[]; // ✅ + concurrency?: number; // ✨ + gitIgnore?: boolean; // ✨ + }; + ``` + +### `symlink` + +- **Node.js 参数**: + ```ts + target: string // ✅ + path: string // ✅ + type?: 'file' | 'dir' | 'junction' // ✅(仅 Windows 有效,Unix 忽略) + ``` + +### `link` + +- **Node.js 参数**: + ```ts + existingPath: string // ✅ + newPath: string // ✅ + ``` + +### `mkdtemp` + +- **Node.js 参数**: + ```ts + prefix: string // ✅ + ``` +- **返回类型**:`string` +- 使用系统随机源(Unix: `/dev/urandom`,Windows: `BCryptGenRandom`),最多重试 10 次 ✅ + +### `watch` + +- **状态**:❌ + +## 用法 + +```ts +import { readdir, stat, readFile, writeFile, mkdir, rm } from 'hyper-fs' + +// 读取目录 +const files = await readdir('./src') + +// 递归 + 返回文件类型 +const entries = await readdir('./src', { + recursive: true, + withFileTypes: true, +}) + +// 读写文件 +const content = await readFile('./package.json', { encoding: 'utf8' }) +await writeFile('./output.txt', 'hello world') + +// 文件信息 +const s = await stat('./package.json') +console.log(s.size, s.isFile()) + +// 创建目录 +await mkdir('./new-dir', { recursive: true }) + +// 删除 +await rm('./temp', { recursive: true, force: true }) +``` + +## 性能基准 + +> 测试环境:Apple Silicon (arm64),Node.js 24.0.2,release 构建(开启 LTO)。 +> 运行 `pnpm build && pnpm bench` 可复现。 + +### Hyper-FS 显著更快的场景 + +这些场景中 Rust 的并行遍历和零拷贝 I/O 发挥了真正优势: + +| 场景 | Node.js | Hyper-FS | 加速比 | +| ------------------------------------------- | --------- | -------- | --------- | +| `readdir` 递归(node_modules,约 3 万条目) | 281 ms | 23 ms | **12x** | +| `glob` 递归(`**/*.rs`) | 25 ms | 1.46 ms | **17x** | +| `glob` 递归 vs fast-glob | 102 ms | 1.46 ms | **70x** | +| `copyFile` 4 MB | 4.67 ms | 0.09 ms | **50x** | +| `readFile` 4 MB utf8 | 1.86 ms | 0.92 ms | **2x** | +| `readFile` 64 KB utf8 | 42 µs | 18 µs | **2.4x** | +| `rm` 2000 个文件(4 线程) | 92 ms | 53 ms | **1.75x** | +| `access` R_OK(目录) | 4.18 µs | 1.55 µs | **2.7x** | +| `cp` 500 文件平铺目录(4 线程) | 86.45 ms | 32.88 ms | **2.6x** | +| `cp` 树形目录 ~363 节点(4 线程) | 108.73 ms | 46.88 ms | **2.3x** | + +### 与 Node.js 持平的场景 + +单文件操作有约 0.3 µs 的 napi 桥接开销,整体表现基本一致: + +| 场景 | Node.js | Hyper-FS | 比率 | +| ---------------------------- | ------- | -------- | ---- | +| `stat`(单文件) | 1.45 µs | 1.77 µs | 1.2x | +| `readFile` 小文件(Buffer) | 8.86 µs | 9.46 µs | 1.1x | +| `writeFile` 小文件(string) | 74 µs | 66 µs | 0.9x | +| `writeFile` 小文件(Buffer) | 115 µs | 103 µs | 0.9x | +| `appendFile` | 30 µs | 27 µs | 0.9x | + +### Node.js 更快的场景 + +极轻量级的内置调用,napi 开销占比较大: + +| 场景 | Node.js | Hyper-FS | 说明 | +| -------------------------- | ------- | -------- | ------------------------ | +| `existsSync`(已存在文件) | 444 ns | 1.34 µs | Node.js 内部有 fast path | +| `accessSync` F_OK | 456 ns | 1.46 µs | 同上——napi 开销占主导 | +| `writeFile` 4 MB string | 2.93 ms | 5.69 ms | 大字符串跨 napi 桥传输 | + +### 并行支持 + +Hyper-FS 在文件系统遍历类操作中使用多线程并行: + +| API | 并行库 | `concurrency` 选项 | 默认值 | +| ----------------- | ------------------------------------------------------------------------- | ------------------ | ------ | +| `readdir`(递归) | [jwalk](https://github.com/Byron/jwalk) | ✅ | auto | +| `glob` | [ignore](https://github.com/BurntSushi/ripgrep/tree/master/crates/ignore) | ✅ | 4 | +| `rm`(递归) | [rayon](https://github.com/rayon-rs/rayon) | ✅ | 1 | +| `cp`(递归) | [rayon](https://github.com/rayon-rs/rayon) | ✅ | 1 | + +单文件操作(`stat`、`readFile`、`writeFile`、`chmod` 等)是原子系统调用,不适用并行化。 + +### 核心结论 + +**Hyper-FS 在递归/批量文件系统操作上表现卓越**(readdir、glob、rm、cp),Rust 的并行遍历器带来 2–70 倍加速。单文件操作与 Node.js 基本持平。napi 桥接带来固定约 0.3 µs 的每次调用开销,仅在亚微秒级操作(如 `existsSync`)中有感知。 + +**`cp` 基准详情**(Apple Silicon,release 构建): + +| 场景 | Node.js | Hyper-FS 1 线程 | Hyper-FS 4 线程 | Hyper-FS 8 线程 | +| ------------------------------------- | --------- | --------------- | --------------- | --------------- | +| 平铺目录(500 文件) | 86.45 ms | 61.56 ms | 32.88 ms | 36.67 ms | +| 树形目录(宽度=4,深度=3,~84 节点) | 23.80 ms | 16.94 ms | 10.62 ms | 9.76 ms | +| 树形目录(宽度=3,深度=5,~363 节点) | 108.73 ms | 75.39 ms | 46.88 ms | 46.18 ms | + +`cp` 的最优并发数在 Apple Silicon 上为 **4 线程**——超过后受 I/O 带宽限制,收益趋于平稳。 + +## 贡献 + +参阅 [CONTRIBUTING.md](./CONTRIBUTING.md) — 完整的开发指南,涵盖环境搭建、参考 Node.js 源码、编写 Rust 实现、测试与性能基准。 + +## 许可证 + +MIT diff --git a/__test__/access.spec.ts b/__test__/access.spec.ts new file mode 100644 index 0000000..8543ca8 --- /dev/null +++ b/__test__/access.spec.ts @@ -0,0 +1,118 @@ +import test from 'ava' +import * as nodeFs from 'node:fs' +import { accessSync, access } from '../index.js' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +const F_OK = 0 +const R_OK = 4 +const W_OK = 2 +const X_OK = 1 + +function tmpFile(name: string): string { + const dir = join(tmpdir(), `hyper-fs-test-access-${Date.now()}-${Math.random().toString(36).slice(2)}`) + nodeFs.mkdirSync(dir, { recursive: true }) + const file = join(dir, name) + nodeFs.writeFileSync(file, 'test') + return file +} + +test('accessSync: should succeed for existing file (F_OK)', (t) => { + t.notThrows(() => accessSync('./package.json')) +}) + +test('accessSync: should succeed with explicit F_OK', (t) => { + t.notThrows(() => accessSync('./package.json', F_OK)) +}) + +test('accessSync: should succeed with R_OK', (t) => { + t.notThrows(() => accessSync('./package.json', R_OK)) +}) + +test('accessSync: should succeed with W_OK', (t) => { + t.notThrows(() => accessSync('./package.json', W_OK)) +}) + +test('accessSync: should throw on non-existent file', (t) => { + t.throws(() => accessSync('./no-such-file'), { message: /ENOENT/ }) +}) + +test('access: async should succeed for existing file', async (t) => { + await t.notThrowsAsync(async () => await access('./package.json')) +}) + +test('access: async should throw on non-existent file', async (t) => { + await t.throwsAsync(async () => await access('./no-such-file'), { message: /ENOENT/ }) +}) + +// ===== dual-run comparison ===== + +test('dual-run: accessSync should behave same as node:fs for existing file', (t) => { + let nodeErr: Error | null = null + let hyperErr: Error | null = null + + try { + nodeFs.accessSync('./package.json', R_OK) + } catch (e) { + nodeErr = e as Error + } + try { + accessSync('./package.json', R_OK) + } catch (e) { + hyperErr = e as Error + } + + t.is(hyperErr, nodeErr) +}) + +test('dual-run: accessSync should both throw for non-existent file', (t) => { + const target = './no-such-file-access-dual-' + Date.now() + let nodeThrew = false + let hyperThrew = false + + try { + nodeFs.accessSync(target) + } catch { + nodeThrew = true + } + try { + accessSync(target) + } catch { + hyperThrew = true + } + + t.is(hyperThrew, nodeThrew) +}) + +test('accessSync: X_OK should succeed for executable file', (t) => { + if (process.platform === 'win32') { + t.pass('Skipping X_OK test on Windows') + return + } + const file = tmpFile('exec.sh') + nodeFs.chmodSync(file, 0o755) + t.notThrows(() => accessSync(file, X_OK)) +}) + +test('accessSync: should throw ENOENT (not EACCES) for missing file', (t) => { + const target = '/tmp/no-such-file-access-' + Date.now() + t.throws(() => accessSync(target), { message: /ENOENT/ }) +}) + +test('dual-run: accessSync ENOENT error message starts with ENOENT like node:fs', (t) => { + const target = '/tmp/no-such-file-access-dual-' + Date.now() + let nodeMsg = '' + let hyperMsg = '' + try { + nodeFs.accessSync(target) + } catch (e) { + nodeMsg = (e as Error).message + } + try { + accessSync(target) + } catch (e) { + hyperMsg = (e as Error).message + } + t.true(nodeMsg.startsWith('ENOENT')) + t.true(hyperMsg.startsWith('ENOENT')) +}) diff --git a/__test__/chmod.spec.ts b/__test__/chmod.spec.ts new file mode 100644 index 0000000..07a9e34 --- /dev/null +++ b/__test__/chmod.spec.ts @@ -0,0 +1,62 @@ +import test from 'ava' +import { chmodSync, chmod, statSync } from '../index.js' +import * as nodeFs from 'node:fs' +import { writeFileSync, mkdirSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpFile(name: string): string { + const dir = join(tmpdir(), `hyper-fs-test-chmod-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + const file = join(dir, name) + writeFileSync(file, 'test') + return file +} + +test('chmodSync: should change file permissions', (t) => { + if (process.platform === 'win32') { + t.pass('Skipping chmod test on Windows') + return + } + const file = tmpFile('chmod.txt') + chmodSync(file, 0o644) + const s = statSync(file) + t.is(s.mode & 0o777, 0o644) + + chmodSync(file, 0o755) + const s2 = statSync(file) + t.is(s2.mode & 0o777, 0o755) +}) + +test('chmodSync: should throw on non-existent file', (t) => { + t.throws(() => chmodSync('/tmp/no-such-file-' + Date.now(), 0o644), { message: /ENOENT/ }) +}) + +test('chmod: async should change permissions', async (t) => { + if (process.platform === 'win32') { + t.pass('Skipping chmod test on Windows') + return + } + const file = tmpFile('async-chmod.txt') + await chmod(file, 0o600) + const s = statSync(file) + t.is(s.mode & 0o777, 0o600) +}) + +// ===== dual-run comparison ===== + +test('dual-run: chmodSync should produce same mode as node:fs', (t) => { + if (process.platform === 'win32') { + t.pass('Skipping chmod test on Windows') + return + } + const nodeFile = tmpFile('node-chmod.txt') + const hyperFile = tmpFile('hyper-chmod.txt') + + nodeFs.chmodSync(nodeFile, 0o755) + chmodSync(hyperFile, 0o755) + + const nodeStat = nodeFs.statSync(nodeFile) + const hyperStat = statSync(hyperFile) + t.is(hyperStat.mode & 0o777, nodeStat.mode & 0o777) +}) diff --git a/__test__/chown.spec.ts b/__test__/chown.spec.ts new file mode 100644 index 0000000..0433ee7 --- /dev/null +++ b/__test__/chown.spec.ts @@ -0,0 +1,59 @@ +import test from 'ava' +import { chownSync, chown, statSync } from '../index.js' +import { writeFileSync, mkdirSync, statSync as nodeStatSync, chownSync as nodeChownSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpFile(name: string): string { + const dir = join(tmpdir(), `hyper-fs-test-chown-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + const file = join(dir, name) + writeFileSync(file, 'test') + return file +} + +test('chownSync: should not throw on valid file with current uid/gid', (t) => { + if (process.platform === 'win32') { + t.pass('Skipping chown test on Windows') + return + } + const file = tmpFile('chown.txt') + const s = nodeStatSync(file) + t.notThrows(() => chownSync(file, s.uid, s.gid)) +}) + +test('chownSync: should match node:fs behavior', (t) => { + if (process.platform === 'win32') { + t.pass('Skipping chown test on Windows') + return + } + const file = tmpFile('compare.txt') + const s = nodeStatSync(file) + + nodeChownSync(file, s.uid, s.gid) + const nodeStat = nodeStatSync(file) + + chownSync(file, s.uid, s.gid) + const hyperStat = statSync(file) + + t.is(hyperStat.uid, nodeStat.uid) + t.is(hyperStat.gid, nodeStat.gid) +}) + +test('chownSync: should throw on non-existent file', (t) => { + t.throws(() => chownSync('/tmp/no-such-file-' + Date.now(), 0, 0), { message: /ENOENT/ }) +}) + +test('chown: async should not throw on valid file', async (t) => { + if (process.platform === 'win32') { + t.pass('Skipping chown test on Windows') + return + } + const file = tmpFile('async-chown.txt') + const s = nodeStatSync(file) + await t.notThrowsAsync(async () => await chown(file, s.uid, s.gid)) +}) + +test('chown: async should throw on non-existent file', async (t) => { + await t.throwsAsync(async () => await chown('/tmp/no-such-file-' + Date.now(), 0, 0), { message: /ENOENT/ }) +}) diff --git a/__test__/copy_file.spec.ts b/__test__/copy_file.spec.ts new file mode 100644 index 0000000..7a78043 --- /dev/null +++ b/__test__/copy_file.spec.ts @@ -0,0 +1,72 @@ +import test from 'ava' +import { copyFileSync, copyFile } from '../index.js' +import * as nodeFs from 'node:fs' +import { writeFileSync, readFileSync, existsSync, mkdirSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpDir(): string { + const dir = join(tmpdir(), `hyper-fs-test-copy-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + return dir +} + +test('copyFileSync: should copy a file', (t) => { + const dir = tmpDir() + const src = join(dir, 'src.txt') + const dest = join(dir, 'dest.txt') + writeFileSync(src, 'original') + + copyFileSync(src, dest) + + t.true(existsSync(dest)) + t.is(readFileSync(dest, 'utf8'), 'original') +}) + +test('copyFileSync: should throw on non-existent source', (t) => { + const dir = tmpDir() + t.throws(() => copyFileSync(join(dir, 'nope'), join(dir, 'dest')), { message: /ENOENT/ }) +}) + +test('copyFileSync: COPYFILE_EXCL should throw if dest exists', (t) => { + const dir = tmpDir() + const src = join(dir, 'src.txt') + const dest = join(dir, 'dest.txt') + writeFileSync(src, 'data') + writeFileSync(dest, 'existing') + + t.throws(() => copyFileSync(src, dest, 1), { message: /EEXIST/ }) +}) + +test('copyFile: async should copy a file', async (t) => { + const dir = tmpDir() + const src = join(dir, 'src.txt') + const dest = join(dir, 'dest.txt') + writeFileSync(src, 'hello') + + await copyFile(src, dest) + + t.true(existsSync(dest)) + t.is(readFileSync(dest, 'utf8'), 'hello') +}) + +// ===== dual-run comparison ===== + +test('dual-run: copyFileSync should produce identical file as node:fs', (t) => { + const dir = tmpDir() + const src = join(dir, 'src-dual.txt') + const nodeDest = join(dir, 'node-dest.txt') + const hyperDest = join(dir, 'hyper-dest.txt') + writeFileSync(src, 'dual-run copy test 你好') + + nodeFs.copyFileSync(src, nodeDest) + copyFileSync(src, hyperDest) + + const nodeContent = readFileSync(nodeDest, 'utf8') + const hyperContent = readFileSync(hyperDest, 'utf8') + t.is(hyperContent, nodeContent) + + const nodeStat = nodeFs.statSync(nodeDest) + const hyperStat = nodeFs.statSync(hyperDest) + t.is(hyperStat.size, nodeStat.size) +}) diff --git a/__test__/cp.spec.ts b/__test__/cp.spec.ts new file mode 100644 index 0000000..9245d88 --- /dev/null +++ b/__test__/cp.spec.ts @@ -0,0 +1,192 @@ +import test from 'ava' +import { cpSync, cp } from '../index.js' +import * as nodeFs from 'node:fs' +import { writeFileSync, readFileSync, existsSync, mkdirSync, symlinkSync, readdirSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpDir(name: string): string { + const dir = join(tmpdir(), `hyper-fs-test-cp-${Date.now()}-${Math.random().toString(36).slice(2)}-${name}`) + mkdirSync(dir, { recursive: true }) + return dir +} + +// ===== cpSync: file ===== + +test('cpSync: should copy a single file', (t) => { + const dir = tmpDir('file') + const src = join(dir, 'src.txt') + const dest = join(dir, 'dest.txt') + writeFileSync(src, 'hello cp') + + cpSync(src, dest) + + t.true(existsSync(dest)) + t.is(readFileSync(dest, 'utf8'), 'hello cp') +}) + +test('cpSync: should overwrite existing file by default (force=true)', (t) => { + const dir = tmpDir('overwrite') + const src = join(dir, 'src.txt') + const dest = join(dir, 'dest.txt') + writeFileSync(src, 'new') + writeFileSync(dest, 'old') + + cpSync(src, dest) + + t.is(readFileSync(dest, 'utf8'), 'new') +}) + +test('cpSync: errorOnExist should throw when dest exists', (t) => { + const dir = tmpDir('err-exist') + const src = join(dir, 'src.txt') + const dest = join(dir, 'dest.txt') + writeFileSync(src, 'data') + writeFileSync(dest, 'existing') + + t.throws(() => cpSync(src, dest, { errorOnExist: true, force: false }), { message: /EEXIST/ }) +}) + +test('cpSync: force=false should not overwrite', (t) => { + const dir = tmpDir('no-force') + const src = join(dir, 'src.txt') + const dest = join(dir, 'dest.txt') + writeFileSync(src, 'new') + writeFileSync(dest, 'old') + + cpSync(src, dest, { force: false }) + + t.is(readFileSync(dest, 'utf8'), 'old') +}) + +test('cpSync: should throw ENOENT on non-existent source', (t) => { + const dir = tmpDir('noent') + t.throws(() => cpSync(join(dir, 'nope'), join(dir, 'dest')), { message: /ENOENT/ }) +}) + +// ===== cpSync: directory ===== + +test('cpSync: should throw on directory without recursive', (t) => { + const dir = tmpDir('dir-no-rec') + const src = join(dir, 'srcdir') + mkdirSync(src) + writeFileSync(join(src, 'f.txt'), 'data') + + t.throws(() => cpSync(src, join(dir, 'destdir')), { message: /recursive/ }) +}) + +test('cpSync: recursive should copy directory tree', (t) => { + const dir = tmpDir('recursive') + const src = join(dir, 'src') + mkdirSync(join(src, 'sub'), { recursive: true }) + writeFileSync(join(src, 'a.txt'), 'aaa') + writeFileSync(join(src, 'sub', 'b.txt'), 'bbb') + + const dest = join(dir, 'dest') + cpSync(src, dest, { recursive: true }) + + t.true(existsSync(join(dest, 'a.txt'))) + t.true(existsSync(join(dest, 'sub', 'b.txt'))) + t.is(readFileSync(join(dest, 'a.txt'), 'utf8'), 'aaa') + t.is(readFileSync(join(dest, 'sub', 'b.txt'), 'utf8'), 'bbb') +}) + +test('cpSync: recursive should handle deeply nested dirs', (t) => { + const dir = tmpDir('deep') + const src = join(dir, 'src') + mkdirSync(join(src, 'a', 'b', 'c'), { recursive: true }) + writeFileSync(join(src, 'a', 'b', 'c', 'deep.txt'), 'deep') + + const dest = join(dir, 'dest') + cpSync(src, dest, { recursive: true }) + + t.is(readFileSync(join(dest, 'a', 'b', 'c', 'deep.txt'), 'utf8'), 'deep') +}) + +// ===== cpSync: preserveTimestamps ===== + +test('cpSync: preserveTimestamps should keep mtime', (t) => { + if (process.platform === 'win32') { + t.pass('Skipping timestamp test on Windows') + return + } + const dir = tmpDir('timestamps') + const src = join(dir, 'src.txt') + writeFileSync(src, 'ts test') + const pastTime = new Date('2020-01-01T00:00:00Z') + nodeFs.utimesSync(src, pastTime, pastTime) + + const dest = join(dir, 'dest.txt') + cpSync(src, dest, { preserveTimestamps: true }) + + const srcStat = nodeFs.statSync(src) + const destStat = nodeFs.statSync(dest) + t.true(Math.abs(srcStat.mtimeMs - destStat.mtimeMs) < 1000) +}) + +// ===== async cp ===== + +test('cp: async should copy a file', async (t) => { + const dir = tmpDir('async') + const src = join(dir, 'src.txt') + const dest = join(dir, 'dest.txt') + writeFileSync(src, 'async cp') + + await cp(src, dest) + + t.is(readFileSync(dest, 'utf8'), 'async cp') +}) + +test('cp: async recursive should copy directory', async (t) => { + const dir = tmpDir('async-rec') + const src = join(dir, 'src') + mkdirSync(join(src, 'sub'), { recursive: true }) + writeFileSync(join(src, 'f.txt'), 'file') + writeFileSync(join(src, 'sub', 'g.txt'), 'sub-file') + + const dest = join(dir, 'dest') + await cp(src, dest, { recursive: true }) + + t.is(readFileSync(join(dest, 'f.txt'), 'utf8'), 'file') + t.is(readFileSync(join(dest, 'sub', 'g.txt'), 'utf8'), 'sub-file') +}) + +// ===== dual-run comparison ===== + +test('dual-run: cpSync file should produce same result as node:fs', (t) => { + const dir = tmpDir('dual-file') + const src = join(dir, 'src.txt') + writeFileSync(src, 'dual cp test 你好') + + const nodeDest = join(dir, 'node-dest.txt') + const hyperDest = join(dir, 'hyper-dest.txt') + + nodeFs.cpSync(src, nodeDest) + cpSync(src, hyperDest) + + t.is(readFileSync(hyperDest, 'utf8'), readFileSync(nodeDest, 'utf8')) +}) + +test('dual-run: cpSync recursive should produce same tree as node:fs', (t) => { + const dir = tmpDir('dual-tree') + const src = join(dir, 'src') + mkdirSync(join(src, 'sub'), { recursive: true }) + writeFileSync(join(src, 'root.txt'), 'root') + writeFileSync(join(src, 'sub', 'child.txt'), 'child') + + const nodeDest = join(dir, 'node-dest') + const hyperDest = join(dir, 'hyper-dest') + + nodeFs.cpSync(src, nodeDest, { recursive: true }) + cpSync(src, hyperDest, { recursive: true }) + + const nodeFiles = readdirSync(nodeDest, { recursive: true }) as string[] + const hyperFiles = readdirSync(hyperDest, { recursive: true }) as string[] + t.deepEqual(hyperFiles.sort(), nodeFiles.sort()) + + t.is(readFileSync(join(hyperDest, 'root.txt'), 'utf8'), readFileSync(join(nodeDest, 'root.txt'), 'utf8')) + t.is( + readFileSync(join(hyperDest, 'sub', 'child.txt'), 'utf8'), + readFileSync(join(nodeDest, 'sub', 'child.txt'), 'utf8'), + ) +}) diff --git a/__test__/encoding.spec.ts b/__test__/encoding.spec.ts new file mode 100644 index 0000000..dd9e117 --- /dev/null +++ b/__test__/encoding.spec.ts @@ -0,0 +1,120 @@ +import test from 'ava' +import { readFileSync, writeFileSync } from '../index.js' +import * as nodeFs from 'node:fs' +import { mkdirSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpFile(name: string, content?: string | Buffer): string { + const dir = join(tmpdir(), `hyper-fs-test-enc-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + const file = join(dir, name) + if (content !== undefined) { + nodeFs.writeFileSync(file, content) + } + return file +} + +// ===== readFile encoding ===== + +test('readFile: utf8 encoding', (t) => { + const file = tmpFile('utf8.txt', '你好世界 hello') + const result = readFileSync(file, { encoding: 'utf8' }) + t.is(result, '你好世界 hello') +}) + +test('readFile: utf-8 encoding (alias)', (t) => { + const file = tmpFile('utf8-alias.txt', 'test') + const result = readFileSync(file, { encoding: 'utf-8' }) + t.is(result, 'test') +}) + +test('readFile: ascii encoding', (t) => { + const file = tmpFile('ascii.txt', Buffer.from([0x48, 0x65, 0x6c, 0x6c, 0x6f])) + const result = readFileSync(file, { encoding: 'ascii' }) as string + t.is(result, 'Hello') +}) + +test('readFile: latin1 encoding', (t) => { + const file = tmpFile('latin1.txt', Buffer.from([0xe9, 0xe8, 0xea])) + const result = readFileSync(file, { encoding: 'latin1' }) as string + const nodeResult = nodeFs.readFileSync(file, { encoding: 'latin1' }) + t.is(result, nodeResult) +}) + +test('readFile: hex encoding', (t) => { + const file = tmpFile('hex.txt', Buffer.from([0xde, 0xad, 0xbe, 0xef])) + const result = readFileSync(file, { encoding: 'hex' }) as string + const nodeResult = nodeFs.readFileSync(file, { encoding: 'hex' }) + t.is(result, nodeResult) +}) + +test('readFile: base64 encoding', (t) => { + const file = tmpFile('base64.txt', 'Hello World') + const result = readFileSync(file, { encoding: 'base64' }) as string + const nodeResult = nodeFs.readFileSync(file, { encoding: 'base64' }) + t.is(result, nodeResult) +}) + +test('readFile: base64url encoding', (t) => { + const file = tmpFile('base64url.txt', Buffer.from([0xfb, 0xff, 0xfe])) + const result = readFileSync(file, { encoding: 'base64url' }) as string + const nodeResult = nodeFs.readFileSync(file, { encoding: 'base64url' }) + t.is(result, nodeResult) +}) + +test('readFile: unknown encoding should throw', (t) => { + const file = tmpFile('unknown.txt', 'test') + t.throws(() => readFileSync(file, { encoding: 'unknown' }), { message: /Unknown encoding/ }) +}) + +test('readFile: no encoding returns Buffer', (t) => { + const file = tmpFile('buf.txt', 'buffer test') + const result = readFileSync(file) + t.true(Buffer.isBuffer(result)) +}) + +// ===== writeFile encoding ===== + +test('writeFile: hex encoding', (t) => { + const file = tmpFile('write-hex.bin') + writeFileSync(file, 'deadbeef', { encoding: 'hex' }) + const content = nodeFs.readFileSync(file) + t.deepEqual(content, Buffer.from([0xde, 0xad, 0xbe, 0xef])) +}) + +test('writeFile: base64 encoding', (t) => { + const file = tmpFile('write-base64.txt') + writeFileSync(file, 'SGVsbG8=', { encoding: 'base64' }) + const content = nodeFs.readFileSync(file, 'utf8') + t.is(content, 'Hello') +}) + +test('writeFile: latin1 encoding', (t) => { + const file = tmpFile('write-latin1.txt') + const nodeFile = tmpFile('write-latin1-node.txt') + + writeFileSync(file, '\xe9\xe8\xea', { encoding: 'latin1' }) + nodeFs.writeFileSync(nodeFile, '\xe9\xe8\xea', { encoding: 'latin1' }) + + const hyperBuf = nodeFs.readFileSync(file) + const nodeBuf = nodeFs.readFileSync(nodeFile) + t.deepEqual(hyperBuf, nodeBuf) +}) + +// ===== dual-run encoding comparison ===== + +test('dual-run: readFile base64 should match node:fs', (t) => { + const data = 'The quick brown fox jumps over the lazy dog' + const file = tmpFile('dual-b64.txt', data) + const hyperResult = readFileSync(file, { encoding: 'base64' }) as string + const nodeResult = nodeFs.readFileSync(file, { encoding: 'base64' }) + t.is(hyperResult, nodeResult) +}) + +test('dual-run: readFile hex should match node:fs', (t) => { + const file = tmpFile('dual-hex.txt', Buffer.from([0x00, 0x11, 0x22, 0x33, 0xff])) + const hyperResult = readFileSync(file, { encoding: 'hex' }) as string + const nodeResult = nodeFs.readFileSync(file, { encoding: 'hex' }) + t.is(hyperResult, nodeResult) +}) diff --git a/__test__/exists.spec.ts b/__test__/exists.spec.ts new file mode 100644 index 0000000..715b21b --- /dev/null +++ b/__test__/exists.spec.ts @@ -0,0 +1,34 @@ +import test from 'ava' +import * as nodeFs from 'node:fs' +import { existsSync, exists } from '../index.js' + +test('existsSync: should return true for existing file', (t) => { + t.true(existsSync('./package.json')) +}) + +test('existsSync: should return true for existing directory', (t) => { + t.true(existsSync('./src')) +}) + +test('existsSync: should return false for non-existent path', (t) => { + t.false(existsSync('./no-such-file')) +}) + +test('exists: async should return true for existing file', async (t) => { + const result = await exists('./package.json') + t.true(result) +}) + +test('exists: async should return false for non-existent file', async (t) => { + const result = await exists('./no-such-file') + t.false(result) +}) + +// ===== dual-run comparison ===== + +test('dual-run: existsSync should match node:fs.existsSync', (t) => { + const paths = ['./package.json', './src', './no-such-file', './node_modules', './Cargo.toml'] + for (const p of paths) { + t.is(existsSync(p), nodeFs.existsSync(p), `existsSync mismatch for "${p}"`) + } +}) diff --git a/__test__/glob.spec.ts b/__test__/glob.spec.ts new file mode 100644 index 0000000..1051d8d --- /dev/null +++ b/__test__/glob.spec.ts @@ -0,0 +1,185 @@ +import test from 'ava' +import { globSync, glob } from '../index.js' +import * as nodeFs from 'node:fs' +import { join } from 'path' +import { tmpdir } from 'node:os' + +const CWD = process.cwd() + +// 构造包含文件和子目录的临时目录,用于验证目录匹配行为 +function makeDirFixture(): string { + const base = join(tmpdir(), `hyper-glob-test-${Date.now()}-${Math.random().toString(36).slice(2)}`) + nodeFs.mkdirSync(join(base, 'src/sub'), { recursive: true }) + nodeFs.writeFileSync(join(base, 'src/a.ts'), '') + nodeFs.writeFileSync(join(base, 'src/b.ts'), '') + nodeFs.writeFileSync(join(base, 'src/sub/c.ts'), '') + nodeFs.mkdirSync(join(base, 'dist'), { recursive: true }) + nodeFs.writeFileSync(join(base, 'dist/out.js'), '') + return base +} + +test('globSync: should find files in current directory', (t) => { + const files = globSync('*.json', { cwd: CWD }) + t.true(files.length > 0) + t.true(files.some((f) => f.endsWith('package.json'))) +}) + +test('globSync: should match files in subdirectories', (t) => { + const files = globSync('src/*.rs', { cwd: CWD }) + t.true(files.length > 0) + t.true(files.some((f) => f.endsWith('lib.rs'))) +}) + +test('globSync: should return Dirent objects when withFileTypes is true', (t) => { + const files = globSync('src/*.rs', { cwd: CWD, withFileTypes: true }) + t.true(files.length > 0) + + const first = files[0] + if (typeof first === 'object') { + t.is(typeof first.isFile, 'function') + t.true(first.isFile()) + t.is(typeof first.name, 'string') + t.true(first.name.endsWith('.rs')) + t.is(typeof first.parentPath, 'string') + } else { + t.fail('Should return objects') + } +}) + +test('globSync: should support exclude option', (t) => { + // Should match multiple .rs files normally + const allFiles = globSync('src/*.rs', { cwd: CWD }) + t.true(allFiles.some((f) => f.endsWith('lib.rs'))) + + // Exclude lib.rs + const filteredFiles = globSync('src/*.rs', { cwd: CWD, exclude: ['lib.rs'] }) + t.true(filteredFiles.length > 0) + t.false( + filteredFiles.some((f) => f.endsWith('lib.rs')), + 'Should exclude lib.rs', + ) + t.true(filteredFiles.length < allFiles.length) +}) + +test('globSync: should respect gitIgnore (default: true)', (t) => { + // When gitIgnore: true (default), files in .gitignore are excluded. + // When gitIgnore: false, they are included. + const ignoredFiles = globSync('target/**/*.d', { cwd: CWD }) + const includedFiles = globSync('target/**/*.d', { cwd: CWD, gitIgnore: false }) + + if (includedFiles.length > 0) { + t.true(ignoredFiles.length < includedFiles.length, 'Should find fewer files when respecting gitignore') + } else { + t.pass('Target directory empty or not present, skipping gitIgnore comparison') + } +}) + +test('globSync: concurrency option should not crash', (t) => { + const files = globSync('src/**/*.rs', { cwd: CWD, concurrency: 2 }) + t.true(files.length > 0) +}) + +test('async: should work basically', async (t) => { + const files = await glob('*.json', { cwd: CWD }) + t.true(files.length > 0) + t.true(files.some((f) => f.endsWith('package.json'))) +}) + +test('async: withFileTypes', async (t) => { + const files = await glob('src/*.rs', { cwd: CWD, withFileTypes: true }) + t.true(files.length > 0) + const first = files[0] + t.is(typeof first, 'object') + t.true(first.isFile()) +}) + +test('async: should return empty array for no matches', async (t) => { + const files = await glob('non_existent_*.xyz', { cwd: CWD }) + t.true(Array.isArray(files)) + t.is(files.length, 0) +}) + +test('async: recursive match', async (t) => { + const files = await glob('**/*.rs', { cwd: CWD }) + t.true(files.length > 0) + t.true(files.some((f) => f.includes('src/lib.rs'))) +}) + +// ===== 目录匹配行为(对齐 Node.js fs.globSync)===== + +test('globSync: "src/*" should include subdirectories matching the pattern', (t) => { + const base = makeDirFixture() + // Node.js: fs.globSync('src/*') 返回 src/ 下的文件 AND 子目录 + const results = globSync('src/*', { cwd: base }) + const names = results.map((r) => r.replace(/\\/g, '/')) + t.true(names.includes('src/a.ts'), 'should include files') + t.true(names.includes('src/b.ts'), 'should include files') + t.true(names.includes('src/sub'), 'should include directories matching the pattern') + // 不应包含 dist/ 下的内容(不匹配 src/*) + t.false(names.some((n) => n.startsWith('dist'))) +}) + +test('globSync: "**/*.ts" should NOT include directories (dirs lack .ts extension)', (t) => { + const base = makeDirFixture() + const results = globSync('**/*.ts', { cwd: base }) + const names = results.map((r) => r.replace(/\\/g, '/')) + // 所有结果应以 .ts 结尾(目录不应被包含) + t.true(names.length > 0) + t.true( + names.every((n) => n.endsWith('.ts')), + `non-.ts entry found: ${names.join(', ')}`, + ) +}) + +test('globSync: "**" should include both files and directories recursively', (t) => { + const base = makeDirFixture() + const results = globSync('**', { cwd: base }) + const names = results.map((r) => r.replace(/\\/g, '/')) + // 应包含目录 src、src/sub、dist + t.true(names.includes('src'), 'should include top-level directories') + t.true(names.includes('src/sub'), 'should include nested directories') + // 也应包含文件 + t.true(names.some((n) => n.endsWith('.ts'))) +}) + +test('globSync: dir-matching result should have isDirectory()=true with withFileTypes', (t) => { + const base = makeDirFixture() + const results = globSync('src/*', { cwd: base, withFileTypes: true }) + t.true(results.length > 0) + const subDir = results.find((r) => typeof r === 'object' && r.name === 'sub') + t.truthy(subDir, 'should include sub directory as Dirent') + if (subDir && typeof subDir === 'object') { + t.true(subDir.isDirectory()) + t.false(subDir.isFile()) + } +}) + +test('dual-run: globSync "src/*" should match node:fs.globSync behavior for directories', (t) => { + const base = makeDirFixture() + // node:fs.globSync 自 v22.0.0 起稳定,对齐其目录匹配行为 + const nodeResults: string[] = [] + try { + // @ts-ignore - globSync 在旧版 Node 可能不存在 + const nodeGlob = nodeFs.globSync as ((p: string, o: object) => string[]) | undefined + if (typeof nodeGlob === 'function') { + nodeResults.push(...nodeGlob('src/*', { cwd: base })) + } + } catch { + // 旧版 Node.js 不支持 fs.globSync,跳过对比 + t.pass('node:fs.globSync not available, skipping dual-run comparison') + return + } + const hyperResults = globSync('src/*', { cwd: base }).map((r) => r.replace(/\\/g, '/')) + const nodeNorm = nodeResults.map((r) => r.replace(/\\/g, '/')) + // 检查 node.js 返回的目录条目我们也有 + const nodeDirs = nodeNorm.filter((n) => { + try { + return nodeFs.statSync(join(base, n)).isDirectory() + } catch { + return false + } + }) + for (const d of nodeDirs) { + t.true(hyperResults.includes(d), `should include directory '${d}' as node:fs does`) + } +}) diff --git a/__test__/link.spec.ts b/__test__/link.spec.ts new file mode 100644 index 0000000..23e8c73 --- /dev/null +++ b/__test__/link.spec.ts @@ -0,0 +1,84 @@ +import test from 'ava' +import { linkSync, link, statSync } from '../index.js' +import { writeFileSync, mkdirSync, existsSync, readFileSync, statSync as nodeStatSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpDir(): string { + const dir = join(tmpdir(), `hyper-fs-test-link-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + return dir +} + +// ===== sync ===== + +test('linkSync: should create a hard link', (t) => { + const dir = tmpDir() + const src = join(dir, 'source.txt') + const dest = join(dir, 'hardlink.txt') + writeFileSync(src, 'hello') + + linkSync(src, dest) + t.true(existsSync(dest)) + t.is(readFileSync(dest, 'utf8'), 'hello') +}) + +test('linkSync: hard link should share the same inode', (t) => { + const dir = tmpDir() + const src = join(dir, 'source2.txt') + const dest = join(dir, 'hardlink2.txt') + writeFileSync(src, 'hello') + + linkSync(src, dest) + const srcStat = statSync(src) + const destStat = statSync(dest) + t.is(srcStat.ino, destStat.ino) + t.is(srcStat.nlink, 2) +}) + +test('linkSync: should throw ENOENT for non-existent source', (t) => { + const dir = tmpDir() + t.throws(() => linkSync(join(dir, 'nope.txt'), join(dir, 'link.txt')), { message: /ENOENT/ }) +}) + +test('linkSync: should throw EEXIST if dest already exists', (t) => { + const dir = tmpDir() + const src = join(dir, 'src3.txt') + const dest = join(dir, 'dest3.txt') + writeFileSync(src, 'hello') + writeFileSync(dest, 'existing') + + t.throws(() => linkSync(src, dest), { message: /EEXIST/ }) +}) + +test('linkSync: should match node:fs behavior (same inode)', (t) => { + const dir = tmpDir() + const src = join(dir, 'compare.txt') + const dest = join(dir, 'compare-link.txt') + writeFileSync(src, 'hello') + + linkSync(src, dest) + const nodeStat = nodeStatSync(dest) + const hyperStat = statSync(dest) + t.is(hyperStat.ino, nodeStat.ino) +}) + +// ===== async ===== + +test('link: async should create a hard link', async (t) => { + const dir = tmpDir() + const src = join(dir, 'async-src.txt') + const dest = join(dir, 'async-link.txt') + writeFileSync(src, 'async hello') + + await link(src, dest) + t.true(existsSync(dest)) + t.is(readFileSync(dest, 'utf8'), 'async hello') +}) + +test('link: async should throw ENOENT for non-existent source', async (t) => { + const dir = tmpDir() + await t.throwsAsync(async () => await link(join(dir, 'nope.txt'), join(dir, 'link.txt')), { + message: /ENOENT/, + }) +}) diff --git a/__test__/mkdir.spec.ts b/__test__/mkdir.spec.ts new file mode 100644 index 0000000..b819e6c --- /dev/null +++ b/__test__/mkdir.spec.ts @@ -0,0 +1,93 @@ +import test from 'ava' +import { mkdirSync, mkdir, rmdirSync } from '../index.js' +import * as nodeFs from 'node:fs' +import { existsSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpPath(name: string): string { + return join(tmpdir(), `hyper-fs-test-mkdir-${Date.now()}-${name}`) +} + +test('mkdirSync: should create a directory', (t) => { + const dir = tmpPath('basic') + mkdirSync(dir) + t.true(existsSync(dir)) + rmdirSync(dir) +}) + +test('mkdirSync: should throw on existing directory without recursive', (t) => { + const dir = tmpPath('existing') + mkdirSync(dir) + t.throws(() => mkdirSync(dir), { message: /EEXIST/ }) + rmdirSync(dir) +}) + +test('mkdirSync: recursive should create nested dirs', (t) => { + const dir = tmpPath('recursive') + const nested = join(dir, 'a', 'b', 'c') + mkdirSync(nested, { recursive: true }) + t.true(existsSync(nested)) + // cleanup + rmdirSync(join(dir, 'a', 'b', 'c')) + rmdirSync(join(dir, 'a', 'b')) + rmdirSync(join(dir, 'a')) + rmdirSync(dir) +}) + +test('mkdirSync: recursive should not throw if dir already exists', (t) => { + const dir = tmpPath('recursive-exists') + mkdirSync(dir) + t.notThrows(() => mkdirSync(dir, { recursive: true })) + rmdirSync(dir) +}) + +test('mkdir: async should create a directory', async (t) => { + const dir = tmpPath('async') + await mkdir(dir) + t.true(existsSync(dir)) + rmdirSync(dir) +}) + +test('mkdir: async recursive', async (t) => { + const dir = tmpPath('async-recursive') + const nested = join(dir, 'x', 'y') + await mkdir(nested, { recursive: true }) + t.true(existsSync(nested)) + rmdirSync(join(dir, 'x', 'y')) + rmdirSync(join(dir, 'x')) + rmdirSync(dir) +}) + +// ===== dual-run comparison ===== + +test('dual-run: mkdirSync recursive should create same structure as node:fs', (t) => { + const nodeDir = tmpPath('node-recursive') + const hyperDir = tmpPath('hyper-recursive') + + nodeFs.mkdirSync(join(nodeDir, 'a', 'b'), { recursive: true }) + mkdirSync(join(hyperDir, 'a', 'b'), { recursive: true }) + + t.is(existsSync(join(nodeDir, 'a', 'b')), existsSync(join(hyperDir, 'a', 'b'))) + t.is(existsSync(join(nodeDir, 'a')), existsSync(join(hyperDir, 'a'))) + + nodeFs.rmSync(nodeDir, { recursive: true }) + nodeFs.rmSync(hyperDir, { recursive: true }) +}) + +test('dual-run: mkdirSync should return first created path like node:fs', (t) => { + const nodeDir = tmpPath('node-return') + const hyperDir = tmpPath('hyper-return') + + const nodeResult = nodeFs.mkdirSync(join(nodeDir, 'a', 'b'), { recursive: true }) + const hyperResult = mkdirSync(join(hyperDir, 'a', 'b'), { recursive: true }) + + t.is(typeof hyperResult, typeof nodeResult) + if (nodeResult !== undefined && hyperResult !== undefined) { + t.true(nodeResult.endsWith('node-return')) + t.true(hyperResult.endsWith('hyper-return')) + } + + nodeFs.rmSync(nodeDir, { recursive: true }) + nodeFs.rmSync(hyperDir, { recursive: true }) +}) diff --git a/__test__/mkdtemp.spec.ts b/__test__/mkdtemp.spec.ts new file mode 100644 index 0000000..494e8ff --- /dev/null +++ b/__test__/mkdtemp.spec.ts @@ -0,0 +1,62 @@ +import test from 'ava' +import { mkdtempSync, mkdtemp } from '../index.js' +import * as nodeFs from 'node:fs' +import { existsSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +const prefix = join(tmpdir(), 'hyper-fs-test-mkdtemp-') + +// ===== sync ===== + +test('mkdtempSync: should create a temp directory and return its path', (t) => { + const dir = mkdtempSync(prefix) + t.true(typeof dir === 'string') + t.true(dir.startsWith(prefix)) + t.true(existsSync(dir)) +}) + +test('mkdtempSync: should create unique directories on each call', (t) => { + const dir1 = mkdtempSync(prefix) + const dir2 = mkdtempSync(prefix) + t.not(dir1, dir2) + t.true(existsSync(dir1)) + t.true(existsSync(dir2)) +}) + +test('mkdtempSync: should throw ENOENT for non-existent parent', (t) => { + t.throws(() => mkdtempSync('/tmp/no-such-parent-dir-999/prefix-'), { message: /ENOENT/ }) +}) + +// ===== async ===== + +test('mkdtemp: async should create a temp directory', async (t) => { + const dir = (await mkdtemp(prefix)) as string + t.true(typeof dir === 'string') + t.true(dir.startsWith(prefix)) + t.true(existsSync(dir)) +}) + +test('mkdtemp: async should throw ENOENT for non-existent parent', async (t) => { + await t.throwsAsync(async () => await mkdtemp('/tmp/no-such-parent-dir-999/prefix-'), { + message: /ENOENT/, + }) +}) + +// ===== dual-run comparison ===== + +test('dual-run: mkdtempSync should behave like node:fs.mkdtempSync', (t) => { + const nodeDir = nodeFs.mkdtempSync(prefix) + const hyperDir = mkdtempSync(prefix) + + t.true(typeof nodeDir === 'string') + t.true(typeof hyperDir === 'string') + t.true(nodeDir.startsWith(prefix)) + t.true(hyperDir.startsWith(prefix)) + t.not(nodeDir, hyperDir) + t.true(existsSync(nodeDir)) + t.true(existsSync(hyperDir)) + + t.true(nodeFs.statSync(nodeDir).isDirectory()) + t.true(nodeFs.statSync(hyperDir).isDirectory()) +}) diff --git a/__test__/read_file.spec.ts b/__test__/read_file.spec.ts new file mode 100644 index 0000000..5deeeba --- /dev/null +++ b/__test__/read_file.spec.ts @@ -0,0 +1,50 @@ +import test from 'ava' +import { readFileSync, readFile, writeFileSync } from '../index.js' +import * as nodeFs from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +test('readFileSync: should read file as Buffer by default', (t) => { + const result = readFileSync('./package.json') + t.true(Buffer.isBuffer(result)) + t.true((result as Buffer).length > 0) +}) + +test('readFileSync: should read file as string with encoding=utf8', (t) => { + const result = readFileSync('./package.json', { encoding: 'utf8' }) + t.is(typeof result, 'string') + t.true((result as string).includes('hyper-fs')) +}) + +test('readFileSync: should match node:fs readFileSync', (t) => { + const nodeResult = nodeFs.readFileSync('./package.json', 'utf8') + const hyperResult = readFileSync('./package.json', { encoding: 'utf8' }) + t.is(hyperResult, nodeResult) +}) + +test('readFileSync: should throw on non-existent file', (t) => { + t.throws(() => readFileSync('./no-such-file'), { message: /ENOENT/ }) +}) + +test('readFile: async should read file', async (t) => { + const result = await readFile('./package.json', { encoding: 'utf8' }) + t.is(typeof result, 'string') + t.true((result as string).includes('hyper-fs')) +}) + +test('readFile: async should throw on non-existent file', async (t) => { + await t.throwsAsync(async () => await readFile('./no-such-file'), { message: /ENOENT/ }) +}) + +test('dual-run: readFileSync Buffer should match node:fs byte-for-byte', (t) => { + const nodeResult = nodeFs.readFileSync('./package.json') + const hyperResult = readFileSync('./package.json') as Buffer + t.true(Buffer.isBuffer(hyperResult)) + t.deepEqual(hyperResult, nodeResult) +}) + +test('dual-run: readFileSync utf8 string should match node:fs', (t) => { + const nodeResult = nodeFs.readFileSync('./package.json', 'utf8') + const hyperResult = readFileSync('./package.json', { encoding: 'utf8' }) as string + t.is(hyperResult, nodeResult) +}) diff --git a/__test__/readdir.spec.ts b/__test__/readdir.spec.ts index b4206a2..b181434 100644 --- a/__test__/readdir.spec.ts +++ b/__test__/readdir.spec.ts @@ -1,4 +1,5 @@ import test from 'ava' +import * as nodeFs from 'node:fs' import { readdirSync, readdir } from '../index.js' test('sync: should list files in current directory (strings by default)', (t) => { @@ -25,7 +26,9 @@ test('sync: should return Dirent objects when withFileTypes is true', (t) => { const first = files[0] if (typeof first === 'object') { t.is(typeof first.name, 'string') - t.is(typeof first.isDir, 'boolean') + // Dirent in Node.js (and our implementation) uses methods, not properties for type checking + t.is(typeof first.isDirectory, 'function') + t.is(typeof first.isFile, 'function') } else { t.fail('Should return objects when withFileTypes is true') } @@ -34,12 +37,14 @@ test('sync: should return Dirent objects when withFileTypes is true', (t) => { t.truthy(packageJson, 'Result should contain package.json') if (typeof packageJson !== 'string' && packageJson) { - t.is(packageJson.isDir, false) + t.is(packageJson.isFile(), true) + t.is(packageJson.isDirectory(), false) } const srcDir = files.find((f) => typeof f !== 'string' && f.name === 'src') if (srcDir && typeof srcDir !== 'string') { - t.is(srcDir.isDir, true, 'src should be identified as a directory') + t.is(srcDir.isDirectory(), true, 'src should be identified as a directory') + t.is(srcDir.isFile(), false) } }) @@ -87,3 +92,37 @@ test('error: should throw on non-existent directory', async (t) => { t.throws(() => readdirSync('./path/to/nowhere')) await t.throwsAsync(async () => await readdir('./path/to/nowhere')) }) + +// ===== dual-run comparison ===== + +test('dual-run: readdirSync names should match node:fs', (t) => { + const nodeResult = nodeFs.readdirSync('.').sort() + const hyperResult = (readdirSync('.') as string[]).sort() + t.deepEqual(hyperResult, nodeResult) +}) + +test('dual-run: readdirSync withFileTypes entry names should match node:fs', (t) => { + const nodeEntries = nodeFs.readdirSync('.', { withFileTypes: true }) + const hyperEntries = readdirSync('.', { withFileTypes: true }) + + const nodeNames = nodeEntries.map((e) => e.name).sort() + const hyperNames = (hyperEntries as { name: string }[]).map((e) => e.name).sort() + t.deepEqual(hyperNames, nodeNames) +}) + +test('dual-run: readdirSync withFileTypes isFile/isDirectory should match', (t) => { + const nodeEntries = nodeFs.readdirSync('.', { withFileTypes: true }) + const hyperEntries = readdirSync('.', { withFileTypes: true }) as { + name: string + isFile: () => boolean + isDirectory: () => boolean + }[] + + for (const nodeEntry of nodeEntries) { + const hyperEntry = hyperEntries.find((e) => e.name === nodeEntry.name) + if (hyperEntry) { + t.is(hyperEntry.isFile(), nodeEntry.isFile(), `isFile mismatch for ${nodeEntry.name}`) + t.is(hyperEntry.isDirectory(), nodeEntry.isDirectory(), `isDirectory mismatch for ${nodeEntry.name}`) + } + } +}) diff --git a/__test__/readlink.spec.ts b/__test__/readlink.spec.ts new file mode 100644 index 0000000..1e38863 --- /dev/null +++ b/__test__/readlink.spec.ts @@ -0,0 +1,60 @@ +import test from 'ava' +import { readlinkSync, readlink } from '../index.js' +import { writeFileSync, mkdirSync, symlinkSync, readlinkSync as nodeReadlinkSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpDir(): string { + const dir = join(tmpdir(), `hyper-fs-test-readlink-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + return dir +} + +test('readlinkSync: should read symbolic link target', (t) => { + const dir = tmpDir() + const target = join(dir, 'target.txt') + const link = join(dir, 'link.txt') + writeFileSync(target, 'hello') + symlinkSync(target, link) + + const result = readlinkSync(link) + t.is(result, target) +}) + +test('readlinkSync: should match node:fs readlinkSync', (t) => { + const dir = tmpDir() + const target = join(dir, 'target2.txt') + const link = join(dir, 'link2.txt') + writeFileSync(target, 'hello') + symlinkSync(target, link) + + const nodeResult = nodeReadlinkSync(link, 'utf8') + const hyperResult = readlinkSync(link) + t.is(hyperResult, nodeResult) +}) + +test('readlinkSync: should throw ENOENT on non-existent path', (t) => { + t.throws(() => readlinkSync('/tmp/no-such-link-' + Date.now()), { message: /ENOENT/ }) +}) + +test('readlinkSync: should throw EINVAL on non-symlink', (t) => { + const dir = tmpDir() + const file = join(dir, 'regular.txt') + writeFileSync(file, 'not a symlink') + t.throws(() => readlinkSync(file), { message: /EINVAL/ }) +}) + +test('readlink: async should read symbolic link', async (t) => { + const dir = tmpDir() + const target = join(dir, 'target3.txt') + const link = join(dir, 'link3.txt') + writeFileSync(target, 'hello') + symlinkSync(target, link) + + const result = await readlink(link) + t.is(result, target) +}) + +test('readlink: async should throw on non-existent path', async (t) => { + await t.throwsAsync(async () => await readlink('/tmp/no-such-link-' + Date.now()), { message: /ENOENT/ }) +}) diff --git a/__test__/realpath.spec.ts b/__test__/realpath.spec.ts new file mode 100644 index 0000000..c73f4e2 --- /dev/null +++ b/__test__/realpath.spec.ts @@ -0,0 +1,70 @@ +import test from 'ava' +import { realpathSync, realpath } from '../index.js' +import * as nodeFs from 'node:fs' +import * as path from 'node:path' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpDir(): string { + const dir = join(tmpdir(), `hyper-fs-test-realpath-${Date.now()}-${Math.random().toString(36).slice(2)}`) + nodeFs.mkdirSync(dir, { recursive: true }) + return dir +} + +test('realpathSync: should resolve to absolute path', (t) => { + const result = realpathSync('.') + t.true(path.isAbsolute(result)) +}) + +test('realpathSync: should match node:fs realpathSync', (t) => { + const nodeResult = nodeFs.realpathSync('.') + const hyperResult = realpathSync('.') + t.is(hyperResult, nodeResult) +}) + +test('realpathSync: should throw on non-existent path', (t) => { + t.throws(() => realpathSync('./no-such-path'), { message: /ENOENT/ }) +}) + +test('realpath: async should resolve path', async (t) => { + const result = await realpath('.') + t.true(path.isAbsolute(result)) +}) + +test('realpath: async should throw on non-existent path', async (t) => { + await t.throwsAsync(async () => await realpath('./no-such-path'), { message: /ENOENT/ }) +}) + +test('dual-run: realpathSync should resolve symlink to real path', (t) => { + const dir = tmpDir() + const target = join(dir, 'real-target.txt') + const link = join(dir, 'link.txt') + nodeFs.writeFileSync(target, 'hello') + nodeFs.symlinkSync(target, link) + + const nodeResult = nodeFs.realpathSync(link) + const hyperResult = realpathSync(link) + // Compare against node:fs (not raw `target`): on macOS /tmp is a symlink to /private/tmp, + // so realpath resolves through it. + t.is(hyperResult, nodeResult) + // The resolved path should end with the target filename + t.true(hyperResult.endsWith('real-target.txt')) +}) + +test('dual-run: realpathSync should resolve relative path same as node:fs', (t) => { + const nodeResult = nodeFs.realpathSync('src') + const hyperResult = realpathSync('src') + t.is(hyperResult, nodeResult) +}) + +test('realpath: async dual-run should resolve symlink same as node:fs', async (t) => { + const dir = tmpDir() + const target = join(dir, 'async-target.txt') + const link = join(dir, 'async-link.txt') + nodeFs.writeFileSync(target, 'hello') + nodeFs.symlinkSync(target, link) + + const nodeResult = nodeFs.realpathSync(link) + const hyperResult = await realpath(link) + t.is(hyperResult, nodeResult) +}) diff --git a/__test__/rename.spec.ts b/__test__/rename.spec.ts new file mode 100644 index 0000000..4167146 --- /dev/null +++ b/__test__/rename.spec.ts @@ -0,0 +1,61 @@ +import test from 'ava' +import { renameSync, rename } from '../index.js' +import * as nodeFs from 'node:fs' +import { writeFileSync, existsSync, mkdirSync, readFileSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpDir(): string { + const dir = join(tmpdir(), `hyper-fs-test-rename-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + return dir +} + +test('renameSync: should rename a file', (t) => { + const dir = tmpDir() + const src = join(dir, 'old.txt') + const dest = join(dir, 'new.txt') + writeFileSync(src, 'content') + + renameSync(src, dest) + + t.false(existsSync(src)) + t.true(existsSync(dest)) + t.is(readFileSync(dest, 'utf8'), 'content') +}) + +test('renameSync: should throw on non-existent source', (t) => { + const dir = tmpDir() + t.throws(() => renameSync(join(dir, 'nope'), join(dir, 'other')), { message: /ENOENT/ }) +}) + +test('rename: async should rename a file', async (t) => { + const dir = tmpDir() + const src = join(dir, 'a.txt') + const dest = join(dir, 'b.txt') + writeFileSync(src, 'data') + + await rename(src, dest) + + t.false(existsSync(src)) + t.true(existsSync(dest)) +}) + +// ===== dual-run comparison ===== + +test('dual-run: renameSync should produce same result as node:fs', (t) => { + const dir = tmpDir() + const nodeSrc = join(dir, 'node-src.txt') + const nodeDest = join(dir, 'node-dest.txt') + const hyperSrc = join(dir, 'hyper-src.txt') + const hyperDest = join(dir, 'hyper-dest.txt') + writeFileSync(nodeSrc, 'rename-test') + writeFileSync(hyperSrc, 'rename-test') + + nodeFs.renameSync(nodeSrc, nodeDest) + renameSync(hyperSrc, hyperDest) + + t.is(existsSync(hyperSrc), existsSync(nodeSrc)) + t.is(existsSync(hyperDest), existsSync(nodeDest)) + t.is(readFileSync(hyperDest, 'utf8'), readFileSync(nodeDest, 'utf8')) +}) diff --git a/__test__/rm.spec.ts b/__test__/rm.spec.ts index 04d3aef..9bbee49 100644 --- a/__test__/rm.spec.ts +++ b/__test__/rm.spec.ts @@ -1,6 +1,6 @@ import test from 'ava' import { rmSync, rm } from '../index.js' -import { mkdirSync, writeFileSync, existsSync } from 'node:fs' +import { mkdirSync, writeFileSync, existsSync, rmSync as nodeRmSync } from 'node:fs' import { join } from 'node:path' import { tmpdir } from 'node:os' @@ -215,3 +215,68 @@ test('async: should remove deep nested directory with concurrency', async (t) => await rm(testDir, { recursive: true, concurrency: 4 }) t.false(existsSync(testDir)) }) + +// ===== dual-run comparison ===== + +test('dual-run: rmSync file should behave same as node:fs', (t) => { + const dir = createTempDir() + const nodeFile = join(dir, 'node.txt') + const hyperFile = join(dir, 'hyper.txt') + writeFileSync(nodeFile, 'a') + writeFileSync(hyperFile, 'a') + + t.is(existsSync(hyperFile), existsSync(nodeFile)) + + nodeRmSync(nodeFile) + rmSync(hyperFile) + + t.is(existsSync(hyperFile), existsSync(nodeFile)) +}) + +test('dual-run: rmSync recursive should behave same as node:fs', (t) => { + const dir1 = createTempDir() + const dir2 = createTempDir() + const nodeDir = join(dir1, 'sub') + const hyperDir = join(dir2, 'sub') + + mkdirSync(nodeDir, { recursive: true }) + mkdirSync(hyperDir, { recursive: true }) + writeFileSync(join(nodeDir, 'f.txt'), 'x') + writeFileSync(join(hyperDir, 'f.txt'), 'x') + + nodeRmSync(nodeDir, { recursive: true, force: true }) + rmSync(hyperDir, { recursive: true, force: true }) + + t.is(existsSync(hyperDir), existsSync(nodeDir)) +}) + +// ===== maxRetries / retryDelay ===== + +test('sync: maxRetries should eventually succeed on transient failure', (t) => { + const tempDir = createTempDir() + const testFile = join(tempDir, 'retry.txt') + writeFileSync(testFile, 'retry test') + + rmSync(testFile, { maxRetries: 3, retryDelay: 10 }) + t.false(existsSync(testFile)) +}) + +test('sync: maxRetries with recursive should work', (t) => { + const tempDir = createTempDir() + const testDir = join(tempDir, 'retry-dir') + mkdirSync(testDir, { recursive: true }) + writeFileSync(join(testDir, 'f.txt'), 'data') + + rmSync(testDir, { recursive: true, maxRetries: 2, retryDelay: 50 }) + t.false(existsSync(testDir)) +}) + +test('async: maxRetries with recursive should work', async (t) => { + const tempDir = createTempDir() + const testDir = join(tempDir, 'retry-async') + mkdirSync(testDir, { recursive: true }) + writeFileSync(join(testDir, 'f.txt'), 'data') + + await rm(testDir, { recursive: true, maxRetries: 2, retryDelay: 50 }) + t.false(existsSync(testDir)) +}) diff --git a/__test__/rmdir.spec.ts b/__test__/rmdir.spec.ts new file mode 100644 index 0000000..0b3172c --- /dev/null +++ b/__test__/rmdir.spec.ts @@ -0,0 +1,57 @@ +import test from 'ava' +import { rmdirSync, rmdir, mkdirSync } from '../index.js' +import { existsSync, writeFileSync, mkdirSync as nodeMkdirSync, rmdirSync as nodeRmdirSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpDir(name: string): string { + const dir = join(tmpdir(), `hyper-fs-test-rmdir-${Date.now()}-${name}`) + nodeMkdirSync(dir, { recursive: true }) + return dir +} + +test('rmdirSync: should remove an empty directory', (t) => { + const dir = tmpDir('empty') + const target = join(dir, 'sub') + mkdirSync(target) + t.true(existsSync(target)) + rmdirSync(target) + t.false(existsSync(target)) +}) + +test('rmdirSync: should throw on non-empty directory', (t) => { + const dir = tmpDir('notempty') + const target = join(dir, 'sub') + mkdirSync(target) + writeFileSync(join(target, 'file.txt'), 'data') + t.throws(() => rmdirSync(target), { message: /ENOTEMPTY/ }) +}) + +test('rmdirSync: should throw on non-existent path', (t) => { + t.throws(() => rmdirSync('/tmp/hyper-fs-no-such-dir-' + Date.now()), { message: /ENOENT/ }) +}) + +test('rmdir: async should remove empty directory', async (t) => { + const dir = tmpDir('async') + const target = join(dir, 'sub') + mkdirSync(target) + await rmdir(target) + t.false(existsSync(target)) +}) + +// ===== dual-run comparison ===== + +test('dual-run: rmdirSync should leave same state as node:fs.rmdirSync', (t) => { + const dir1 = tmpDir('node-rmdir') + const dir2 = tmpDir('hyper-rmdir') + const nodeTarget = join(dir1, 'sub') + const hyperTarget = join(dir2, 'sub') + + nodeMkdirSync(nodeTarget) + mkdirSync(hyperTarget) + + nodeRmdirSync(nodeTarget) + rmdirSync(hyperTarget) + + t.is(existsSync(hyperTarget), existsSync(nodeTarget)) +}) diff --git a/__test__/stat.spec.ts b/__test__/stat.spec.ts new file mode 100644 index 0000000..7208843 --- /dev/null +++ b/__test__/stat.spec.ts @@ -0,0 +1,160 @@ +import test from 'ava' +import { statSync, stat, lstatSync, lstat } from '../index.js' +import * as nodeFs from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpDir(): string { + const dir = join(tmpdir(), `hyper-fs-test-stat-${Date.now()}-${Math.random().toString(36).slice(2)}`) + nodeFs.mkdirSync(dir, { recursive: true }) + return dir +} + +test('statSync: should return stats for a file', (t) => { + const s = statSync('./package.json') + t.is(typeof s.size, 'number') + t.true(s.size > 0) + t.true(s.isFile()) + t.false(s.isDirectory()) +}) + +test('statSync: should return stats for a directory', (t) => { + const s = statSync('./src') + t.true(s.isDirectory()) + t.false(s.isFile()) +}) + +test('statSync: should match node:fs stat values', (t) => { + const nodeStat = nodeFs.statSync('./package.json') + const hyperStat = statSync('./package.json') + + t.is(hyperStat.size, nodeStat.size) + t.is(hyperStat.isFile(), nodeStat.isFile()) + t.is(hyperStat.isDirectory(), nodeStat.isDirectory()) + t.is(hyperStat.isSymbolicLink(), nodeStat.isSymbolicLink()) + t.is(hyperStat.mode, nodeStat.mode) + t.is(hyperStat.uid, nodeStat.uid) + t.is(hyperStat.gid, nodeStat.gid) + t.is(hyperStat.nlink, nodeStat.nlink) +}) + +test('statSync: should throw on non-existent path', (t) => { + t.throws(() => statSync('./no-such-file'), { message: /ENOENT/ }) +}) + +test('stat: async should return stats', async (t) => { + const s = await stat('./package.json') + t.true(s.isFile()) + t.true(s.size > 0) +}) + +test('stat: async should throw on non-existent path', async (t) => { + await t.throwsAsync(async () => await stat('./no-such-file'), { message: /ENOENT/ }) +}) + +test('lstatSync: should return stats without following symlinks', (t) => { + const s = lstatSync('./package.json') + t.true(s.isFile()) +}) + +test('lstat: async should work', async (t) => { + const s = await lstat('./package.json') + t.true(s.isFile()) +}) + +test('statSync: atimeMs/mtimeMs/ctimeMs/birthtimeMs should be numbers', (t) => { + const s = statSync('./package.json') + t.is(typeof s.atimeMs, 'number') + t.is(typeof s.mtimeMs, 'number') + t.is(typeof s.ctimeMs, 'number') + t.is(typeof s.birthtimeMs, 'number') + t.true(s.mtimeMs > 0) +}) + +test('statSync: atime/mtime/ctime/birthtime should be Date objects', (t) => { + const s = statSync('./package.json') + t.true(s.atime instanceof Date) + t.true(s.mtime instanceof Date) + t.true(s.ctime instanceof Date) + t.true(s.birthtime instanceof Date) + t.true(s.mtime.getTime() > 0) +}) + +test('statSync: atime.getTime() should be close to atimeMs', (t) => { + const s = statSync('./package.json') + t.true(Math.abs(s.atime.getTime() - s.atimeMs) < 1000) +}) + +test('statSync: should match node:fs atime/mtime Date values', (t) => { + const nodeStat = nodeFs.statSync('./package.json') + const hyperStat = statSync('./package.json') + t.is(hyperStat.mtime.getTime(), nodeStat.mtime.getTime()) +}) + +test('lstatSync: dual-run — symlink should report isSymbolicLink()', (t) => { + const dir = tmpDir() + const target = join(dir, 'target.txt') + const link = join(dir, 'link.txt') + nodeFs.writeFileSync(target, 'hello') + nodeFs.symlinkSync(target, link) + + const nodeLstat = nodeFs.lstatSync(link) + const hyperLstat = lstatSync(link) + + t.is(hyperLstat.isSymbolicLink(), nodeLstat.isSymbolicLink()) + t.true(hyperLstat.isSymbolicLink()) + t.is(hyperLstat.isFile(), nodeLstat.isFile()) + t.false(hyperLstat.isFile()) +}) + +test('statSync: atime Date should be correct for pre-epoch (negative ms) timestamp', (t) => { + // 使用 node:fs utimesSync 设置一个 Unix 纪元前的时间戳(负毫秒值) + const dir = tmpDir() + const file = join(dir, 'pre-epoch.txt') + nodeFs.writeFileSync(file, 'x') + // -500 ms = 1969-12-31T23:59:59.500Z + const preEpochSecs = -0.5 + nodeFs.utimesSync(file, preEpochSecs, preEpochSecs) + + const hyperStat = statSync(file) + const nodeStat = nodeFs.statSync(file) + + // 验证 ms 值符号正确(负值) + t.true(hyperStat.mtimeMs < 0, 'mtimeMs should be negative for pre-epoch timestamps') + // 验证转换后的 Date 和 node:fs 一致 + t.is(hyperStat.mtime.getTime(), nodeStat.mtime.getTime()) +}) + +test('statSync: mtime Date should have correct sub-second precision', (t) => { + const dir = tmpDir() + const file = join(dir, 'subsec.txt') + nodeFs.writeFileSync(file, 'x') + // 设置带小数秒的时间戳(500ms 精度) + const ts = 1_700_000_000.5 // 带 0.5 秒小数部分 + nodeFs.utimesSync(file, ts, ts) + + const hyperStat = statSync(file) + const nodeStat = nodeFs.statSync(file) + + // Date 毫秒值应和 node:fs 一致(精度 1ms 内) + t.true( + Math.abs(hyperStat.mtime.getTime() - nodeStat.mtime.getTime()) < 2, + `mtime mismatch: hyper=${hyperStat.mtime.getTime()} node=${nodeStat.mtime.getTime()}`, + ) +}) + +test('statSync: dual-run — stat follows symlink (shows target not link)', (t) => { + const dir = tmpDir() + const target = join(dir, 'target.txt') + const link = join(dir, 'link.txt') + nodeFs.writeFileSync(target, 'hello') + nodeFs.symlinkSync(target, link) + + const nodeStat = nodeFs.statSync(link) + const hyperStat = statSync(link) + + t.is(hyperStat.isFile(), nodeStat.isFile()) + t.true(hyperStat.isFile()) + t.is(hyperStat.isSymbolicLink(), nodeStat.isSymbolicLink()) + t.false(hyperStat.isSymbolicLink()) +}) diff --git a/__test__/symlink.spec.ts b/__test__/symlink.spec.ts new file mode 100644 index 0000000..b4576ae --- /dev/null +++ b/__test__/symlink.spec.ts @@ -0,0 +1,99 @@ +import test from 'ava' +import { symlinkSync, symlink, readlinkSync, statSync, lstatSync } from '../index.js' +import * as nodeFs from 'node:fs' +import { writeFileSync, mkdirSync, existsSync, readlinkSync as nodeReadlinkSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpDir(): string { + const dir = join(tmpdir(), `hyper-fs-test-symlink-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + return dir +} + +// ===== sync ===== + +test('symlinkSync: should create a symbolic link to a file', (t) => { + const dir = tmpDir() + const target = join(dir, 'target.txt') + const link = join(dir, 'link.txt') + writeFileSync(target, 'hello') + + symlinkSync(target, link) + t.true(existsSync(link)) + + const resolved = readlinkSync(link) + t.is(resolved, target) +}) + +test('symlinkSync: should create a symbolic link to a directory', (t) => { + const dir = tmpDir() + const targetDir = join(dir, 'subdir') + mkdirSync(targetDir) + const link = join(dir, 'link-dir') + + symlinkSync(targetDir, link) + t.true(lstatSync(link).isSymbolicLink()) + t.true(statSync(link).isDirectory()) +}) + +test('symlinkSync: should match node:fs readlink result', (t) => { + const dir = tmpDir() + const target = join(dir, 'target2.txt') + const link = join(dir, 'link2.txt') + writeFileSync(target, 'hello') + + symlinkSync(target, link) + const nodeResult = nodeReadlinkSync(link, 'utf8') + const hyperResult = readlinkSync(link) + t.is(hyperResult, nodeResult) +}) + +test('symlinkSync: should throw EEXIST if link path already exists', (t) => { + const dir = tmpDir() + const target = join(dir, 'target3.txt') + const link = join(dir, 'link3.txt') + writeFileSync(target, 'hello') + writeFileSync(link, 'existing') + + t.throws(() => symlinkSync(target, link), { message: /EEXIST/ }) +}) + +// ===== dual-run ===== + +test('dual-run: symlinkSync result should match node:fs.symlinkSync', (t) => { + const dir = tmpDir() + const target = join(dir, 'dual-target.txt') + const hyperLink = join(dir, 'dual-hyper-link.txt') + const nodeLink = join(dir, 'dual-node-link.txt') + nodeFs.writeFileSync(target, 'hello') + + nodeFs.symlinkSync(target, nodeLink) + symlinkSync(target, hyperLink) + + // Both should be symlinks pointing to the same target + const nodeReadlink = nodeReadlinkSync(nodeLink, 'utf8') + const hyperReadlink = nodeReadlinkSync(hyperLink, 'utf8') + t.is(hyperReadlink, nodeReadlink) + + // Both should resolve to the same file + const nodeLstat = nodeFs.lstatSync(nodeLink) + const hyperLstat = nodeFs.lstatSync(hyperLink) + t.is(hyperLstat.isSymbolicLink(), nodeLstat.isSymbolicLink()) + t.true(hyperLstat.isSymbolicLink()) +}) + +// ===== async ===== + +test('symlink: async should create a symbolic link', async (t) => { + const dir = tmpDir() + const target = join(dir, 'async-target.txt') + const link = join(dir, 'async-link.txt') + writeFileSync(target, 'hello') + + await symlink(target, link) + t.true(existsSync(link)) + + const resolved = readlinkSync(link) + t.is(resolved, target) +}) diff --git a/__test__/truncate.spec.ts b/__test__/truncate.spec.ts new file mode 100644 index 0000000..1b41473 --- /dev/null +++ b/__test__/truncate.spec.ts @@ -0,0 +1,54 @@ +import test from 'ava' +import { truncateSync, truncate } from '../index.js' +import * as nodeFs from 'node:fs' +import { writeFileSync, readFileSync, mkdirSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpFile(name: string): string { + const dir = join(tmpdir(), `hyper-fs-test-truncate-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + const file = join(dir, name) + writeFileSync(file, 'hello world 12345') + return file +} + +test('truncateSync: should truncate to 0 by default', (t) => { + const file = tmpFile('zero.txt') + truncateSync(file) + const content = readFileSync(file, 'utf8') + t.is(content, '') +}) + +test('truncateSync: should truncate to specific length', (t) => { + const file = tmpFile('len.txt') + truncateSync(file, 5) + const content = readFileSync(file, 'utf8') + t.is(content, 'hello') +}) + +test('truncateSync: should throw on non-existent file', (t) => { + t.throws(() => truncateSync('/tmp/no-such-file-' + Date.now()), { message: /ENOENT/ }) +}) + +test('truncate: async should truncate', async (t) => { + const file = tmpFile('async.txt') + await truncate(file, 5) + const content = readFileSync(file, 'utf8') + t.is(content, 'hello') +}) + +// ===== dual-run comparison ===== + +test('dual-run: truncateSync should produce same result as node:fs', (t) => { + const nodeFile = tmpFile('node-trunc.txt') + const hyperFile = tmpFile('hyper-trunc.txt') + + nodeFs.truncateSync(nodeFile, 5) + truncateSync(hyperFile, 5) + + const nodeContent = readFileSync(nodeFile, 'utf8') + const hyperContent = readFileSync(hyperFile, 'utf8') + t.is(hyperContent, nodeContent) + t.is(hyperContent.length, nodeContent.length) +}) diff --git a/__test__/unlink.spec.ts b/__test__/unlink.spec.ts new file mode 100644 index 0000000..092dc23 --- /dev/null +++ b/__test__/unlink.spec.ts @@ -0,0 +1,60 @@ +import test from 'ava' +import { unlinkSync, unlink } from '../index.js' +import * as nodeFs from 'node:fs' +import { writeFileSync, existsSync, mkdirSync, rmdirSync as nodeRmdirSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function createTempFile(): string { + const dir = join(tmpdir(), `hyper-fs-test-unlink-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + const file = join(dir, 'test.txt') + writeFileSync(file, 'hello') + return file +} + +test('unlinkSync: should remove a file', (t) => { + const file = createTempFile() + t.true(existsSync(file)) + unlinkSync(file) + t.false(existsSync(file)) +}) + +test('unlinkSync: should throw on non-existent file', (t) => { + t.throws(() => unlinkSync('/tmp/hyper-fs-no-such-file-' + Date.now()), { message: /ENOENT/ }) +}) + +test('unlinkSync: should throw on directory', (t) => { + const dir = join(tmpdir(), `hyper-fs-test-unlink-dir-${Date.now()}`) + mkdirSync(dir) + t.throws(() => unlinkSync(dir), { message: /EPERM/ }) + nodeRmdirSync(dir) +}) + +test('unlink: async should remove a file', async (t) => { + const file = createTempFile() + t.true(existsSync(file)) + await unlink(file) + t.false(existsSync(file)) +}) + +test('unlink: async should throw on non-existent file', async (t) => { + await t.throwsAsync(async () => await unlink('/tmp/hyper-fs-no-such-' + Date.now()), { message: /ENOENT/ }) +}) + +// ===== dual-run comparison ===== + +test('dual-run: unlinkSync should produce same result as node:fs', (t) => { + const dir = join(tmpdir(), `hyper-fs-test-unlink-dual-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + + const nodeFile = join(dir, 'node.txt') + const hyperFile = join(dir, 'hyper.txt') + writeFileSync(nodeFile, 'x') + writeFileSync(hyperFile, 'x') + + nodeFs.unlinkSync(nodeFile) + unlinkSync(hyperFile) + + t.is(existsSync(hyperFile), existsSync(nodeFile)) +}) diff --git a/__test__/utimes.spec.ts b/__test__/utimes.spec.ts new file mode 100644 index 0000000..ddab832 --- /dev/null +++ b/__test__/utimes.spec.ts @@ -0,0 +1,91 @@ +import test from 'ava' +import { utimesSync, utimes, statSync } from '../index.js' +import { writeFileSync, mkdirSync, statSync as nodeStatSync, utimesSync as nodeUtimesSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpFile(name: string): string { + const dir = join(tmpdir(), `hyper-fs-test-utimes-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + const file = join(dir, name) + writeFileSync(file, 'test') + return file +} + +test('utimesSync: should update atime and mtime', (t) => { + const file = tmpFile('utimes.txt') + const atime = 1000 + const mtime = 2000 + + utimesSync(file, atime, mtime) + const s = statSync(file) + + t.is(Math.floor(s.atimeMs / 1000), atime) + t.is(Math.floor(s.mtimeMs / 1000), mtime) +}) + +test('utimesSync: should match node:fs behavior', (t) => { + const file1 = tmpFile('node-utimes.txt') + const file2 = tmpFile('hyper-utimes.txt') + const atime = 1500000000 + const mtime = 1600000000 + + nodeUtimesSync(file1, atime, mtime) + utimesSync(file2, atime, mtime) + + const nodeStat = nodeStatSync(file1) + const hyperStat = statSync(file2) + + t.is(Math.floor(hyperStat.atimeMs / 1000), Math.floor(nodeStat.atimeMs / 1000)) + t.is(Math.floor(hyperStat.mtimeMs / 1000), Math.floor(nodeStat.mtimeMs / 1000)) +}) + +test('utimesSync: should throw on non-existent file', (t) => { + t.throws(() => utimesSync('/tmp/no-such-file-' + Date.now(), 1000, 2000), { message: /ENOENT/ }) +}) + +test('utimes: async should update times', async (t) => { + const file = tmpFile('async-utimes.txt') + const atime = 1000 + const mtime = 2000 + + await utimes(file, atime, mtime) + const s = statSync(file) + + t.is(Math.floor(s.atimeMs / 1000), atime) + t.is(Math.floor(s.mtimeMs / 1000), mtime) +}) + +test('utimes: async should throw on non-existent file', async (t) => { + await t.throwsAsync(async () => await utimes('/tmp/no-such-file-' + Date.now(), 1000, 2000), { + message: /ENOENT/, + }) +}) + +test('utimesSync: should accept Date objects as atime/mtime', (t) => { + const file = tmpFile('utimes-date.txt') + const atime = new Date('2020-01-01T00:00:00Z') + const mtime = new Date('2021-06-15T12:00:00Z') + + utimesSync(file, atime.getTime() / 1000, mtime.getTime() / 1000) + const s = statSync(file) + + t.is(Math.floor(s.atimeMs / 1000), Math.floor(atime.getTime() / 1000)) + t.is(Math.floor(s.mtimeMs / 1000), Math.floor(mtime.getTime() / 1000)) +}) + +test('dual-run: utimesSync Date values should match node:fs', (t) => { + const file1 = tmpFile('node-utimes-date.txt') + const file2 = tmpFile('hyper-utimes-date.txt') + const atimeSecs = 1577836800 // 2020-01-01 + const mtimeSecs = 1623758400 // 2021-06-15 + + nodeUtimesSync(file1, atimeSecs, mtimeSecs) + utimesSync(file2, atimeSecs, mtimeSecs) + + const nodeStat = nodeStatSync(file1) + const hyperStat = statSync(file2) + + t.is(hyperStat.mtime.getTime(), nodeStat.mtime.getTime()) + t.is(hyperStat.atime.getTime(), nodeStat.atime.getTime()) +}) diff --git a/__test__/write_file.spec.ts b/__test__/write_file.spec.ts new file mode 100644 index 0000000..723f9b4 --- /dev/null +++ b/__test__/write_file.spec.ts @@ -0,0 +1,146 @@ +import test from 'ava' +import { writeFileSync, writeFile, readFileSync, appendFileSync, appendFile } from '../index.js' +import * as nodeFs from 'node:fs' +import { mkdirSync } from 'node:fs' +import { join } from 'node:path' +import { tmpdir } from 'node:os' + +function tmpFile(name: string): string { + const dir = join(tmpdir(), `hyper-fs-test-write-${Date.now()}-${Math.random().toString(36).slice(2)}`) + mkdirSync(dir, { recursive: true }) + return join(dir, name) +} + +test('writeFileSync: should write string to file', (t) => { + const file = tmpFile('str.txt') + writeFileSync(file, 'hello world') + const content = readFileSync(file, { encoding: 'utf8' }) + t.is(content, 'hello world') +}) + +test('writeFileSync: should write Buffer to file', (t) => { + const file = tmpFile('buf.txt') + writeFileSync(file, Buffer.from('buffer data')) + const content = readFileSync(file, { encoding: 'utf8' }) + t.is(content, 'buffer data') +}) + +test('writeFileSync: should overwrite existing file', (t) => { + const file = tmpFile('overwrite.txt') + writeFileSync(file, 'first') + writeFileSync(file, 'second') + const content = readFileSync(file, { encoding: 'utf8' }) + t.is(content, 'second') +}) + +test('writeFile: async should write file', async (t) => { + const file = tmpFile('async.txt') + await writeFile(file, 'async content') + const content = readFileSync(file, { encoding: 'utf8' }) + t.is(content, 'async content') +}) + +test('appendFileSync: should append to file', (t) => { + const file = tmpFile('append.txt') + writeFileSync(file, 'start') + appendFileSync(file, '-end') + const content = readFileSync(file, { encoding: 'utf8' }) + t.is(content, 'start-end') +}) + +test('appendFile: async should append', async (t) => { + const file = tmpFile('append-async.txt') + writeFileSync(file, 'a') + await appendFile(file, 'b') + const content = readFileSync(file, { encoding: 'utf8' }) + t.is(content, 'ab') +}) + +// ===== encoding option tests (async) ===== + +test('writeFile: async hex encoding should write decoded binary bytes', async (t) => { + const file = tmpFile('async-hex.bin') + // "deadbeef" 以 hex encoding 写入,应得到 4 个字节 0xde 0xad 0xbe 0xef + await writeFile(file, 'deadbeef', { encoding: 'hex' }) + const buf = nodeFs.readFileSync(file) + t.deepEqual([...buf], [0xde, 0xad, 0xbe, 0xef]) +}) + +test('writeFile: async base64 encoding should write decoded bytes', async (t) => { + const file = tmpFile('async-b64.bin') + const original = Buffer.from([0x01, 0x02, 0x03, 0xff]) + const b64 = original.toString('base64') + await writeFile(file, b64, { encoding: 'base64' }) + const buf = nodeFs.readFileSync(file) + t.deepEqual([...buf], [...original]) +}) + +test('writeFile: async encoding result matches node:fs writeFile', async (t) => { + const nodeFile = tmpFile('node-hex.bin') + const hyperFile = tmpFile('hyper-hex.bin') + nodeFs.writeFileSync(nodeFile, 'cafebabe', { encoding: 'hex' }) + await writeFile(hyperFile, 'cafebabe', { encoding: 'hex' }) + t.deepEqual([...nodeFs.readFileSync(hyperFile)], [...nodeFs.readFileSync(nodeFile)]) +}) + +test('appendFile: async hex encoding should append decoded binary bytes', async (t) => { + const file = tmpFile('async-append-hex.bin') + nodeFs.writeFileSync(file, Buffer.from([0x01])) + await appendFile(file, 'ff00', { encoding: 'hex' }) + const buf = nodeFs.readFileSync(file) + t.deepEqual([...buf], [0x01, 0xff, 0x00]) +}) + +test('appendFile: async encoding result matches node:fs appendFile', async (t) => { + const nodeFile = tmpFile('node-append-b64.bin') + const hyperFile = tmpFile('hyper-append-b64.bin') + const b64 = Buffer.from('hello').toString('base64') + nodeFs.writeFileSync(nodeFile, '') + nodeFs.writeFileSync(hyperFile, '') + nodeFs.appendFileSync(nodeFile, b64, { encoding: 'base64' }) + await appendFile(hyperFile, b64, { encoding: 'base64' }) + t.deepEqual([...nodeFs.readFileSync(hyperFile)], [...nodeFs.readFileSync(nodeFile)]) +}) + +// ===== dual-run comparison ===== + +test('dual-run: writeFileSync should produce same file content as node:fs', (t) => { + const nodeFile = tmpFile('node-write.txt') + const hyperFile = tmpFile('hyper-write.txt') + const data = 'hello dual-run test 你好世界' + + nodeFs.writeFileSync(nodeFile, data) + writeFileSync(hyperFile, data) + + const nodeContent = nodeFs.readFileSync(nodeFile, 'utf8') + const hyperContent = nodeFs.readFileSync(hyperFile, 'utf8') + t.is(hyperContent, nodeContent) +}) + +test('dual-run: writeFileSync Buffer should produce same content as node:fs', (t) => { + const nodeFile = tmpFile('node-buf.txt') + const hyperFile = tmpFile('hyper-buf.txt') + const data = Buffer.from([0x00, 0x01, 0x02, 0xff, 0xfe]) + + nodeFs.writeFileSync(nodeFile, data) + writeFileSync(hyperFile, data) + + const nodeContent = nodeFs.readFileSync(nodeFile) + const hyperContent = nodeFs.readFileSync(hyperFile) + t.deepEqual(hyperContent, nodeContent) +}) + +test('dual-run: appendFileSync should produce same result as node:fs', (t) => { + const nodeFile = tmpFile('node-append.txt') + const hyperFile = tmpFile('hyper-append.txt') + + nodeFs.writeFileSync(nodeFile, 'base') + writeFileSync(hyperFile, 'base') + + nodeFs.appendFileSync(nodeFile, '-appended') + appendFileSync(hyperFile, '-appended') + + const nodeContent = nodeFs.readFileSync(nodeFile, 'utf8') + const hyperContent = nodeFs.readFileSync(hyperFile, 'utf8') + t.is(hyperContent, nodeContent) +}) diff --git a/benchmark/copy_file.ts b/benchmark/copy_file.ts new file mode 100644 index 0000000..302c29f --- /dev/null +++ b/benchmark/copy_file.ts @@ -0,0 +1,73 @@ +import * as fs from 'node:fs' +import * as path from 'node:path' +import * as os from 'node:os' +import { copyFileSync } from '../index.js' + +const tmpDir = path.join(os.tmpdir(), `hyper-fs-bench-copyfile-${Date.now()}`) +fs.mkdirSync(tmpDir, { recursive: true }) + +// Prepare source files +const smallSrc = path.join(tmpDir, 'small-src.txt') +const mediumSrc = path.join(tmpDir, 'medium-src.txt') +const largeSrc = path.join(tmpDir, 'large-src.txt') + +fs.writeFileSync(smallSrc, 'hello world') +fs.writeFileSync(mediumSrc, 'x'.repeat(64 * 1024)) // 64KB +fs.writeFileSync(largeSrc, 'x'.repeat(4 * 1024 * 1024)) // 4MB + +const iterations = 50 + +function benchmark(name: string, fn: () => void): number { + fn() + const times: number[] = [] + for (let i = 0; i < iterations; i++) { + const start = process.hrtime.bigint() + fn() + const end = process.hrtime.bigint() + times.push(Number(end - start) / 1_000_000) + } + return times.reduce((a, b) => a + b, 0) / times.length +} + +function runGroup(groupName: string, impls: { name: string; fn: () => void }[]) { + console.log(`\n${groupName}`) + const results: { name: string; time: number }[] = [] + + for (const impl of impls) { + results.push({ name: impl.name, time: benchmark(impl.name, impl.fn) }) + } + + const baseline = results[0] + for (const res of results) { + const isBaseline = res === baseline + const ratio = res.time / baseline.time + const diffStr = isBaseline ? '(baseline)' : `${ratio.toFixed(2)}x ${ratio > 1 ? '(slower)' : '(faster)'}` + console.log(` ${res.name.padEnd(25)} ${res.time.toFixed(3)} ms ${diffStr}`) + } +} + +console.log('Benchmarking copyFile with various file sizes') + +let counter = 0 +const getDest = () => path.join(tmpDir, `dest-${counter++}.txt`) + +// 1. Small file copy +runGroup('copyFile (small 11B)', [ + { name: 'Node.js', fn: () => fs.copyFileSync(smallSrc, getDest()) }, + { name: 'Hyper-FS', fn: () => copyFileSync(smallSrc, getDest()) }, +]) + +// 2. Medium file copy +runGroup('copyFile (64KB)', [ + { name: 'Node.js', fn: () => fs.copyFileSync(mediumSrc, getDest()) }, + { name: 'Hyper-FS', fn: () => copyFileSync(mediumSrc, getDest()) }, +]) + +// 3. Large file copy +runGroup('copyFile (4MB)', [ + { name: 'Node.js', fn: () => fs.copyFileSync(largeSrc, getDest()) }, + { name: 'Hyper-FS', fn: () => copyFileSync(largeSrc, getDest()) }, +]) + +// Cleanup +fs.rmSync(tmpDir, { recursive: true, force: true }) diff --git a/benchmark/cp.ts b/benchmark/cp.ts new file mode 100644 index 0000000..2d78564 --- /dev/null +++ b/benchmark/cp.ts @@ -0,0 +1,121 @@ +import * as fs from 'node:fs' +import * as path from 'node:path' +import * as os from 'node:os' +import { cpSync as hyperCpSync } from '../index.js' + +const tmpDir = os.tmpdir() +const baseDir = path.join(tmpDir, 'hyper-fs-bench-cp') +const srcBase = path.join(tmpDir, 'hyper-fs-bench-cp-src') + +if (fs.existsSync(baseDir)) fs.rmSync(baseDir, { recursive: true, force: true }) +if (fs.existsSync(srcBase)) fs.rmSync(srcBase, { recursive: true, force: true }) +fs.mkdirSync(baseDir, { recursive: true }) +fs.mkdirSync(srcBase, { recursive: true }) + +// ────────────────────────────────── +// Source tree builders +// ────────────────────────────────── + +function createFlatDir(dir: string, count: number) { + if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true }) + for (let i = 0; i < count; i++) { + fs.writeFileSync(path.join(dir, `file-${i}.txt`), 'content') + } +} + +function createTreeDir(dir: string, breadth: number, depth: number) { + if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true }) + function build(current: string, level: number) { + if (level >= depth) return + for (let i = 0; i < breadth; i++) { + const sub = path.join(current, `d${level}-${i}`) + fs.mkdirSync(sub) + fs.writeFileSync(path.join(sub, 'file.txt'), 'hello world') + build(sub, level + 1) + } + } + build(dir, 0) +} + +// Pre-build all source trees once (copy is non-destructive to src, only dest changes) +const flatSrc = path.join(srcBase, 'flat-500') +const treeSrc = path.join(srcBase, 'tree-3x5') +const treeSmallSrc = path.join(srcBase, 'tree-4x3') + +createFlatDir(flatSrc, 500) // 500 files flat +createTreeDir(treeSrc, 3, 5) // 3-breadth × 5-depth ≈ 363 nodes +createTreeDir(treeSmallSrc, 4, 3) // 4-breadth × 3-depth ≈ 84 nodes + +// ────────────────────────────────── +// Benchmark utilities +// ────────────────────────────────── + +const implementations = [ + { + name: 'Node.js', + fn: (src: string, dest: string) => fs.cpSync(src, dest, { recursive: true }), + }, + { + name: 'Hyper-FS (1 thread)', + fn: (src: string, dest: string) => hyperCpSync(src, dest, { recursive: true, concurrency: 1 }), + }, + { + name: 'Hyper-FS (4 threads)', + fn: (src: string, dest: string) => hyperCpSync(src, dest, { recursive: true, concurrency: 4 }), + }, + { + name: 'Hyper-FS (8 threads)', + fn: (src: string, dest: string) => hyperCpSync(src, dest, { recursive: true, concurrency: 8 }), + }, +] + +let destCounter = 0 +function nextDest(prefix: string): string { + return path.join(baseDir, `${prefix}-${destCounter++}`) +} + +function runGroup(groupName: string, src: string) { + console.log(`\n${groupName}`) + const iterations = 12 + const results: { name: string; time: number }[] = [] + + for (const impl of implementations) { + // Warmup + const warmupDest = nextDest('warmup') + impl.fn(src, warmupDest) + fs.rmSync(warmupDest, { recursive: true, force: true }) + + const times: number[] = [] + for (let i = 0; i < iterations; i++) { + const dest = nextDest(impl.name.replace(/[^a-zA-Z0-9]/g, '-')) + const start = process.hrtime.bigint() + impl.fn(src, dest) + const end = process.hrtime.bigint() + times.push(Number(end - start) / 1_000_000) + fs.rmSync(dest, { recursive: true, force: true }) + } + + const sorted = [...times].sort((a, b) => a - b) + const trimmed = sorted.slice(1, -1) // drop best and worst + const avg = trimmed.reduce((a, b) => a + b, 0) / trimmed.length + results.push({ name: impl.name, time: avg }) + } + + const baseline = results[0] + for (const res of results) { + const ratio = res.time / baseline.time + const diffStr = res === baseline ? '(baseline)' : `${ratio.toFixed(2)}x ${ratio > 1 ? '(slower)' : '(faster)'}` + console.log(` ${res.name.padEnd(28)} ${res.time.toFixed(2)} ms ${diffStr}`) + } +} + +console.log('Benchmarking cp recursive (destructive dest — manual iterations)') +console.log('Source: pre-built, Dest: created & removed each iteration') + +runGroup('Flat dir (500 files)', flatSrc) +runGroup('Tree dir (breadth=4, depth=3, ~84 nodes)', treeSmallSrc) +runGroup('Tree dir (breadth=3, depth=5, ~363 nodes)', treeSrc) + +// Cleanup +if (fs.existsSync(baseDir)) fs.rmSync(baseDir, { recursive: true, force: true }) +if (fs.existsSync(srcBase)) fs.rmSync(srcBase, { recursive: true, force: true }) diff --git a/benchmark/exists.ts b/benchmark/exists.ts new file mode 100644 index 0000000..42b20a3 --- /dev/null +++ b/benchmark/exists.ts @@ -0,0 +1,48 @@ +import { run, bench, group } from 'mitata' +import * as fs from 'node:fs' +import { existsSync, accessSync } from '../index.js' + +const existingFile = 'package.json' +const existingDir = 'node_modules' +const nonExistent = '/tmp/hyper-fs-bench-nonexistent-path-12345' + +console.log('Benchmarking exists / access') + +// 1. existsSync — existing file +group('exists (existing file)', () => { + bench('Node.js', () => fs.existsSync(existingFile)).baseline() + bench('Hyper-FS', () => existsSync(existingFile)) +}) + +// 2. existsSync — non-existent path +group('exists (non-existent)', () => { + bench('Node.js', () => fs.existsSync(nonExistent)).baseline() + bench('Hyper-FS', () => existsSync(nonExistent)) +}) + +// 3. accessSync — existing file (F_OK) +group('access (existing file, F_OK)', () => { + bench('Node.js', () => fs.accessSync(existingFile, fs.constants.F_OK)).baseline() + bench('Hyper-FS', () => accessSync(existingFile)) +}) + +// 4. accessSync — existing dir (R_OK) +group('access (existing dir, R_OK)', () => { + bench('Node.js', () => fs.accessSync(existingDir, fs.constants.R_OK)).baseline() + bench('Hyper-FS', () => accessSync(existingDir, fs.constants.R_OK)) +}) + +// 5. Batch exists — check many files rapidly +const files = fs.readdirSync('src').map((f) => `src/${f}`) +console.log(`Batch exists target: ${files.length} files in src/`) + +group(`exists batch (${files.length} files)`, () => { + bench('Node.js', () => { + for (const f of files) fs.existsSync(f) + }).baseline() + bench('Hyper-FS', () => { + for (const f of files) existsSync(f) + }) +}) + +await run({ colors: true }) diff --git a/benchmark/glob.ts b/benchmark/glob.ts new file mode 100644 index 0000000..9e3d55c --- /dev/null +++ b/benchmark/glob.ts @@ -0,0 +1,44 @@ +import { run, bench, group } from 'mitata' +import { globSync as hyperGlobSync } from '../index.js' +import { globSync as nodeGlobSync } from 'glob' +import fastGlob from 'fast-glob' + +const cwd = process.cwd() + +// Patterns to test +const patternSimple = 'src/*.rs' +const patternRecursive = '**/*.rs' +const patternDeep = 'node_modules/**/*.json' + +console.log(`Benchmarking glob in: ${cwd}`) + +// 1. Simple Flat Glob +group('Glob (Simple: src/*.rs)', () => { + bench('node-glob', () => nodeGlobSync(patternSimple, { cwd })) + bench('fast-glob', () => fastGlob.sync(patternSimple, { cwd })) + bench('hyper-fs', () => hyperGlobSync(patternSimple, { cwd })).baseline() +}) + +// 2. Recursive Glob +group('Glob (Recursive: **/*.rs)', () => { + bench('node-glob', () => nodeGlobSync(patternRecursive, { cwd })) + bench('fast-glob', () => fastGlob.sync(patternRecursive, { cwd })) + bench('hyper-fs', () => hyperGlobSync(patternRecursive, { cwd })).baseline() +}) + +// 3. Deep Recursive (if node_modules exists) +// This is a stress test +group('Glob (Deep: node_modules/**/*.json)', () => { + // Only run if node_modules exists to avoid empty result bias + const hasNodeModules = fastGlob.sync('node_modules').length > 0 + if (hasNodeModules) { + bench('node-glob', () => nodeGlobSync(patternDeep, { cwd })) + bench('fast-glob', () => fastGlob.sync(patternDeep, { cwd })) + bench('hyper-fs', () => hyperGlobSync(patternDeep, { cwd })).baseline() + bench('hyper-fs (8 threads)', () => hyperGlobSync(patternDeep, { cwd, concurrency: 8 })) + } +}) + +await run({ + colors: true, +}) diff --git a/benchmark/mkdir.ts b/benchmark/mkdir.ts new file mode 100644 index 0000000..8db23b5 --- /dev/null +++ b/benchmark/mkdir.ts @@ -0,0 +1,96 @@ +import * as fs from 'node:fs' +import * as path from 'node:path' +import * as os from 'node:os' +import { mkdirSync } from '../index.js' + +const tmpDir = path.join(os.tmpdir(), `hyper-fs-bench-mkdir-${Date.now()}`) +fs.mkdirSync(tmpDir, { recursive: true }) + +const iterations = 100 + +function benchmark(name: string, fn: () => void): number { + fn() + const times: number[] = [] + for (let i = 0; i < iterations; i++) { + const start = process.hrtime.bigint() + fn() + const end = process.hrtime.bigint() + times.push(Number(end - start) / 1_000_000) + } + return times.reduce((a, b) => a + b, 0) / times.length +} + +function runGroup(groupName: string, impls: { name: string; fn: () => void }[]) { + console.log(`\n${groupName}`) + const results: { name: string; time: number }[] = [] + + for (const impl of impls) { + results.push({ name: impl.name, time: benchmark(impl.name, impl.fn) }) + } + + const baseline = results[0] + for (const res of results) { + const isBaseline = res === baseline + const ratio = res.time / baseline.time + const diffStr = isBaseline ? '(baseline)' : `${ratio.toFixed(2)}x ${ratio > 1 ? '(slower)' : '(faster)'}` + console.log(` ${res.name.padEnd(25)} ${res.time.toFixed(3)} ms ${diffStr}`) + } +} + +console.log('Benchmarking mkdir') + +let counter = 0 + +// 1. Single directory creation +runGroup('mkdir (single dir)', [ + { + name: 'Node.js', + fn: () => { + const dir = path.join(tmpDir, `node-single-${counter++}`) + fs.mkdirSync(dir) + }, + }, + { + name: 'Hyper-FS', + fn: () => { + const dir = path.join(tmpDir, `hyper-single-${counter++}`) + mkdirSync(dir) + }, + }, +]) + +// 2. Recursive mkdir (deep path) +runGroup('mkdir (recursive, depth=5)', [ + { + name: 'Node.js', + fn: () => { + const dir = path.join(tmpDir, `node-deep-${counter++}`, 'a', 'b', 'c', 'd') + fs.mkdirSync(dir, { recursive: true }) + }, + }, + { + name: 'Hyper-FS', + fn: () => { + const dir = path.join(tmpDir, `hyper-deep-${counter++}`, 'a', 'b', 'c', 'd') + mkdirSync(dir, { recursive: true }) + }, + }, +]) + +// 3. Recursive mkdir on already-existing path (no-op scenario) +const existingDir = path.join(tmpDir, 'already-exists') +fs.mkdirSync(existingDir, { recursive: true }) + +runGroup('mkdir (recursive, already exists)', [ + { + name: 'Node.js', + fn: () => fs.mkdirSync(existingDir, { recursive: true }), + }, + { + name: 'Hyper-FS', + fn: () => mkdirSync(existingDir, { recursive: true }), + }, +]) + +// Cleanup +fs.rmSync(tmpDir, { recursive: true, force: true }) diff --git a/benchmark/read_file.ts b/benchmark/read_file.ts new file mode 100644 index 0000000..ab1fc57 --- /dev/null +++ b/benchmark/read_file.ts @@ -0,0 +1,69 @@ +import { run, bench, group } from 'mitata' +import * as fs from 'node:fs' +import * as path from 'node:path' +import * as os from 'node:os' +import { readFileSync } from '../index.js' + +const tmpDir = path.join(os.tmpdir(), `hyper-fs-bench-readfile-${Date.now()}`) +fs.mkdirSync(tmpDir, { recursive: true }) + +// Prepare test files of various sizes +const smallFile = path.join(tmpDir, 'small.txt') +const mediumFile = path.join(tmpDir, 'medium.txt') +const largeFile = path.join(tmpDir, 'large.txt') + +fs.writeFileSync(smallFile, 'hello world') +fs.writeFileSync(mediumFile, 'x'.repeat(64 * 1024)) // 64KB +fs.writeFileSync(largeFile, 'x'.repeat(4 * 1024 * 1024)) // 4MB + +// Also use a real file for realistic benchmark +const realFile = 'package.json' + +console.log('Benchmarking readFile with various file sizes') + +// 1. Small file — Buffer +group('readFile (small 11B, Buffer)', () => { + bench('Node.js', () => fs.readFileSync(smallFile)).baseline() + bench('Hyper-FS', () => readFileSync(smallFile)) +}) + +// 2. Small file — UTF-8 string +group('readFile (small 11B, utf8)', () => { + bench('Node.js', () => fs.readFileSync(smallFile, 'utf8')).baseline() + bench('Hyper-FS', () => readFileSync(smallFile, { encoding: 'utf8' })) +}) + +// 3. Medium file — Buffer +group('readFile (64KB, Buffer)', () => { + bench('Node.js', () => fs.readFileSync(mediumFile)).baseline() + bench('Hyper-FS', () => readFileSync(mediumFile)) +}) + +// 4. Medium file — UTF-8 string +group('readFile (64KB, utf8)', () => { + bench('Node.js', () => fs.readFileSync(mediumFile, 'utf8')).baseline() + bench('Hyper-FS', () => readFileSync(mediumFile, { encoding: 'utf8' })) +}) + +// 5. Large file — Buffer +group('readFile (4MB, Buffer)', () => { + bench('Node.js', () => fs.readFileSync(largeFile)).baseline() + bench('Hyper-FS', () => readFileSync(largeFile)) +}) + +// 6. Large file — UTF-8 string +group('readFile (4MB, utf8)', () => { + bench('Node.js', () => fs.readFileSync(largeFile, 'utf8')).baseline() + bench('Hyper-FS', () => readFileSync(largeFile, { encoding: 'utf8' })) +}) + +// 7. Real file (package.json) — UTF-8 string +group('readFile (package.json, utf8)', () => { + bench('Node.js', () => fs.readFileSync(realFile, 'utf8')).baseline() + bench('Hyper-FS', () => readFileSync(realFile, { encoding: 'utf8' })) +}) + +await run({ colors: true }) + +// Cleanup +fs.rmSync(tmpDir, { recursive: true, force: true }) diff --git a/benchmark/rm.ts b/benchmark/rm.ts index 7251394..05333a9 100644 --- a/benchmark/rm.ts +++ b/benchmark/rm.ts @@ -6,7 +6,6 @@ import { rmSync as hyperRmSync } from '../index.js' const tmpDir = os.tmpdir() const baseDir = path.join(tmpDir, 'hyper-fs-bench-rm') -// Clean up previous runs if (fs.existsSync(baseDir)) { fs.rmSync(baseDir, { recursive: true, force: true }) } @@ -29,69 +28,71 @@ function createDeepStructure(dir: string, depth: number) { } } -async function runGroup(groupName: string, setupFn: (dir: string) => void) { - console.log(`\n${groupName}`) +function createTreeStructure(dir: string, breadth: number, depth: number) { + if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true }) + function build(current: string, level: number) { + if (level >= depth) return + for (let i = 0; i < breadth; i++) { + const sub = path.join(current, `d${level}-${i}`) + fs.mkdirSync(sub) + fs.writeFileSync(path.join(sub, 'file.txt'), 'content') + build(sub, level + 1) + } + } + build(dir, 0) +} - const implementations = [ - { name: 'Node.js fs.rmSync', fn: (p: string) => fs.rmSync(p, { recursive: true, force: true }) }, - { name: 'hyper-fs rmSync', fn: (p: string) => hyperRmSync(p, { recursive: true, force: true }) }, - { - name: 'hyper-fs rmSync (4 threads)', - fn: (p: string) => hyperRmSync(p, { recursive: true, force: true, concurrency: 4 }), - }, - ] +const implementations = [ + { name: 'Node.js', fn: (p: string) => fs.rmSync(p, { recursive: true, force: true }) }, + { name: 'Hyper-FS', fn: (p: string) => hyperRmSync(p, { recursive: true, force: true }) }, + { + name: 'Hyper-FS (4 threads)', + fn: (p: string) => hyperRmSync(p, { recursive: true, force: true, concurrency: 4 }), + }, +] +function runGroup(groupName: string, setupFn: (dir: string) => void) { + console.log(`\n${groupName}`) + const iterations = 10 const results: { name: string; time: number }[] = [] for (const impl of implementations) { const times: number[] = [] - const iterations = 10 - // Warmup (1 run) const warmupDir = path.join(baseDir, `warmup-${impl.name.replace(/[^a-zA-Z0-9]/g, '')}`) setupFn(warmupDir) impl.fn(warmupDir) for (let i = 0; i < iterations; i++) { const testDir = path.join(baseDir, `${impl.name.replace(/[^a-zA-Z0-9]/g, '-')}-${i}`) - setupFn(testDir) // Setup time NOT included + setupFn(testDir) const start = process.hrtime.bigint() - impl.fn(testDir) // Measured time + impl.fn(testDir) const end = process.hrtime.bigint() - const ms = Number(end - start) / 1_000_000 - times.push(ms) + times.push(Number(end - start) / 1_000_000) } const avg = times.reduce((a, b) => a + b, 0) / times.length results.push({ name: impl.name, time: avg }) } - // Render Mitata-like output - // Example: - // Node.js fs.rmSync 10.50 ms (baseline) - // hyper-fs rmSync 12.00 ms 1.14x (slower) - const baseline = results[0] - - results.forEach((res) => { + for (const res of results) { const isBaseline = res === baseline const ratio = res.time / baseline.time const diffStr = isBaseline ? '(baseline)' : `${ratio.toFixed(2)}x ${ratio > 1 ? '(slower)' : '(faster)'}` - - console.log(` ${res.name.padEnd(25)} ${res.time.toFixed(2)} ms ${diffStr}`) - }) + console.log(` ${res.name.padEnd(25)} ${res.time.toFixed(3)} ms ${diffStr}`) + } } -async function run() { - await runGroup('Flat directory (2000 files)', (dir) => createFlatStructure(dir, 2000)) - await runGroup('Deep nested directory (depth 100)', (dir) => createDeepStructure(dir, 100)) +console.log('Benchmarking rm (destructive — manual iterations)') - // Clean up - if (fs.existsSync(baseDir)) { - fs.rmSync(baseDir, { recursive: true, force: true }) - } -} +runGroup('Flat directory (2000 files)', (dir) => createFlatStructure(dir, 2000)) +runGroup('Deep nested directory (depth 100)', (dir) => createDeepStructure(dir, 100)) +runGroup('Tree structure (breadth=3, depth=4, ~120 nodes)', (dir) => createTreeStructure(dir, 3, 4)) -run() +if (fs.existsSync(baseDir)) { + fs.rmSync(baseDir, { recursive: true, force: true }) +} diff --git a/benchmark/stat.ts b/benchmark/stat.ts new file mode 100644 index 0000000..56e77f2 --- /dev/null +++ b/benchmark/stat.ts @@ -0,0 +1,41 @@ +import { run, bench, group } from 'mitata' +import * as fs from 'node:fs' +import { statSync, lstatSync } from '../index.js' + +const targetFile = 'package.json' +const targetDir = 'node_modules' + +console.log(`Benchmarking stat on: ${targetFile} and ${targetDir}`) + +// 1. stat on a file +group('stat (file)', () => { + bench('Node.js', () => fs.statSync(targetFile)).baseline() + bench('Hyper-FS', () => statSync(targetFile)) +}) + +// 2. stat on a directory +group('stat (directory)', () => { + bench('Node.js', () => fs.statSync(targetDir)).baseline() + bench('Hyper-FS', () => statSync(targetDir)) +}) + +// 3. lstat on a file +group('lstat (file)', () => { + bench('Node.js', () => fs.lstatSync(targetFile)).baseline() + bench('Hyper-FS', () => lstatSync(targetFile)) +}) + +// 4. Batch stat — stat multiple files in sequence +const files = fs.readdirSync('src').map((f) => `src/${f}`) +console.log(`Batch stat target: ${files.length} files in src/`) + +group(`stat batch (${files.length} files)`, () => { + bench('Node.js', () => { + for (const f of files) fs.statSync(f) + }).baseline() + bench('Hyper-FS', () => { + for (const f of files) statSync(f) + }) +}) + +await run({ colors: true }) diff --git a/benchmark/write_file.ts b/benchmark/write_file.ts new file mode 100644 index 0000000..f7e13a1 --- /dev/null +++ b/benchmark/write_file.ts @@ -0,0 +1,89 @@ +import * as fs from 'node:fs' +import * as path from 'node:path' +import * as os from 'node:os' +import { writeFileSync, appendFileSync } from '../index.js' + +const tmpDir = path.join(os.tmpdir(), `hyper-fs-bench-writefile-${Date.now()}`) +fs.mkdirSync(tmpDir, { recursive: true }) + +const smallData = 'hello world' +const mediumData = 'x'.repeat(64 * 1024) // 64KB +const largeData = 'x'.repeat(4 * 1024 * 1024) // 4MB +const bufferData = Buffer.alloc(64 * 1024, 0x61) // 64KB Buffer + +const iterations = 50 + +function benchmark(name: string, fn: () => void): number { + // Warmup + fn() + + const times: number[] = [] + for (let i = 0; i < iterations; i++) { + const start = process.hrtime.bigint() + fn() + const end = process.hrtime.bigint() + times.push(Number(end - start) / 1_000_000) + } + const avg = times.reduce((a, b) => a + b, 0) / times.length + return avg +} + +function runGroup(groupName: string, impls: { name: string; fn: () => void }[]) { + console.log(`\n${groupName}`) + const results: { name: string; time: number }[] = [] + + for (const impl of impls) { + results.push({ name: impl.name, time: benchmark(impl.name, impl.fn) }) + } + + const baseline = results[0] + for (const res of results) { + const isBaseline = res === baseline + const ratio = res.time / baseline.time + const diffStr = isBaseline ? '(baseline)' : `${ratio.toFixed(2)}x ${ratio > 1 ? '(slower)' : '(faster)'}` + console.log(` ${res.name.padEnd(25)} ${res.time.toFixed(3)} ms ${diffStr}`) + } +} + +console.log('Benchmarking writeFile with various data sizes') + +let counter = 0 +const getPath = () => path.join(tmpDir, `file-${counter++}.txt`) + +// 1. Small string write +runGroup('writeFile (small 11B, string)', [ + { name: 'Node.js', fn: () => fs.writeFileSync(getPath(), smallData) }, + { name: 'Hyper-FS', fn: () => writeFileSync(getPath(), smallData) }, +]) + +// 2. Medium string write +runGroup('writeFile (64KB, string)', [ + { name: 'Node.js', fn: () => fs.writeFileSync(getPath(), mediumData) }, + { name: 'Hyper-FS', fn: () => writeFileSync(getPath(), mediumData) }, +]) + +// 3. Large string write +runGroup('writeFile (4MB, string)', [ + { name: 'Node.js', fn: () => fs.writeFileSync(getPath(), largeData) }, + { name: 'Hyper-FS', fn: () => writeFileSync(getPath(), largeData) }, +]) + +// 4. Buffer write +runGroup('writeFile (64KB, Buffer)', [ + { name: 'Node.js', fn: () => fs.writeFileSync(getPath(), bufferData) }, + { name: 'Hyper-FS', fn: () => writeFileSync(getPath(), bufferData) }, +]) + +// 5. appendFile +const appendTarget1 = path.join(tmpDir, 'append-node.txt') +const appendTarget2 = path.join(tmpDir, 'append-hyper.txt') +fs.writeFileSync(appendTarget1, '') +fs.writeFileSync(appendTarget2, '') + +runGroup('appendFile (small string)', [ + { name: 'Node.js', fn: () => fs.appendFileSync(appendTarget1, 'line\n') }, + { name: 'Hyper-FS', fn: () => appendFileSync(appendTarget2, 'line\n') }, +]) + +// Cleanup +fs.rmSync(tmpDir, { recursive: true, force: true }) diff --git a/index.d.ts b/index.d.ts index 9e51a57..c1ef9ce 100644 --- a/index.d.ts +++ b/index.d.ts @@ -1,27 +1,152 @@ /* auto-generated by NAPI-RS */ /* eslint-disable */ -export interface Dirent { - name: string - parentPath: string - isDir: boolean -} - -export declare function readdir(path: string, options?: ReaddirOptions | undefined | null): Promise - -/** * Reads the contents of a directory. - * @param {string | Buffer | URL} path - * @param {string | { - * encoding?: string; - * withFileTypes?: boolean; - * recursive?: boolean; - * }} [options] - * @param {( - * err?: Error, - * files?: string[] | Buffer[] | Dirent[] - * ) => any} callback - * @returns {void} - */ +export declare class Dirent { + readonly name: string + readonly parentPath: string + isFile(): boolean + isDirectory(): boolean + isSymbolicLink(): boolean + isBlockDevice(): boolean + isCharacterDevice(): boolean + isFIFO(): boolean + isSocket(): boolean + get path(): string +} + +export declare class Stats { + readonly dev: number + readonly mode: number + readonly nlink: number + readonly uid: number + readonly gid: number + readonly rdev: number + readonly blksize: number + readonly ino: number + readonly size: number + readonly blocks: number + readonly atimeMs: number + readonly mtimeMs: number + readonly ctimeMs: number + readonly birthtimeMs: number + isFile(): boolean + isDirectory(): boolean + isSymbolicLink(): boolean + isBlockDevice(): boolean + isCharacterDevice(): boolean + isFIFO(): boolean + isSocket(): boolean + /** Returns atime as a Date object (Node.js compatible) */ + get atime(): Date + /** Returns mtime as a Date object (Node.js compatible) */ + get mtime(): Date + /** Returns ctime as a Date object (Node.js compatible) */ + get ctime(): Date + /** Returns birthtime as a Date object (Node.js compatible) */ + get birthtime(): Date +} + +export declare function access(path: string, mode?: number | undefined | null): Promise + +export declare function accessSync(path: string, mode?: number | undefined | null): void + +export declare function appendFile( + path: string, + data: string | Buffer, + options?: WriteFileOptions | undefined | null, +): Promise + +export declare function appendFileSync( + path: string, + data: string | Buffer, + options?: WriteFileOptions | undefined | null, +): void + +export declare function chmod(path: string, mode: number): Promise + +export declare function chmodSync(path: string, mode: number): void + +export declare function chown(path: string, uid: number, gid: number): Promise + +export declare function chownSync(path: string, uid: number, gid: number): void + +export declare function copyFile(src: string, dest: string, mode?: number | undefined | null): Promise + +export declare function copyFileSync(src: string, dest: string, mode?: number | undefined | null): void + +export declare function cp(src: string, dest: string, options?: CpOptions | undefined | null): Promise + +export interface CpOptions { + recursive?: boolean + force?: boolean + errorOnExist?: boolean + preserveTimestamps?: boolean + dereference?: boolean + verbatimSymlinks?: boolean + /** + * Hyper-FS extension: number of parallel threads for recursive copy. + * 0 or 1 means sequential; > 1 enables rayon parallel traversal. + */ + concurrency?: number +} + +export declare function cpSync(src: string, dest: string, options?: CpOptions | undefined | null): void + +export declare function exists(path: string): Promise + +export declare function existsSync(path: string): boolean + +export declare function glob( + pattern: string, + options?: GlobOptions | undefined | null, +): Promise | Array> + +export interface GlobOptions { + cwd?: string + withFileTypes?: boolean + exclude?: Array + concurrency?: number + /** Respect .gitignore / .ignore files (default: true) */ + gitIgnore?: boolean +} + +export declare function globSync( + pattern: string, + options?: GlobOptions | undefined | null, +): Array | Array + +export declare function link(existingPath: string, newPath: string): Promise + +export declare function linkSync(existingPath: string, newPath: string): void + +export declare function lstat(path: string): Promise + +export declare function lstatSync(path: string): Stats + +export declare function mkdir(path: string, options?: MkdirOptions | undefined | null): Promise + +export interface MkdirOptions { + recursive?: boolean + mode?: number +} + +export declare function mkdirSync(path: string, options?: MkdirOptions | undefined | null): string | null + +export declare function mkdtemp(prefix: string): Promise + +export declare function mkdtempSync(prefix: string): string + +export declare function readdir( + path: string, + options?: ReaddirOptions | undefined | null, +): Promise | Array> + export interface ReaddirOptions { + /** + * File name encoding. 'utf8' (default) returns strings. + * 'buffer' returns Buffer objects for each name (not yet supported, treated as 'utf8'). + * Other values are treated as 'utf8'. + */ + encoding?: string skipHidden?: boolean concurrency?: number recursive?: boolean @@ -33,19 +158,42 @@ export declare function readdirSync( options?: ReaddirOptions | undefined | null, ): Array | Array -export declare function rm(path: string, options?: RmOptions | undefined | null): Promise - -/** * Asynchronously removes files and - * directories (modeled on the standard POSIX `rm` utility). - * @param {string | Buffer | URL} path - * @param {{ - * force?: boolean; - * maxRetries?: number; - * recursive?: boolean; - * retryDelay?: number; - * }} [options] - * @param {(err?: Error) => any} callback - * @returns {void} +export declare function readFile(path: string, options?: ReadFileOptions | undefined | null): Promise + +export interface ReadFileOptions { + encoding?: string + flag?: string +} + +export declare function readFileSync(path: string, options?: ReadFileOptions | undefined | null): string | Buffer + +export declare function readlink(path: string): Promise + +export declare function readlinkSync(path: string): string + +export declare function realpath(path: string): Promise + +export declare function realpathSync(path: string): string + +export declare function rename(oldPath: string, newPath: string): Promise + +export declare function renameSync(oldPath: string, newPath: string): void + +export declare function rm(path: string, options?: RmOptions | undefined | null): Promise + +export declare function rmdir(path: string): Promise + +export declare function rmdirSync(path: string): void + +/** + * Removes files and directories (modeled on the standard POSIX `rm` utility). + * + * - `force`: When true, silently ignore errors when path does not exist. + * - `recursive`: When true, remove directory and all its contents. + * - `maxRetries`: If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or `EPERM` error is + * encountered, retries with a linear backoff of `retryDelay` ms on each try. + * - `retryDelay`: The amount of time in milliseconds to wait between retries (default 100ms). + * - `concurrency` (hyper-fs extension): Number of parallel threads for recursive removal. */ export interface RmOptions { force?: boolean @@ -56,3 +204,51 @@ export interface RmOptions { } export declare function rmSync(path: string, options?: RmOptions | undefined | null): void + +export declare function stat(path: string): Promise + +export declare function statSync(path: string): Stats + +export declare function symlink( + target: string, + path: string, + /** On Windows: 'file' | 'dir' | 'junction'. Ignored on Unix. */ + symlinkType?: string | undefined | null, +): Promise + +export declare function symlinkSync( + target: string, + path: string, + /** On Windows: 'file' | 'dir' | 'junction'. Ignored on Unix. */ + symlinkType?: string | undefined | null, +): void + +export declare function truncate(path: string, len?: number | undefined | null): Promise + +export declare function truncateSync(path: string, len?: number | undefined | null): void + +export declare function unlink(path: string): Promise + +export declare function unlinkSync(path: string): void + +export declare function utimes(path: string, atime: number, mtime: number): Promise + +export declare function utimesSync(path: string, atime: number, mtime: number): void + +export declare function writeFile( + path: string, + data: string | Buffer, + options?: WriteFileOptions | undefined | null, +): Promise + +export interface WriteFileOptions { + encoding?: string + mode?: number + flag?: string +} + +export declare function writeFileSync( + path: string, + data: string | Buffer, + options?: WriteFileOptions | undefined | null, +): void diff --git a/index.js b/index.js index 8e0cb2d..111236f 100644 --- a/index.js +++ b/index.js @@ -572,7 +572,55 @@ if (!nativeBinding) { } module.exports = nativeBinding +module.exports.Dirent = nativeBinding.Dirent +module.exports.Stats = nativeBinding.Stats +module.exports.access = nativeBinding.access +module.exports.accessSync = nativeBinding.accessSync +module.exports.appendFile = nativeBinding.appendFile +module.exports.appendFileSync = nativeBinding.appendFileSync +module.exports.chmod = nativeBinding.chmod +module.exports.chmodSync = nativeBinding.chmodSync +module.exports.chown = nativeBinding.chown +module.exports.chownSync = nativeBinding.chownSync +module.exports.copyFile = nativeBinding.copyFile +module.exports.copyFileSync = nativeBinding.copyFileSync +module.exports.cp = nativeBinding.cp +module.exports.cpSync = nativeBinding.cpSync +module.exports.exists = nativeBinding.exists +module.exports.existsSync = nativeBinding.existsSync +module.exports.glob = nativeBinding.glob +module.exports.globSync = nativeBinding.globSync +module.exports.link = nativeBinding.link +module.exports.linkSync = nativeBinding.linkSync +module.exports.lstat = nativeBinding.lstat +module.exports.lstatSync = nativeBinding.lstatSync +module.exports.mkdir = nativeBinding.mkdir +module.exports.mkdirSync = nativeBinding.mkdirSync +module.exports.mkdtemp = nativeBinding.mkdtemp +module.exports.mkdtempSync = nativeBinding.mkdtempSync module.exports.readdir = nativeBinding.readdir module.exports.readdirSync = nativeBinding.readdirSync +module.exports.readFile = nativeBinding.readFile +module.exports.readFileSync = nativeBinding.readFileSync +module.exports.readlink = nativeBinding.readlink +module.exports.readlinkSync = nativeBinding.readlinkSync +module.exports.realpath = nativeBinding.realpath +module.exports.realpathSync = nativeBinding.realpathSync +module.exports.rename = nativeBinding.rename +module.exports.renameSync = nativeBinding.renameSync module.exports.rm = nativeBinding.rm +module.exports.rmdir = nativeBinding.rmdir +module.exports.rmdirSync = nativeBinding.rmdirSync module.exports.rmSync = nativeBinding.rmSync +module.exports.stat = nativeBinding.stat +module.exports.statSync = nativeBinding.statSync +module.exports.symlink = nativeBinding.symlink +module.exports.symlinkSync = nativeBinding.symlinkSync +module.exports.truncate = nativeBinding.truncate +module.exports.truncateSync = nativeBinding.truncateSync +module.exports.unlink = nativeBinding.unlink +module.exports.unlinkSync = nativeBinding.unlinkSync +module.exports.utimes = nativeBinding.utimes +module.exports.utimesSync = nativeBinding.utimesSync +module.exports.writeFile = nativeBinding.writeFile +module.exports.writeFileSync = nativeBinding.writeFileSync diff --git a/package.json b/package.json index d5af444..e95836d 100644 --- a/package.json +++ b/package.json @@ -1,21 +1,27 @@ { "name": "hyper-fs", "version": "0.0.1", - "description": "Template project for writing node package with napi-rs", + "description": "High-performance drop-in replacement for Node.js fs module, powered by Rust", "main": "index.js", "repository": { "type": "git", - "url": "git+ssh://git@github.com/napi-rs/package-template.git" + "url": "git+https://github.com/hyper-fs/hyper-fs.git" }, "license": "MIT", "browser": "browser.js", "keywords": [ + "fs", + "filesystem", + "file-system", + "performance", + "rust", + "napi", "napi-rs", - "NAPI", - "N-API", - "Rust", - "node-addon", - "node-addon-api" + "native", + "readdir", + "glob", + "readFile", + "writeFile" ], "files": [ "index.d.ts", @@ -32,7 +38,7 @@ ] }, "engines": { - "node": ">= 10.16.0 < 11 || >= 11.8.0 < 12 || >= 12.0.0" + "node": ">= 18.0.0" }, "publishConfig": { "registry": "https://registry.npmjs.org/", @@ -64,14 +70,16 @@ "@types/node": "^20.0.0", "ava": "^6.4.1", "chalk": "^5.6.2", + "fast-glob": "^3.3.3", + "glob": "^13.0.0", "husky": "^9.1.7", "lint-staged": "^16.1.6", + "mitata": "^1.0.34", "npm-run-all2": "^8.0.4", "oxlint": "^1.14.0", "prettier": "^3.6.2", "tinybench": "^5.0.1", - "typescript": "^5.9.2", - "mitata": "^1.0.34" + "typescript": "^5.9.2" }, "lint-staged": { "*.@(js|ts|tsx)": [ @@ -105,5 +113,5 @@ "singleQuote": true, "arrowParens": "always" }, - "packageManager": "pnpm@9.15.0" + "packageManager": "pnpm@9.15.4" } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 11f820c..d49c3af 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -34,6 +34,12 @@ importers: chalk: specifier: ^5.6.2 version: 5.6.2 + fast-glob: + specifier: ^3.3.3 + version: 3.3.3 + glob: + specifier: ^13.0.0 + version: 13.0.0 husky: specifier: ^9.1.7 version: 9.1.7 @@ -222,6 +228,16 @@ packages: '@types/node': optional: true + '@isaacs/balanced-match@4.0.1': + resolution: + { integrity: sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ== } + engines: { node: 20 || >=22 } + + '@isaacs/brace-expansion@5.0.0': + resolution: + { integrity: sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA== } + engines: { node: 20 || >=22 } + '@isaacs/cliui@8.0.2': resolution: { integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== } @@ -1288,6 +1304,11 @@ packages: { integrity: sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg== } hasBin: true + glob@13.0.0: + resolution: + { integrity: sha512-tvZgpqk6fz4BaNZ66ZsRaZnbHvP/jG3uKJvAZOwEVUL4RTA5nJeeLYfyN9/VA8NX/V3IBG+hkeuGpKjvELkVhA== } + engines: { node: 20 || >=22 } + globby@14.1.0: resolution: { integrity: sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA== } @@ -1444,6 +1465,11 @@ packages: resolution: { integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== } + lru-cache@11.2.4: + resolution: + { integrity: sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg== } + engines: { node: 20 || >=22 } + matcher@5.0.0: resolution: { integrity: sha512-s2EMBOWtXFc8dgqvoAzKJXxNHibcdJMV0gwqKUaw9E2JBJuGUK7DrNKrA6g/i+v72TT16+6sVm5mS3thaMLQUw== } @@ -1479,6 +1505,11 @@ packages: { integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA== } engines: { node: '>=18' } + minimatch@10.1.1: + resolution: + { integrity: sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ== } + engines: { node: 20 || >=22 } + minimatch@9.0.5: resolution: { integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== } @@ -1598,6 +1629,11 @@ packages: { integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== } engines: { node: '>=16 || 14 >=14.18' } + path-scurry@2.0.1: + resolution: + { integrity: sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA== } + engines: { node: 20 || >=22 } + path-type@6.0.0: resolution: { integrity: sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ== } @@ -2046,6 +2082,12 @@ snapshots: optionalDependencies: '@types/node': 20.19.25 + '@isaacs/balanced-match@4.0.1': {} + + '@isaacs/brace-expansion@5.0.0': + dependencies: + '@isaacs/balanced-match': 4.0.1 + '@isaacs/cliui@8.0.2': dependencies: string-width: 5.1.2 @@ -2814,6 +2856,12 @@ snapshots: package-json-from-dist: 1.0.1 path-scurry: 1.11.1 + glob@13.0.0: + dependencies: + minimatch: 10.1.1 + minipass: 7.1.2 + path-scurry: 2.0.1 + globby@14.1.0: dependencies: '@sindresorhus/merge-streams': 2.3.0 @@ -2926,6 +2974,8 @@ snapshots: lru-cache@10.4.3: {} + lru-cache@11.2.4: {} + matcher@5.0.0: dependencies: escape-string-regexp: 5.0.0 @@ -2949,6 +2999,10 @@ snapshots: mimic-function@5.0.1: {} + minimatch@10.1.1: + dependencies: + '@isaacs/brace-expansion': 5.0.0 + minimatch@9.0.5: dependencies: brace-expansion: 2.0.2 @@ -3027,6 +3081,11 @@ snapshots: lru-cache: 10.4.3 minipass: 7.1.2 + path-scurry@2.0.1: + dependencies: + lru-cache: 11.2.4 + minipass: 7.1.2 + path-type@6.0.0: {} picomatch@2.3.1: {} diff --git a/reference/fs.js b/reference/fs.js new file mode 100644 index 0000000..7336c3f --- /dev/null +++ b/reference/fs.js @@ -0,0 +1,3196 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// Maintainers, keep in mind that ES1-style octal literals (`0666`) are not +// allowed in strict mode. Use ES6-style octal literals instead (`0o666`). + +'use strict' + +const { + ArrayFromAsync, + ArrayPrototypePush, + BigIntPrototypeToString, + Boolean, + FunctionPrototypeCall, + MathMax, + Number, + ObjectDefineProperties, + ObjectDefineProperty, + Promise, + PromisePrototypeThen, + PromiseResolve, + ReflectApply, + SafeMap, + SafeSet, + StringPrototypeCharCodeAt, + StringPrototypeIndexOf, + StringPrototypeSlice, + SymbolDispose, + uncurryThis, +} = primordials + +const { fs: constants } = internalBinding('constants') +const { S_IFIFO, S_IFLNK, S_IFMT, S_IFREG, S_IFSOCK, F_OK, O_WRONLY, O_SYMLINK } = constants + +const pathModule = require('path') +const { isArrayBufferView } = require('internal/util/types') + +const binding = internalBinding('fs') + +const { createBlobFromFilePath } = require('internal/blob') + +const { Buffer } = require('buffer') +const { isBuffer: BufferIsBuffer } = Buffer +const BufferToString = uncurryThis(Buffer.prototype.toString) +const { + AbortError, + aggregateTwoErrors, + codes: { ERR_ACCESS_DENIED, ERR_FS_FILE_TOO_LARGE, ERR_INVALID_ARG_VALUE }, +} = require('internal/errors') + +const { FSReqCallback, statValues } = binding +const { toPathIfFileURL } = require('internal/url') +const { + customPromisifyArgs: kCustomPromisifyArgsSymbol, + getLazy, + kEmptyObject, + promisify: { custom: kCustomPromisifiedSymbol }, + SideEffectFreeRegExpPrototypeExec, + defineLazyProperties, + isWindows, + isMacOS, +} = require('internal/util') +const { + constants: { kIoMaxLength, kMaxUserId }, + copyObject, + Dirent, + getDirent, + getDirents, + getOptions, + getValidatedFd, + getValidatedPath, + handleErrorFromBinding, + preprocessSymlinkDestination, + Stats, + getStatFsFromBinding, + getStatsFromBinding, + realpathCacheKey, + stringToFlags, + stringToSymlinkType, + toUnixTimestamp, + validateBufferArray, + validateCpOptions, + validateOffsetLengthRead, + validateOffsetLengthWrite, + validatePath, + validatePosition, + validateRmOptions, + validateRmOptionsSync, + validateRmdirOptions, + validateStringAfterArrayBufferView, + warnOnNonPortableTemplate, +} = require('internal/fs/utils') +const { CHAR_FORWARD_SLASH, CHAR_BACKWARD_SLASH } = require('internal/constants') +const { + isInt32, + parseFileMode, + validateBoolean, + validateBuffer, + validateEncoding, + validateFunction, + validateInteger, + validateObject, + validateOneOf, + validateString, + kValidateObjectAllowNullable, +} = require('internal/validators') + +const permission = require('internal/process/permission') + +let fs + +// Lazy loaded +let cpFn +let cpSyncFn +let promises = null +let ReadStream +let WriteStream +let rimraf +let kResistStopPropagation +let ReadFileContext + +// These have to be separate because of how graceful-fs happens to do it's +// monkeypatching. +let FileReadStream +let FileWriteStream +let Utf8Stream + +function lazyLoadUtf8Stream() { + Utf8Stream ??= require('internal/streams/fast-utf8-stream') +} + +// Ensure that callbacks run in the global context. Only use this function +// for callbacks that are passed to the binding layer, callbacks that are +// invoked from JS already run in the proper scope. +function makeCallback(cb) { + validateFunction(cb, 'cb') + + return (...args) => ReflectApply(cb, this, args) +} + +// Special case of `makeCallback()` that is specific to async `*stat()` calls as +// an optimization, since the data passed back to the callback needs to be +// transformed anyway. +function makeStatsCallback(cb) { + validateFunction(cb, 'cb') + + return (err, stats) => { + if (err) return cb(err) + if (stats === undefined && err === null) return cb(null, undefined) + cb(err, getStatsFromBinding(stats)) + } +} + +const isFd = isInt32 + +function isFileType(stats, fileType) { + // Use stats array directly to avoid creating an fs.Stats instance just for + // our internal use. + let mode = stats[1] + if (typeof mode === 'bigint') mode = Number(mode) + return (mode & S_IFMT) === fileType +} + +/** + * Tests a user's permissions for the file or directory + * specified by `path`. + * @param {string | Buffer | URL} path + * @param {number} [mode] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function access(path, mode, callback) { + if (typeof mode === 'function') { + callback = mode + mode = F_OK + } + + path = getValidatedPath(path) + callback = makeCallback(callback) + + const req = new FSReqCallback() + req.oncomplete = callback + binding.access(path, mode, req) +} + +/** + * Synchronously tests a user's permissions for the file or + * directory specified by `path`. + * @param {string | Buffer | URL} path + * @param {number} [mode] + * @returns {void} + */ +function accessSync(path, mode) { + binding.access(getValidatedPath(path), mode) +} + +/** + * Tests whether or not the given path exists. + * @param {string | Buffer | URL} path + * @param {(exists?: boolean) => any} callback + * @returns {void} + */ +function exists(path, callback) { + validateFunction(callback, 'cb') + + function suppressedCallback(err) { + callback(!err) + } + + try { + fs.access(path, F_OK, suppressedCallback) + } catch { + return callback(false) + } +} + +ObjectDefineProperty(exists, kCustomPromisifiedSymbol, { + __proto__: null, + value: function exists(path) { + // eslint-disable-line func-name-matching + return new Promise((resolve) => fs.exists(path, resolve)) + }, +}) + +let showExistsDeprecation = true +/** + * Synchronously tests whether or not the given path exists. + * @param {string | Buffer | URL} path + * @returns {boolean} + */ +function existsSync(path) { + try { + path = getValidatedPath(path) + } catch (err) { + if (showExistsDeprecation && err?.code === 'ERR_INVALID_ARG_TYPE') { + process.emitWarning( + 'Passing invalid argument types to fs.existsSync is deprecated', + 'DeprecationWarning', + 'DEP0187', + ) + showExistsDeprecation = false + } + return false + } + + return binding.existsSync(path) +} + +function readFileAfterOpen(err, fd) { + const context = this.context + + if (err) { + context.callback(err) + return + } + + context.fd = fd + + const req = new FSReqCallback() + req.oncomplete = readFileAfterStat + req.context = context + binding.fstat(fd, false, req) +} + +function readFileAfterStat(err, stats) { + const context = this.context + + if (err) return context.close(err) + + // TODO(BridgeAR): Check if allocating a smaller chunk is better performance + // wise, similar to the promise based version (less peak memory and chunked + // stringify operations vs multiple C++/JS boundary crossings). + const size = (context.size = isFileType(stats, S_IFREG) ? stats[8] : 0) + + if (size > kIoMaxLength) { + err = new ERR_FS_FILE_TOO_LARGE(size) + return context.close(err) + } + + try { + if (size === 0) { + // TODO(BridgeAR): If an encoding is set, use the StringDecoder to concat + // the result and reuse the buffer instead of allocating a new one. + context.buffers = [] + } else { + context.buffer = Buffer.allocUnsafeSlow(size) + } + } catch (err) { + return context.close(err) + } + context.read() +} + +function checkAborted(signal, callback) { + if (signal?.aborted) { + callback(new AbortError(undefined, { cause: signal.reason })) + return true + } + return false +} + +/** + * Asynchronously reads the entire contents of a file. + * @param {string | Buffer | URL | number} path + * @param {{ + * encoding?: string | null; + * flag?: string; + * signal?: AbortSignal; + * } | string} [options] + * @param {( + * err?: Error, + * data?: string | Buffer + * ) => any} callback + * @returns {void} + */ +function readFile(path, options, callback) { + callback ||= options + validateFunction(callback, 'cb') + options = getOptions(options, { flag: 'r' }) + ReadFileContext ??= require('internal/fs/read/context') + const context = new ReadFileContext(callback, options.encoding) + context.isUserFd = isFd(path) // File descriptor ownership + + if (options.signal) { + context.signal = options.signal + } + if (context.isUserFd) { + process.nextTick(function tick(context) { + FunctionPrototypeCall(readFileAfterOpen, { context }, null, path) + }, context) + return + } + + if (checkAborted(options.signal, callback)) return + + const flagsNumber = stringToFlags(options.flag, 'options.flag') + const req = new FSReqCallback() + req.context = context + req.oncomplete = readFileAfterOpen + binding.open(getValidatedPath(path), flagsNumber, 0o666, req) +} + +function tryStatSync(fd, isUserFd) { + const stats = binding.fstat(fd, false, undefined, true /* shouldNotThrow */) + if (stats === undefined && !isUserFd) { + fs.closeSync(fd) + } + return stats +} + +function tryCreateBuffer(size, fd, isUserFd) { + let threw = true + let buffer + try { + if (size > kIoMaxLength) { + throw new ERR_FS_FILE_TOO_LARGE(size) + } + buffer = Buffer.allocUnsafe(size) + threw = false + } finally { + if (threw && !isUserFd) fs.closeSync(fd) + } + return buffer +} + +function tryReadSync(fd, isUserFd, buffer, pos, len) { + let threw = true + let bytesRead + try { + bytesRead = fs.readSync(fd, buffer, pos, len) + threw = false + } finally { + if (threw && !isUserFd) fs.closeSync(fd) + } + return bytesRead +} + +/** + * Synchronously reads the entire contents of a file. + * @param {string | Buffer | URL | number} path + * @param {{ + * encoding?: string | null; + * flag?: string; + * }} [options] + * @returns {string | Buffer} + */ +function readFileSync(path, options) { + options = getOptions(options, { flag: 'r' }) + + if (options.encoding === 'utf8' || options.encoding === 'utf-8') { + if (!isInt32(path)) { + path = getValidatedPath(path) + } + return binding.readFileUtf8(path, stringToFlags(options.flag)) + } + + const isUserFd = isFd(path) // File descriptor ownership + const fd = isUserFd ? path : fs.openSync(path, options.flag, 0o666) + + const stats = tryStatSync(fd, isUserFd) + const size = isFileType(stats, S_IFREG) ? stats[8] : 0 + let pos = 0 + let buffer // Single buffer with file data + let buffers // List for when size is unknown + + if (size === 0) { + buffers = [] + } else { + buffer = tryCreateBuffer(size, fd, isUserFd) + } + + let bytesRead + + if (size !== 0) { + do { + bytesRead = tryReadSync(fd, isUserFd, buffer, pos, size - pos) + pos += bytesRead + } while (bytesRead !== 0 && pos < size) + } else { + do { + // The kernel lies about many files. + // Go ahead and try to read some bytes. + buffer = Buffer.allocUnsafe(8192) + bytesRead = tryReadSync(fd, isUserFd, buffer, 0, 8192) + if (bytesRead !== 0) { + ArrayPrototypePush(buffers, buffer.slice(0, bytesRead)) + } + pos += bytesRead + } while (bytesRead !== 0) + } + + if (!isUserFd) fs.closeSync(fd) + + if (size === 0) { + // Data was collected into the buffers list. + buffer = Buffer.concat(buffers, pos) + } else if (pos < size) { + buffer = buffer.slice(0, pos) + } + + if (options.encoding) buffer = buffer.toString(options.encoding) + return buffer +} + +function defaultCloseCallback(err) { + if (err != null) throw err +} + +/** + * Closes the file descriptor. + * @param {number} fd + * @param {(err?: Error) => any} [callback] + * @returns {void} + */ +function close(fd, callback = defaultCloseCallback) { + if (callback !== defaultCloseCallback) callback = makeCallback(callback) + + const req = new FSReqCallback() + req.oncomplete = callback + binding.close(fd, req) +} + +/** + * Synchronously closes the file descriptor. + * @param {number} fd + * @returns {void} + */ +function closeSync(fd) { + binding.close(fd) +} + +/** + * Asynchronously opens a file. + * @param {string | Buffer | URL} path + * @param {string | number} [flags] + * @param {string | number} [mode] + * @param {( + * err?: Error, + * fd?: number + * ) => any} callback + * @returns {void} + */ +function open(path, flags, mode, callback) { + path = getValidatedPath(path) + if (arguments.length < 3) { + callback = flags + flags = 'r' + mode = 0o666 + } else if (typeof mode === 'function') { + callback = mode + mode = 0o666 + } else { + mode = parseFileMode(mode, 'mode', 0o666) + } + const flagsNumber = stringToFlags(flags) + callback = makeCallback(callback) + + const req = new FSReqCallback() + req.oncomplete = callback + + binding.open(path, flagsNumber, mode, req) +} + +/** + * Synchronously opens a file. + * @param {string | Buffer | URL} path + * @param {string | number} [flags] + * @param {string | number} [mode] + * @returns {number} + */ +function openSync(path, flags, mode) { + return binding.open(getValidatedPath(path), stringToFlags(flags), parseFileMode(mode, 'mode', 0o666)) +} + +/** + * @param {string | Buffer | URL } path + * @param {{ + * type?: string; + * }} [options] + * @returns {Promise} + */ +function openAsBlob(path, options = kEmptyObject) { + validateObject(options, 'options') + const type = options.type || '' + validateString(type, 'options.type') + // The underlying implementation here returns the Blob synchronously for now. + // To give ourselves flexibility to maybe return the Blob asynchronously, + // this API returns a Promise. + path = getValidatedPath(path) + return PromiseResolve(createBlobFromFilePath(path, { type })) +} + +/** + * Reads file from the specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView} buffer + * @param {number | { + * offset?: number; + * length?: number; + * position?: number | bigint | null; + * }} [offsetOrOptions] + * @param {number} length + * @param {number | bigint | null} position + * @param {( + * err?: Error, + * bytesRead?: number, + * buffer?: Buffer + * ) => any} callback + * @returns {void} + */ +function read(fd, buffer, offsetOrOptions, length, position, callback) { + fd = getValidatedFd(fd) + let offset = offsetOrOptions + let params = null + if (arguments.length <= 4) { + if (arguments.length === 4) { + // This is fs.read(fd, buffer, options, callback) + validateObject(offsetOrOptions, 'options', kValidateObjectAllowNullable) + callback = length + params = offsetOrOptions + } else if (arguments.length === 3) { + // This is fs.read(fd, bufferOrParams, callback) + if (!isArrayBufferView(buffer)) { + // This is fs.read(fd, params, callback) + params = buffer + ;({ buffer = Buffer.alloc(16384) } = params ?? kEmptyObject) + } + callback = offsetOrOptions + } else { + // This is fs.read(fd, callback) + callback = buffer + buffer = Buffer.alloc(16384) + } + + if (params !== undefined) { + validateObject(params, 'options', kValidateObjectAllowNullable) + } + ;({ offset = 0, length = buffer?.byteLength - offset, position = null } = params ?? kEmptyObject) + } + + validateBuffer(buffer) + validateFunction(callback, 'cb') + + if (offset == null) { + offset = 0 + } else { + validateInteger(offset, 'offset', 0) + } + + length |= 0 + + if (length === 0) { + return process.nextTick(function tick() { + callback(null, 0, buffer) + }) + } + + if (buffer.byteLength === 0) { + throw new ERR_INVALID_ARG_VALUE('buffer', buffer, 'is empty and cannot be written') + } + + validateOffsetLengthRead(offset, length, buffer.byteLength) + + if (position == null) { + position = -1 + } else { + validatePosition(position, 'position', length) + } + + function wrapper(err, bytesRead) { + // Retain a reference to buffer so that it can't be GC'ed too soon. + callback(err, bytesRead || 0, buffer) + } + + const req = new FSReqCallback() + req.oncomplete = wrapper + + binding.read(fd, buffer, offset, length, position, req) +} + +ObjectDefineProperty(read, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ['bytesRead', 'buffer'], + enumerable: false, +}) + +/** + * Synchronously reads the file from the + * specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView} buffer + * @param {number | { + * offset?: number; + * length?: number; + * position?: number | bigint | null; + * }} [offsetOrOptions] + * @param {number} [length] + * @param {number} [position] + * @returns {number} + */ +function readSync(fd, buffer, offsetOrOptions, length, position) { + validateBuffer(buffer) + + let offset = offsetOrOptions + if (arguments.length <= 3 || typeof offsetOrOptions === 'object') { + if (offsetOrOptions !== undefined) { + validateObject(offsetOrOptions, 'options', kValidateObjectAllowNullable) + } + + ;({ offset = 0, length = buffer.byteLength - offset, position = null } = offsetOrOptions ?? kEmptyObject) + } + + if (offset === undefined) { + offset = 0 + } else { + validateInteger(offset, 'offset', 0) + } + + length |= 0 + + if (length === 0) { + return 0 + } + + if (buffer.byteLength === 0) { + throw new ERR_INVALID_ARG_VALUE('buffer', buffer, 'is empty and cannot be written') + } + + validateOffsetLengthRead(offset, length, buffer.byteLength) + + if (position == null) { + position = -1 + } else { + validatePosition(position, 'position', length) + } + + return binding.read(fd, buffer, offset, length, position) +} + +/** + * Reads file from the specified `fd` (file descriptor) + * and writes to an array of `ArrayBufferView`s. + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @param {( + * err?: Error, + * bytesRead?: number, + * buffers?: ArrayBufferView[] + * ) => any} callback + * @returns {void} + */ +function readv(fd, buffers, position, callback) { + function wrapper(err, read) { + callback(err, read || 0, buffers) + } + + fd = getValidatedFd(fd) + validateBufferArray(buffers) + callback ||= position + validateFunction(callback, 'cb') + + const req = new FSReqCallback() + req.oncomplete = wrapper + + if (typeof position !== 'number') position = null + + binding.readBuffers(fd, buffers, position, req) +} + +ObjectDefineProperty(readv, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ['bytesRead', 'buffers'], + enumerable: false, +}) + +/** + * Synchronously reads file from the + * specified `fd` (file descriptor) and writes to an array + * of `ArrayBufferView`s. + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @returns {number} + */ +function readvSync(fd, buffers, position) { + validateBufferArray(buffers) + + if (typeof position !== 'number') position = null + + return binding.readBuffers(fd, buffers, position) +} + +/** + * Writes `buffer` to the specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView | string} buffer + * @param {number | object} [offsetOrOptions] + * @param {number} [length] + * @param {number | null} [position] + * @param {( + * err?: Error, + * bytesWritten?: number, + * buffer?: Buffer | TypedArray | DataView + * ) => any} callback + * @returns {void} + */ +function write(fd, buffer, offsetOrOptions, length, position, callback) { + function wrapper(err, written) { + // Retain a reference to buffer so that it can't be GC'ed too soon. + callback(err, written || 0, buffer) + } + + fd = getValidatedFd(fd) + let offset = offsetOrOptions + if (isArrayBufferView(buffer)) { + callback ||= position || length || offset + validateFunction(callback, 'cb') + + if (typeof offset === 'object') { + ;({ offset = 0, length = buffer.byteLength - offset, position = null } = offsetOrOptions ?? kEmptyObject) + } + + if (offset == null || typeof offset === 'function') { + offset = 0 + } else { + validateInteger(offset, 'offset', 0) + } + if (typeof length !== 'number') length = buffer.byteLength - offset + if (typeof position !== 'number') position = null + validateOffsetLengthWrite(offset, length, buffer.byteLength) + + const req = new FSReqCallback() + req.oncomplete = wrapper + binding.writeBuffer(fd, buffer, offset, length, position, req) + return + } + + validateStringAfterArrayBufferView(buffer, 'buffer') + + if (typeof position !== 'function') { + if (typeof offset === 'function') { + position = offset + offset = null + } else { + position = length + } + length = 'utf8' + } + + const str = buffer + validateEncoding(str, length) + callback = position + validateFunction(callback, 'cb') + + const req = new FSReqCallback() + req.oncomplete = wrapper + binding.writeString(fd, str, offset, length, req) +} + +ObjectDefineProperty(write, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ['bytesWritten', 'buffer'], + enumerable: false, +}) + +/** + * Synchronously writes `buffer` to the + * specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView | string} buffer + * @param {{ + * offset?: number; + * length?: number; + * position?: number | null; + * }} [offsetOrOptions] + * @param {number} [length] + * @param {number} [position] + * @returns {number} + */ +function writeSync(fd, buffer, offsetOrOptions, length, position) { + const ctx = {} + let result + + let offset = offsetOrOptions + if (isArrayBufferView(buffer)) { + if (typeof offset === 'object') { + ;({ offset = 0, length = buffer.byteLength - offset, position = null } = offsetOrOptions ?? kEmptyObject) + } + if (position === undefined) position = null + if (offset == null) { + offset = 0 + } else { + validateInteger(offset, 'offset', 0) + } + if (typeof length !== 'number') length = buffer.byteLength - offset + validateOffsetLengthWrite(offset, length, buffer.byteLength) + result = binding.writeBuffer(fd, buffer, offset, length, position, undefined, ctx) + } else { + validateStringAfterArrayBufferView(buffer, 'buffer') + validateEncoding(buffer, length) + + if (offset === undefined) offset = null + result = binding.writeString(fd, buffer, offset, length, undefined, ctx) + } + handleErrorFromBinding(ctx) + return result +} + +/** + * Writes an array of `ArrayBufferView`s to the + * specified `fd` (file descriptor). + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @param {( + * err?: Error, + * bytesWritten?: number, + * buffers?: ArrayBufferView[] + * ) => any} callback + * @returns {void} + */ +function writev(fd, buffers, position, callback) { + function wrapper(err, written) { + callback(err, written || 0, buffers) + } + + fd = getValidatedFd(fd) + validateBufferArray(buffers) + callback ||= position + validateFunction(callback, 'cb') + + if (buffers.length === 0) { + process.nextTick(callback, null, 0, buffers) + return + } + + const req = new FSReqCallback() + req.oncomplete = wrapper + + if (typeof position !== 'number') position = null + + binding.writeBuffers(fd, buffers, position, req) +} + +ObjectDefineProperty(writev, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ['bytesWritten', 'buffer'], + enumerable: false, +}) + +/** + * Synchronously writes an array of `ArrayBufferView`s + * to the specified `fd` (file descriptor). + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @returns {number} + */ +function writevSync(fd, buffers, position) { + validateBufferArray(buffers) + + if (buffers.length === 0) { + return 0 + } + + if (typeof position !== 'number') position = null + + return binding.writeBuffers(fd, buffers, position) +} + +/** + * Asynchronously renames file at `oldPath` to + * the pathname provided as `newPath`. + * @param {string | Buffer | URL} oldPath + * @param {string | Buffer | URL} newPath + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function rename(oldPath, newPath, callback) { + callback = makeCallback(callback) + const req = new FSReqCallback() + req.oncomplete = callback + binding.rename(getValidatedPath(oldPath, 'oldPath'), getValidatedPath(newPath, 'newPath'), req) +} + +/** + * Synchronously renames file at `oldPath` to + * the pathname provided as `newPath`. + * @param {string | Buffer | URL} oldPath + * @param {string | Buffer | URL} newPath + * @returns {void} + */ +function renameSync(oldPath, newPath) { + binding.rename(getValidatedPath(oldPath, 'oldPath'), getValidatedPath(newPath, 'newPath')) +} + +/** + * Truncates the file. + * @param {string | Buffer | URL} path + * @param {number} [len] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function truncate(path, len, callback) { + if (typeof len === 'function') { + callback = len + len = 0 + } else if (len === undefined) { + len = 0 + } + + validateInteger(len, 'len') + len = MathMax(0, len) + validateFunction(callback, 'cb') + fs.open(path, 'r+', (er, fd) => { + if (er) return callback(er) + const req = new FSReqCallback() + req.oncomplete = function oncomplete(er) { + fs.close(fd, (er2) => { + callback(aggregateTwoErrors(er2, er)) + }) + } + binding.ftruncate(fd, len, req) + }) +} + +/** + * Synchronously truncates the file. + * @param {string | Buffer | URL} path + * @param {number} [len] + * @returns {void} + */ +function truncateSync(path, len) { + if (len === undefined) { + len = 0 + } + // Allow error to be thrown, but still close fd. + const fd = fs.openSync(path, 'r+') + try { + fs.ftruncateSync(fd, len) + } finally { + fs.closeSync(fd) + } +} + +/** + * Truncates the file descriptor. + * @param {number} fd + * @param {number} [len] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function ftruncate(fd, len = 0, callback) { + if (typeof len === 'function') { + callback = len + len = 0 + } + validateInteger(len, 'len') + len = MathMax(0, len) + callback = makeCallback(callback) + + const req = new FSReqCallback() + req.oncomplete = callback + binding.ftruncate(fd, len, req) +} + +/** + * Synchronously truncates the file descriptor. + * @param {number} fd + * @param {number} [len] + * @returns {void} + */ +function ftruncateSync(fd, len = 0) { + validateInteger(len, 'len') + binding.ftruncate(fd, len < 0 ? 0 : len) +} + +function lazyLoadCp() { + if (cpFn === undefined) { + ;({ cpFn } = require('internal/fs/cp/cp')) + cpFn = require('util').callbackify(cpFn) + ;({ cpSyncFn } = require('internal/fs/cp/cp-sync')) + } +} + +function lazyLoadRimraf() { + if (rimraf === undefined) ({ rimraf } = require('internal/fs/rimraf')) +} + +/** + * Asynchronously removes a directory. + * @param {string | Buffer | URL} path + * @param {object} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function rmdir(path, options, callback) { + if (typeof options === 'function') { + callback = options + options = undefined + } + + if (options?.recursive !== undefined) { + // This API previously accepted a `recursive` option that was deprecated + // and removed. However, in order to make the change more visible, we + // opted to throw an error if recursive is specified rather than removing it + // entirely. + throw new ERR_INVALID_ARG_VALUE('options.recursive', options.recursive, 'is no longer supported') + } + + callback = makeCallback(callback) + path = getValidatedPath(path) + + validateRmdirOptions(options) + const req = new FSReqCallback() + req.oncomplete = callback + binding.rmdir(path, req) +} + +/** + * Synchronously removes a directory. + * @param {string | Buffer | URL} path + * @param {object} [options] + * @returns {void} + */ +function rmdirSync(path, options) { + path = getValidatedPath(path) + + if (options?.recursive !== undefined) { + throw new ERR_INVALID_ARG_VALUE('options.recursive', options.recursive, 'is no longer supported') + } + + validateRmdirOptions(options) + binding.rmdir(path) +} + +/** + * Asynchronously removes files and + * directories (modeled on the standard POSIX `rm` utility). + * @param {string | Buffer | URL} path + * @param {{ + * force?: boolean; + * maxRetries?: number; + * recursive?: boolean; + * retryDelay?: number; + * }} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function rm(path, options, callback) { + if (typeof options === 'function') { + callback = options + options = undefined + } + path = getValidatedPath(path) + + validateRmOptions(path, options, false, (err, options) => { + if (err) { + return callback(err) + } + lazyLoadRimraf() + return rimraf(path, options, callback) + }) +} + +/** + * Synchronously removes files and + * directories (modeled on the standard POSIX `rm` utility). + * @param {string | Buffer | URL} path + * @param {{ + * force?: boolean; + * maxRetries?: number; + * recursive?: boolean; + * retryDelay?: number; + * }} [options] + * @returns {void} + */ +function rmSync(path, options) { + const opts = validateRmOptionsSync(path, options, false) + return binding.rmSync(getValidatedPath(path), opts.maxRetries, opts.recursive, opts.retryDelay) +} + +/** + * Forces all currently queued I/O operations associated + * with the file to the operating system's synchronized + * I/O completion state. + * @param {number} fd + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fdatasync(fd, callback) { + const req = new FSReqCallback() + req.oncomplete = makeCallback(callback) + + if (permission.isEnabled()) { + callback(new ERR_ACCESS_DENIED('fdatasync API is disabled when Permission Model is enabled.')) + return + } + binding.fdatasync(fd, req) +} + +/** + * Synchronously forces all currently queued I/O operations + * associated with the file to the operating + * system's synchronized I/O completion state. + * @param {number} fd + * @returns {void} + */ +function fdatasyncSync(fd) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED('fdatasync API is disabled when Permission Model is enabled.') + } + binding.fdatasync(fd) +} + +/** + * Requests for all data for the open file descriptor + * to be flushed to the storage device. + * @param {number} fd + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fsync(fd, callback) { + const req = new FSReqCallback() + req.oncomplete = makeCallback(callback) + if (permission.isEnabled()) { + callback(new ERR_ACCESS_DENIED('fsync API is disabled when Permission Model is enabled.')) + return + } + binding.fsync(fd, req) +} + +/** + * Synchronously requests for all data for the open + * file descriptor to be flushed to the storage device. + * @param {number} fd + * @returns {void} + */ +function fsyncSync(fd) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED('fsync API is disabled when Permission Model is enabled.') + } + binding.fsync(fd) +} + +/** + * Asynchronously creates a directory. + * @param {string | Buffer | URL} path + * @param {{ + * recursive?: boolean; + * mode?: string | number; + * } | number} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function mkdir(path, options, callback) { + let mode = 0o777 + let recursive = false + if (typeof options === 'function') { + callback = options + } else if (typeof options === 'number' || typeof options === 'string') { + mode = parseFileMode(options, 'mode') + } else if (options) { + if (options.recursive !== undefined) { + recursive = options.recursive + validateBoolean(recursive, 'options.recursive') + } + if (options.mode !== undefined) { + mode = parseFileMode(options.mode, 'options.mode') + } + } + callback = makeCallback(callback) + + const req = new FSReqCallback() + req.oncomplete = callback + binding.mkdir(getValidatedPath(path), mode, recursive, req) +} + +/** + * Synchronously creates a directory. + * @param {string | Buffer | URL} path + * @param {{ + * recursive?: boolean; + * mode?: string | number; + * } | number} [options] + * @returns {string | void} + */ +function mkdirSync(path, options) { + let mode = 0o777 + let recursive = false + if (typeof options === 'number' || typeof options === 'string') { + mode = parseFileMode(options, 'mode') + } else if (options) { + if (options.recursive !== undefined) { + recursive = options.recursive + validateBoolean(recursive, 'options.recursive') + } + if (options.mode !== undefined) { + mode = parseFileMode(options.mode, 'options.mode') + } + } + + const result = binding.mkdir(getValidatedPath(path), mode, recursive) + + if (recursive) { + return result + } +} + +/* + * An recursive algorithm for reading the entire contents of the `basePath` directory. + * This function does not validate `basePath` as a directory. It is passed directly to + * `binding.readdir`. + * @param {string} basePath + * @param {{ encoding: string, withFileTypes: boolean }} options + * @param {( + * err?: Error, + * files?: string[] | Buffer[] | Dirent[] + * ) => any} callback + * @returns {void} + */ +function readdirRecursive(basePath, options, callback) { + const context = { + withFileTypes: Boolean(options.withFileTypes), + encoding: options.encoding, + basePath, + readdirResults: [], + pathsQueue: [basePath], + } + + let i = 0 + + function read(path) { + const req = new FSReqCallback() + req.oncomplete = (err, result) => { + if (err) { + callback(err) + return + } + + if (result === undefined) { + callback(null, context.readdirResults) + return + } + + processReaddirResult({ + result, + currentPath: path, + context, + }) + + if (i < context.pathsQueue.length) { + read(context.pathsQueue[i++]) + } else { + callback(null, context.readdirResults) + } + } + + binding.readdir(path, context.encoding, context.withFileTypes, req) + } + + read(context.pathsQueue[i++]) +} + +// Calling `readdir` with `withFileTypes=true`, the result is an array of arrays. +// The first array is the names, and the second array is the types. +// They are guaranteed to be the same length; hence, setting `length` to the length +// of the first array within the result. +const processReaddirResult = (args) => (args.context.withFileTypes ? handleDirents(args) : handleFilePaths(args)) + +function handleDirents({ result, currentPath, context }) { + const { 0: names, 1: types } = result + const { length } = names + + for (let i = 0; i < length; i++) { + // Avoid excluding symlinks, as they are not directories. + // Refs: https://github.com/nodejs/node/issues/52663 + const fullPath = pathModule.join(currentPath, names[i]) + const dirent = getDirent(currentPath, names[i], types[i]) + ArrayPrototypePush(context.readdirResults, dirent) + + if (dirent.isDirectory() || binding.internalModuleStat(fullPath) === 1) { + ArrayPrototypePush(context.pathsQueue, fullPath) + } + } +} + +function handleFilePaths({ result, currentPath, context }) { + for (let i = 0; i < result.length; i++) { + const resultPath = pathModule.join(currentPath, result[i]) + const relativeResultPath = pathModule.relative(context.basePath, resultPath) + const stat = binding.internalModuleStat(resultPath) + ArrayPrototypePush(context.readdirResults, relativeResultPath) + + if (stat === 1) { + ArrayPrototypePush(context.pathsQueue, resultPath) + } + } +} + +/** + * An iterative algorithm for reading the entire contents of the `basePath` directory. + * This function does not validate `basePath` as a directory. It is passed directly to + * `binding.readdir`. + * @param {string} basePath + * @param {{ encoding: string, withFileTypes: boolean }} options + * @returns {string[] | Dirent[]} + */ +function readdirSyncRecursive(basePath, options) { + const context = { + withFileTypes: Boolean(options.withFileTypes), + encoding: options.encoding, + basePath, + readdirResults: [], + pathsQueue: [basePath], + } + + function read(path) { + const readdirResult = binding.readdir(path, context.encoding, context.withFileTypes) + + if (readdirResult === undefined) { + return + } + + processReaddirResult({ + result: readdirResult, + currentPath: path, + context, + }) + } + + for (let i = 0; i < context.pathsQueue.length; i++) { + read(context.pathsQueue[i]) + } + + return context.readdirResults +} + +/** + * Reads the contents of a directory. + * @param {string | Buffer | URL} path + * @param {string | { + * encoding?: string; + * withFileTypes?: boolean; + * recursive?: boolean; + * }} [options] + * @param {( + * err?: Error, + * files?: string[] | Buffer[] | Dirent[] + * ) => any} callback + * @returns {void} + */ +function readdir(path, options, callback) { + callback = makeCallback(typeof options === 'function' ? options : callback) + options = getOptions(options) + path = getValidatedPath(path) + if (options.recursive != null) { + validateBoolean(options.recursive, 'options.recursive') + } + + if (options.recursive) { + // Make shallow copy to prevent mutating options from affecting results + options = copyObject(options) + + readdirRecursive(path, options, callback) + return + } + + const req = new FSReqCallback() + if (!options.withFileTypes) { + req.oncomplete = callback + } else { + req.oncomplete = (err, result) => { + if (err) { + callback(err) + return + } + getDirents(path, result, callback) + } + } + binding.readdir(path, options.encoding, !!options.withFileTypes, req) +} + +/** + * Synchronously reads the contents of a directory. + * @param {string | Buffer | URL} path + * @param {string | { + * encoding?: string; + * withFileTypes?: boolean; + * recursive?: boolean; + * }} [options] + * @returns {string | Buffer[] | Dirent[]} + */ +function readdirSync(path, options) { + options = getOptions(options) + path = getValidatedPath(path) + if (options.recursive != null) { + validateBoolean(options.recursive, 'options.recursive') + } + + if (options.recursive) { + return readdirSyncRecursive(path, options) + } + + const result = binding.readdir(path, options.encoding, !!options.withFileTypes) + + return result !== undefined && options.withFileTypes ? getDirents(path, result) : result +} + +/** + * Invokes the callback with the `fs.Stats` + * for the file descriptor. + * @param {number} fd + * @param {{ bigint?: boolean; }} [options] + * @param {( + * err?: Error, + * stats?: Stats + * ) => any} [callback] + * @returns {void} + */ +function fstat(fd, options = { bigint: false }, callback) { + if (typeof options === 'function') { + callback = options + options = kEmptyObject + } + callback = makeStatsCallback(callback) + + const req = new FSReqCallback(options.bigint) + req.oncomplete = callback + binding.fstat(fd, options.bigint, req) +} + +/** + * Retrieves the `fs.Stats` for the symbolic link + * referred to by the `path`. + * @param {string | Buffer | URL} path + * @param {{ bigint?: boolean; }} [options] + * @param {( + * err?: Error, + * stats?: Stats + * ) => any} callback + * @returns {void} + */ +function lstat(path, options = { bigint: false }, callback) { + if (typeof options === 'function') { + callback = options + options = kEmptyObject + } + callback = makeStatsCallback(callback) + path = getValidatedPath(path) + if (permission.isEnabled() && !permission.has('fs.read', path)) { + const resource = BufferIsBuffer(path) ? BufferToString(path) : path + callback(new ERR_ACCESS_DENIED('Access to this API has been restricted', 'FileSystemRead', resource)) + return + } + + const req = new FSReqCallback(options.bigint) + req.oncomplete = callback + binding.lstat(path, options.bigint, req) +} + +/** + * Asynchronously gets the stats of a file. + * @param {string | Buffer | URL} path + * @param {{ bigint?: boolean; }} [options] + * @param {( + * err?: Error, + * stats?: Stats + * ) => any} callback + * @returns {void} + */ +function stat(path, options = { bigint: false, throwIfNoEntry: true }, callback) { + if (typeof options === 'function') { + callback = options + options = kEmptyObject + } + callback = makeStatsCallback(callback) + + const req = new FSReqCallback(options.bigint) + req.oncomplete = callback + binding.stat(getValidatedPath(path), options.bigint, req, options.throwIfNoEntry) +} + +function statfs(path, options = { bigint: false }, callback) { + if (typeof options === 'function') { + callback = options + options = kEmptyObject + } + validateFunction(callback, 'cb') + path = getValidatedPath(path) + const req = new FSReqCallback(options.bigint) + req.oncomplete = (err, stats) => { + if (err) { + return callback(err) + } + + callback(err, getStatFsFromBinding(stats)) + } + binding.statfs(path, options.bigint, req) +} + +/** + * Synchronously retrieves the `fs.Stats` for + * the file descriptor. + * @param {number} fd + * @param {{ bigint?: boolean; }} [options] + * @returns {Stats | undefined} + */ +function fstatSync(fd, options = { bigint: false }) { + const stats = binding.fstat(fd, options.bigint, undefined, false) + if (stats === undefined) { + return + } + return getStatsFromBinding(stats) +} + +/** + * Synchronously retrieves the `fs.Stats` for + * the symbolic link referred to by the `path`. + * @param {string | Buffer | URL} path + * @param {{ + * bigint?: boolean; + * throwIfNoEntry?: boolean; + * }} [options] + * @returns {Stats | undefined} + */ +function lstatSync(path, options = { bigint: false, throwIfNoEntry: true }) { + path = getValidatedPath(path) + if (permission.isEnabled() && !permission.has('fs.read', path)) { + const resource = BufferIsBuffer(path) ? BufferToString(path) : path + throw new ERR_ACCESS_DENIED('Access to this API has been restricted', 'FileSystemRead', resource) + } + const stats = binding.lstat(path, options.bigint, undefined, options.throwIfNoEntry) + + if (stats === undefined) { + return + } + return getStatsFromBinding(stats) +} + +/** + * Synchronously retrieves the `fs.Stats` + * for the `path`. + * @param {string | Buffer | URL} path + * @param {{ + * bigint?: boolean; + * throwIfNoEntry?: boolean; + * }} [options] + * @returns {Stats} + */ +function statSync(path, options = { bigint: false, throwIfNoEntry: true }) { + const stats = binding.stat(getValidatedPath(path), options.bigint, undefined, options.throwIfNoEntry) + if (stats === undefined) { + return undefined + } + return getStatsFromBinding(stats) +} + +function statfsSync(path, options = { bigint: false }) { + const stats = binding.statfs(getValidatedPath(path), options.bigint) + return getStatFsFromBinding(stats) +} + +/** + * Reads the contents of a symbolic link + * referred to by `path`. + * @param {string | Buffer | URL} path + * @param {{ encoding?: string; } | string} [options] + * @param {( + * err?: Error, + * linkString?: string | Buffer + * ) => any} callback + * @returns {void} + */ +function readlink(path, options, callback) { + callback = makeCallback(typeof options === 'function' ? options : callback) + options = getOptions(options) + const req = new FSReqCallback() + req.oncomplete = callback + binding.readlink(getValidatedPath(path), options.encoding, req) +} + +/** + * Synchronously reads the contents of a symbolic link + * referred to by `path`. + * @param {string | Buffer | URL} path + * @param {{ encoding?: string; } | string} [options] + * @returns {string | Buffer} + */ +function readlinkSync(path, options) { + options = getOptions(options) + return binding.readlink(getValidatedPath(path), options.encoding) +} + +/** + * Creates the link called `path` pointing to `target`. + * @param {string | Buffer | URL} target + * @param {string | Buffer | URL} path + * @param {string | null} [type] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function symlink(target, path, type, callback) { + if (callback === undefined) { + callback = makeCallback(type) + type = undefined + } else { + validateOneOf(type, 'type', ['dir', 'file', 'junction', null, undefined]) + } + + // Due to the nature of Node.js runtime, symlinks has different edge cases that can bypass + // the permission model security guarantees. Thus, this API is disabled unless fs.read + // and fs.write permission has been given. + if (permission.isEnabled() && !permission.has('fs')) { + callback(new ERR_ACCESS_DENIED('fs.symlink API requires full fs.read and fs.write permissions.')) + return + } + + target = getValidatedPath(target, 'target') + path = getValidatedPath(path) + + if (isWindows && type == null) { + let absoluteTarget + try { + // Symlinks targets can be relative to the newly created path. + // Calculate absolute file name of the symlink target, and check + // if it is a directory. Ignore resolve error to keep symlink + // errors consistent between platforms if invalid path is + // provided. + absoluteTarget = pathModule.resolve(path, '..', target) + } catch { + // Continue regardless of error. + } + if (absoluteTarget !== undefined) { + stat(absoluteTarget, (err, stat) => { + const resolvedType = !err && stat.isDirectory() ? 'dir' : 'file' + const resolvedFlags = stringToSymlinkType(resolvedType) + const destination = preprocessSymlinkDestination(target, resolvedType, path) + + const req = new FSReqCallback() + req.oncomplete = callback + binding.symlink(destination, path, resolvedFlags, req) + }) + return + } + } + + const destination = preprocessSymlinkDestination(target, type, path) + + const flags = stringToSymlinkType(type) + const req = new FSReqCallback() + req.oncomplete = callback + binding.symlink(destination, path, flags, req) +} + +/** + * Synchronously creates the link called `path` + * pointing to `target`. + * @param {string | Buffer | URL} target + * @param {string | Buffer | URL} path + * @param {string | null} [type] + * @returns {void} + */ +function symlinkSync(target, path, type) { + validateOneOf(type, 'type', ['dir', 'file', 'junction', null, undefined]) + if (isWindows && type == null) { + const absoluteTarget = pathModule.resolve(`${path}`, '..', `${target}`) + if (statSync(absoluteTarget, { throwIfNoEntry: false })?.isDirectory()) { + type = 'dir' + } + } + + // Due to the nature of Node.js runtime, symlinks has different edge cases that can bypass + // the permission model security guarantees. Thus, this API is disabled unless fs.read + // and fs.write permission has been given. + if (permission.isEnabled() && !permission.has('fs')) { + throw new ERR_ACCESS_DENIED('fs.symlink API requires full fs.read and fs.write permissions.') + } + + target = getValidatedPath(target, 'target') + path = getValidatedPath(path) + + binding.symlink(preprocessSymlinkDestination(target, type, path), path, stringToSymlinkType(type)) +} + +/** + * Creates a new link from the `existingPath` + * to the `newPath`. + * @param {string | Buffer | URL} existingPath + * @param {string | Buffer | URL} newPath + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function link(existingPath, newPath, callback) { + callback = makeCallback(callback) + + existingPath = getValidatedPath(existingPath, 'existingPath') + newPath = getValidatedPath(newPath, 'newPath') + + const req = new FSReqCallback() + req.oncomplete = callback + + binding.link(existingPath, newPath, req) +} + +/** + * Synchronously creates a new link from the `existingPath` + * to the `newPath`. + * @param {string | Buffer | URL} existingPath + * @param {string | Buffer | URL} newPath + * @returns {void} + */ +function linkSync(existingPath, newPath) { + existingPath = getValidatedPath(existingPath, 'existingPath') + newPath = getValidatedPath(newPath, 'newPath') + + binding.link(existingPath, newPath) +} + +/** + * Asynchronously removes a file or symbolic link. + * @param {string | Buffer | URL} path + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function unlink(path, callback) { + callback = makeCallback(callback) + const req = new FSReqCallback() + req.oncomplete = callback + binding.unlink(getValidatedPath(path), req) +} + +/** + * Synchronously removes a file or symbolic link. + * @param {string | Buffer | URL} path + * @returns {void} + */ +function unlinkSync(path) { + binding.unlink(getValidatedPath(path)) +} + +/** + * Sets the permissions on the file. + * @param {number} fd + * @param {string | number} mode + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fchmod(fd, mode, callback) { + mode = parseFileMode(mode, 'mode') + callback = makeCallback(callback) + + if (permission.isEnabled()) { + callback(new ERR_ACCESS_DENIED('fchmod API is disabled when Permission Model is enabled.')) + return + } + + const req = new FSReqCallback() + req.oncomplete = callback + binding.fchmod(fd, mode, req) +} + +/** + * Synchronously sets the permissions on the file. + * @param {number} fd + * @param {string | number} mode + * @returns {void} + */ +function fchmodSync(fd, mode) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED('fchmod API is disabled when Permission Model is enabled.') + } + binding.fchmod(fd, parseFileMode(mode, 'mode')) +} + +/** + * Changes the permissions on a symbolic link. + * @param {string | Buffer | URL} path + * @param {number} mode + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function lchmod(path, mode, callback) { + validateFunction(callback, 'cb') + mode = parseFileMode(mode, 'mode') + fs.open(path, O_WRONLY | O_SYMLINK, (err, fd) => { + if (err) { + callback(err) + return + } + // Prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, (err) => { + fs.close(fd, (err2) => { + callback(aggregateTwoErrors(err2, err)) + }) + }) + }) +} + +/** + * Synchronously changes the permissions on a symbolic link. + * @param {string | Buffer | URL} path + * @param {number} mode + * @returns {void} + */ +function lchmodSync(path, mode) { + const fd = fs.openSync(path, O_WRONLY | O_SYMLINK) + + // Prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + try { + fs.fchmodSync(fd, mode) + } finally { + fs.closeSync(fd) + } +} + +/** + * Asynchronously changes the permissions of a file. + * @param {string | Buffer | URL} path + * @param {string | number} mode + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function chmod(path, mode, callback) { + path = getValidatedPath(path) + mode = parseFileMode(mode, 'mode') + callback = makeCallback(callback) + + const req = new FSReqCallback() + req.oncomplete = callback + binding.chmod(path, mode, req) +} + +/** + * Synchronously changes the permissions of a file. + * @param {string | Buffer | URL} path + * @param {string | number} mode + * @returns {void} + */ +function chmodSync(path, mode) { + path = getValidatedPath(path) + mode = parseFileMode(mode, 'mode') + + binding.chmod(path, mode) +} + +/** + * Sets the owner of the symbolic link. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function lchown(path, uid, gid, callback) { + callback = makeCallback(callback) + path = getValidatedPath(path) + validateInteger(uid, 'uid', -1, kMaxUserId) + validateInteger(gid, 'gid', -1, kMaxUserId) + const req = new FSReqCallback() + req.oncomplete = callback + binding.lchown(path, uid, gid, req) +} + +/** + * Synchronously sets the owner of the symbolic link. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @returns {void} + */ +function lchownSync(path, uid, gid) { + path = getValidatedPath(path) + validateInteger(uid, 'uid', -1, kMaxUserId) + validateInteger(gid, 'gid', -1, kMaxUserId) + binding.lchown(path, uid, gid) +} + +/** + * Sets the owner of the file. + * @param {number} fd + * @param {number} uid + * @param {number} gid + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fchown(fd, uid, gid, callback) { + validateInteger(uid, 'uid', -1, kMaxUserId) + validateInteger(gid, 'gid', -1, kMaxUserId) + callback = makeCallback(callback) + if (permission.isEnabled()) { + callback(new ERR_ACCESS_DENIED('fchown API is disabled when Permission Model is enabled.')) + return + } + + const req = new FSReqCallback() + req.oncomplete = callback + binding.fchown(fd, uid, gid, req) +} + +/** + * Synchronously sets the owner of the file. + * @param {number} fd + * @param {number} uid + * @param {number} gid + * @returns {void} + */ +function fchownSync(fd, uid, gid) { + validateInteger(uid, 'uid', -1, kMaxUserId) + validateInteger(gid, 'gid', -1, kMaxUserId) + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED('fchown API is disabled when Permission Model is enabled.') + } + + binding.fchown(fd, uid, gid) +} + +/** + * Asynchronously changes the owner and group + * of a file. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function chown(path, uid, gid, callback) { + callback = makeCallback(callback) + path = getValidatedPath(path) + validateInteger(uid, 'uid', -1, kMaxUserId) + validateInteger(gid, 'gid', -1, kMaxUserId) + + const req = new FSReqCallback() + req.oncomplete = callback + binding.chown(path, uid, gid, req) +} + +/** + * Synchronously changes the owner and group + * of a file. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @returns {void} + */ +function chownSync(path, uid, gid) { + path = getValidatedPath(path) + validateInteger(uid, 'uid', -1, kMaxUserId) + validateInteger(gid, 'gid', -1, kMaxUserId) + binding.chown(path, uid, gid) +} + +/** + * Changes the file system timestamps of the object + * referenced by `path`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function utimes(path, atime, mtime, callback) { + callback = makeCallback(callback) + path = getValidatedPath(path) + + const req = new FSReqCallback() + req.oncomplete = callback + binding.utimes(path, toUnixTimestamp(atime), toUnixTimestamp(mtime), req) +} + +/** + * Synchronously changes the file system timestamps + * of the object referenced by `path`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @returns {void} + */ +function utimesSync(path, atime, mtime) { + binding.utimes(getValidatedPath(path), toUnixTimestamp(atime), toUnixTimestamp(mtime)) +} + +/** + * Changes the file system timestamps of the object + * referenced by the supplied `fd` (file descriptor). + * @param {number} fd + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function futimes(fd, atime, mtime, callback) { + atime = toUnixTimestamp(atime, 'atime') + mtime = toUnixTimestamp(mtime, 'mtime') + callback = makeCallback(callback) + + if (permission.isEnabled()) { + callback(new ERR_ACCESS_DENIED('futimes API is disabled when Permission Model is enabled.')) + return + } + + const req = new FSReqCallback() + req.oncomplete = callback + binding.futimes(fd, atime, mtime, req) +} + +/** + * Synchronously changes the file system timestamps + * of the object referenced by the + * supplied `fd` (file descriptor). + * @param {number} fd + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @returns {void} + */ +function futimesSync(fd, atime, mtime) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED('futimes API is disabled when Permission Model is enabled.') + } + + binding.futimes(fd, toUnixTimestamp(atime, 'atime'), toUnixTimestamp(mtime, 'mtime')) +} + +/** + * Changes the access and modification times of + * a file in the same way as `fs.utimes()`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function lutimes(path, atime, mtime, callback) { + callback = makeCallback(callback) + path = getValidatedPath(path) + + const req = new FSReqCallback() + req.oncomplete = callback + binding.lutimes(path, toUnixTimestamp(atime), toUnixTimestamp(mtime), req) +} + +/** + * Synchronously changes the access and modification + * times of a file in the same way as `fs.utimesSync()`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @returns {void} + */ +function lutimesSync(path, atime, mtime) { + binding.lutimes(getValidatedPath(path), toUnixTimestamp(atime), toUnixTimestamp(mtime)) +} + +function writeAll(fd, isUserFd, buffer, offset, length, signal, flush, callback) { + if (signal?.aborted) { + const abortError = new AbortError(undefined, { cause: signal.reason }) + if (isUserFd) { + callback(abortError) + } else { + fs.close(fd, (err) => { + callback(aggregateTwoErrors(err, abortError)) + }) + } + return + } + // write(fd, buffer, offset, length, position, callback) + fs.write(fd, buffer, offset, length, null, (writeErr, written) => { + if (writeErr) { + if (isUserFd) { + callback(writeErr) + } else { + fs.close(fd, (err) => { + callback(aggregateTwoErrors(err, writeErr)) + }) + } + } else if (written === length) { + if (!flush) { + if (isUserFd) { + callback(null) + } else { + fs.close(fd, callback) + } + } else { + fs.fsync(fd, (syncErr) => { + if (syncErr) { + if (isUserFd) { + callback(syncErr) + } else { + fs.close(fd, (err) => { + callback(aggregateTwoErrors(err, syncErr)) + }) + } + } else if (isUserFd) { + callback(null) + } else { + fs.close(fd, callback) + } + }) + } + } else { + offset += written + length -= written + writeAll(fd, isUserFd, buffer, offset, length, signal, flush, callback) + } + }) +} + +/** + * Asynchronously writes data to the file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer | TypedArray | DataView} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * signal?: AbortSignal; + * flush?: boolean; + * } | string} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function writeFile(path, data, options, callback) { + callback ||= options + validateFunction(callback, 'cb') + options = getOptions(options, { + encoding: 'utf8', + mode: 0o666, + flag: 'w', + flush: false, + }) + const flag = options.flag || 'w' + const flush = options.flush ?? false + + validateBoolean(flush, 'options.flush') + + if (!isArrayBufferView(data)) { + validateStringAfterArrayBufferView(data, 'data') + data = Buffer.from(data, options.encoding || 'utf8') + } + + if (isFd(path)) { + const isUserFd = true + const signal = options.signal + writeAll(path, isUserFd, data, 0, data.byteLength, signal, flush, callback) + return + } + + if (checkAborted(options.signal, callback)) return + + fs.open(path, flag, options.mode, (openErr, fd) => { + if (openErr) { + callback(openErr) + } else { + const isUserFd = false + const signal = options.signal + writeAll(fd, isUserFd, data, 0, data.byteLength, signal, flush, callback) + } + }) +} + +/** + * Synchronously writes data to the file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer | TypedArray | DataView} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * flush?: boolean; + * } | string} [options] + * @returns {void} + */ +function writeFileSync(path, data, options) { + options = getOptions(options, { + encoding: 'utf8', + mode: 0o666, + flag: 'w', + flush: false, + }) + + const flush = options.flush ?? false + + validateBoolean(flush, 'options.flush') + + const flag = options.flag || 'w' + + // C++ fast path for string data and UTF8 encoding + if (typeof data === 'string' && (options.encoding === 'utf8' || options.encoding === 'utf-8')) { + if (!isInt32(path)) { + path = getValidatedPath(path) + } + + return binding.writeFileUtf8(path, data, stringToFlags(flag), parseFileMode(options.mode, 'mode', 0o666)) + } + + if (!isArrayBufferView(data)) { + validateStringAfterArrayBufferView(data, 'data') + data = Buffer.from(data, options.encoding || 'utf8') + } + + const isUserFd = isFd(path) // File descriptor ownership + const fd = isUserFd ? path : fs.openSync(path, flag, options.mode) + + let offset = 0 + let length = data.byteLength + try { + while (length > 0) { + const written = fs.writeSync(fd, data, offset, length) + offset += written + length -= written + } + + if (flush) { + fs.fsyncSync(fd) + } + } finally { + if (!isUserFd) fs.closeSync(fd) + } +} + +/** + * Asynchronously appends data to a file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * flush?: boolean; + * } | string} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function appendFile(path, data, options, callback) { + callback ||= options + validateFunction(callback, 'cb') + options = getOptions(options, { encoding: 'utf8', mode: 0o666, flag: 'a' }) + + // Don't make changes directly on options object + options = copyObject(options) + + // Force append behavior when using a supplied file descriptor + if (!options.flag || isFd(path)) options.flag = 'a' + + fs.writeFile(path, data, options, callback) +} + +/** + * Synchronously appends data to a file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * } | string} [options] + * @returns {void} + */ +function appendFileSync(path, data, options) { + options = getOptions(options, { encoding: 'utf8', mode: 0o666, flag: 'a' }) + + // Don't make changes directly on options object + options = copyObject(options) + + // Force append behavior when using a supplied file descriptor + if (!options.flag || isFd(path)) options.flag = 'a' + + fs.writeFileSync(path, data, options) +} + +/** + * Watches for the changes on `filename`. + * @param {string | Buffer | URL} filename + * @param {string | { + * persistent?: boolean; + * recursive?: boolean; + * encoding?: string; + * signal?: AbortSignal; + * }} [options] + * @param {( + * eventType?: string, + * filename?: string | Buffer + * ) => any} [listener] + * @returns {watchers.FSWatcher} + */ +function watch(filename, options, listener) { + if (typeof options === 'function') { + listener = options + } + options = getOptions(options) + + // Don't make changes directly on options object + options = copyObject(options) + + if (options.persistent === undefined) options.persistent = true + if (options.recursive === undefined) options.recursive = false + + let watcher + const watchers = require('internal/fs/watchers') + const path = getValidatedPath(filename) + // TODO(anonrig): Remove non-native watcher when/if libuv supports recursive. + // As of November 2022, libuv does not support recursive file watch on all platforms, + // e.g. Linux due to the limitations of inotify. + if (options.recursive && !isMacOS && !isWindows) { + const nonNativeWatcher = require('internal/fs/recursive_watch') + watcher = new nonNativeWatcher.FSWatcher(options) + watcher[watchers.kFSWatchStart](path) + } else { + watcher = new watchers.FSWatcher() + watcher[watchers.kFSWatchStart](path, options.persistent, options.recursive, options.encoding, options.ignore) + } + + if (listener) { + watcher.addListener('change', listener) + } + if (options.signal) { + if (options.signal.aborted) { + process.nextTick(() => watcher.close()) + } else { + const listener = () => watcher.close() + kResistStopPropagation ??= require('internal/event_target').kResistStopPropagation + options.signal.addEventListener('abort', listener, { __proto__: null, [kResistStopPropagation]: true }) + watcher.once('close', () => { + options.signal.removeEventListener('abort', listener) + }) + } + } + + return watcher +} + +const statWatchers = new SafeMap() + +/** + * Watches for changes on `filename`. + * @param {string | Buffer | URL} filename + * @param {{ + * bigint?: boolean; + * persistent?: boolean; + * interval?: number; + * }} [options] + * @param {( + * current?: Stats, + * previous?: Stats + * ) => any} listener + * @returns {watchers.StatWatcher} + */ +function watchFile(filename, options, listener) { + filename = getValidatedPath(filename) + filename = pathModule.resolve(filename) + let stat + + if (options === null || typeof options !== 'object') { + listener = options + options = null + } + + options = { + // Poll interval in milliseconds. 5007 is what libev used to use. It's + // a little on the slow side but let's stick with it for now to keep + // behavioral changes to a minimum. + interval: 5007, + persistent: true, + ...options, + } + + validateFunction(listener, 'listener') + + stat = statWatchers.get(filename) + const watchers = require('internal/fs/watchers') + if (stat === undefined) { + stat = new watchers.StatWatcher(options.bigint) + stat[watchers.kFSStatWatcherStart](filename, options.persistent, options.interval) + statWatchers.set(filename, stat) + } else { + stat[watchers.kFSStatWatcherAddOrCleanRef]('add') + } + + stat.addListener('change', listener) + return stat +} + +/** + * Stops watching for changes on `filename`. + * @param {string | Buffer | URL} filename + * @param {() => any} [listener] + * @returns {void} + */ +function unwatchFile(filename, listener) { + filename = getValidatedPath(filename) + filename = pathModule.resolve(filename) + const stat = statWatchers.get(filename) + + if (stat === undefined) return + const watchers = require('internal/fs/watchers') + if (typeof listener === 'function') { + const beforeListenerCount = stat.listenerCount('change') + stat.removeListener('change', listener) + if (stat.listenerCount('change') < beforeListenerCount) stat[watchers.kFSStatWatcherAddOrCleanRef]('clean') + } else { + stat.removeAllListeners('change') + stat[watchers.kFSStatWatcherAddOrCleanRef]('cleanAll') + } + + if (stat.listenerCount('change') === 0) { + stat.stop() + statWatchers.delete(filename) + } +} + +let splitRoot +if (isWindows) { + // Regex to find the device root on Windows (e.g. 'c:\\'), including trailing + // slash. + const splitRootRe = /^(?:[a-zA-Z]:|[\\/]{2}[^\\/]+[\\/][^\\/]+)?[\\/]*/ + splitRoot = function splitRoot(str) { + return SideEffectFreeRegExpPrototypeExec(splitRootRe, str)[0] + } +} else { + splitRoot = function splitRoot(str) { + for (let i = 0; i < str.length; ++i) { + if (StringPrototypeCharCodeAt(str, i) !== CHAR_FORWARD_SLASH) return StringPrototypeSlice(str, 0, i) + } + return str + } +} + +function encodeRealpathResult(result, options) { + if (!options || !options.encoding || options.encoding === 'utf8') return result + const asBuffer = Buffer.from(result) + if (options.encoding === 'buffer') { + return asBuffer + } + return asBuffer.toString(options.encoding) +} + +// Finds the next portion of a (partial) path, up to the next path delimiter +let nextPart +if (isWindows) { + nextPart = function nextPart(p, i) { + for (; i < p.length; ++i) { + const ch = StringPrototypeCharCodeAt(p, i) + + // Check for a separator character + if (ch === CHAR_BACKWARD_SLASH || ch === CHAR_FORWARD_SLASH) return i + } + return -1 + } +} else { + nextPart = function nextPart(p, i) { + return StringPrototypeIndexOf(p, '/', i) + } +} + +/** + * Returns the resolved pathname. + * @param {string | Buffer | URL} p + * @param {string | { encoding?: string | null; }} [options] + * @returns {string | Buffer} + */ +function realpathSync(p, options) { + options = getOptions(options) + p = toPathIfFileURL(p) + if (typeof p !== 'string') { + p += '' + } + validatePath(p) + p = pathModule.resolve(p) + + const cache = options[realpathCacheKey] + const maybeCachedResult = cache?.get(p) + if (maybeCachedResult) { + return maybeCachedResult + } + + const seenLinks = new SafeMap() + const knownHard = new SafeSet() + const original = p + + // Current character position in p + let pos + // The partial path so far, including a trailing slash if any + let current + // The partial path without a trailing slash (except when pointing at a root) + let base + // The partial path scanned in the previous round, with slash + let previous + + // Skip over roots + current = base = splitRoot(p) + pos = current.length + + // On windows, check that the root exists. On unix there is no need. + if (isWindows) { + const out = binding.lstat(base, false, undefined, true /* throwIfNoEntry */) + if (out === undefined) { + return + } + knownHard.add(base) + } + + // Walk down the path, swapping out linked path parts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + const result = nextPart(p, pos) + previous = current + if (result === -1) { + const last = StringPrototypeSlice(p, pos) + current += last + base = previous + last + pos = p.length + } else { + current += StringPrototypeSlice(p, pos, result + 1) + base = previous + StringPrototypeSlice(p, pos, result) + pos = result + 1 + } + + // Continue if not a symlink, break if a pipe/socket + if (knownHard.has(base) || cache?.get(base) === base) { + if (isFileType(statValues, S_IFIFO) || isFileType(statValues, S_IFSOCK)) { + break + } + continue + } + + let resolvedLink + const maybeCachedResolved = cache?.get(base) + if (maybeCachedResolved) { + resolvedLink = maybeCachedResolved + } else { + // Use stats array directly to avoid creating an fs.Stats instance just + // for our internal use. + + const stats = binding.lstat(base, true, undefined, true /* throwIfNoEntry */) + if (stats === undefined) { + return + } + + if (!isFileType(stats, S_IFLNK)) { + knownHard.add(base) + cache?.set(base, base) + continue + } + + // Read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + let linkTarget = null + let id + if (!isWindows) { + const dev = BigIntPrototypeToString(stats[0], 32) + const ino = BigIntPrototypeToString(stats[7], 32) + id = `${dev}:${ino}` + if (seenLinks.has(id)) { + linkTarget = seenLinks.get(id) + } + } + if (linkTarget === null) { + binding.stat(base, false, undefined, true) + linkTarget = binding.readlink(base, undefined) + } + resolvedLink = pathModule.resolve(previous, linkTarget) + + cache?.set(base, resolvedLink) + if (!isWindows) seenLinks.set(id, linkTarget) + } + + // Resolve the link, then start over + p = pathModule.resolve(resolvedLink, StringPrototypeSlice(p, pos)) + + // Skip over roots + current = base = splitRoot(p) + pos = current.length + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard.has(base)) { + const out = binding.lstat(base, false, undefined, true /* throwIfNoEntry */) + if (out === undefined) { + return + } + knownHard.add(base) + } + } + + cache?.set(original, p) + return encodeRealpathResult(p, options) +} + +/** + * Returns the resolved pathname. + * @param {string | Buffer | URL} path + * @param {string | { encoding?: string; }} [options] + * @returns {string | Buffer} + */ +realpathSync.native = (path, options) => { + options = getOptions(options) + return binding.realpath(getValidatedPath(path), options.encoding) +} + +/** + * Asynchronously computes the canonical pathname by + * resolving `.`, `..` and symbolic links. + * @param {string | Buffer | URL} p + * @param {string | { encoding?: string; }} [options] + * @param {( + * err?: Error, + * resolvedPath?: string | Buffer + * ) => any} callback + * @returns {void} + */ +function realpath(p, options, callback) { + if (typeof options === 'function') { + callback = options + } else { + validateFunction(callback, 'cb') + } + options = getOptions(options) + p = toPathIfFileURL(p) + + if (typeof p !== 'string') { + p += '' + } + validatePath(p) + p = pathModule.resolve(p) + + const seenLinks = new SafeMap() + const knownHard = new SafeSet() + + // Current character position in p + let pos + // The partial path so far, including a trailing slash if any + let current + // The partial path without a trailing slash (except when pointing at a root) + let base + // The partial path scanned in the previous round, with slash + let previous + + current = base = splitRoot(p) + pos = current.length + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard.has(base)) { + fs.lstat(base, (err) => { + if (err) return callback(err) + knownHard.add(base) + LOOP() + }) + } else { + process.nextTick(LOOP) + } + + // Walk down the path, swapping out linked path parts for their real + // values + function LOOP() { + // Stop if scanned past end of path + if (pos >= p.length) { + return callback(null, encodeRealpathResult(p, options)) + } + + // find the next part + const result = nextPart(p, pos) + previous = current + if (result === -1) { + const last = StringPrototypeSlice(p, pos) + current += last + base = previous + last + pos = p.length + } else { + current += StringPrototypeSlice(p, pos, result + 1) + base = previous + StringPrototypeSlice(p, pos, result) + pos = result + 1 + } + + // Continue if not a symlink, break if a pipe/socket + if (knownHard.has(base)) { + if (isFileType(statValues, S_IFIFO) || isFileType(statValues, S_IFSOCK)) { + return callback(null, encodeRealpathResult(p, options)) + } + return process.nextTick(LOOP) + } + + return fs.lstat(base, { bigint: true }, gotStat) + } + + function gotStat(err, stats) { + if (err) return callback(err) + + // If not a symlink, skip to the next path part + if (!stats.isSymbolicLink()) { + knownHard.add(base) + return process.nextTick(LOOP) + } + + // Stat & read the link if not read before. + // Call `gotTarget()` as soon as the link target is known. + // `dev`/`ino` always return 0 on windows, so skip the check. + let id + if (!isWindows) { + const dev = BigIntPrototypeToString(stats.dev, 32) + const ino = BigIntPrototypeToString(stats.ino, 32) + id = `${dev}:${ino}` + if (seenLinks.has(id)) { + return gotTarget(null, seenLinks.get(id)) + } + } + fs.stat(base, (err) => { + if (err) return callback(err) + + fs.readlink(base, (err, target) => { + if (!isWindows) seenLinks.set(id, target) + gotTarget(err, target) + }) + }) + } + + function gotTarget(err, target) { + if (err) return callback(err) + + gotResolvedLink(pathModule.resolve(previous, target)) + } + + function gotResolvedLink(resolvedLink) { + // Resolve the link, then start over + p = pathModule.resolve(resolvedLink, StringPrototypeSlice(p, pos)) + current = base = splitRoot(p) + pos = current.length + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard.has(base)) { + fs.lstat(base, (err) => { + if (err) return callback(err) + knownHard.add(base) + LOOP() + }) + } else { + process.nextTick(LOOP) + } + } +} + +/** + * Asynchronously computes the canonical pathname by + * resolving `.`, `..` and symbolic links. + * @param {string | Buffer | URL} path + * @param {string | { encoding?: string; }} [options] + * @param {( + * err?: Error, + * resolvedPath?: string | Buffer + * ) => any} callback + * @returns {void} + */ +realpath.native = (path, options, callback) => { + callback = makeCallback(callback || options) + options = getOptions(options) + path = getValidatedPath(path) + const req = new FSReqCallback() + req.oncomplete = callback + binding.realpath(path, options.encoding, req) +} + +/** + * Creates a unique temporary directory. + * @param {string | Buffer | URL} prefix + * @param {string | { encoding?: string; }} [options] + * @param {( + * err?: Error, + * directory?: string + * ) => any} callback + * @returns {void} + */ +function mkdtemp(prefix, options, callback) { + callback = makeCallback(typeof options === 'function' ? options : callback) + options = getOptions(options) + + prefix = getValidatedPath(prefix, 'prefix') + warnOnNonPortableTemplate(prefix) + + const req = new FSReqCallback() + req.oncomplete = callback + binding.mkdtemp(prefix, options.encoding, req) +} + +/** + * Synchronously creates a unique temporary directory. + * @param {string | Buffer | URL} prefix + * @param {string | { encoding?: string; }} [options] + * @returns {string} + */ +function mkdtempSync(prefix, options) { + options = getOptions(options) + + prefix = getValidatedPath(prefix, 'prefix') + warnOnNonPortableTemplate(prefix) + return binding.mkdtemp(prefix, options.encoding) +} + +/** + * Synchronously creates a unique temporary directory. + * The returned value is a disposable object which removes the + * directory and its contents when disposed. + * @param {string | Buffer | URL} prefix + * @param {string | { encoding?: string; }} [options] + * @returns {object} A disposable object with a "path" property. + */ +function mkdtempDisposableSync(prefix, options) { + options = getOptions(options) + + prefix = getValidatedPath(prefix, 'prefix') + warnOnNonPortableTemplate(prefix) + + const path = binding.mkdtemp(prefix, options.encoding) + // Stash the full path in case of process.chdir() + const fullPath = pathModule.resolve(process.cwd(), path) + + const remove = () => { + binding.rmSync(fullPath, 0 /* maxRetries */, true /* recursive */, 100 /* retryDelay */) + } + return { + path, + remove, + [SymbolDispose]() { + remove() + }, + } +} + +/** + * Asynchronously copies `src` to `dest`. By + * default, `dest` is overwritten if it already exists. + * @param {string | Buffer | URL} src + * @param {string | Buffer | URL} dest + * @param {number} [mode] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function copyFile(src, dest, mode, callback) { + if (typeof mode === 'function') { + callback = mode + mode = 0 + } + + src = getValidatedPath(src, 'src') + dest = getValidatedPath(dest, 'dest') + callback = makeCallback(callback) + + const req = new FSReqCallback() + req.oncomplete = callback + binding.copyFile(src, dest, mode, req) +} + +/** + * Synchronously copies `src` to `dest`. By + * default, `dest` is overwritten if it already exists. + * @param {string | Buffer | URL} src + * @param {string | Buffer | URL} dest + * @param {number} [mode] + * @returns {void} + */ +function copyFileSync(src, dest, mode) { + binding.copyFile(getValidatedPath(src, 'src'), getValidatedPath(dest, 'dest'), mode) +} + +/** + * Asynchronously copies `src` to `dest`. `src` can be a file, directory, or + * symlink. The contents of directories will be copied recursively. + * @param {string | URL} src + * @param {string | URL} dest + * @param {object} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function cp(src, dest, options, callback) { + if (typeof options === 'function') { + callback = options + options = undefined + } + callback = makeCallback(callback) + options = validateCpOptions(options) + src = getValidatedPath(src, 'src') + dest = getValidatedPath(dest, 'dest') + lazyLoadCp() + cpFn(src, dest, options, callback) +} + +/** + * Synchronously copies `src` to `dest`. `src` can be a file, directory, or + * symlink. The contents of directories will be copied recursively. + * @param {string | URL} src + * @param {string | URL} dest + * @param {object} [options] + * @returns {void} + */ +function cpSync(src, dest, options) { + options = validateCpOptions(options) + src = getValidatedPath(src, 'src') + dest = getValidatedPath(dest, 'dest') + lazyLoadCp() + cpSyncFn(src, dest, options) +} + +function lazyLoadStreams() { + if (!ReadStream) { + ;({ ReadStream, WriteStream } = require('internal/fs/streams')) + FileReadStream = ReadStream + FileWriteStream = WriteStream + } +} + +/** + * Creates a readable stream with a default `highWaterMark` + * of 64 KiB. + * @param {string | Buffer | URL} path + * @param {string | { + * flags?: string; + * encoding?: string; + * fd?: number | FileHandle; + * mode?: number; + * autoClose?: boolean; + * emitClose?: boolean; + * start: number; + * end?: number; + * highWaterMark?: number; + * fs?: object | null; + * signal?: AbortSignal | null; + * }} [options] + * @returns {ReadStream} + */ +function createReadStream(path, options) { + lazyLoadStreams() + return new ReadStream(path, options) +} + +/** + * Creates a write stream. + * @param {string | Buffer | URL} path + * @param {string | { + * flags?: string; + * encoding?: string; + * fd?: number | FileHandle; + * mode?: number; + * autoClose?: boolean; + * emitClose?: boolean; + * start: number; + * fs?: object | null; + * signal?: AbortSignal | null; + * highWaterMark?: number; + * flush?: boolean; + * }} [options] + * @returns {WriteStream} + */ +function createWriteStream(path, options) { + lazyLoadStreams() + return new WriteStream(path, options) +} + +const lazyGlob = getLazy(() => require('internal/fs/glob').Glob) + +function glob(pattern, options, callback) { + if (typeof options === 'function') { + callback = options + options = undefined + } + callback = makeCallback(callback) + + const Glob = lazyGlob() + PromisePrototypeThen(ArrayFromAsync(new Glob(pattern, options).glob()), (res) => callback(null, res), callback) +} + +function globSync(pattern, options) { + const Glob = lazyGlob() + return new Glob(pattern, options).globSync() +} + +module.exports = fs = { + appendFile, + appendFileSync, + access, + accessSync, + chown, + chownSync, + chmod, + chmodSync, + close, + closeSync, + copyFile, + copyFileSync, + cp, + cpSync, + createReadStream, + createWriteStream, + exists, + existsSync, + fchown, + fchownSync, + fchmod, + fchmodSync, + fdatasync, + fdatasyncSync, + fstat, + fstatSync, + fsync, + fsyncSync, + ftruncate, + ftruncateSync, + futimes, + futimesSync, + glob, + globSync, + lchown, + lchownSync, + lchmod: constants.O_SYMLINK !== undefined ? lchmod : undefined, + lchmodSync: constants.O_SYMLINK !== undefined ? lchmodSync : undefined, + link, + linkSync, + lstat, + lstatSync, + lutimes, + lutimesSync, + mkdir, + mkdirSync, + mkdtemp, + mkdtempSync, + mkdtempDisposableSync, + open, + openSync, + openAsBlob, + readdir, + readdirSync, + read, + readSync, + readv, + readvSync, + readFile, + readFileSync, + readlink, + readlinkSync, + realpath, + realpathSync, + rename, + renameSync, + rm, + rmSync, + rmdir, + rmdirSync, + stat, + statfs, + statSync, + statfsSync, + symlink, + symlinkSync, + truncate, + truncateSync, + unwatchFile, + unlink, + unlinkSync, + utimes, + utimesSync, + watch, + watchFile, + writeFile, + writeFileSync, + write, + writeSync, + writev, + writevSync, + Dirent, + Stats, + + get ReadStream() { + lazyLoadStreams() + return ReadStream + }, + + set ReadStream(val) { + ReadStream = val + }, + + get WriteStream() { + lazyLoadStreams() + return WriteStream + }, + + set WriteStream(val) { + WriteStream = val + }, + + // Legacy names... these have to be separate because of how graceful-fs + // (and possibly other) modules monkey patch the values. + get FileReadStream() { + lazyLoadStreams() + return FileReadStream + }, + + set FileReadStream(val) { + FileReadStream = val + }, + + get FileWriteStream() { + lazyLoadStreams() + return FileWriteStream + }, + + set FileWriteStream(val) { + FileWriteStream = val + }, + + get Utf8Stream() { + lazyLoadUtf8Stream() + return Utf8Stream + }, + + // For tests + _toUnixTimestamp: toUnixTimestamp, +} + +defineLazyProperties(fs, 'internal/fs/dir', ['Dir', 'opendir', 'opendirSync']) + +ObjectDefineProperties(fs, { + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + promises: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + promises ??= require('internal/fs/promises').exports + return promises + }, + }, +}) diff --git a/reference/internal/fs/dir.js b/reference/internal/fs/dir.js new file mode 100644 index 0000000..f1fd044 --- /dev/null +++ b/reference/internal/fs/dir.js @@ -0,0 +1,326 @@ +'use strict' + +const { + ArrayPrototypePush, + ArrayPrototypeShift, + FunctionPrototypeBind, + ObjectDefineProperties, + PromiseReject, + SymbolAsyncDispose, + SymbolAsyncIterator, + SymbolDispose, +} = primordials + +const pathModule = require('path') +const binding = internalBinding('fs') +const dirBinding = internalBinding('fs_dir') +const { + codes: { ERR_DIR_CLOSED, ERR_DIR_CONCURRENT_OPERATION, ERR_INVALID_THIS, ERR_MISSING_ARGS }, +} = require('internal/errors') + +const { FSReqCallback } = binding +const { promisify } = require('internal/util') +const { getDirent, getOptions, getValidatedPath } = require('internal/fs/utils') +const { validateFunction, validateUint32 } = require('internal/validators') + +class Dir { + #handle + #path + #bufferedEntries = [] + #closed = false + #options + #readPromisified + #closePromisified + #operationQueue = null + #handlerQueue = [] + + constructor(handle, path, options) { + if (handle == null) throw new ERR_MISSING_ARGS('handle') + this.#handle = handle + this.#path = path + this.#options = { + bufferSize: 32, + ...getOptions(options, { + encoding: 'utf8', + }), + } + + try { + validateUint32(this.#options.bufferSize, 'options.bufferSize', true) + } catch (validationError) { + // Userland won't be able to close handle if we throw, so we close it first + this.#handle.close() + throw validationError + } + + this.#readPromisified = FunctionPrototypeBind(promisify(this.#readImpl), this, false) + this.#closePromisified = FunctionPrototypeBind(promisify(this.close), this) + } + + get path() { + if (!(#path in this)) throw new ERR_INVALID_THIS('Dir') + return this.#path + } + + #processHandlerQueue() { + while (this.#handlerQueue.length > 0) { + const handler = ArrayPrototypeShift(this.#handlerQueue) + const { handle, path } = handler + + const result = handle.read(this.#options.encoding, this.#options.bufferSize) + + if (result !== null) { + this.#processReadResult(path, result) + if (result.length > 0) { + ArrayPrototypePush(this.#handlerQueue, handler) + } + } else { + handle.close() + } + + if (this.#bufferedEntries.length > 0) { + break + } + } + + return this.#bufferedEntries.length > 0 + } + + read(callback) { + return arguments.length === 0 ? this.#readPromisified() : this.#readImpl(true, callback) + } + + #readImpl(maybeSync, callback) { + if (this.#closed === true) { + throw new ERR_DIR_CLOSED() + } + + if (callback === undefined) { + return this.#readPromisified() + } + + validateFunction(callback, 'callback') + + if (this.#operationQueue !== null) { + ArrayPrototypePush(this.#operationQueue, () => { + this.#readImpl(maybeSync, callback) + }) + return + } + + if (this.#processHandlerQueue()) { + try { + const dirent = ArrayPrototypeShift(this.#bufferedEntries) + + if (this.#options.recursive && dirent.isDirectory()) { + this.#readSyncRecursive(dirent) + } + + if (maybeSync) process.nextTick(callback, null, dirent) + else callback(null, dirent) + return + } catch (error) { + return callback(error) + } + } + + const req = new FSReqCallback() + req.oncomplete = (err, result) => { + process.nextTick(() => { + const queue = this.#operationQueue + this.#operationQueue = null + for (const op of queue) op() + }) + + if (err || result === null) { + return callback(err, result) + } + + try { + this.#processReadResult(this.#path, result) + const dirent = ArrayPrototypeShift(this.#bufferedEntries) + if (this.#options.recursive && dirent.isDirectory()) { + this.#readSyncRecursive(dirent) + } + callback(null, dirent) + } catch (error) { + callback(error) + } + } + + this.#operationQueue = [] + this.#handle.read(this.#options.encoding, this.#options.bufferSize, req) + } + + #processReadResult(path, result) { + for (let i = 0; i < result.length; i += 2) { + ArrayPrototypePush(this.#bufferedEntries, getDirent(path, result[i], result[i + 1])) + } + } + + #readSyncRecursive(dirent) { + const path = pathModule.join(dirent.parentPath, dirent.name) + const handle = dirBinding.opendir(path, this.#options.encoding) + + if (handle === undefined) { + return + } + + ArrayPrototypePush(this.#handlerQueue, { handle, path }) + } + + readSync() { + if (this.#closed === true) { + throw new ERR_DIR_CLOSED() + } + + if (this.#operationQueue !== null) { + throw new ERR_DIR_CONCURRENT_OPERATION() + } + + if (this.#processHandlerQueue()) { + const dirent = ArrayPrototypeShift(this.#bufferedEntries) + if (this.#options.recursive && dirent.isDirectory()) { + this.#readSyncRecursive(dirent) + } + return dirent + } + + const result = this.#handle.read(this.#options.encoding, this.#options.bufferSize) + + if (result === null) { + return result + } + + this.#processReadResult(this.#path, result) + + const dirent = ArrayPrototypeShift(this.#bufferedEntries) + if (this.#options.recursive && dirent.isDirectory()) { + this.#readSyncRecursive(dirent) + } + return dirent + } + + close(callback) { + if (callback === undefined) { + if (this.#closed === true) { + return PromiseReject(new ERR_DIR_CLOSED()) + } + return this.#closePromisified() + } + + validateFunction(callback, 'callback') + + if (this.#closed === true) { + process.nextTick(callback, new ERR_DIR_CLOSED()) + return + } + + if (this.#operationQueue !== null) { + ArrayPrototypePush(this.#operationQueue, () => { + this.close(callback) + }) + return + } + + while (this.#handlerQueue.length > 0) { + const handler = ArrayPrototypeShift(this.#handlerQueue) + handler.handle.close() + } + + this.#closed = true + const req = new FSReqCallback() + req.oncomplete = callback + this.#handle.close(req) + } + + closeSync() { + if (this.#closed === true) { + throw new ERR_DIR_CLOSED() + } + + if (this.#operationQueue !== null) { + throw new ERR_DIR_CONCURRENT_OPERATION() + } + + while (this.#handlerQueue.length > 0) { + const handler = ArrayPrototypeShift(this.#handlerQueue) + handler.handle.close() + } + + this.#closed = true + this.#handle.close() + } + + async *entries() { + try { + while (true) { + const result = await this.#readPromisified() + if (result === null) { + break + } + yield result + } + } finally { + await this.#closePromisified() + } + } + + [SymbolDispose]() { + if (this.#closed) return + this.closeSync() + } + + async [SymbolAsyncDispose]() { + if (this.#closed) return + await this.#closePromisified() + } +} + +ObjectDefineProperties(Dir.prototype, { + [SymbolAsyncIterator]: { + __proto__: null, + enumerable: false, + writable: true, + configurable: true, + value: Dir.prototype.entries, + }, +}) + +function opendir(path, options, callback) { + callback = typeof options === 'function' ? options : callback + validateFunction(callback, 'callback') + + path = getValidatedPath(path) + options = getOptions(options, { + encoding: 'utf8', + }) + + function opendirCallback(error, handle) { + if (error) { + callback(error) + } else { + callback(null, new Dir(handle, path, options)) + } + } + + const req = new FSReqCallback() + req.oncomplete = opendirCallback + + dirBinding.opendir(path, options.encoding, req) +} + +function opendirSync(path, options) { + path = getValidatedPath(path) + options = getOptions(options, { encoding: 'utf8' }) + + const handle = dirBinding.opendirSync(path) + + return new Dir(handle, path, options) +} + +module.exports = { + Dir, + opendir, + opendirSync, +} diff --git a/reference/internal/fs/promises.js b/reference/internal/fs/promises.js new file mode 100644 index 0000000..6436f4e --- /dev/null +++ b/reference/internal/fs/promises.js @@ -0,0 +1,1185 @@ +'use strict' + +const { + ArrayPrototypePop, + ArrayPrototypePush, + Error, + ErrorCaptureStackTrace, + FunctionPrototypeBind, + MathMax, + MathMin, + Promise, + PromisePrototypeThen, + PromiseReject, + PromiseResolve, + SafeArrayIterator, + SafePromisePrototypeFinally, + Symbol, + SymbolAsyncDispose, + Uint8Array, +} = primordials + +const { fs: constants } = internalBinding('constants') +const { F_OK, O_SYMLINK, O_WRONLY, S_IFMT, S_IFREG } = constants + +const binding = internalBinding('fs') +const { Buffer } = require('buffer') + +const { + AbortError, + aggregateTwoErrors, + codes: { + ERR_ACCESS_DENIED, + ERR_FS_FILE_TOO_LARGE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_STATE, + ERR_METHOD_NOT_IMPLEMENTED, + }, +} = require('internal/errors') +const { isArrayBufferView } = require('internal/util/types') + +const { + constants: { kIoMaxLength, kMaxUserId, kReadFileBufferLength, kReadFileUnknownBufferLength, kWriteFileMaxChunkSize }, + copyObject, + getDirents, + getOptions, + getStatFsFromBinding, + getStatsFromBinding, + getValidatedPath, + preprocessSymlinkDestination, + stringToFlags, + stringToSymlinkType, + toUnixTimestamp, + validateBufferArray, + validateCpOptions, + validateOffsetLengthRead, + validateOffsetLengthWrite, + validatePosition, + validateRmOptions, + validateRmdirOptions, + validateStringAfterArrayBufferView, + warnOnNonPortableTemplate, +} = require('internal/fs/utils') +const { opendir } = require('internal/fs/dir') +const { + parseFileMode, + validateAbortSignal, + validateBoolean, + validateBuffer, + validateEncoding, + validateInteger, + validateObject, + validateOneOf, + kValidateObjectAllowNullable, +} = require('internal/validators') +const pathModule = require('path') +const { getLazy, kEmptyObject, lazyDOMException, promisify, isWindows, isMacOS } = require('internal/util') +const EventEmitter = require('events') +const { StringDecoder } = require('string_decoder') +const { kFSWatchStart, watch } = require('internal/fs/watchers') +const nonNativeWatcher = require('internal/fs/recursive_watch') +const { isIterable } = require('internal/streams/utils') +const assert = require('internal/assert') + +const permission = require('internal/process/permission') + +const kHandle = Symbol('kHandle') +const kFd = Symbol('kFd') +const kRefs = Symbol('kRefs') +const kClosePromise = Symbol('kClosePromise') +const kCloseReason = Symbol('kCloseReason') +const kCloseResolve = Symbol('kCloseResolve') +const kCloseReject = Symbol('kCloseReject') +const kRef = Symbol('kRef') +const kUnref = Symbol('kUnref') +const kLocked = Symbol('kLocked') + +const { kUsePromises } = binding +const { Interface } = require('internal/readline/interface') +const { kDeserialize, kTransfer, kTransferList, markTransferMode } = require('internal/worker/js_transferable') + +const getDirectoryEntriesPromise = promisify(getDirents) +const validateRmOptionsPromise = promisify(validateRmOptions) + +let cpPromises +function lazyLoadCpPromises() { + return (cpPromises ??= require('internal/fs/cp/cp').cpFn) +} + +// Lazy loaded to avoid circular dependency. +let fsStreams +function lazyFsStreams() { + return (fsStreams ??= require('internal/fs/streams')) +} + +const lazyRimRaf = getLazy(() => require('internal/fs/rimraf').rimrafPromises) + +const lazyReadableStream = getLazy(() => require('internal/webstreams/readablestream').ReadableStream) + +// By the time the C++ land creates an error for a promise rejection (likely from a +// libuv callback), there is already no JS frames on the stack. So we need to +// wait until V8 resumes execution back to JS land before we have enough information +// to re-capture the stack trace. +function handleErrorFromBinding(error) { + ErrorCaptureStackTrace(error, handleErrorFromBinding) + return PromiseReject(error) +} + +class FileHandle extends EventEmitter { + /** + * @param {InternalFSBinding.FileHandle | undefined} filehandle + */ + constructor(filehandle) { + super() + markTransferMode(this, false, true) + this[kHandle] = filehandle + this[kFd] = filehandle ? filehandle.fd : -1 + + this[kRefs] = 1 + this[kClosePromise] = null + } + + getAsyncId() { + return this[kHandle].getAsyncId() + } + + get fd() { + return this[kFd] + } + + appendFile(data, options) { + return fsCall(writeFile, this, data, options) + } + + chmod(mode) { + return fsCall(fchmod, this, mode) + } + + chown(uid, gid) { + return fsCall(fchown, this, uid, gid) + } + + datasync() { + return fsCall(fdatasync, this) + } + + sync() { + return fsCall(fsync, this) + } + + read(buffer, offset, length, position) { + return fsCall(read, this, buffer, offset, length, position) + } + + readv(buffers, position) { + return fsCall(readv, this, buffers, position) + } + + readFile(options) { + return fsCall(readFile, this, options) + } + + readLines(options = undefined) { + return new Interface({ + input: this.createReadStream(options), + crlfDelay: Infinity, + }) + } + + stat(options) { + return fsCall(fstat, this, options) + } + + truncate(len = 0) { + return fsCall(ftruncate, this, len) + } + + utimes(atime, mtime) { + return fsCall(futimes, this, atime, mtime) + } + + write(buffer, offset, length, position) { + return fsCall(write, this, buffer, offset, length, position) + } + + writev(buffers, position) { + return fsCall(writev, this, buffers, position) + } + + writeFile(data, options) { + return fsCall(writeFile, this, data, options) + } + + close = () => { + if (this[kFd] === -1) { + return PromiseResolve() + } + + if (this[kClosePromise]) { + return this[kClosePromise] + } + + this[kRefs]-- + if (this[kRefs] === 0) { + this[kFd] = -1 + this[kClosePromise] = SafePromisePrototypeFinally(this[kHandle].close(), () => { + this[kClosePromise] = undefined + }) + } else { + this[kClosePromise] = SafePromisePrototypeFinally( + new Promise((resolve, reject) => { + this[kCloseResolve] = resolve + this[kCloseReject] = reject + }), + () => { + this[kClosePromise] = undefined + this[kCloseReject] = undefined + this[kCloseResolve] = undefined + }, + ) + } + + this.emit('close') + return this[kClosePromise] + } + + async [SymbolAsyncDispose]() { + await this.close() + } + + /** + * @typedef {import('../webstreams/readablestream').ReadableStream + * } ReadableStream + * @param {{ type?: 'bytes', autoClose?: boolean }} [options] + * @returns {ReadableStream} + */ + readableWebStream(options = kEmptyObject) { + if (this[kFd] === -1) throw new ERR_INVALID_STATE('The FileHandle is closed') + if (this[kClosePromise]) throw new ERR_INVALID_STATE('The FileHandle is closing') + if (this[kLocked]) throw new ERR_INVALID_STATE('The FileHandle is locked') + this[kLocked] = true + + validateObject(options, 'options') + const { type = 'bytes', autoClose = false } = options + + validateBoolean(autoClose, 'options.autoClose') + + if (type !== 'bytes') { + process.emitWarning( + 'A non-"bytes" options.type has no effect. A byte-oriented steam is ' + 'always created.', + 'ExperimentalWarning', + ) + } + + const readFn = FunctionPrototypeBind(this.read, this) + const ondone = async () => { + this[kUnref]() + if (autoClose) await this.close() + } + + const ReadableStream = lazyReadableStream() + const readable = new ReadableStream({ + type: 'bytes', + autoAllocateChunkSize: 16384, + + async pull(controller) { + const view = controller.byobRequest.view + const { bytesRead } = await readFn(view, view.byteOffset, view.byteLength) + + if (bytesRead === 0) { + controller.close() + await ondone() + } + + controller.byobRequest.respond(bytesRead) + }, + + async cancel() { + await ondone() + }, + }) + + const { readableStreamCancel } = require('internal/webstreams/readablestream') + this[kRef]() + this.once('close', () => { + readableStreamCancel(readable) + }) + + return readable + } + + /** + * @typedef {import('./streams').ReadStream + * } ReadStream + * @param {{ + * encoding?: string; + * autoClose?: boolean; + * emitClose?: boolean; + * start: number; + * end?: number; + * highWaterMark?: number; + * }} [options] + * @returns {ReadStream} + */ + createReadStream(options = undefined) { + const { ReadStream } = lazyFsStreams() + return new ReadStream(undefined, { ...options, fd: this }) + } + + /** + * @typedef {import('./streams').WriteStream + * } WriteStream + * @param {{ + * encoding?: string; + * autoClose?: boolean; + * emitClose?: boolean; + * start: number; + * highWaterMark?: number; + * flush?: boolean; + * }} [options] + * @returns {WriteStream} + */ + createWriteStream(options = undefined) { + const { WriteStream } = lazyFsStreams() + return new WriteStream(undefined, { ...options, fd: this }) + } + + [kTransfer]() { + if (this[kClosePromise] || this[kRefs] > 1) { + throw lazyDOMException('Cannot transfer FileHandle while in use', 'DataCloneError') + } + + const handle = this[kHandle] + this[kFd] = -1 + this[kCloseReason] = 'The FileHandle has been transferred' + this[kHandle] = null + this[kRefs] = 0 + + return { + data: { handle }, + deserializeInfo: 'internal/fs/promises:FileHandle', + } + } + + [kTransferList]() { + return [this[kHandle]] + } + + [kDeserialize]({ handle }) { + this[kHandle] = handle + this[kFd] = handle.fd + } + + [kRef]() { + this[kRefs]++ + } + + [kUnref]() { + this[kRefs]-- + if (this[kRefs] === 0) { + this[kFd] = -1 + PromisePrototypeThen(this[kHandle].close(), this[kCloseResolve], this[kCloseReject]) + } + } +} + +async function handleFdClose(fileOpPromise, closeFunc) { + return PromisePrototypeThen( + fileOpPromise, + (result) => PromisePrototypeThen(closeFunc(), () => result), + (opError) => + PromisePrototypeThen( + closeFunc(), + () => PromiseReject(opError), + (closeError) => PromiseReject(aggregateTwoErrors(closeError, opError)), + ), + ) +} + +async function handleFdSync(fileOpPromise, handle) { + return PromisePrototypeThen( + fileOpPromise, + (result) => + PromisePrototypeThen( + handle.sync(), + () => result, + (syncError) => PromiseReject(syncError), + ), + (opError) => PromiseReject(opError), + ) +} + +async function fsCall(fn, handle, ...args) { + assert(handle[kRefs] !== undefined, 'handle must be an instance of FileHandle') + + if (handle.fd === -1) { + // eslint-disable-next-line no-restricted-syntax + const err = new Error(handle[kCloseReason] ?? 'file closed') + err.code = 'EBADF' + err.syscall = fn.name + throw err + } + + try { + handle[kRef]() + return await fn(handle, ...new SafeArrayIterator(args)) + } finally { + handle[kUnref]() + } +} + +function checkAborted(signal) { + if (signal?.aborted) throw new AbortError(undefined, { cause: signal.reason }) +} + +async function writeFileHandle(filehandle, data, signal, encoding) { + checkAborted(signal) + if (isCustomIterable(data)) { + for await (const buf of data) { + checkAborted(signal) + const toWrite = isArrayBufferView(buf) ? buf : Buffer.from(buf, encoding || 'utf8') + let remaining = toWrite.byteLength + while (remaining > 0) { + const writeSize = MathMin(kWriteFileMaxChunkSize, remaining) + const { bytesWritten } = await write(filehandle, toWrite, toWrite.byteLength - remaining, writeSize) + remaining -= bytesWritten + checkAborted(signal) + } + } + return + } + data = new Uint8Array(data.buffer, data.byteOffset, data.byteLength) + let remaining = data.byteLength + if (remaining === 0) return + do { + checkAborted(signal) + const { bytesWritten } = await write(filehandle, data, 0, MathMin(kWriteFileMaxChunkSize, data.byteLength)) + remaining -= bytesWritten + data = new Uint8Array(data.buffer, data.byteOffset + bytesWritten, data.byteLength - bytesWritten) + } while (remaining > 0) +} + +async function readFileHandle(filehandle, options) { + const signal = options?.signal + const encoding = options?.encoding + const decoder = encoding && new StringDecoder(encoding) + + checkAborted(signal) + + const statFields = await PromisePrototypeThen( + binding.fstat(filehandle.fd, false, kUsePromises), + undefined, + handleErrorFromBinding, + ) + + checkAborted(signal) + + let size = 0 + let length = 0 + if ((statFields[1 /* mode */] & S_IFMT) === S_IFREG) { + size = statFields[8 /* size */] + length = encoding ? MathMin(size, kReadFileBufferLength) : size + } + if (length === 0) { + length = kReadFileUnknownBufferLength + } + + if (size > kIoMaxLength) throw new ERR_FS_FILE_TOO_LARGE(size) + + let totalRead = 0 + const noSize = size === 0 + let buffer = Buffer.allocUnsafeSlow(length) + let result = '' + let offset = 0 + let buffers + const chunkedRead = length > kReadFileBufferLength + + while (true) { + checkAborted(signal) + + if (chunkedRead) { + length = MathMin(size - totalRead, kReadFileBufferLength) + } + + const bytesRead = + (await PromisePrototypeThen( + binding.read(filehandle.fd, buffer, offset, length, -1, kUsePromises), + undefined, + handleErrorFromBinding, + )) ?? 0 + totalRead += bytesRead + + if (bytesRead === 0 || totalRead === size || (bytesRead !== buffer.length && !chunkedRead && !noSize)) { + const singleRead = bytesRead === totalRead + + const bytesToCheck = chunkedRead ? totalRead : bytesRead + + if (bytesToCheck !== buffer.length) { + buffer = buffer.subarray(0, bytesToCheck) + } + + if (!encoding) { + if (noSize && !singleRead) { + ArrayPrototypePush(buffers, buffer) + return Buffer.concat(buffers, totalRead) + } + return buffer + } + + if (singleRead) { + return buffer.toString(encoding) + } + result += decoder.end(buffer) + return result + } + const readBuffer = bytesRead !== buffer.length ? buffer.subarray(0, bytesRead) : buffer + if (encoding) { + result += decoder.write(readBuffer) + } else if (size !== 0) { + offset = totalRead + } else { + buffers ??= [] + // Unknown file size requires chunks. + ArrayPrototypePush(buffers, readBuffer) + buffer = Buffer.allocUnsafeSlow(kReadFileUnknownBufferLength) + } + } +} + +// All of the functions are defined as async in order to ensure that errors +// thrown cause promise rejections rather than being thrown synchronously. +async function access(path, mode = F_OK) { + return await PromisePrototypeThen( + binding.access(getValidatedPath(path), mode, kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +async function cp(src, dest, options) { + options = validateCpOptions(options) + src = getValidatedPath(src, 'src') + dest = getValidatedPath(dest, 'dest') + return lazyLoadCpPromises()(src, dest, options) +} + +async function copyFile(src, dest, mode) { + return await PromisePrototypeThen( + binding.copyFile(getValidatedPath(src, 'src'), getValidatedPath(dest, 'dest'), mode, kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +// Note that unlike fs.open() which uses numeric file descriptors, +// fsPromises.open() uses the fs.FileHandle class. +async function open(path, flags, mode) { + path = getValidatedPath(path) + const flagsNumber = stringToFlags(flags) + mode = parseFileMode(mode, 'mode', 0o666) + return new FileHandle( + await PromisePrototypeThen( + binding.openFileHandle(path, flagsNumber, mode, kUsePromises), + undefined, + handleErrorFromBinding, + ), + ) +} + +async function read(handle, bufferOrParams, offset, length, position) { + let buffer = bufferOrParams + if (!isArrayBufferView(buffer)) { + // This is fh.read(params) + if (bufferOrParams !== undefined) { + validateObject(bufferOrParams, 'options', kValidateObjectAllowNullable) + } + ;({ + buffer = Buffer.alloc(16384), + offset = 0, + length = buffer.byteLength - offset, + position = null, + } = bufferOrParams ?? kEmptyObject) + + validateBuffer(buffer) + } + + if (offset !== null && typeof offset === 'object') { + // This is fh.read(buffer, options) + ;({ offset = 0, length = buffer.byteLength - offset, position = null } = offset) + } + + if (offset == null) { + offset = 0 + } else { + validateInteger(offset, 'offset', 0) + } + + length ??= buffer.byteLength - offset + + if (length === 0) return { __proto__: null, bytesRead: length, buffer } + + if (buffer.byteLength === 0) { + throw new ERR_INVALID_ARG_VALUE('buffer', buffer, 'is empty and cannot be written') + } + + validateOffsetLengthRead(offset, length, buffer.byteLength) + + if (position == null) { + position = -1 + } else { + validatePosition(position, 'position', length) + } + + const bytesRead = + (await PromisePrototypeThen( + binding.read(handle.fd, buffer, offset, length, position, kUsePromises), + undefined, + handleErrorFromBinding, + )) || 0 + + return { __proto__: null, bytesRead, buffer } +} + +async function readv(handle, buffers, position) { + validateBufferArray(buffers) + + if (typeof position !== 'number') position = null + + const bytesRead = + (await PromisePrototypeThen( + binding.readBuffers(handle.fd, buffers, position, kUsePromises), + undefined, + handleErrorFromBinding, + )) || 0 + return { __proto__: null, bytesRead, buffers } +} + +async function write(handle, buffer, offsetOrOptions, length, position) { + if (buffer?.byteLength === 0) return { __proto__: null, bytesWritten: 0, buffer } + + let offset = offsetOrOptions + if (isArrayBufferView(buffer)) { + if (typeof offset === 'object') { + ;({ offset = 0, length = buffer.byteLength - offset, position = null } = offsetOrOptions ?? kEmptyObject) + } + + if (offset == null) { + offset = 0 + } else { + validateInteger(offset, 'offset', 0) + } + if (typeof length !== 'number') length = buffer.byteLength - offset + if (typeof position !== 'number') position = null + validateOffsetLengthWrite(offset, length, buffer.byteLength) + const bytesWritten = + (await PromisePrototypeThen( + binding.writeBuffer(handle.fd, buffer, offset, length, position, kUsePromises), + undefined, + handleErrorFromBinding, + )) || 0 + return { __proto__: null, bytesWritten, buffer } + } + + validateStringAfterArrayBufferView(buffer, 'buffer') + validateEncoding(buffer, length) + const bytesWritten = + (await PromisePrototypeThen( + binding.writeString(handle.fd, buffer, offset, length, kUsePromises), + undefined, + handleErrorFromBinding, + )) || 0 + return { __proto__: null, bytesWritten, buffer } +} + +async function writev(handle, buffers, position) { + validateBufferArray(buffers) + + if (typeof position !== 'number') position = null + + if (buffers.length === 0) { + return { __proto__: null, bytesWritten: 0, buffers } + } + + const bytesWritten = + (await PromisePrototypeThen( + binding.writeBuffers(handle.fd, buffers, position, kUsePromises), + undefined, + handleErrorFromBinding, + )) || 0 + return { __proto__: null, bytesWritten, buffers } +} + +async function rename(oldPath, newPath) { + oldPath = getValidatedPath(oldPath, 'oldPath') + newPath = getValidatedPath(newPath, 'newPath') + return await PromisePrototypeThen(binding.rename(oldPath, newPath, kUsePromises), undefined, handleErrorFromBinding) +} + +async function truncate(path, len = 0) { + const fd = await open(path, 'r+') + return handleFdClose(ftruncate(fd, len), fd.close) +} + +async function ftruncate(handle, len = 0) { + validateInteger(len, 'len') + len = MathMax(0, len) + return await PromisePrototypeThen(binding.ftruncate(handle.fd, len, kUsePromises), undefined, handleErrorFromBinding) +} + +async function rm(path, options) { + path = getValidatedPath(path) + options = await validateRmOptionsPromise(path, options, false) + return lazyRimRaf()(path, options) +} + +async function rmdir(path, options) { + path = getValidatedPath(path) + + if (options?.recursive !== undefined) { + throw new ERR_INVALID_ARG_VALUE('options.recursive', options.recursive, 'is no longer supported') + } + + options = validateRmdirOptions(options) + + return await PromisePrototypeThen(binding.rmdir(path, kUsePromises), undefined, handleErrorFromBinding) +} + +async function fdatasync(handle) { + return await PromisePrototypeThen(binding.fdatasync(handle.fd, kUsePromises), undefined, handleErrorFromBinding) +} + +async function fsync(handle) { + return await PromisePrototypeThen(binding.fsync(handle.fd, kUsePromises), undefined, handleErrorFromBinding) +} + +async function mkdir(path, options) { + if (typeof options === 'number' || typeof options === 'string') { + options = { mode: options } + } + const { recursive = false, mode = 0o777 } = options || kEmptyObject + path = getValidatedPath(path) + validateBoolean(recursive, 'options.recursive') + + return await PromisePrototypeThen( + binding.mkdir(path, parseFileMode(mode, 'mode', 0o777), recursive, kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +async function readdirRecursive(originalPath, options) { + const result = [] + const queue = [ + [ + originalPath, + await PromisePrototypeThen( + binding.readdir(originalPath, options.encoding, !!options.withFileTypes, kUsePromises), + undefined, + handleErrorFromBinding, + ), + ], + ] + + if (options.withFileTypes) { + while (queue.length > 0) { + // If we want to implement BFS make this a `shift` call instead of `pop` + const { 0: path, 1: readdir } = ArrayPrototypePop(queue) + for (const dirent of getDirents(path, readdir)) { + ArrayPrototypePush(result, dirent) + if (dirent.isDirectory()) { + const direntPath = pathModule.join(path, dirent.name) + ArrayPrototypePush(queue, [ + direntPath, + await PromisePrototypeThen( + binding.readdir(direntPath, options.encoding, true, kUsePromises), + undefined, + handleErrorFromBinding, + ), + ]) + } + } + } + } else { + while (queue.length > 0) { + const { 0: path, 1: readdir } = ArrayPrototypePop(queue) + for (const ent of readdir) { + const direntPath = pathModule.join(path, ent) + const stat = binding.internalModuleStat(direntPath) + ArrayPrototypePush(result, pathModule.relative(originalPath, direntPath)) + if (stat === 1) { + ArrayPrototypePush(queue, [ + direntPath, + await PromisePrototypeThen( + binding.readdir(direntPath, options.encoding, false, kUsePromises), + undefined, + handleErrorFromBinding, + ), + ]) + } + } + } + } + + return result +} + +async function readdir(path, options) { + options = getOptions(options) + + // Make shallow copy to prevent mutating options from affecting results + options = copyObject(options) + + path = getValidatedPath(path) + if (options.recursive) { + return readdirRecursive(path, options) + } + const result = await PromisePrototypeThen( + binding.readdir(path, options.encoding, !!options.withFileTypes, kUsePromises), + undefined, + handleErrorFromBinding, + ) + return options.withFileTypes ? getDirectoryEntriesPromise(path, result) : result +} + +async function readlink(path, options) { + options = getOptions(options) + path = getValidatedPath(path, 'oldPath') + return await PromisePrototypeThen( + binding.readlink(path, options.encoding, kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +async function symlink(target, path, type) { + validateOneOf(type, 'type', ['dir', 'file', 'junction', null, undefined]) + if (isWindows && type == null) { + try { + const absoluteTarget = pathModule.resolve(`${path}`, '..', `${target}`) + type = (await stat(absoluteTarget)).isDirectory() ? 'dir' : 'file' + } catch { + // Default to 'file' if path is invalid or file does not exist + type = 'file' + } + } + + // Due to the nature of Node.js runtime, symlinks has different edge cases that can bypass + // the permission model security guarantees. Thus, this API is disabled unless fs.read + // and fs.write permission has been given. + if (permission.isEnabled() && !permission.has('fs')) { + throw new ERR_ACCESS_DENIED('fs.symlink API requires full fs.read and fs.write permissions.') + } + + target = getValidatedPath(target, 'target') + path = getValidatedPath(path) + return await PromisePrototypeThen( + binding.symlink(preprocessSymlinkDestination(target, type, path), path, stringToSymlinkType(type), kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +async function fstat(handle, options = { bigint: false }) { + const result = await PromisePrototypeThen( + binding.fstat(handle.fd, options.bigint, kUsePromises), + undefined, + handleErrorFromBinding, + ) + return getStatsFromBinding(result) +} + +async function lstat(path, options = { bigint: false }) { + const result = await PromisePrototypeThen( + binding.lstat(getValidatedPath(path), options.bigint, kUsePromises), + undefined, + handleErrorFromBinding, + ) + return getStatsFromBinding(result) +} + +async function stat(path, options = { bigint: false, throwIfNoEntry: true }) { + const result = await PromisePrototypeThen( + binding.stat(getValidatedPath(path), options.bigint, kUsePromises, options.throwIfNoEntry), + undefined, + handleErrorFromBinding, + ) + + // Binding will resolve undefined if UV_ENOENT or UV_ENOTDIR and throwIfNoEntry is false + if (!options.throwIfNoEntry && result === undefined) return undefined + + return getStatsFromBinding(result) +} + +async function statfs(path, options = { bigint: false }) { + const result = await PromisePrototypeThen( + binding.statfs(getValidatedPath(path), options.bigint, kUsePromises), + undefined, + handleErrorFromBinding, + ) + return getStatFsFromBinding(result) +} + +async function link(existingPath, newPath) { + existingPath = getValidatedPath(existingPath, 'existingPath') + newPath = getValidatedPath(newPath, 'newPath') + return await PromisePrototypeThen( + binding.link(existingPath, newPath, kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +async function unlink(path) { + return await PromisePrototypeThen( + binding.unlink(getValidatedPath(path), kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +async function fchmod(handle, mode) { + mode = parseFileMode(mode, 'mode') + return await PromisePrototypeThen(binding.fchmod(handle.fd, mode, kUsePromises), undefined, handleErrorFromBinding) +} + +async function chmod(path, mode) { + path = getValidatedPath(path) + mode = parseFileMode(mode, 'mode') + return await PromisePrototypeThen(binding.chmod(path, mode, kUsePromises), undefined, handleErrorFromBinding) +} + +async function lchmod(path, mode) { + if (O_SYMLINK === undefined) throw new ERR_METHOD_NOT_IMPLEMENTED('lchmod()') + + const fd = await open(path, O_WRONLY | O_SYMLINK) + return handleFdClose(fchmod(fd, mode), fd.close) +} + +async function lchown(path, uid, gid) { + path = getValidatedPath(path) + validateInteger(uid, 'uid', -1, kMaxUserId) + validateInteger(gid, 'gid', -1, kMaxUserId) + return await PromisePrototypeThen(binding.lchown(path, uid, gid, kUsePromises), undefined, handleErrorFromBinding) +} + +async function fchown(handle, uid, gid) { + validateInteger(uid, 'uid', -1, kMaxUserId) + validateInteger(gid, 'gid', -1, kMaxUserId) + return await PromisePrototypeThen( + binding.fchown(handle.fd, uid, gid, kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +async function chown(path, uid, gid) { + path = getValidatedPath(path) + validateInteger(uid, 'uid', -1, kMaxUserId) + validateInteger(gid, 'gid', -1, kMaxUserId) + return await PromisePrototypeThen(binding.chown(path, uid, gid, kUsePromises), undefined, handleErrorFromBinding) +} + +async function utimes(path, atime, mtime) { + path = getValidatedPath(path) + return await PromisePrototypeThen( + binding.utimes(path, toUnixTimestamp(atime), toUnixTimestamp(mtime), kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +async function futimes(handle, atime, mtime) { + atime = toUnixTimestamp(atime, 'atime') + mtime = toUnixTimestamp(mtime, 'mtime') + return await PromisePrototypeThen( + binding.futimes(handle.fd, atime, mtime, kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +async function lutimes(path, atime, mtime) { + return await PromisePrototypeThen( + binding.lutimes(getValidatedPath(path), toUnixTimestamp(atime), toUnixTimestamp(mtime), kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +async function realpath(path, options) { + options = getOptions(options) + return await PromisePrototypeThen( + binding.realpath(getValidatedPath(path), options.encoding, kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +async function mkdtemp(prefix, options) { + options = getOptions(options) + + prefix = getValidatedPath(prefix, 'prefix') + warnOnNonPortableTemplate(prefix) + + return await PromisePrototypeThen( + binding.mkdtemp(prefix, options.encoding, kUsePromises), + undefined, + handleErrorFromBinding, + ) +} + +async function mkdtempDisposable(prefix, options) { + options = getOptions(options) + + prefix = getValidatedPath(prefix, 'prefix') + warnOnNonPortableTemplate(prefix) + + const cwd = process.cwd() + const path = await PromisePrototypeThen( + binding.mkdtemp(prefix, options.encoding, kUsePromises), + undefined, + handleErrorFromBinding, + ) + // Stash the full path in case of process.chdir() + const fullPath = pathModule.resolve(cwd, path) + + const remove = async () => { + const rmrf = lazyRimRaf() + await rmrf(fullPath, { + maxRetries: 0, + recursive: true, + retryDelay: 0, + }) + } + return { + __proto__: null, + path, + remove, + async [SymbolAsyncDispose]() { + await remove() + }, + } +} + +async function writeFile(path, data, options) { + options = getOptions(options, { + encoding: 'utf8', + mode: 0o666, + flag: 'w', + flush: false, + }) + const flag = options.flag || 'w' + const flush = options.flush ?? false + + validateBoolean(flush, 'options.flush') + + if (!isArrayBufferView(data) && !isCustomIterable(data)) { + validateStringAfterArrayBufferView(data, 'data') + data = Buffer.from(data, options.encoding || 'utf8') + } + + validateAbortSignal(options.signal) + if (path instanceof FileHandle) return writeFileHandle(path, data, options.signal, options.encoding) + + checkAborted(options.signal) + + const fd = await open(path, flag, options.mode) + let writeOp = writeFileHandle(fd, data, options.signal, options.encoding) + + if (flush) { + writeOp = handleFdSync(writeOp, fd) + } + + return handleFdClose(writeOp, fd.close) +} + +function isCustomIterable(obj) { + return isIterable(obj) && !isArrayBufferView(obj) && typeof obj !== 'string' +} + +async function appendFile(path, data, options) { + options = getOptions(options, { encoding: 'utf8', mode: 0o666, flag: 'a' }) + options = copyObject(options) + options.flag ||= 'a' + return writeFile(path, data, options) +} + +async function readFile(path, options) { + options = getOptions(options, { flag: 'r' }) + const flag = options.flag || 'r' + + if (path instanceof FileHandle) return readFileHandle(path, options) + + checkAborted(options.signal) + + const fd = await open(path, flag, 0o666) + return handleFdClose(readFileHandle(fd, options), fd.close) +} + +async function* _watch(filename, options = kEmptyObject) { + validateObject(options, 'options') + + if (options.recursive != null) { + validateBoolean(options.recursive, 'options.recursive') + + // TODO(anonrig): Remove non-native watcher when/if libuv supports recursive. + // As of November 2022, libuv does not support recursive file watch on all platforms, + // e.g. Linux due to the limitations of inotify. + if (options.recursive && !isMacOS && !isWindows) { + const watcher = new nonNativeWatcher.FSWatcher(options) + watcher[kFSWatchStart](filename) + yield* watcher + return + } + } + + yield* watch(filename, options) +} + +const lazyGlob = getLazy(() => require('internal/fs/glob').Glob) +async function* glob(pattern, options) { + const Glob = lazyGlob() + yield* new Glob(pattern, options).glob() +} + +module.exports = { + exports: { + access, + copyFile, + cp, + glob, + open, + opendir: promisify(opendir), + rename, + truncate, + rm, + rmdir, + mkdir, + readdir, + readlink, + symlink, + lstat, + stat, + statfs, + link, + unlink, + chmod, + lchmod, + lchown, + chown, + utimes, + lutimes, + realpath, + mkdtemp, + mkdtempDisposable, + writeFile, + appendFile, + readFile, + watch: !isMacOS && !isWindows ? _watch : watch, + constants, + }, + + FileHandle, + kRef, + kUnref, +} diff --git a/reference/internal/fs/utils.js b/reference/internal/fs/utils.js new file mode 100644 index 0000000..7a90fde --- /dev/null +++ b/reference/internal/fs/utils.js @@ -0,0 +1,949 @@ +'use strict' + +const { + ArrayIsArray, + BigInt, + Date, + DateNow, + DatePrototypeGetTime, + ErrorCaptureStackTrace, + FunctionPrototypeCall, + MathMin, + MathRound, + Number, + NumberIsFinite, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectIs, + ObjectSetPrototypeOf, + ReflectOwnKeys, + RegExpPrototypeSymbolReplace, + StringPrototypeEndsWith, + StringPrototypeIncludes, + Symbol, + TypedArrayPrototypeAt, + TypedArrayPrototypeIncludes, +} = primordials + +const { Buffer } = require('buffer') +const { + UVException, + codes: { ERR_FS_EISDIR, ERR_INCOMPATIBLE_OPTION_PAIR, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE }, + hideStackFrames, +} = require('internal/errors') +const { isArrayBufferView, isBigInt64Array, isDate, isUint8Array } = require('internal/util/types') +const { kEmptyObject, once, deprecate, isWindows } = require('internal/util') +const { toPathIfFileURL } = require('internal/url') +const { + validateAbortSignal, + validateBoolean, + validateFunction, + validateInt32, + validateInteger, + validateObject, + validateUint32, +} = require('internal/validators') +const pathModule = require('path') +const kType = Symbol('type') +const kStats = Symbol('stats') +const assert = require('internal/assert') + +const { + fs: { + F_OK = 0, + W_OK = 0, + R_OK = 0, + X_OK = 0, + COPYFILE_EXCL, + COPYFILE_FICLONE, + COPYFILE_FICLONE_FORCE, + O_APPEND, + O_CREAT, + O_EXCL, + O_RDONLY, + O_RDWR, + O_SYNC, + O_TRUNC, + O_WRONLY, + S_IFBLK, + S_IFCHR, + S_IFDIR, + S_IFIFO, + S_IFLNK, + S_IFMT, + S_IFREG, + S_IFSOCK, + UV_FS_SYMLINK_DIR, + UV_FS_SYMLINK_JUNCTION, + UV_DIRENT_UNKNOWN, + UV_DIRENT_FILE, + UV_DIRENT_DIR, + UV_DIRENT_LINK, + UV_DIRENT_FIFO, + UV_DIRENT_SOCKET, + UV_DIRENT_CHAR, + UV_DIRENT_BLOCK, + }, + os: { + errno: { EISDIR }, + }, +} = internalBinding('constants') + +// The access modes can be any of F_OK, R_OK, W_OK or X_OK. Some might not be +// available on specific systems. They can be used in combination as well +// (F_OK | R_OK | W_OK | X_OK). +const kMinimumAccessMode = MathMin(F_OK, W_OK, R_OK, X_OK) +const kMaximumAccessMode = F_OK | W_OK | R_OK | X_OK + +const kDefaultCopyMode = 0 +// The copy modes can be any of COPYFILE_EXCL, COPYFILE_FICLONE or +// COPYFILE_FICLONE_FORCE. They can be used in combination as well +// (COPYFILE_EXCL | COPYFILE_FICLONE | COPYFILE_FICLONE_FORCE). +const kMinimumCopyMode = MathMin(kDefaultCopyMode, COPYFILE_EXCL, COPYFILE_FICLONE, COPYFILE_FICLONE_FORCE) +const kMaximumCopyMode = COPYFILE_EXCL | COPYFILE_FICLONE | COPYFILE_FICLONE_FORCE + +// Most platforms don't allow reads or writes >= 2 GiB. +// See https://github.com/libuv/libuv/pull/1501. +const kIoMaxLength = 2 ** 31 - 1 + +// Use 64kb in case the file type is not a regular file and thus do not know the +// actual file size. Increasing the value further results in more frequent over +// allocation for small files and consumes CPU time and memory that should be +// used else wise. +// Use up to 512kb per read otherwise to partition reading big files to prevent +// blocking other threads in case the available threads are all in use. +const kReadFileUnknownBufferLength = 64 * 1024 +const kReadFileBufferLength = 512 * 1024 + +const kWriteFileMaxChunkSize = 512 * 1024 + +const kMaxUserId = 2 ** 32 - 1 + +let fs +function lazyLoadFs() { + return (fs ??= require('fs')) +} + +function assertEncoding(encoding) { + if (encoding && !Buffer.isEncoding(encoding)) { + const reason = 'is invalid encoding' + throw new ERR_INVALID_ARG_VALUE('encoding', encoding, reason) + } +} + +class Dirent { + constructor(name, type, path) { + this.name = name + this.parentPath = path + this[kType] = type + } + + isDirectory() { + return this[kType] === UV_DIRENT_DIR + } + + isFile() { + return this[kType] === UV_DIRENT_FILE + } + + isBlockDevice() { + return this[kType] === UV_DIRENT_BLOCK + } + + isCharacterDevice() { + return this[kType] === UV_DIRENT_CHAR + } + + isSymbolicLink() { + return this[kType] === UV_DIRENT_LINK + } + + isFIFO() { + return this[kType] === UV_DIRENT_FIFO + } + + isSocket() { + return this[kType] === UV_DIRENT_SOCKET + } +} + +class DirentFromStats extends Dirent { + constructor(name, stats, path) { + super(name, null, path) + this[kStats] = stats + } +} + +for (const name of ReflectOwnKeys(Dirent.prototype)) { + if (name === 'constructor') { + continue + } + DirentFromStats.prototype[name] = function () { + return this[kStats][name]() + } +} + +function copyObject(source) { + const target = {} + for (const key in source) target[key] = source[key] + return target +} + +const bufferSep = Buffer.from(pathModule.sep) + +function join(path, name) { + if ((typeof path === 'string' || isUint8Array(path)) && name === undefined) { + return path + } + + if (typeof path === 'string' && isUint8Array(name)) { + const pathBuffer = Buffer.from(pathModule.join(path, pathModule.sep)) + return Buffer.concat([pathBuffer, name]) + } + + if (typeof path === 'string' && typeof name === 'string') { + return pathModule.join(path, name) + } + + if (isUint8Array(path) && isUint8Array(name)) { + return Buffer.concat([path, bufferSep, name]) + } + + throw new ERR_INVALID_ARG_TYPE('path', ['string', 'Buffer'], path) +} + +function getDirents(path, { 0: names, 1: types }, callback) { + let i + if (typeof callback === 'function') { + const len = names.length + let toFinish = 0 + callback = once(callback) + for (i = 0; i < len; i++) { + const type = types[i] + if (type === UV_DIRENT_UNKNOWN) { + const name = names[i] + const idx = i + toFinish++ + let filepath + try { + filepath = join(path, name) + } catch (err) { + callback(err) + return + } + lazyLoadFs().lstat(filepath, (err, stats) => { + if (err) { + callback(err) + return + } + names[idx] = new DirentFromStats(name, stats, path) + if (--toFinish === 0) { + callback(null, names) + } + }) + } else { + names[i] = new Dirent(names[i], types[i], path) + } + } + if (toFinish === 0) { + callback(null, names) + } + } else { + const len = names.length + for (i = 0; i < len; i++) { + names[i] = getDirent(path, names[i], types[i]) + } + return names + } +} + +function getDirent(path, name, type, callback) { + if (typeof callback === 'function') { + if (type === UV_DIRENT_UNKNOWN) { + let filepath + try { + filepath = join(path, name) + } catch (err) { + callback(err) + return + } + lazyLoadFs().lstat(filepath, (err, stats) => { + if (err) { + callback(err) + return + } + callback(null, new DirentFromStats(name, stats, path)) + }) + } else { + callback(null, new Dirent(name, type, path)) + } + } else if (type === UV_DIRENT_UNKNOWN) { + const filepath = join(path, name) + const stats = lazyLoadFs().lstatSync(filepath) + return new DirentFromStats(name, stats, path) + } else { + return new Dirent(name, type, path) + } +} + +function getOptions(options, defaultOptions = kEmptyObject) { + if (options == null || typeof options === 'function') { + return defaultOptions + } + + if (typeof options === 'string') { + defaultOptions = { ...defaultOptions } + defaultOptions.encoding = options + options = defaultOptions + } else if (typeof options !== 'object') { + throw new ERR_INVALID_ARG_TYPE('options', ['string', 'Object'], options) + } + + if (options.encoding !== 'buffer') assertEncoding(options.encoding) + + if (options.signal !== undefined) { + validateAbortSignal(options.signal, 'options.signal') + } + + return options +} + +/** + * @param {InternalFSBinding.FSSyncContext} ctx + */ +function handleErrorFromBinding(ctx) { + if (ctx.errno !== undefined) { + // libuv error numbers + const err = new UVException(ctx) + ErrorCaptureStackTrace(err, handleErrorFromBinding) + throw err + } + if (ctx.error !== undefined) { + // Errors created in C++ land. + // TODO(joyeecheung): currently, ctx.error are encoding errors + // usually caused by memory problems. We need to figure out proper error + // code(s) for this. + ErrorCaptureStackTrace(ctx.error, handleErrorFromBinding) + throw ctx.error + } +} + +function preprocessSymlinkDestination(path, type, linkPath) { + if (!isWindows) { + // No preprocessing is needed on Unix. + return path + } + path = '' + path + if (type === 'junction') { + // Junctions paths need to be absolute and \\?\-prefixed. + // A relative target is relative to the link's parent directory. + path = pathModule.resolve(linkPath, '..', path) + return pathModule.toNamespacedPath(path) + } + if (pathModule.isAbsolute(path)) { + // If the path is absolute, use the \\?\-prefix to enable long filenames + return pathModule.toNamespacedPath(path) + } + // Windows symlinks don't tolerate forward slashes. + return RegExpPrototypeSymbolReplace(/\//g, path, '\\') +} + +// Constructor for file stats. +function StatsBase(dev, mode, nlink, uid, gid, rdev, blksize, ino, size, blocks) { + this.dev = dev + this.mode = mode + this.nlink = nlink + this.uid = uid + this.gid = gid + this.rdev = rdev + this.blksize = blksize + this.ino = ino + this.size = size + this.blocks = blocks +} + +StatsBase.prototype.isDirectory = function () { + return this._checkModeProperty(S_IFDIR) +} + +StatsBase.prototype.isFile = function () { + return this._checkModeProperty(S_IFREG) +} + +StatsBase.prototype.isBlockDevice = function () { + return this._checkModeProperty(S_IFBLK) +} + +StatsBase.prototype.isCharacterDevice = function () { + return this._checkModeProperty(S_IFCHR) +} + +StatsBase.prototype.isSymbolicLink = function () { + return this._checkModeProperty(S_IFLNK) +} + +StatsBase.prototype.isFIFO = function () { + return this._checkModeProperty(S_IFIFO) +} + +StatsBase.prototype.isSocket = function () { + return this._checkModeProperty(S_IFSOCK) +} + +const kNsPerMsBigInt = 1_000_000n +const kNsPerSecBigInt = 1_000_000_000n +const kMsPerSec = 1_000 +const kNsPerMs = 1_000_000 +function msFromTimeSpec(sec, nsec) { + return sec * kMsPerSec + nsec / kNsPerMs +} + +function nsFromTimeSpecBigInt(sec, nsec) { + return sec * kNsPerSecBigInt + nsec +} + +// The Date constructor performs Math.floor() on the absolute value +// of the timestamp: https://tc39.es/ecma262/#sec-timeclip +// Since there may be a precision loss when the timestamp is +// converted to a floating point number, we manually round +// the timestamp here before passing it to Date(). +// Refs: https://github.com/nodejs/node/pull/12607 +// Refs: https://github.com/nodejs/node/pull/43714 +function dateFromMs(ms) { + // Coercing to number, ms can be bigint + return new Date(MathRound(Number(ms))) +} + +const lazyDateFields = { + __proto__: null, + atime: { + __proto__: null, + enumerable: true, + configurable: true, + get() { + return (this.atime = dateFromMs(this.atimeMs)) + }, + set(value) { + ObjectDefineProperty(this, 'atime', { __proto__: null, value, writable: true }) + }, + }, + mtime: { + __proto__: null, + enumerable: true, + configurable: true, + get() { + return (this.mtime = dateFromMs(this.mtimeMs)) + }, + set(value) { + ObjectDefineProperty(this, 'mtime', { __proto__: null, value, writable: true }) + }, + }, + ctime: { + __proto__: null, + enumerable: true, + configurable: true, + get() { + return (this.ctime = dateFromMs(this.ctimeMs)) + }, + set(value) { + ObjectDefineProperty(this, 'ctime', { __proto__: null, value, writable: true }) + }, + }, + birthtime: { + __proto__: null, + enumerable: true, + configurable: true, + get() { + return (this.birthtime = dateFromMs(this.birthtimeMs)) + }, + set(value) { + ObjectDefineProperty(this, 'birthtime', { __proto__: null, value, writable: true }) + }, + }, +} + +function BigIntStats( + dev, + mode, + nlink, + uid, + gid, + rdev, + blksize, + ino, + size, + blocks, + atimeNs, + mtimeNs, + ctimeNs, + birthtimeNs, +) { + FunctionPrototypeCall(StatsBase, this, dev, mode, nlink, uid, gid, rdev, blksize, ino, size, blocks) + + this.atimeMs = atimeNs / kNsPerMsBigInt + this.mtimeMs = mtimeNs / kNsPerMsBigInt + this.ctimeMs = ctimeNs / kNsPerMsBigInt + this.birthtimeMs = birthtimeNs / kNsPerMsBigInt + this.atimeNs = atimeNs + this.mtimeNs = mtimeNs + this.ctimeNs = ctimeNs + this.birthtimeNs = birthtimeNs +} + +ObjectSetPrototypeOf(BigIntStats.prototype, StatsBase.prototype) +ObjectSetPrototypeOf(BigIntStats, StatsBase) +ObjectDefineProperties(BigIntStats.prototype, lazyDateFields) + +BigIntStats.prototype._checkModeProperty = function (property) { + if (isWindows && (property === S_IFIFO || property === S_IFBLK || property === S_IFSOCK)) { + return false // Some types are not available on Windows + } + return (this.mode & BigInt(S_IFMT)) === BigInt(property) +} + +function Stats(dev, mode, nlink, uid, gid, rdev, blksize, ino, size, blocks, atimeMs, mtimeMs, ctimeMs, birthtimeMs) { + FunctionPrototypeCall(StatsBase, this, dev, mode, nlink, uid, gid, rdev, blksize, ino, size, blocks) + this.atimeMs = atimeMs + this.mtimeMs = mtimeMs + this.ctimeMs = ctimeMs + this.birthtimeMs = birthtimeMs +} + +ObjectSetPrototypeOf(Stats.prototype, StatsBase.prototype) +ObjectSetPrototypeOf(Stats, StatsBase) +ObjectDefineProperties(Stats.prototype, lazyDateFields) + +Stats.prototype._checkModeProperty = function (property) { + if (isWindows && (property === S_IFIFO || property === S_IFBLK || property === S_IFSOCK)) { + return false // Some types are not available on Windows + } + return (this.mode & S_IFMT) === property +} + +/** + * @param {Float64Array | BigInt64Array} stats + * @param {number} offset + * @returns {BigIntStats | Stats} + */ +function getStatsFromBinding(stats, offset = 0) { + if (isBigInt64Array(stats)) { + return new BigIntStats( + stats[0 + offset], + stats[1 + offset], + stats[2 + offset], + stats[3 + offset], + stats[4 + offset], + stats[5 + offset], + stats[6 + offset], + stats[7 + offset], + stats[8 + offset], + stats[9 + offset], + nsFromTimeSpecBigInt(stats[10 + offset], stats[11 + offset]), + nsFromTimeSpecBigInt(stats[12 + offset], stats[13 + offset]), + nsFromTimeSpecBigInt(stats[14 + offset], stats[15 + offset]), + nsFromTimeSpecBigInt(stats[16 + offset], stats[17 + offset]), + ) + } + return new Stats( + stats[0 + offset], + stats[1 + offset], + stats[2 + offset], + stats[3 + offset], + stats[4 + offset], + stats[5 + offset], + stats[6 + offset], + stats[7 + offset], + stats[8 + offset], + stats[9 + offset], + msFromTimeSpec(stats[10 + offset], stats[11 + offset]), + msFromTimeSpec(stats[12 + offset], stats[13 + offset]), + msFromTimeSpec(stats[14 + offset], stats[15 + offset]), + msFromTimeSpec(stats[16 + offset], stats[17 + offset]), + ) +} + +class StatFs { + constructor(type, bsize, blocks, bfree, bavail, files, ffree) { + this.type = type + this.bsize = bsize + this.blocks = blocks + this.bfree = bfree + this.bavail = bavail + this.files = files + this.ffree = ffree + } +} + +function getStatFsFromBinding(stats) { + return new StatFs(stats[0], stats[1], stats[2], stats[3], stats[4], stats[5], stats[6]) +} + +function stringToFlags(flags, name = 'flags') { + if (typeof flags === 'number') { + validateInt32(flags, name) + return flags + } + + if (flags == null) { + return O_RDONLY + } + + switch (flags) { + case 'r': + return O_RDONLY + case 'rs': // Fall through. + case 'sr': + return O_RDONLY | O_SYNC + case 'r+': + return O_RDWR + case 'rs+': // Fall through. + case 'sr+': + return O_RDWR | O_SYNC + + case 'w': + return O_TRUNC | O_CREAT | O_WRONLY + case 'wx': // Fall through. + case 'xw': + return O_TRUNC | O_CREAT | O_WRONLY | O_EXCL + + case 'w+': + return O_TRUNC | O_CREAT | O_RDWR + case 'wx+': // Fall through. + case 'xw+': + return O_TRUNC | O_CREAT | O_RDWR | O_EXCL + + case 'a': + return O_APPEND | O_CREAT | O_WRONLY + case 'ax': // Fall through. + case 'xa': + return O_APPEND | O_CREAT | O_WRONLY | O_EXCL + case 'as': // Fall through. + case 'sa': + return O_APPEND | O_CREAT | O_WRONLY | O_SYNC + + case 'a+': + return O_APPEND | O_CREAT | O_RDWR + case 'ax+': // Fall through. + case 'xa+': + return O_APPEND | O_CREAT | O_RDWR | O_EXCL + case 'as+': // Fall through. + case 'sa+': + return O_APPEND | O_CREAT | O_RDWR | O_SYNC + } + + throw new ERR_INVALID_ARG_VALUE('flags', flags) +} + +const stringToSymlinkType = hideStackFrames((type) => { + switch (type) { + case undefined: + case null: + case 'file': + return 0 + case 'dir': + return UV_FS_SYMLINK_DIR + case 'junction': + return UV_FS_SYMLINK_JUNCTION + } +}) + +// converts Date or number to a fractional UNIX timestamp +function toUnixTimestamp(time, name = 'time') { + // eslint-disable-next-line eqeqeq + if (typeof time === 'string' && +time == time) { + return +time + } + if (NumberIsFinite(time)) { + if (time < 0) { + return DateNow() / 1000 + } + return time + } + if (isDate(time)) { + // Convert to 123.456 UNIX timestamp + return DatePrototypeGetTime(time) / 1000 + } + throw new ERR_INVALID_ARG_TYPE(name, ['Date', 'Time in seconds'], time) +} + +const validateOffsetLengthRead = hideStackFrames((offset, length, bufferLength) => { + if (offset < 0) { + throw new ERR_OUT_OF_RANGE.HideStackFramesError('offset', '>= 0', offset) + } + if (length < 0) { + throw new ERR_OUT_OF_RANGE.HideStackFramesError('length', '>= 0', length) + } + if (offset + length > bufferLength) { + throw new ERR_OUT_OF_RANGE.HideStackFramesError('length', `<= ${bufferLength - offset}`, length) + } +}) + +const validateOffsetLengthWrite = hideStackFrames((offset, length, byteLength) => { + if (offset > byteLength) { + throw new ERR_OUT_OF_RANGE.HideStackFramesError('offset', `<= ${byteLength}`, offset) + } + + if (length > byteLength - offset) { + throw new ERR_OUT_OF_RANGE.HideStackFramesError('length', `<= ${byteLength - offset}`, length) + } + + if (length < 0) { + throw new ERR_OUT_OF_RANGE.HideStackFramesError('length', '>= 0', length) + } + + validateInt32.withoutStackTrace(length, 'length', 0) +}) + +const validatePath = hideStackFrames((path, propName = 'path') => { + if (typeof path !== 'string' && !isUint8Array(path)) { + throw new ERR_INVALID_ARG_TYPE.HideStackFramesError(propName, ['string', 'Buffer', 'URL'], path) + } + + const pathIsString = typeof path === 'string' + const pathIsUint8Array = isUint8Array(path) + + // We can only perform meaningful checks on strings and Uint8Arrays. + if ( + (!pathIsString && !pathIsUint8Array) || + (pathIsString && !StringPrototypeIncludes(path, '\u0000')) || + (pathIsUint8Array && !TypedArrayPrototypeIncludes(path, 0)) + ) { + return + } + + throw new ERR_INVALID_ARG_VALUE.HideStackFramesError( + propName, + path, + 'must be a string, Uint8Array, or URL without null bytes', + ) +}) + +const getValidatedPath = hideStackFrames((fileURLOrPath, propName = 'path') => { + const path = toPathIfFileURL(fileURLOrPath) + validatePath(path, propName) + return path +}) + +const getValidatedFd = hideStackFrames((fd, propName = 'fd') => { + if (ObjectIs(fd, -0)) { + return 0 + } + + validateInt32(fd, propName, 0) + + return fd +}) + +const validateBufferArray = hideStackFrames((buffers, propName = 'buffers') => { + if (!ArrayIsArray(buffers)) + throw new ERR_INVALID_ARG_TYPE.HideStackFramesError(propName, 'ArrayBufferView[]', buffers) + + for (let i = 0; i < buffers.length; i++) { + if (!isArrayBufferView(buffers[i])) + throw new ERR_INVALID_ARG_TYPE.HideStackFramesError(propName, 'ArrayBufferView[]', buffers) + } + + return buffers +}) + +let nonPortableTemplateWarn = true + +function warnOnNonPortableTemplate(template) { + // Template strings passed to the mkdtemp() family of functions should not + // end with 'X' because they are handled inconsistently across platforms. + if ( + nonPortableTemplateWarn && + ((typeof template === 'string' && StringPrototypeEndsWith(template, 'X')) || + (typeof template !== 'string' && TypedArrayPrototypeAt(template, -1) === 0x58)) + ) { + process.emitWarning( + 'mkdtemp() templates ending with X are not portable. ' + 'For details see: https://nodejs.org/api/fs.html', + ) + nonPortableTemplateWarn = false + } +} + +const defaultCpOptions = { + dereference: false, + errorOnExist: false, + filter: undefined, + force: true, + preserveTimestamps: false, + recursive: false, + verbatimSymlinks: false, +} + +const defaultRmOptions = { + recursive: false, + force: false, + retryDelay: 100, + maxRetries: 0, +} + +const validateCpOptions = hideStackFrames((options) => { + if (options === undefined) return { ...defaultCpOptions } + validateObject(options, 'options') + options = { ...defaultCpOptions, ...options } + validateBoolean(options.dereference, 'options.dereference') + validateBoolean(options.errorOnExist, 'options.errorOnExist') + validateBoolean(options.force, 'options.force') + validateBoolean(options.preserveTimestamps, 'options.preserveTimestamps') + validateBoolean(options.recursive, 'options.recursive') + validateBoolean(options.verbatimSymlinks, 'options.verbatimSymlinks') + options.mode = getValidMode(options.mode, 'copyFile') + if (options.dereference === true && options.verbatimSymlinks === true) { + throw new ERR_INCOMPATIBLE_OPTION_PAIR.HideStackFramesError('dereference', 'verbatimSymlinks') + } + if (options.filter !== undefined) { + validateFunction(options.filter, 'options.filter') + } + return options +}) + +const validateRmOptions = hideStackFrames((path, options, expectDir, cb) => { + options = validateRmdirOptions(options, defaultRmOptions) + validateBoolean.withoutStackTrace(options.force, 'options.force') + validateBoolean.withoutStackTrace(options.recursive, 'options.recursive') + validateInt32.withoutStackTrace(options.retryDelay, 'options.retryDelay', 0) + validateUint32.withoutStackTrace(options.maxRetries, 'options.maxRetries') + + lazyLoadFs().lstat(path, (err, stats) => { + if (err) { + if (options.force && err.code === 'ENOENT') { + return cb(null, options) + } + return cb(err, options) + } + + if (expectDir && !stats.isDirectory()) { + return cb(false) + } + + if (stats.isDirectory() && !options.recursive) { + const err = new ERR_FS_EISDIR.HideStackFramesError({ + code: 'EISDIR', + message: 'is a directory', + path, + syscall: 'rm', + errno: EISDIR, + }) + + return cb(err) + } + return cb(null, options) + }) +}) + +const validateRmOptionsSync = hideStackFrames((path, options, expectDir) => { + options = validateRmdirOptions.withoutStackTrace(options, defaultRmOptions) + validateBoolean.withoutStackTrace(options.force, 'options.force') + validateBoolean.withoutStackTrace(options.recursive, 'options.recursive') + validateInt32.withoutStackTrace(options.retryDelay, 'options.retryDelay', 0) + validateUint32.withoutStackTrace(options.maxRetries, 'options.maxRetries') + + if (!options.force || expectDir || !options.recursive) { + const isDirectory = lazyLoadFs().lstatSync(path, { throwIfNoEntry: !options.force })?.isDirectory() + + if (expectDir && !isDirectory) { + return false + } + + if (isDirectory && !options.recursive) { + throw new ERR_FS_EISDIR.HideStackFramesError({ + code: 'EISDIR', + message: 'is a directory', + path, + syscall: 'rm', + errno: EISDIR, + }) + } + } + + return options +}) + +const validateRmdirOptions = hideStackFrames((options, defaults = { __proto__: null }) => { + if (options === undefined) return defaults + validateObject.withoutStackTrace(options, 'options') + + options = { ...defaults, ...options } + + return options +}) + +const getValidMode = hideStackFrames((mode, type) => { + let min = kMinimumAccessMode + let max = kMaximumAccessMode + let def = F_OK + if (type === 'copyFile') { + min = kMinimumCopyMode + max = kMaximumCopyMode + def = mode || kDefaultCopyMode + } else { + assert(type === 'access') + } + if (mode == null) { + return def + } + validateInteger.withoutStackTrace(mode, 'mode', min, max) + return mode +}) + +const validateStringAfterArrayBufferView = hideStackFrames((buffer, name) => { + if (typeof buffer !== 'string') { + throw new ERR_INVALID_ARG_TYPE.HideStackFramesError(name, ['string', 'Buffer', 'TypedArray', 'DataView'], buffer) + } +}) + +const validatePosition = hideStackFrames((position, name, length) => { + if (typeof position === 'number') { + validateInteger.withoutStackTrace(position, name, -1) + } else if (typeof position === 'bigint') { + const maxPosition = 2n ** 63n - 1n - BigInt(length) + if (!(position >= -1n && position <= maxPosition)) { + throw new ERR_OUT_OF_RANGE.HideStackFramesError(name, `>= -1 && <= ${maxPosition}`, position) + } + } else { + throw new ERR_INVALID_ARG_TYPE.HideStackFramesError(name, ['integer', 'bigint'], position) + } +}) + +module.exports = { + constants: { + kIoMaxLength, + kMaxUserId, + kReadFileBufferLength, + kReadFileUnknownBufferLength, + kWriteFileMaxChunkSize, + }, + assertEncoding, + BigIntStats, // for testing + copyObject, + Dirent, + DirentFromStats, + getDirent, + getDirents, + getOptions, + getValidatedFd, + getValidatedPath, + handleErrorFromBinding, + preprocessSymlinkDestination, + realpathCacheKey: Symbol('realpathCacheKey'), + getStatFsFromBinding, + getStatsFromBinding, + stringToFlags, + stringToSymlinkType, + Stats: deprecate(Stats, 'fs.Stats constructor is deprecated.', 'DEP0180'), + toUnixTimestamp, + validateBufferArray, + validateCpOptions, + validateOffsetLengthRead, + validateOffsetLengthWrite, + validatePath, + validatePosition, + validateRmOptions, + validateRmOptionsSync, + validateRmdirOptions, + validateStringAfterArrayBufferView, + warnOnNonPortableTemplate, +} diff --git a/reference/internal/fs/watchers.js b/reference/internal/fs/watchers.js new file mode 100644 index 0000000..7595e36 --- /dev/null +++ b/reference/internal/fs/watchers.js @@ -0,0 +1,455 @@ +'use strict' + +const { + ArrayIsArray, + ArrayPrototypePush, + ArrayPrototypeShift, + Error, + FunctionPrototypeCall, + ObjectDefineProperty, + ObjectSetPrototypeOf, + PromiseWithResolvers, + RegExpPrototypeExec, + Symbol, +} = primordials + +const { + AbortError, + UVException, + codes: { ERR_FS_WATCH_QUEUE_OVERFLOW, ERR_INVALID_ARG_VALUE }, +} = require('internal/errors') + +const { kEmptyObject, getLazy, isWindows, isMacOS } = require('internal/util') + +const { kFsStatsFieldsNumber, StatWatcher: _StatWatcher } = internalBinding('fs') + +const { FSEvent } = internalBinding('fs_event_wrap') +const { UV_ENOSPC } = internalBinding('uv') +const { EventEmitter } = require('events') + +const { getStatsFromBinding, getValidatedPath } = require('internal/fs/utils') + +const { + defaultTriggerAsyncIdScope, + symbols: { owner_symbol }, +} = require('internal/async_hooks') + +const { toNamespacedPath } = require('path') + +const { + validateAbortSignal, + validateBoolean, + validateIgnoreOption, + validateObject, + validateUint32, + validateInteger, + validateOneOf, +} = require('internal/validators') + +const { + Buffer: { isEncoding }, +} = require('buffer') + +const { isRegExp } = require('internal/util/types') + +const assert = require('internal/assert') + +const kOldStatus = Symbol('kOldStatus') +const kUseBigint = Symbol('kUseBigint') + +const kFSWatchStart = Symbol('kFSWatchStart') +const kFSStatWatcherStart = Symbol('kFSStatWatcherStart') +const KFSStatWatcherRefCount = Symbol('KFSStatWatcherRefCount') +const KFSStatWatcherMaxRefCount = Symbol('KFSStatWatcherMaxRefCount') +const kFSStatWatcherAddOrCleanRef = Symbol('kFSStatWatcherAddOrCleanRef') + +const lazyMinimatch = getLazy(() => require('internal/deps/minimatch/index')) + +/** + * Creates an ignore matcher function from the ignore option. + * @param {string | RegExp | Function | Array} ignore - The ignore patterns + * @returns {Function | null} A function that returns true if filename should be ignored + */ +function createIgnoreMatcher(ignore) { + if (ignore == null) return null + const matchers = ArrayIsArray(ignore) ? ignore : [ignore] + const compiled = [] + + for (let i = 0; i < matchers.length; i++) { + const matcher = matchers[i] + if (typeof matcher === 'string') { + const mm = new (lazyMinimatch().Minimatch)(matcher, { + __proto__: null, + nocase: isWindows || isMacOS, + windowsPathsNoEscape: true, + nonegate: true, + nocomment: true, + optimizationLevel: 2, + platform: process.platform, + // matchBase allows patterns without slashes to match the basename + // e.g., '*.log' matches 'subdir/file.log' + matchBase: true, + }) + ArrayPrototypePush(compiled, (filename) => mm.match(filename)) + } else if (isRegExp(matcher)) { + ArrayPrototypePush(compiled, (filename) => RegExpPrototypeExec(matcher, filename) !== null) + } else { + // Function + ArrayPrototypePush(compiled, matcher) + } + } + + return (filename) => { + for (let i = 0; i < compiled.length; i++) { + if (compiled[i](filename)) return true + } + return false + } +} + +function emitStop(self) { + self.emit('stop') +} + +function StatWatcher(bigint) { + FunctionPrototypeCall(EventEmitter, this) + + this._handle = null + this[kOldStatus] = -1 + this[kUseBigint] = bigint + this[KFSStatWatcherRefCount] = 1 + this[KFSStatWatcherMaxRefCount] = 1 +} +ObjectSetPrototypeOf(StatWatcher.prototype, EventEmitter.prototype) +ObjectSetPrototypeOf(StatWatcher, EventEmitter) + +function onchange(newStatus, stats) { + const self = this[owner_symbol] + if (self[kOldStatus] === -1 && newStatus === -1 && stats[2 /* new nlink */] === stats[16 /* old nlink */]) { + return + } + + self[kOldStatus] = newStatus + self.emit('change', getStatsFromBinding(stats), getStatsFromBinding(stats, kFsStatsFieldsNumber)) +} + +// At the moment if filename is undefined, we +// 1. Throw an Error if it's the first +// time Symbol('kFSStatWatcherStart') is called +// 2. Return silently if Symbol('kFSStatWatcherStart') has already been called +// on a valid filename and the wrap has been initialized +// This method is a noop if the watcher has already been started. +StatWatcher.prototype[kFSStatWatcherStart] = function (filename, persistent, interval) { + if (this._handle !== null) return + + this._handle = new _StatWatcher(this[kUseBigint]) + this._handle[owner_symbol] = this + this._handle.onchange = onchange + if (!persistent) this.unref() + + // uv_fs_poll is a little more powerful than ev_stat but we curb it for + // the sake of backwards compatibility. + this[kOldStatus] = -1 + + filename = getValidatedPath(filename, 'filename') + validateUint32(interval, 'interval') + const err = this._handle.start(toNamespacedPath(filename), interval) + if (err) { + const error = new UVException({ + errno: err, + syscall: 'watch', + path: filename, + }) + error.filename = filename + throw error + } +} + +// To maximize backward-compatibility for the end user, +// a no-op stub method has been added instead of +// totally removing StatWatcher.prototype.start. +// This should not be documented. +StatWatcher.prototype.start = () => {} + +// FIXME(joyeecheung): this method is not documented while there is +// another documented fs.unwatchFile(). The counterpart in +// FSWatcher is .close() +// This method is a noop if the watcher has not been started. +StatWatcher.prototype.stop = function () { + if (this._handle === null) return + + defaultTriggerAsyncIdScope(this._handle.getAsyncId(), process.nextTick, emitStop, this) + this._handle.close() + this._handle = null +} + +// Clean up or add ref counters. +StatWatcher.prototype[kFSStatWatcherAddOrCleanRef] = function (operate) { + if (operate === 'add') { + // Add a Ref + this[KFSStatWatcherRefCount]++ + this[KFSStatWatcherMaxRefCount]++ + } else if (operate === 'clean') { + // Clean up a single + this[KFSStatWatcherMaxRefCount]-- + this.unref() + } else if (operate === 'cleanAll') { + // Clean up all + this[KFSStatWatcherMaxRefCount] = 0 + this[KFSStatWatcherRefCount] = 0 + this._handle?.unref() + } +} + +StatWatcher.prototype.ref = function () { + // Avoid refCount calling ref multiple times causing unref to have no effect. + if (this[KFSStatWatcherRefCount] === this[KFSStatWatcherMaxRefCount]) return this + if (this._handle && this[KFSStatWatcherRefCount]++ === 0) this._handle.ref() + return this +} + +StatWatcher.prototype.unref = function () { + // Avoid refCount calling unref multiple times causing ref to have no effect. + if (this[KFSStatWatcherRefCount] === 0) return this + if (this._handle && --this[KFSStatWatcherRefCount] === 0) this._handle.unref() + return this +} + +function FSWatcher() { + FunctionPrototypeCall(EventEmitter, this) + + this._handle = new FSEvent() + this._handle[owner_symbol] = this + this._ignoreMatcher = null + + this._handle.onchange = (status, eventType, filename) => { + // TODO(joyeecheung): we may check self._handle.initialized here + // and return if that is false. This allows us to avoid firing the event + // after the handle is closed, and to fire both UV_RENAME and UV_CHANGE + // if they are set by libuv at the same time. + if (status < 0) { + if (this._handle !== null) { + // We don't use this.close() here to avoid firing the close event. + this._handle.close() + this._handle = null // Make the handle garbage collectable. + } + const error = new UVException({ + errno: status, + syscall: 'watch', + path: filename, + }) + error.filename = filename + this.emit('error', error) + } else { + // Filter events if ignore matcher is set and filename is available + if (filename != null && this._ignoreMatcher?.(filename)) { + return + } + this.emit('change', eventType, filename) + } + } +} +ObjectSetPrototypeOf(FSWatcher.prototype, EventEmitter.prototype) +ObjectSetPrototypeOf(FSWatcher, EventEmitter) + +// At the moment if filename is undefined, we +// 1. Throw an Error if it's the first time Symbol('kFSWatchStart') is called +// 2. Return silently if Symbol('kFSWatchStart') has already been called +// on a valid filename and the wrap has been initialized +// 3. Return silently if the watcher has already been closed +// This method is a noop if the watcher has already been started. +FSWatcher.prototype[kFSWatchStart] = function (filename, persistent, recursive, encoding, ignore) { + if (this._handle === null) { + // closed + return + } + assert(this._handle instanceof FSEvent, 'handle must be a FSEvent') + if (this._handle.initialized) { + // already started + return + } + + filename = getValidatedPath(filename, 'filename') + + // Validate and create the ignore matcher + validateIgnoreOption(ignore, 'options.ignore') + this._ignoreMatcher = createIgnoreMatcher(ignore) + + const err = this._handle.start(toNamespacedPath(filename), persistent, recursive, encoding) + if (err) { + const error = new UVException({ + errno: err, + syscall: 'watch', + path: filename, + message: err === UV_ENOSPC ? 'System limit for number of file watchers reached' : '', + }) + error.filename = filename + throw error + } +} + +// To maximize backward-compatibility for the end user, +// a no-op stub method has been added instead of +// totally removing FSWatcher.prototype.start. +// This should not be documented. +FSWatcher.prototype.start = () => {} + +// This method is a noop if the watcher has not been started or +// has already been closed. +FSWatcher.prototype.close = function () { + if (this._handle === null) { + // closed + return + } + assert(this._handle instanceof FSEvent, 'handle must be a FSEvent') + if (!this._handle.initialized) { + // not started + return + } + this._handle.close() + this._handle = null // Make the handle garbage collectable. + process.nextTick(emitCloseNT, this) +} + +FSWatcher.prototype.ref = function () { + if (this._handle) this._handle.ref() + return this +} + +FSWatcher.prototype.unref = function () { + if (this._handle) this._handle.unref() + return this +} + +function emitCloseNT(self) { + self.emit('close') +} + +// Legacy alias on the C++ wrapper object. This is not public API, so we may +// want to runtime-deprecate it at some point. There's no hurry, though. +ObjectDefineProperty(FSEvent.prototype, 'owner', { + __proto__: null, + get() { + return this[owner_symbol] + }, + set(v) { + return (this[owner_symbol] = v) + }, +}) + +let kResistStopPropagation + +async function* watch(filename, options = kEmptyObject) { + const path = toNamespacedPath(getValidatedPath(filename)) + validateObject(options, 'options') + + const { + persistent = true, + recursive = false, + encoding = 'utf8', + maxQueue = 2048, + overflow = 'ignore', + signal, + ignore, + } = options + + validateBoolean(persistent, 'options.persistent') + validateBoolean(recursive, 'options.recursive') + validateInteger(maxQueue, 'options.maxQueue') + validateOneOf(overflow, 'options.overflow', ['ignore', 'error']) + validateAbortSignal(signal, 'options.signal') + validateIgnoreOption(ignore, 'options.ignore') + + if (encoding && !isEncoding(encoding)) { + const reason = 'is invalid encoding' + throw new ERR_INVALID_ARG_VALUE('encoding', encoding, reason) + } + + if (signal?.aborted) throw new AbortError(undefined, { cause: signal.reason }) + + const handle = new FSEvent() + const ignoreMatcher = createIgnoreMatcher(ignore) + let { promise, resolve } = PromiseWithResolvers() + const queue = [] + const oncancel = () => { + handle.close() + resolve() + } + + try { + if (signal) { + kResistStopPropagation ??= require('internal/event_target').kResistStopPropagation + signal.addEventListener('abort', oncancel, { __proto__: null, once: true, [kResistStopPropagation]: true }) + } + handle.onchange = (status, eventType, filename) => { + if (status < 0) { + const error = new UVException({ + errno: status, + syscall: 'watch', + path: filename, + }) + error.filename = filename + handle.close() + ArrayPrototypePush(queue, error) + resolve() + return + } + // Filter events if ignore matcher is set and filename is available + if (filename != null && ignoreMatcher?.(filename)) { + return + } + if (queue.length < maxQueue) { + ArrayPrototypePush(queue, { __proto__: null, eventType, filename }) + resolve() + } else if (overflow === 'error') { + queue.length = 0 + ArrayPrototypePush(queue, new ERR_FS_WATCH_QUEUE_OVERFLOW(maxQueue)) + resolve() + } else { + process.emitWarning('fs.watch maxQueue exceeded') + } + } + + const err = handle.start(path, persistent, recursive, encoding) + if (err) { + const error = new UVException({ + errno: err, + syscall: 'watch', + path: filename, + message: err === UV_ENOSPC ? 'System limit for number of file watchers reached' : '', + }) + error.filename = filename + handle.close() + throw error + } + + while (!signal?.aborted) { + await promise + while (queue.length) { + const item = ArrayPrototypeShift(queue) + if (item instanceof Error) { + throw item + } else { + yield item + } + } + ;({ promise, resolve } = PromiseWithResolvers()) + } + if (signal?.aborted) { + throw new AbortError(undefined, { cause: signal?.reason }) + } + } finally { + handle.close() + signal?.removeEventListener('abort', oncancel) + } +} + +module.exports = { + createIgnoreMatcher, + FSWatcher, + StatWatcher, + kFSWatchStart, + kFSStatWatcherStart, + kFSStatWatcherAddOrCleanRef, + watch, +} diff --git a/src/access.rs b/src/access.rs new file mode 100644 index 0000000..160f59d --- /dev/null +++ b/src/access.rs @@ -0,0 +1,112 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::path::Path; + +#[cfg(unix)] +use std::os::unix::fs::MetadataExt; + +// Node.js access mode constants +pub const F_OK: u32 = 0; +pub const R_OK: u32 = 4; +pub const W_OK: u32 = 2; +pub const X_OK: u32 = 1; + +fn access_impl(path_str: String, mode: Option) -> Result<()> { + let path = Path::new(&path_str); + let mode = mode.unwrap_or(F_OK); + + let meta = std::fs::symlink_metadata(path).map_err(|_| { + Error::from_reason(format!( + "ENOENT: no such file or directory, access '{}'", + path.to_string_lossy() + )) + })?; + + // F_OK: just check existence (already passed above) + if mode == F_OK { + return Ok(()); + } + + #[cfg(unix)] + { + let file_mode = meta.mode(); + let uid = unsafe { libc::getuid() }; + let gid = unsafe { libc::getgid() }; + let is_owner = uid == meta.uid(); + let is_group = gid == meta.gid(); + + let check = |flag: u32, owner_bit: u32, group_bit: u32, other_bit: u32| -> bool { + if mode & flag == 0 { + return true; + } + if uid == 0 { + // root can read/write anything; execute requires at least one execute bit + if flag == X_OK { + return file_mode & (owner_bit | group_bit | other_bit) != 0; + } + return true; + } + if is_owner && (file_mode & owner_bit != 0) { + return true; + } + if is_group && (file_mode & group_bit != 0) { + return true; + } + file_mode & other_bit != 0 + }; + + let ok = check(R_OK, 0o400, 0o040, 0o004) + && check(W_OK, 0o200, 0o020, 0o002) + && check(X_OK, 0o100, 0o010, 0o001); + + if !ok { + return Err(Error::from_reason(format!( + "EACCES: permission denied, access '{}'", + path.to_string_lossy() + ))); + } + } + + #[cfg(not(unix))] + { + if mode & W_OK != 0 && meta.permissions().readonly() { + return Err(Error::from_reason(format!( + "EACCES: permission denied, access '{}'", + path.to_string_lossy() + ))); + } + } + + Ok(()) +} + +#[napi(js_name = "accessSync")] +pub fn access_sync(path: String, mode: Option) -> Result<()> { + access_impl(path, mode) +} + +// ========= async version ========= + +pub struct AccessTask { + pub path: String, + pub mode: Option, +} + +impl Task for AccessTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + access_impl(self.path.clone(), self.mode) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "access")] +pub fn access(path: String, mode: Option) -> AsyncTask { + AsyncTask::new(AccessTask { path, mode }) +} diff --git a/src/chmod.rs b/src/chmod.rs new file mode 100644 index 0000000..d7b1756 --- /dev/null +++ b/src/chmod.rs @@ -0,0 +1,68 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::path::Path; + +fn chmod_impl(path_str: String, mode: u32) -> Result<()> { + let path = Path::new(&path_str); + + #[cfg(unix)] + { + use std::fs; + use std::os::unix::fs::PermissionsExt; + let permissions = fs::Permissions::from_mode(mode); + fs::set_permissions(path, permissions).map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, chmod '{}'", + path.to_string_lossy() + )) + } else { + Error::from_reason(e.to_string()) + } + })?; + } + + #[cfg(not(unix))] + { + let _ = mode; + if !path.exists() { + return Err(Error::from_reason(format!( + "ENOENT: no such file or directory, chmod '{}'", + path.to_string_lossy() + ))); + } + } + + Ok(()) +} + +#[napi(js_name = "chmodSync")] +pub fn chmod_sync(path: String, mode: u32) -> Result<()> { + chmod_impl(path, mode) +} + +// ========= async version ========= + +pub struct ChmodTask { + pub path: String, + pub mode: u32, +} + +impl Task for ChmodTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + chmod_impl(self.path.clone(), self.mode) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "chmod")] +pub fn chmod(path: String, mode: u32) -> AsyncTask { + AsyncTask::new(ChmodTask { path, mode }) +} diff --git a/src/chown.rs b/src/chown.rs new file mode 100644 index 0000000..370ebcc --- /dev/null +++ b/src/chown.rs @@ -0,0 +1,73 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::path::Path; + +fn chown_impl(path_str: String, uid: u32, gid: u32) -> Result<()> { + let path = Path::new(&path_str); + + #[cfg(unix)] + { + use std::ffi::CString; + let c_path = CString::new(path.to_string_lossy().as_bytes()) + .map_err(|_| Error::from_reason("Invalid path"))?; + let ret = unsafe { libc::chown(c_path.as_ptr(), uid, gid) }; + if ret != 0 { + let e = std::io::Error::last_os_error(); + if e.kind() == std::io::ErrorKind::NotFound { + return Err(Error::from_reason(format!( + "ENOENT: no such file or directory, chown '{}'", + path.to_string_lossy() + ))); + } + return Err(Error::from_reason(format!( + "EPERM: operation not permitted, chown '{}'", + path.to_string_lossy() + ))); + } + } + + #[cfg(not(unix))] + { + let _ = (uid, gid); + if !path.exists() { + return Err(Error::from_reason(format!( + "ENOENT: no such file or directory, chown '{}'", + path.to_string_lossy() + ))); + } + } + + Ok(()) +} + +#[napi(js_name = "chownSync")] +pub fn chown_sync(path: String, uid: u32, gid: u32) -> Result<()> { + chown_impl(path, uid, gid) +} + +// ========= async version ========= + +pub struct ChownTask { + pub path: String, + pub uid: u32, + pub gid: u32, +} + +impl Task for ChownTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + chown_impl(self.path.clone(), self.uid, self.gid) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "chown")] +pub fn chown(path: String, uid: u32, gid: u32) -> AsyncTask { + AsyncTask::new(ChownTask { path, uid, gid }) +} diff --git a/src/copy_file.rs b/src/copy_file.rs new file mode 100644 index 0000000..258514f --- /dev/null +++ b/src/copy_file.rs @@ -0,0 +1,74 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs; +use std::path::Path; + +// Node.js copyFile mode constants +pub const COPYFILE_EXCL: u32 = 1; +pub const COPYFILE_FICLONE: u32 = 2; +pub const COPYFILE_FICLONE_FORCE: u32 = 4; + +fn copy_file_impl(src_str: String, dest_str: String, mode: Option) -> Result<()> { + let src = Path::new(&src_str); + let dest = Path::new(&dest_str); + let mode = mode.unwrap_or(0); + + if mode & COPYFILE_EXCL != 0 && dest.exists() { + return Err(Error::from_reason(format!( + "EEXIST: file already exists, copyfile '{}' -> '{}'", + src.to_string_lossy(), + dest.to_string_lossy() + ))); + } + + fs::copy(src, dest).map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, copyfile '{}' -> '{}'", + src.to_string_lossy(), + dest.to_string_lossy() + )) + } else { + Error::from_reason(format!( + "{}, copyfile '{}' -> '{}'", + e, + src.to_string_lossy(), + dest.to_string_lossy() + )) + } + })?; + + Ok(()) +} + +#[napi(js_name = "copyFileSync")] +pub fn copy_file_sync(src: String, dest: String, mode: Option) -> Result<()> { + copy_file_impl(src, dest, mode) +} + +// ========= async version ========= + +pub struct CopyFileTask { + pub src: String, + pub dest: String, + pub mode: Option, +} + +impl Task for CopyFileTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + copy_file_impl(self.src.clone(), self.dest.clone(), self.mode) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "copyFile")] +pub fn copy_file(src: String, dest: String, mode: Option) -> AsyncTask { + AsyncTask::new(CopyFileTask { src, dest, mode }) +} diff --git a/src/cp.rs b/src/cp.rs new file mode 100644 index 0000000..fdadb17 --- /dev/null +++ b/src/cp.rs @@ -0,0 +1,236 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use rayon::prelude::*; +use std::fs; +use std::path::Path; + +#[napi(object)] +#[derive(Clone)] +pub struct CpOptions { + pub recursive: Option, + pub force: Option, + pub error_on_exist: Option, + pub preserve_timestamps: Option, + pub dereference: Option, + pub verbatim_symlinks: Option, + /// Hyper-FS extension: number of parallel threads for recursive copy. + /// 0 or 1 means sequential; > 1 enables rayon parallel traversal. + pub concurrency: Option, +} + +#[cfg(unix)] +fn set_timestamps(src: &Path, dest: &Path) -> std::io::Result<()> { + use std::os::unix::fs::MetadataExt; + let src_meta = fs::metadata(src)?; + let atime_secs = src_meta.atime(); + let atime_nsecs = src_meta.atime_nsec(); + let mtime_secs = src_meta.mtime(); + let mtime_nsecs = src_meta.mtime_nsec(); + + unsafe { + let c_path = std::ffi::CString::new(dest.to_string_lossy().as_bytes()).map_err(|_| { + std::io::Error::new(std::io::ErrorKind::InvalidInput, "invalid path") + })?; + let times = [ + libc::timespec { + tv_sec: atime_secs, + tv_nsec: atime_nsecs, + }, + libc::timespec { + tv_sec: mtime_secs, + tv_nsec: mtime_nsecs, + }, + ]; + libc::utimensat(libc::AT_FDCWD, c_path.as_ptr(), times.as_ptr(), 0); + } + Ok(()) +} + +#[cfg(not(unix))] +fn set_timestamps(_src: &Path, _dest: &Path) -> std::io::Result<()> { + Ok(()) +} + +fn cp_impl(src: &Path, dest: &Path, opts: &CpOptions) -> Result<()> { + let force = opts.force.unwrap_or(true); + let error_on_exist = opts.error_on_exist.unwrap_or(false); + let recursive = opts.recursive.unwrap_or(false); + let preserve_timestamps = opts.preserve_timestamps.unwrap_or(false); + let dereference = opts.dereference.unwrap_or(false); + let verbatim_symlinks = opts.verbatim_symlinks.unwrap_or(false); + let concurrency = opts.concurrency.unwrap_or(0); + + let meta = if dereference { + fs::metadata(src) + } else { + fs::symlink_metadata(src) + }; + + let meta = meta.map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, cp '{}' -> '{}'", + src.to_string_lossy(), + dest.to_string_lossy() + )) + } else { + Error::from_reason(e.to_string()) + } + })?; + + if meta.is_symlink() && !dereference { + let target = fs::read_link(src).map_err(|e| Error::from_reason(e.to_string()))?; + + let link_target = if verbatim_symlinks { + target + } else if target.is_relative() { + src + .parent() + .unwrap_or(Path::new("")) + .join(&target) + .canonicalize() + .unwrap_or(target) + } else { + target.canonicalize().unwrap_or(target) + }; + + if dest.exists() || dest.symlink_metadata().is_ok() { + if error_on_exist { + return Err(Error::from_reason(format!( + "EEXIST: file already exists, cp '{}' -> '{}'", + src.to_string_lossy(), + dest.to_string_lossy() + ))); + } + if force { + let _ = fs::remove_file(dest); + } else { + return Ok(()); + } + } + + #[cfg(unix)] + std::os::unix::fs::symlink(&link_target, dest) + .map_err(|e| Error::from_reason(e.to_string()))?; + #[cfg(windows)] + { + if link_target.is_dir() { + std::os::windows::fs::symlink_dir(&link_target, dest) + .map_err(|e| Error::from_reason(e.to_string()))?; + } else { + std::os::windows::fs::symlink_file(&link_target, dest) + .map_err(|e| Error::from_reason(e.to_string()))?; + } + } + return Ok(()); + } + + if meta.is_dir() { + if !recursive { + return Err(Error::from_reason(format!( + "ERR_FS_EISDIR: Path is a directory. To copy a directory set the 'recursive' option to true, cp '{}' -> '{}'", + src.to_string_lossy(), + dest.to_string_lossy() + ))); + } + + if !dest.exists() { + fs::create_dir_all(dest).map_err(|e| Error::from_reason(e.to_string()))?; + } + + let entries: Vec<_> = fs::read_dir(src) + .map_err(|e| Error::from_reason(e.to_string()))? + .collect::>() + .map_err(|e| Error::from_reason(e.to_string()))?; + + if concurrency > 1 { + entries + .par_iter() + .try_for_each(|entry| -> Result<()> { + cp_impl(&entry.path(), &dest.join(entry.file_name()), opts) + })?; + } else { + for entry in &entries { + cp_impl(&entry.path(), &dest.join(entry.file_name()), opts)?; + } + } + + if preserve_timestamps { + set_timestamps(src, dest).map_err(|e| Error::from_reason(e.to_string()))?; + } + } else { + if dest.exists() { + if error_on_exist { + return Err(Error::from_reason(format!( + "EEXIST: file already exists, cp '{}' -> '{}'", + src.to_string_lossy(), + dest.to_string_lossy() + ))); + } + if !force { + return Ok(()); + } + } + + if let Some(parent) = dest.parent() { + if !parent.exists() { + fs::create_dir_all(parent).map_err(|e| Error::from_reason(e.to_string()))?; + } + } + + fs::copy(src, dest).map_err(|e| Error::from_reason(e.to_string()))?; + + if preserve_timestamps { + set_timestamps(src, dest).map_err(|e| Error::from_reason(e.to_string()))?; + } + } + + Ok(()) +} + +fn cp_entry(src_str: String, dest_str: String, options: Option) -> Result<()> { + let src = Path::new(&src_str); + let dest = Path::new(&dest_str); + let opts = options.unwrap_or(CpOptions { + recursive: None, + force: None, + error_on_exist: None, + preserve_timestamps: None, + dereference: None, + verbatim_symlinks: None, + concurrency: None, + }); + cp_impl(src, dest, &opts) +} + +#[napi(js_name = "cpSync")] +pub fn cp_sync(src: String, dest: String, options: Option) -> Result<()> { + cp_entry(src, dest, options) +} + +// ========= async version ========= + +pub struct CpTask { + pub src: String, + pub dest: String, + pub options: Option, +} + +impl Task for CpTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + cp_entry(self.src.clone(), self.dest.clone(), self.options.clone()) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "cp")] +pub fn cp(src: String, dest: String, options: Option) -> AsyncTask { + AsyncTask::new(CpTask { src, dest, options }) +} diff --git a/src/exists.rs b/src/exists.rs new file mode 100644 index 0000000..9e5218b --- /dev/null +++ b/src/exists.rs @@ -0,0 +1,33 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::path::Path; + +#[napi(js_name = "existsSync")] +pub fn exists_sync(path: String) -> bool { + Path::new(&path).exists() +} + +// ========= async version ========= + +pub struct ExistsTask { + pub path: String, +} + +impl Task for ExistsTask { + type Output = bool; + type JsValue = bool; + + fn compute(&mut self) -> Result { + Ok(Path::new(&self.path).exists()) + } + + fn resolve(&mut self, _env: Env, output: Self::Output) -> Result { + Ok(output) + } +} + +#[napi(js_name = "exists")] +pub fn exists(path: String) -> AsyncTask { + AsyncTask::new(ExistsTask { path }) +} diff --git a/src/glob.rs b/src/glob.rs new file mode 100644 index 0000000..758dc9d --- /dev/null +++ b/src/glob.rs @@ -0,0 +1,179 @@ +use crate::types::Dirent; +use crate::utils::get_file_type_id; +use ignore::{overrides::OverrideBuilder, WalkBuilder}; +use napi::bindgen_prelude::*; +use napi_derive::napi; +use std::path::Path; +use std::sync::{Arc, Mutex}; + +// ignore crate 会对文件按 override 白名单过滤,但目录无论是否匹配都会被遍历(以便 +// 递归进去找匹配的子条目)。因此目录需要单独用 dir_matcher 测试其路径是否符合模式, +// 只有匹配的目录才加入结果——这与 Node.js fs.globSync 的行为一致: +// - "src/*" → 返回 src/ 下的文件 AND 子目录 +// - "**/*.rs" → 只返回 .rs 文件(目录不含 .rs 扩展名,不会匹配) +// - "**" → 返回所有文件和目录(但不含 cwd 根节点自身) + +#[napi(object)] +#[derive(Clone)] +pub struct GlobOptions { + pub cwd: Option, + pub with_file_types: Option, + pub exclude: Option>, + pub concurrency: Option, + pub git_ignore: Option, +} + +#[napi(js_name = "globSync")] +pub fn glob_sync( + pattern: String, + options: Option, +) -> Result, Vec>> { + let opts = options.unwrap_or(GlobOptions { + cwd: None, + with_file_types: None, + exclude: None, + concurrency: None, + git_ignore: None, + }); + + let cwd = opts.cwd.unwrap_or_else(|| ".".to_string()); + let with_file_types = opts.with_file_types.unwrap_or(false); + let concurrency = opts.concurrency.unwrap_or(4) as usize; + + // 构建 override(白名单模式):ignore crate 利用它来过滤文件; + // 同时保留一份 dir_matcher 副本,用于判断目录自身是否匹配模式。 + let mut override_builder = OverrideBuilder::new(&cwd); + override_builder + .add(&pattern) + .map_err(|e| Error::from_reason(e.to_string()))?; + + if let Some(ref excludes) = opts.exclude { + for ex in excludes { + override_builder + .add(&format!("!{}", ex)) + .map_err(|e| Error::from_reason(e.to_string()))?; + } + } + + let overrides = override_builder + .build() + .map_err(|e| Error::from_reason(e.to_string()))?; + + // 复制一份给目录匹配用(walker 会消耗 overrides 所有权) + let dir_matcher = Arc::new(overrides.clone()); + + let mut builder = WalkBuilder::new(&cwd); + builder + .overrides(overrides) + .standard_filters(opts.git_ignore.unwrap_or(true)) + .threads(concurrency); + + // We use two vectors to avoid enum overhead in the lock if possible, but Mutex> is easier + let result_strings = Arc::new(Mutex::new(Vec::new())); + let result_dirents = Arc::new(Mutex::new(Vec::new())); + + let result_strings_clone = result_strings.clone(); + let result_dirents_clone = result_dirents.clone(); + + let root_path = Path::new(&cwd).to_path_buf(); + + builder.build_parallel().run(move || { + let result_strings = result_strings_clone.clone(); + let result_dirents = result_dirents_clone.clone(); + let root = root_path.clone(); + let dir_matcher = dir_matcher.clone(); + + Box::new(move |entry| { + let entry = match entry { + Ok(e) => e, + Err(_) => return ignore::WalkState::Continue, + }; + + // 跳过 cwd 根节点自身(depth 0) + if entry.depth() == 0 { + return ignore::WalkState::Continue; + } + + let path = entry.path(); + let relative_path = path.strip_prefix(&root).unwrap_or(path); + let relative_path_str = relative_path.to_string_lossy().to_string(); + + let is_dir = entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false); + + if is_dir { + // 目录:ignore crate 只为遍历而产出,需单独测试路径是否符合模式。 + // 与 Node.js 行为一致:模式 "src/*" 会同时返回 src/ 下的文件和子目录。 + let matched = dir_matcher.matched(relative_path, true); + if !matched.is_whitelist() { + // 目录本身不匹配模式,但仍继续遍历以便找到匹配的子条目 + return ignore::WalkState::Continue; + } + // 目录匹配模式,加入结果后继续遍历 + } + // 非目录条目:ignore crate 的 override 白名单已确保它们匹配模式 + + if with_file_types { + let mut lock = result_dirents.lock().unwrap(); + let parent_path = relative_path + .parent() + .unwrap_or(Path::new("")) + .to_string_lossy() + .to_string(); + let name = relative_path + .file_name() + .unwrap_or_default() + .to_string_lossy() + .to_string(); + let file_type = if let Some(ft) = entry.file_type() { + get_file_type_id(&ft) + } else { + 0 + }; + lock.push(Dirent { name, parent_path, file_type }); + } else { + let mut lock = result_strings.lock().unwrap(); + lock.push(relative_path_str); + } + + ignore::WalkState::Continue + }) + }); + + if with_file_types { + let final_results = Arc::try_unwrap(result_dirents) + .map_err(|_| Error::from_reason("Lock error"))? + .into_inner() + .map_err(|_| Error::from_reason("Mutex error"))?; + Ok(Either::B(final_results)) + } else { + let final_results = Arc::try_unwrap(result_strings) + .map_err(|_| Error::from_reason("Lock error"))? + .into_inner() + .map_err(|_| Error::from_reason("Mutex error"))?; + Ok(Either::A(final_results)) + } +} + +// ===== Async version ===== +pub struct GlobTask { + pub pattern: String, + pub options: Option, +} + +impl Task for GlobTask { + type Output = Either, Vec>; + type JsValue = Either, Vec>; + + fn compute(&mut self) -> Result { + glob_sync(self.pattern.clone(), self.options.clone()) + } + + fn resolve(&mut self, _env: Env, output: Self::Output) -> Result { + Ok(output) + } +} + +#[napi(js_name = "glob")] +pub fn glob(pattern: String, options: Option) -> AsyncTask { + AsyncTask::new(GlobTask { pattern, options }) +} diff --git a/src/lib.rs b/src/lib.rs index 8757011..e827bef 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,18 +1,52 @@ -// #![deny(clippy::all)] -// use napi_derive::napi; - -// #[napi] -// pub fn plus_100(input: u32) -> u32 { -// input + 100 -// } - -// src/lib.rs #![deny(clippy::all)] -// define modules +pub mod access; +pub mod chmod; +pub mod chown; +pub mod copy_file; +pub mod cp; +pub mod exists; +pub mod glob; +pub mod link; +pub mod mkdir; +pub mod mkdtemp; +pub mod read_file; pub mod readdir; +pub mod readlink; +pub mod realpath; +pub mod rename; pub mod rm; +pub mod rmdir; +pub mod stat; +pub mod symlink; +pub mod truncate; +pub mod types; +pub mod unlink; +pub mod utimes; +pub mod utils; +pub mod write_file; -//export modules +pub use access::*; +pub use chmod::*; +pub use chown::*; +pub use copy_file::*; +pub use cp::*; +pub use exists::*; +pub use glob::*; +pub use link::*; +pub use mkdir::*; +pub use mkdtemp::*; +pub use read_file::*; pub use readdir::*; +pub use readlink::*; +pub use realpath::*; +pub use rename::*; pub use rm::*; +pub use rmdir::*; +pub use stat::*; +pub use symlink::*; +pub use truncate::*; +pub use types::*; +pub use unlink::*; +pub use utimes::*; +pub use write_file::*; diff --git a/src/link.rs b/src/link.rs new file mode 100644 index 0000000..e9ef345 --- /dev/null +++ b/src/link.rs @@ -0,0 +1,68 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs; +use std::path::Path; + +fn link_impl(existing_path: String, new_path: String) -> Result<()> { + let existing = Path::new(&existing_path); + let new = Path::new(&new_path); + + fs::hard_link(existing, new).map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, link '{}' -> '{}'", + existing_path, new_path + )) + } else if e.kind() == std::io::ErrorKind::AlreadyExists { + Error::from_reason(format!( + "EEXIST: file already exists, link '{}' -> '{}'", + existing_path, new_path + )) + } else if e.kind() == std::io::ErrorKind::PermissionDenied { + Error::from_reason(format!( + "EPERM: operation not permitted, link '{}' -> '{}'", + existing_path, new_path + )) + } else { + Error::from_reason(format!( + "{}, link '{}' -> '{}'", + e, existing_path, new_path + )) + } + })?; + Ok(()) +} + +#[napi(js_name = "linkSync")] +pub fn link_sync(existing_path: String, new_path: String) -> Result<()> { + link_impl(existing_path, new_path) +} + +// ========= async version ========= + +pub struct LinkTask { + pub existing_path: String, + pub new_path: String, +} + +impl Task for LinkTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + link_impl(self.existing_path.clone(), self.new_path.clone()) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "link")] +pub fn link(existing_path: String, new_path: String) -> AsyncTask { + AsyncTask::new(LinkTask { + existing_path, + new_path, + }) +} diff --git a/src/mkdir.rs b/src/mkdir.rs new file mode 100644 index 0000000..30bc570 --- /dev/null +++ b/src/mkdir.rs @@ -0,0 +1,114 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs; +use std::path::Path; + +#[napi(object)] +#[derive(Clone)] +pub struct MkdirOptions { + pub recursive: Option, + pub mode: Option, +} + +fn mkdir_impl(path_str: String, options: Option) -> Result> { + let path = Path::new(&path_str); + let opts = options.unwrap_or(MkdirOptions { + recursive: None, + mode: None, + }); + let recursive = opts.recursive.unwrap_or(false); + + #[cfg(unix)] + let _mode = opts.mode.unwrap_or(0o777); + + if recursive { + // Node.js returns the first directory path created, or undefined if it already existed + if path.exists() { + return Ok(None); + } + + // Find the first ancestor that doesn't exist + let mut ancestors = vec![]; + let mut current = path.to_path_buf(); + while !current.exists() { + ancestors.push(current.clone()); + match current.parent() { + Some(parent) => current = parent.to_path_buf(), + None => break, + } + } + + fs::create_dir_all(path).map_err(|e| { + Error::from_reason(format!( + "ENOENT: no such file or directory, mkdir '{}'", + e + )) + })?; + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + for ancestor in &ancestors { + let _ = fs::set_permissions(ancestor, fs::Permissions::from_mode(_mode)); + } + } + + let first_created = ancestors.last().map(|p| p.to_string_lossy().to_string()); + Ok(first_created) + } else { + fs::create_dir(path).map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, mkdir '{}'", + path.to_string_lossy() + )) + } else if e.kind() == std::io::ErrorKind::AlreadyExists { + Error::from_reason(format!( + "EEXIST: file already exists, mkdir '{}'", + path.to_string_lossy() + )) + } else { + Error::from_reason(format!("{}", e)) + } + })?; + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let _ = fs::set_permissions(path, fs::Permissions::from_mode(_mode)); + } + + Ok(None) + } +} + +#[napi(js_name = "mkdirSync")] +pub fn mkdir_sync(path: String, options: Option) -> Result> { + mkdir_impl(path, options) +} + +// ========= async version ========= + +pub struct MkdirTask { + pub path: String, + pub options: Option, +} + +impl Task for MkdirTask { + type Output = Option; + type JsValue = Option; + + fn compute(&mut self) -> Result { + mkdir_impl(self.path.clone(), self.options.clone()) + } + + fn resolve(&mut self, _env: Env, output: Self::Output) -> Result { + Ok(output) + } +} + +#[napi(js_name = "mkdir")] +pub fn mkdir(path: String, options: Option) -> AsyncTask { + AsyncTask::new(MkdirTask { path, options }) +} diff --git a/src/mkdtemp.rs b/src/mkdtemp.rs new file mode 100644 index 0000000..61ffdb0 --- /dev/null +++ b/src/mkdtemp.rs @@ -0,0 +1,133 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs; +use std::path::Path; + +/// Generate a cryptographically seeded random 6-char suffix using OS random bytes. +/// Falls back to time-based entropy if the OS call fails. +fn generate_random_suffix() -> String { + const CHARS: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + let mut bytes = [0u8; 6]; + + // Try OS-level random source first (getrandom syscall on Linux/macOS, BCryptGenRandom on Windows) + #[cfg(unix)] + { + use std::io::Read; + if let Ok(mut f) = fs::File::open("/dev/urandom") { + let _ = f.read_exact(&mut bytes); + } else { + fill_fallback(&mut bytes); + } + } + + #[cfg(windows)] + { + // BCryptGenRandom via Windows CNG API + extern "system" { + fn BCryptGenRandom( + h_algorithm: *mut std::ffi::c_void, + pb_buffer: *mut u8, + cb_buffer: u32, + dw_flags: u32, + ) -> i32; + } + const BCRYPT_USE_SYSTEM_PREFERRED_RNG: u32 = 0x00000002; + let ret = unsafe { + BCryptGenRandom( + std::ptr::null_mut(), + bytes.as_mut_ptr(), + bytes.len() as u32, + BCRYPT_USE_SYSTEM_PREFERRED_RNG, + ) + }; + if ret != 0 { + fill_fallback(&mut bytes); + } + } + + #[cfg(not(any(unix, windows)))] + fill_fallback(&mut bytes); + + let mut result = String::with_capacity(6); + for &b in &bytes { + result.push(CHARS[(b as usize) % CHARS.len()] as char); + } + result +} + +/// Time + address based fallback when OS random is unavailable +fn fill_fallback(bytes: &mut [u8]) { + use std::time::{SystemTime, UNIX_EPOCH}; + let stack_var: u8 = 0; + let addr = &stack_var as *const u8 as u128; + let mut seed = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_nanos() + ^ addr; + for b in bytes.iter_mut() { + seed ^= seed << 13; + seed ^= seed >> 7; + seed ^= seed << 17; + *b = (seed & 0xff) as u8; + } +} + +fn mkdtemp_impl(prefix: String) -> Result { + if let Some(parent) = Path::new(&prefix).parent() { + if !parent.as_os_str().is_empty() && !parent.exists() { + return Err(Error::from_reason(format!( + "ENOENT: no such file or directory, mkdtemp '{}'", + prefix + ))); + } + } + + // Retry up to 10 times — matches Node.js / libuv behavior + for _ in 0..10 { + let suffix = generate_random_suffix(); + let dir_path = format!("{}{}", prefix, suffix); + match fs::create_dir(&dir_path) { + Ok(()) => return Ok(dir_path), + Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => continue, + Err(e) => { + return Err(Error::from_reason(format!("{}, mkdtemp '{}'", e, prefix))); + } + } + } + + Err(Error::from_reason(format!( + "EEXIST: could not create unique temporary directory after 10 attempts, mkdtemp '{}'", + prefix + ))) +} + +#[napi(js_name = "mkdtempSync")] +pub fn mkdtemp_sync(prefix: String) -> Result { + mkdtemp_impl(prefix) +} + +// ========= async version ========= + +pub struct MkdtempTask { + pub prefix: String, +} + +impl Task for MkdtempTask { + type Output = String; + type JsValue = String; + + fn compute(&mut self) -> Result { + mkdtemp_impl(self.prefix.clone()) + } + + fn resolve(&mut self, _env: Env, output: Self::Output) -> Result { + Ok(output) + } +} + +#[napi(js_name = "mkdtemp")] +pub fn mkdtemp(prefix: String) -> AsyncTask { + AsyncTask::new(MkdtempTask { prefix }) +} diff --git a/src/read_file.rs b/src/read_file.rs new file mode 100644 index 0000000..03a977d --- /dev/null +++ b/src/read_file.rs @@ -0,0 +1,159 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs; +use std::path::Path; + +fn decode_data(data: Vec, encoding: Option<&str>) -> Result> { + match encoding { + Some("utf8" | "utf-8") => { + let s = String::from_utf8(data) + .map_err(|e| Error::from_reason(format!("Invalid UTF-8: {}", e)))?; + Ok(Either::A(s)) + } + Some("ascii") => { + let s: String = data.iter().map(|&b| (b & 0x7f) as char).collect(); + Ok(Either::A(s)) + } + Some("latin1" | "binary") => { + let s: String = data.iter().map(|&b| b as char).collect(); + Ok(Either::A(s)) + } + Some("base64") => { + Ok(Either::A(base64_encode(&data, false))) + } + Some("base64url") => { + Ok(Either::A(base64_encode(&data, true))) + } + Some("hex") => { + let s: String = data.iter().map(|b| format!("{:02x}", b)).collect(); + Ok(Either::A(s)) + } + Some(enc) => Err(Error::from_reason(format!( + "Unknown encoding: {}", enc + ))), + None => Ok(Either::B(Buffer::from(data))), + } +} + +fn base64_encode(data: &[u8], url_safe: bool) -> String { + const STD: &[u8; 64] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; + const URL: &[u8; 64] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"; + let table = if url_safe { URL } else { STD }; + + let mut result = String::with_capacity((data.len() + 2) / 3 * 4); + let chunks = data.chunks(3); + for chunk in chunks { + let b0 = chunk[0] as u32; + let b1 = if chunk.len() > 1 { chunk[1] as u32 } else { 0 }; + let b2 = if chunk.len() > 2 { chunk[2] as u32 } else { 0 }; + let triple = (b0 << 16) | (b1 << 8) | b2; + + result.push(table[((triple >> 18) & 0x3F) as usize] as char); + result.push(table[((triple >> 12) & 0x3F) as usize] as char); + if chunk.len() > 1 { + result.push(table[((triple >> 6) & 0x3F) as usize] as char); + } else if !url_safe { + result.push('='); + } + if chunk.len() > 2 { + result.push(table[(triple & 0x3F) as usize] as char); + } else if !url_safe { + result.push('='); + } + } + result +} + +#[napi(object)] +#[derive(Clone)] +pub struct ReadFileOptions { + pub encoding: Option, + pub flag: Option, +} + +fn read_file_impl( + path_str: String, + options: Option, +) -> Result> { + let path = Path::new(&path_str); + let opts = options.unwrap_or(ReadFileOptions { + encoding: None, + flag: None, + }); + + let flag = opts.flag.as_deref().unwrap_or("r"); + + let mut open_opts = fs::OpenOptions::new(); + match flag { + "r" => { open_opts.read(true); } + "rs" | "sr" => { open_opts.read(true); } + "r+" => { open_opts.read(true).write(true); } + "rs+" | "sr+" => { open_opts.read(true).write(true); } + "a+" => { open_opts.read(true).append(true).create(true); } + "ax+" | "xa+" => { open_opts.read(true).append(true).create_new(true); } + "w+" => { open_opts.read(true).write(true).create(true).truncate(true); } + "wx+" | "xw+" => { open_opts.read(true).write(true).create_new(true); } + _ => { open_opts.read(true); } + } + + let mut file = open_opts.open(path).map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, open '{}'", + path.to_string_lossy() + )) + } else if e.kind() == std::io::ErrorKind::PermissionDenied { + Error::from_reason(format!( + "EACCES: permission denied, open '{}'", + path.to_string_lossy() + )) + } else if e.kind() == std::io::ErrorKind::AlreadyExists { + Error::from_reason(format!( + "EEXIST: file already exists, open '{}'", + path.to_string_lossy() + )) + } else { + Error::from_reason(e.to_string()) + } + })?; + + use std::io::Read; + let mut data = Vec::new(); + file.read_to_end(&mut data).map_err(|e| Error::from_reason(e.to_string()))?; + + decode_data(data, opts.encoding.as_deref()) +} + +#[napi(js_name = "readFileSync")] +pub fn read_file_sync( + path: String, + options: Option, +) -> Result> { + read_file_impl(path, options) +} + +// ========= async version ========= + +pub struct ReadFileTask { + pub path: String, + pub options: Option, +} + +impl Task for ReadFileTask { + type Output = Either; + type JsValue = Either; + + fn compute(&mut self) -> Result { + read_file_impl(self.path.clone(), self.options.clone()) + } + + fn resolve(&mut self, _env: Env, output: Self::Output) -> Result { + Ok(output) + } +} + +#[napi(js_name = "readFile")] +pub fn read_file(path: String, options: Option) -> AsyncTask { + AsyncTask::new(ReadFileTask { path, options }) +} diff --git a/src/readdir.rs b/src/readdir.rs index 0089e15..08cd61b 100644 --- a/src/readdir.rs +++ b/src/readdir.rs @@ -1,3 +1,5 @@ +use crate::types::Dirent; +use crate::utils::get_file_type_id; use jwalk::{Parallelism, WalkDir}; use napi::bindgen_prelude::*; use napi::Task; @@ -24,20 +26,16 @@ use std::path::Path; #[napi(object)] #[derive(Clone)] pub struct ReaddirOptions { + /// File name encoding. 'utf8' (default) returns strings. + /// 'buffer' returns Buffer objects for each name. + /// Other values are treated as 'utf8'. + pub encoding: Option, pub skip_hidden: Option, pub concurrency: Option, pub recursive: Option, pub with_file_types: Option, } -#[napi(object)] // Similar to fs.Dirent -#[derive(Clone)] -pub struct Dirent { - pub name: String, - pub parent_path: String, - pub is_dir: bool, -} - // #[napi] // marco: expose the function to Node fn ls( path_str: String, @@ -52,6 +50,7 @@ fn ls( ))); } let opts = options.unwrap_or(ReaddirOptions { + encoding: None, skip_hidden: Some(false), concurrency: None, recursive: Some(false), @@ -61,6 +60,9 @@ fn ls( let skip_hidden = opts.skip_hidden.unwrap_or(false); let recursive = opts.recursive.unwrap_or(false); let with_file_types = opts.with_file_types.unwrap_or(false); + // 'buffer' encoding is not supported in hyper-fs (we always return String). + // All other encoding values are treated as 'utf8'. + let _encoding = opts.encoding.as_deref().unwrap_or("utf8"); if !recursive { let parent_path_val = search_path_str.to_string(); @@ -89,7 +91,7 @@ fn ls( list.push(Dirent { name: name_str.to_string(), parent_path: parent_path_val.clone(), - is_dir: entry.file_type().map(|t| t.is_dir()).unwrap_or(false), + file_type: entry.file_type().map(|t| get_file_type_id(&t)).unwrap_or(0), }); } else if let Some(ref mut list) = result_files { list.push(name_str.to_string()); @@ -128,7 +130,7 @@ fn ls( Dirent { name: e.file_name().to_string_lossy().to_string(), parent_path: parent, - is_dir: e.file_type().is_dir(), + file_type: get_file_type_id(&e.file_type()), } }) .collect(); diff --git a/src/readlink.rs b/src/readlink.rs new file mode 100644 index 0000000..943ac64 --- /dev/null +++ b/src/readlink.rs @@ -0,0 +1,52 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs; +use std::path::Path; + +fn readlink_impl(path_str: String) -> Result { + let path = Path::new(&path_str); + let target = fs::read_link(path).map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, readlink '{}'", + path.to_string_lossy() + )) + } else { + Error::from_reason(format!( + "EINVAL: invalid argument, readlink '{}'", + path.to_string_lossy() + )) + } + })?; + Ok(target.to_string_lossy().to_string()) +} + +#[napi(js_name = "readlinkSync")] +pub fn readlink_sync(path: String) -> Result { + readlink_impl(path) +} + +// ========= async version ========= + +pub struct ReadlinkTask { + pub path: String, +} + +impl Task for ReadlinkTask { + type Output = String; + type JsValue = String; + + fn compute(&mut self) -> Result { + readlink_impl(self.path.clone()) + } + + fn resolve(&mut self, _env: Env, output: Self::Output) -> Result { + Ok(output) + } +} + +#[napi(js_name = "readlink")] +pub fn readlink(path: String) -> AsyncTask { + AsyncTask::new(ReadlinkTask { path }) +} diff --git a/src/realpath.rs b/src/realpath.rs new file mode 100644 index 0000000..e480c5c --- /dev/null +++ b/src/realpath.rs @@ -0,0 +1,49 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs; +use std::path::Path; + +fn realpath_impl(path_str: String) -> Result { + let path = Path::new(&path_str); + let resolved = fs::canonicalize(path).map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, realpath '{}'", + path.to_string_lossy() + )) + } else { + Error::from_reason(e.to_string()) + } + })?; + Ok(resolved.to_string_lossy().to_string()) +} + +#[napi(js_name = "realpathSync")] +pub fn realpath_sync(path: String) -> Result { + realpath_impl(path) +} + +// ========= async version ========= + +pub struct RealpathTask { + pub path: String, +} + +impl Task for RealpathTask { + type Output = String; + type JsValue = String; + + fn compute(&mut self) -> Result { + realpath_impl(self.path.clone()) + } + + fn resolve(&mut self, _env: Env, output: Self::Output) -> Result { + Ok(output) + } +} + +#[napi(js_name = "realpath")] +pub fn realpath(path: String) -> AsyncTask { + AsyncTask::new(RealpathTask { path }) +} diff --git a/src/rename.rs b/src/rename.rs new file mode 100644 index 0000000..05048a4 --- /dev/null +++ b/src/rename.rs @@ -0,0 +1,58 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs; +use std::path::Path; + +fn rename_impl(old_path_str: String, new_path_str: String) -> Result<()> { + let old_path = Path::new(&old_path_str); + let new_path = Path::new(&new_path_str); + + fs::rename(old_path, new_path).map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, rename '{}' -> '{}'", + old_path.to_string_lossy(), + new_path.to_string_lossy() + )) + } else { + Error::from_reason(format!( + "{}, rename '{}' -> '{}'", + e, + old_path.to_string_lossy(), + new_path.to_string_lossy() + )) + } + })?; + Ok(()) +} + +#[napi(js_name = "renameSync")] +pub fn rename_sync(old_path: String, new_path: String) -> Result<()> { + rename_impl(old_path, new_path) +} + +// ========= async version ========= + +pub struct RenameTask { + pub old_path: String, + pub new_path: String, +} + +impl Task for RenameTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + rename_impl(self.old_path.clone(), self.new_path.clone()) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "rename")] +pub fn rename(old_path: String, new_path: String) -> AsyncTask { + AsyncTask::new(RenameTask { old_path, new_path }) +} diff --git a/src/rm.rs b/src/rm.rs index 44d3dff..de324df 100644 --- a/src/rm.rs +++ b/src/rm.rs @@ -5,20 +5,15 @@ use rayon::prelude::*; use std::fs; use std::path::Path; -// nodejs rm jsdoc: -/** - * Asynchronously removes files and - * directories (modeled on the standard POSIX `rm` utility). - * @param {string | Buffer | URL} path - * @param {{ - * force?: boolean; - * maxRetries?: number; - * recursive?: boolean; - * retryDelay?: number; - * }} [options] - * @param {(err?: Error) => any} callback - * @returns {void} - */ +/// Removes files and directories (modeled on the standard POSIX `rm` utility). +/// +/// - `force`: When true, silently ignore errors when path does not exist. +/// - `recursive`: When true, remove directory and all its contents. +/// - `maxRetries`: If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or `EPERM` error is +/// encountered, Node.js retries the operation with a linear backoff of `retryDelay` ms longer on +/// each try. This option represents the number of retries. +/// - `retryDelay`: The amount of time in milliseconds to wait between retries (default 100ms). +/// - `concurrency` (hyper-fs extension): Number of parallel threads for recursive removal. #[napi(object)] #[derive(Clone)] @@ -72,6 +67,23 @@ fn remove_recursive(path: &Path, opts: &RmOptions) -> Result<()> { Ok(()) } +fn remove_with_retry(path: &Path, opts: &RmOptions) -> Result<()> { + let max_retries = opts.max_retries.unwrap_or(0) as usize; + let retry_delay = opts.retry_delay.unwrap_or(100) as u64; + + let mut last_err = None; + for attempt in 0..=max_retries { + if attempt > 0 { + std::thread::sleep(std::time::Duration::from_millis(retry_delay)); + } + match remove_recursive(path, opts) { + Ok(()) => return Ok(()), + Err(e) => last_err = Some(e), + } + } + Err(last_err.unwrap()) +} + fn remove(path_str: String, options: Option) -> Result<()> { let path = Path::new(&path_str); @@ -86,7 +98,6 @@ fn remove(path_str: String, options: Option) -> Result<()> { if !path.exists() { if force { - // If force is true, silently succeed when path doesn't exist return Ok(()); } return Err(Error::from_reason(format!( @@ -95,7 +106,12 @@ fn remove(path_str: String, options: Option) -> Result<()> { ))); } - remove_recursive(path, &opts) + let max_retries = opts.max_retries.unwrap_or(0); + if max_retries > 0 { + remove_with_retry(path, &opts) + } else { + remove_recursive(path, &opts) + } } // ========= async version ========= diff --git a/src/rmdir.rs b/src/rmdir.rs new file mode 100644 index 0000000..ad22011 --- /dev/null +++ b/src/rmdir.rs @@ -0,0 +1,61 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs; +use std::path::Path; + +fn rmdir_impl(path_str: String) -> Result<()> { + let path = Path::new(&path_str); + + fs::remove_dir(path).map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, rmdir '{}'", + path.to_string_lossy() + )) + } else if e.to_string().contains("not empty") + || e.kind() == std::io::ErrorKind::AlreadyExists + { + Error::from_reason(format!( + "ENOTEMPTY: directory not empty, rmdir '{}'", + path.to_string_lossy() + )) + } else if e.to_string().contains("Not a directory") { + Error::from_reason(format!( + "ENOTDIR: not a directory, rmdir '{}'", + path.to_string_lossy() + )) + } else { + Error::from_reason(e.to_string()) + } + }) +} + +#[napi(js_name = "rmdirSync")] +pub fn rmdir_sync(path: String) -> Result<()> { + rmdir_impl(path) +} + +// ========= async version ========= + +pub struct RmdirTask { + pub path: String, +} + +impl Task for RmdirTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + rmdir_impl(self.path.clone()) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "rmdir")] +pub fn rmdir(path: String) -> AsyncTask { + AsyncTask::new(RmdirTask { path }) +} diff --git a/src/stat.rs b/src/stat.rs new file mode 100644 index 0000000..7e8c770 --- /dev/null +++ b/src/stat.rs @@ -0,0 +1,160 @@ +use crate::types::Stats; +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs; +use std::path::Path; + +#[cfg(unix)] +use std::os::unix::fs::MetadataExt; + +fn metadata_to_stats(meta: &fs::Metadata) -> Stats { + #[cfg(unix)] + { + use std::time::UNIX_EPOCH; + let atime_ms = meta + .accessed() + .ok() + .and_then(|t| t.duration_since(UNIX_EPOCH).ok()) + .map(|d| d.as_secs_f64() * 1000.0) + .unwrap_or(0.0); + let mtime_ms = meta + .modified() + .ok() + .and_then(|t| t.duration_since(UNIX_EPOCH).ok()) + .map(|d| d.as_secs_f64() * 1000.0) + .unwrap_or(0.0); + let ctime_ms = (meta.ctime() as f64) * 1000.0 + (meta.ctime_nsec() as f64) / 1_000_000.0; + let birthtime_ms = meta + .created() + .ok() + .and_then(|t| t.duration_since(UNIX_EPOCH).ok()) + .map(|d| d.as_secs_f64() * 1000.0) + .unwrap_or(0.0); + + Stats { + dev: meta.dev() as f64, + mode: meta.mode(), + nlink: meta.nlink() as f64, + uid: meta.uid(), + gid: meta.gid(), + rdev: meta.rdev() as f64, + blksize: meta.blksize() as f64, + ino: meta.ino() as f64, + size: meta.size() as f64, + blocks: meta.blocks() as f64, + atime_ms, + mtime_ms, + ctime_ms, + birthtime_ms, + } + } + + #[cfg(not(unix))] + { + use std::time::UNIX_EPOCH; + let to_ms = |t: std::io::Result| -> f64 { + t.ok() + .and_then(|t| t.duration_since(UNIX_EPOCH).ok()) + .map(|d| d.as_secs_f64() * 1000.0) + .unwrap_or(0.0) + }; + let atime_ms = to_ms(meta.accessed()); + let mtime_ms = to_ms(meta.modified()); + let birthtime_ms = to_ms(meta.created()); + + let mode = if meta.is_dir() { + 0o040000u32 + } else if meta.is_symlink() { + 0o120000u32 + } else { + 0o100000u32 + }; + + Stats { + dev: 0.0, + mode, + nlink: 1.0, + uid: 0, + gid: 0, + rdev: 0.0, + blksize: 4096.0, + ino: 0.0, + size: meta.len() as f64, + blocks: 0.0, + atime_ms, + mtime_ms, + ctime_ms: mtime_ms, + birthtime_ms, + } + } +} + +fn stat_impl(path_str: String, follow_symlinks: bool) -> Result { + let path = Path::new(&path_str); + let meta = if follow_symlinks { + fs::metadata(path) + } else { + fs::symlink_metadata(path) + }; + let meta = meta.map_err(|e| { + if e.kind() == std::io::ErrorKind::PermissionDenied { + Error::from_reason(format!( + "EACCES: permission denied, stat '{}'", + path.to_string_lossy() + )) + } else { + Error::from_reason(format!( + "ENOENT: no such file or directory, stat '{}'", + path.to_string_lossy() + )) + } + })?; + Ok(metadata_to_stats(&meta)) +} + +#[napi(js_name = "statSync")] +pub fn stat_sync(path: String) -> Result { + stat_impl(path, true) +} + +#[napi(js_name = "lstatSync")] +pub fn lstat_sync(path: String) -> Result { + stat_impl(path, false) +} + +// ========= async versions ========= + +pub struct StatTask { + pub path: String, + pub follow_symlinks: bool, +} + +impl Task for StatTask { + type Output = Stats; + type JsValue = Stats; + + fn compute(&mut self) -> Result { + stat_impl(self.path.clone(), self.follow_symlinks) + } + + fn resolve(&mut self, _env: Env, output: Self::Output) -> Result { + Ok(output) + } +} + +#[napi(js_name = "stat")] +pub fn stat(path: String) -> AsyncTask { + AsyncTask::new(StatTask { + path, + follow_symlinks: true, + }) +} + +#[napi(js_name = "lstat")] +pub fn lstat(path: String) -> AsyncTask { + AsyncTask::new(StatTask { + path, + follow_symlinks: false, + }) +} diff --git a/src/symlink.rs b/src/symlink.rs new file mode 100644 index 0000000..ee5e346 --- /dev/null +++ b/src/symlink.rs @@ -0,0 +1,90 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::path::Path; + +/// On Windows, `symlink_type` controls whether a file or directory symlink +/// (or junction) is created. Valid values: 'file' | 'dir' | 'junction'. +/// On Unix this parameter is ignored. +fn symlink_impl(target: String, path_str: String, symlink_type: Option) -> Result<()> { + let path = Path::new(&path_str); + let target_path = Path::new(&target); + + if path.exists() || path.symlink_metadata().is_ok() { + return Err(Error::from_reason(format!( + "EEXIST: file already exists, symlink '{}' -> '{}'", + target, path_str + ))); + } + + #[cfg(unix)] + { + let _ = symlink_type; // unused on Unix + std::os::unix::fs::symlink(target_path, path).map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, symlink '{}' -> '{}'", + target, path_str + )) + } else { + Error::from_reason(format!("{}, symlink '{}' -> '{}'", e, target, path_str)) + } + })?; + } + + #[cfg(windows)] + { + let ty = symlink_type.as_deref().unwrap_or("file"); + match ty { + "junction" => { + // Junction only works for directories; use symlink_dir as fallback + std::os::windows::fs::symlink_dir(target_path, path) + } + "dir" => std::os::windows::fs::symlink_dir(target_path, path), + _ => std::os::windows::fs::symlink_file(target_path, path), + } + .map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, symlink '{}' -> '{}'", + target, path_str + )) + } else { + Error::from_reason(format!("{}, symlink '{}' -> '{}'", e, target, path_str)) + } + })?; + } + + Ok(()) +} + +#[napi(js_name = "symlinkSync")] +pub fn symlink_sync(target: String, path: String, symlink_type: Option) -> Result<()> { + symlink_impl(target, path, symlink_type) +} + +// ========= async version ========= + +pub struct SymlinkTask { + pub target: String, + pub path: String, + pub symlink_type: Option, +} + +impl Task for SymlinkTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + symlink_impl(self.target.clone(), self.path.clone(), self.symlink_type.clone()) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "symlink")] +pub fn symlink(target: String, path: String, symlink_type: Option) -> AsyncTask { + AsyncTask::new(SymlinkTask { target, path, symlink_type }) +} diff --git a/src/truncate.rs b/src/truncate.rs new file mode 100644 index 0000000..aae18d9 --- /dev/null +++ b/src/truncate.rs @@ -0,0 +1,54 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs::OpenOptions; +use std::path::Path; + +fn truncate_impl(path_str: String, len: Option) -> Result<()> { + let path = Path::new(&path_str); + let len = len.unwrap_or(0).max(0) as u64; + + let file = OpenOptions::new().write(true).open(path).map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, open '{}'", + path.to_string_lossy() + )) + } else { + Error::from_reason(e.to_string()) + } + })?; + + file.set_len(len).map_err(|e| Error::from_reason(e.to_string()))?; + Ok(()) +} + +#[napi(js_name = "truncateSync")] +pub fn truncate_sync(path: String, len: Option) -> Result<()> { + truncate_impl(path, len) +} + +// ========= async version ========= + +pub struct TruncateTask { + pub path: String, + pub len: Option, +} + +impl Task for TruncateTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + truncate_impl(self.path.clone(), self.len) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "truncate")] +pub fn truncate(path: String, len: Option) -> AsyncTask { + AsyncTask::new(TruncateTask { path, len }) +} diff --git a/src/types.rs b/src/types.rs new file mode 100644 index 0000000..18af83d --- /dev/null +++ b/src/types.rs @@ -0,0 +1,174 @@ +#![allow(dead_code)] + +use chrono::{DateTime, Local, TimeZone}; +use napi_derive::napi; + +#[napi] +#[derive(Clone)] +pub struct Dirent { + #[napi(readonly)] + pub name: String, + #[napi(readonly, js_name = "parentPath")] + pub parent_path: String, + // 1: file, 2: dir, 3: symlink, 4: block, 5: char, 6: fifo, 7: socket, 0: unknown + pub(crate) file_type: u8, +} + +#[napi] +impl Dirent { + #[napi(js_name = "isFile")] + pub fn is_file(&self) -> bool { + self.file_type == 1 + } + + #[napi(js_name = "isDirectory")] + pub fn is_directory(&self) -> bool { + self.file_type == 2 + } + + #[napi(js_name = "isSymbolicLink")] + pub fn is_symbolic_link(&self) -> bool { + self.file_type == 3 + } + + #[napi(js_name = "isBlockDevice")] + pub fn is_block_device(&self) -> bool { + self.file_type == 4 + } + + #[napi(js_name = "isCharacterDevice")] + pub fn is_character_device(&self) -> bool { + self.file_type == 5 + } + + #[napi(js_name = "isFIFO")] + pub fn is_fifo(&self) -> bool { + self.file_type == 6 + } + + #[napi(js_name = "isSocket")] + pub fn is_socket(&self) -> bool { + self.file_type == 7 + } + + // Deprecated alias + #[napi(getter)] + pub fn path(&self) -> String { + self.parent_path.clone() + } +} + +// S_IFMT mask and mode constants (matching Node.js / POSIX) +const S_IFMT: u32 = 0o170000; +const S_IFREG: u32 = 0o100000; +const S_IFDIR: u32 = 0o040000; +const S_IFLNK: u32 = 0o120000; +const S_IFBLK: u32 = 0o060000; +const S_IFCHR: u32 = 0o020000; +const S_IFIFO: u32 = 0o010000; +const S_IFSOCK: u32 = 0o140000; + +#[napi] +pub struct Stats { + #[napi(readonly)] + pub dev: f64, + #[napi(readonly)] + pub mode: u32, + #[napi(readonly)] + pub nlink: f64, + #[napi(readonly)] + pub uid: u32, + #[napi(readonly)] + pub gid: u32, + #[napi(readonly)] + pub rdev: f64, + #[napi(readonly)] + pub blksize: f64, + #[napi(readonly)] + pub ino: f64, + #[napi(readonly)] + pub size: f64, + #[napi(readonly)] + pub blocks: f64, + #[napi(readonly, js_name = "atimeMs")] + pub atime_ms: f64, + #[napi(readonly, js_name = "mtimeMs")] + pub mtime_ms: f64, + #[napi(readonly, js_name = "ctimeMs")] + pub ctime_ms: f64, + #[napi(readonly, js_name = "birthtimeMs")] + pub birthtime_ms: f64, +} + +#[napi] +impl Stats { + #[napi(js_name = "isFile")] + pub fn is_file(&self) -> bool { + (self.mode & S_IFMT) == S_IFREG + } + + #[napi(js_name = "isDirectory")] + pub fn is_directory(&self) -> bool { + (self.mode & S_IFMT) == S_IFDIR + } + + #[napi(js_name = "isSymbolicLink")] + pub fn is_symbolic_link(&self) -> bool { + (self.mode & S_IFMT) == S_IFLNK + } + + #[napi(js_name = "isBlockDevice")] + pub fn is_block_device(&self) -> bool { + (self.mode & S_IFMT) == S_IFBLK + } + + #[napi(js_name = "isCharacterDevice")] + pub fn is_character_device(&self) -> bool { + (self.mode & S_IFMT) == S_IFCHR + } + + #[napi(js_name = "isFIFO")] + pub fn is_fifo(&self) -> bool { + (self.mode & S_IFMT) == S_IFIFO + } + + #[napi(js_name = "isSocket")] + pub fn is_socket(&self) -> bool { + (self.mode & S_IFMT) == S_IFSOCK + } + + /// Returns atime as a Date object (Node.js compatible) + #[napi(getter)] + pub fn atime(&self) -> DateTime { + ms_to_datetime(self.atime_ms) + } + + /// Returns mtime as a Date object (Node.js compatible) + #[napi(getter)] + pub fn mtime(&self) -> DateTime { + ms_to_datetime(self.mtime_ms) + } + + /// Returns ctime as a Date object (Node.js compatible) + #[napi(getter)] + pub fn ctime(&self) -> DateTime { + ms_to_datetime(self.ctime_ms) + } + + /// Returns birthtime as a Date object (Node.js compatible) + #[napi(getter)] + pub fn birthtime(&self) -> DateTime { + ms_to_datetime(self.birthtime_ms) + } +} + +fn ms_to_datetime(ms: f64) -> DateTime { + // 先换算到纳秒整数,再用 Euclidean 除法拆分,保证 nsecs 始终非负 + let total_ns = (ms * 1_000_000.0).round() as i64; + let secs = total_ns.div_euclid(1_000_000_000); + let nsecs = total_ns.rem_euclid(1_000_000_000) as u32; + Local + .timestamp_opt(secs, nsecs) + .single() + .unwrap_or_else(|| Local.timestamp_opt(0, 0).unwrap()) +} diff --git a/src/unlink.rs b/src/unlink.rs new file mode 100644 index 0000000..68645ff --- /dev/null +++ b/src/unlink.rs @@ -0,0 +1,56 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs; +use std::path::Path; + +fn unlink_impl(path_str: String) -> Result<()> { + let path = Path::new(&path_str); + + // Node.js unlink only removes files and symlinks, not directories + let meta = fs::symlink_metadata(path).map_err(|_| { + Error::from_reason(format!( + "ENOENT: no such file or directory, unlink '{}'", + path.to_string_lossy() + )) + })?; + + if meta.is_dir() { + return Err(Error::from_reason(format!( + "EPERM: operation not permitted, unlink '{}'", + path.to_string_lossy() + ))); + } + + fs::remove_file(path).map_err(|e| Error::from_reason(e.to_string()))?; + Ok(()) +} + +#[napi(js_name = "unlinkSync")] +pub fn unlink_sync(path: String) -> Result<()> { + unlink_impl(path) +} + +// ========= async version ========= + +pub struct UnlinkTask { + pub path: String, +} + +impl Task for UnlinkTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + unlink_impl(self.path.clone()) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "unlink")] +pub fn unlink(path: String) -> AsyncTask { + AsyncTask::new(UnlinkTask { path }) +} diff --git a/src/utils.rs b/src/utils.rs new file mode 100644 index 0000000..cf17bd7 --- /dev/null +++ b/src/utils.rs @@ -0,0 +1,22 @@ +#[cfg(unix)] +use std::os::unix::fs::FileTypeExt; + +pub fn get_file_type_id(ft: &std::fs::FileType) -> u8 { + if ft.is_file() { + 1 + } else if ft.is_dir() { + 2 + } else if ft.is_symlink() { + 3 + } else if cfg!(unix) && ft.is_block_device() { + 4 + } else if cfg!(unix) && ft.is_char_device() { + 5 + } else if cfg!(unix) && ft.is_fifo() { + 6 + } else if cfg!(unix) && ft.is_socket() { + 7 + } else { + 0 + } +} diff --git a/src/utimes.rs b/src/utimes.rs new file mode 100644 index 0000000..3e82119 --- /dev/null +++ b/src/utimes.rs @@ -0,0 +1,109 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::path::Path; + +#[cfg(not(unix))] +use std::time::{Duration, SystemTime, UNIX_EPOCH}; + +#[cfg(not(unix))] +fn to_system_time(time_secs: f64) -> SystemTime { + if time_secs >= 0.0 { + UNIX_EPOCH + Duration::from_secs_f64(time_secs) + } else { + UNIX_EPOCH - Duration::from_secs_f64(-time_secs) + } +} + +fn utimes_impl(path_str: String, atime: f64, mtime: f64) -> Result<()> { + let path = Path::new(&path_str); + + if !path.exists() { + return Err(Error::from_reason(format!( + "ENOENT: no such file or directory, utimes '{}'", + path.to_string_lossy() + ))); + } + + #[cfg(unix)] + { + use std::ffi::CString; + + let c_path = CString::new(path.to_string_lossy().as_bytes()) + .map_err(|_| Error::from_reason("Invalid path"))?; + + let atime_sec = atime as i64; + let atime_nsec = ((atime - atime as i64 as f64) * 1_000_000_000.0) as i64; + let mtime_sec = mtime as i64; + let mtime_nsec = ((mtime - mtime as i64 as f64) * 1_000_000_000.0) as i64; + + let times = [ + libc::timespec { + tv_sec: atime_sec, + tv_nsec: atime_nsec, + }, + libc::timespec { + tv_sec: mtime_sec, + tv_nsec: mtime_nsec, + }, + ]; + + let ret = unsafe { libc::utimensat(libc::AT_FDCWD, c_path.as_ptr(), times.as_ptr(), 0) }; + if ret != 0 { + let e = std::io::Error::last_os_error(); + return Err(Error::from_reason(format!( + "{}, utimes '{}'", + e, + path.to_string_lossy() + ))); + } + } + + #[cfg(not(unix))] + { + use std::fs; + let atime_sys = to_system_time(atime); + let mtime_sys = to_system_time(mtime); + let file = fs::OpenOptions::new() + .write(true) + .open(path) + .map_err(|e| Error::from_reason(e.to_string()))?; + file + .set_modified(mtime_sys) + .map_err(|e| Error::from_reason(e.to_string()))?; + let _ = atime_sys; // Windows doesn't easily support setting atime via std + } + + Ok(()) +} + +#[napi(js_name = "utimesSync")] +pub fn utimes_sync(path: String, atime: f64, mtime: f64) -> Result<()> { + utimes_impl(path, atime, mtime) +} + +// ========= async version ========= + +pub struct UtimesTask { + pub path: String, + pub atime: f64, + pub mtime: f64, +} + +impl Task for UtimesTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + utimes_impl(self.path.clone(), self.atime, self.mtime) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "utimes")] +pub fn utimes(path: String, atime: f64, mtime: f64) -> AsyncTask { + AsyncTask::new(UtimesTask { path, atime, mtime }) +} diff --git a/src/write_file.rs b/src/write_file.rs new file mode 100644 index 0000000..ba2e625 --- /dev/null +++ b/src/write_file.rs @@ -0,0 +1,260 @@ +use napi::bindgen_prelude::*; +use napi::Task; +use napi_derive::napi; +use std::fs::{self, OpenOptions}; +use std::io::Write; +use std::path::Path; + +fn encode_string(s: &str, encoding: Option<&str>) -> Result> { + match encoding { + None | Some("utf8" | "utf-8") => Ok(s.as_bytes().to_vec()), + Some("ascii") => Ok(s.bytes().map(|b| b & 0x7f).collect()), + Some("latin1" | "binary") => Ok(s.chars().map(|c| c as u8).collect()), + Some("base64") => base64_decode(s, false), + Some("base64url") => base64_decode(s, true), + Some("hex") => hex_decode(s), + Some(enc) => Err(Error::from_reason(format!("Unknown encoding: {}", enc))), + } +} + +fn base64_decode(s: &str, url_safe: bool) -> Result> { + let mut buf = Vec::with_capacity(s.len() * 3 / 4); + let mut acc: u32 = 0; + let mut bits: u32 = 0; + for c in s.chars() { + let val = if url_safe { + match c { + 'A'..='Z' => c as u32 - 'A' as u32, + 'a'..='z' => c as u32 - 'a' as u32 + 26, + '0'..='9' => c as u32 - '0' as u32 + 52, + '-' => 62, + '_' => 63, + '=' => continue, + _ => continue, + } + } else { + match c { + 'A'..='Z' => c as u32 - 'A' as u32, + 'a'..='z' => c as u32 - 'a' as u32 + 26, + '0'..='9' => c as u32 - '0' as u32 + 52, + '+' => 62, + '/' => 63, + '=' => continue, + _ => continue, + } + }; + acc = (acc << 6) | val; + bits += 6; + if bits >= 8 { + bits -= 8; + buf.push((acc >> bits) as u8); + acc &= (1 << bits) - 1; + } + } + Ok(buf) +} + +fn hex_decode(s: &str) -> Result> { + let s = s.trim(); + if s.len() % 2 != 0 { + return Err(Error::from_reason("Invalid hex string".to_string())); + } + let mut buf = Vec::with_capacity(s.len() / 2); + let bytes = s.as_bytes(); + for i in (0..bytes.len()).step_by(2) { + let hi = hex_val(bytes[i])?; + let lo = hex_val(bytes[i + 1])?; + buf.push((hi << 4) | lo); + } + Ok(buf) +} + +fn hex_val(b: u8) -> Result { + match b { + b'0'..=b'9' => Ok(b - b'0'), + b'a'..=b'f' => Ok(b - b'a' + 10), + b'A'..=b'F' => Ok(b - b'A' + 10), + _ => Err(Error::from_reason(format!("Invalid hex character: {}", b as char))), + } +} + +#[napi(object)] +#[derive(Clone)] +pub struct WriteFileOptions { + pub encoding: Option, + pub mode: Option, + pub flag: Option, +} + +fn write_file_impl(path_str: String, data: Either, options: Option) -> Result<()> { + let path = Path::new(&path_str); + let opts = options.unwrap_or(WriteFileOptions { + encoding: None, + mode: None, + flag: None, + }); + + let flag = opts.flag.as_deref().unwrap_or("w"); + let encoding = opts.encoding.as_deref(); + let bytes: Vec = match &data { + Either::A(s) => encode_string(s, encoding)?, + Either::B(b) => b.to_vec(), + }; + + let mut open_opts = OpenOptions::new(); + match flag { + "w" => { open_opts.write(true).create(true).truncate(true); } + "wx" | "xw" => { open_opts.write(true).create_new(true); } + "a" => { open_opts.append(true).create(true); } + "ax" | "xa" => { open_opts.append(true).create_new(true); } + _ => { open_opts.write(true).create(true).truncate(true); } + } + + let mut file = open_opts.open(path).map_err(|e| { + if e.kind() == std::io::ErrorKind::NotFound { + Error::from_reason(format!( + "ENOENT: no such file or directory, open '{}'", + path.to_string_lossy() + )) + } else if e.kind() == std::io::ErrorKind::AlreadyExists { + Error::from_reason(format!( + "EEXIST: file already exists, open '{}'", + path.to_string_lossy() + )) + } else { + Error::from_reason(e.to_string()) + } + })?; + + file.write_all(&bytes).map_err(|e| Error::from_reason(e.to_string()))?; + + #[cfg(unix)] + if let Some(mode) = opts.mode { + use std::os::unix::fs::PermissionsExt; + let _ = fs::set_permissions(path, fs::Permissions::from_mode(mode)); + } + + Ok(()) +} + +#[napi(js_name = "writeFileSync")] +pub fn write_file_sync( + path: String, + data: Either, + options: Option, +) -> Result<()> { + write_file_impl(path, data, options) +} + +// ========= async version ========= + +pub struct WriteFileTask { + pub path: String, + pub string_data: Option, + pub bytes_data: Option>, + pub options: Option, +} + +impl Task for WriteFileTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + let data = if let Some(s) = self.string_data.take() { + Either::A(s) + } else { + Either::B(Buffer::from(self.bytes_data.take().unwrap_or_default())) + }; + write_file_impl(self.path.clone(), data, self.options.clone()) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "writeFile")] +pub fn write_file( + path: String, + data: Either, + options: Option, +) -> AsyncTask { + let (string_data, bytes_data) = match data { + Either::A(s) => (Some(s), None), + Either::B(b) => (None, Some(b.to_vec())), + }; + AsyncTask::new(WriteFileTask { + path, + string_data, + bytes_data, + options, + }) +} + +// appendFile is writeFile with flag='a' + +fn append_file_impl(path_str: String, data: Either, options: Option) -> Result<()> { + let opts = options.unwrap_or(WriteFileOptions { + encoding: None, + mode: None, + flag: None, + }); + let merged = WriteFileOptions { + encoding: opts.encoding, + mode: opts.mode, + flag: Some(opts.flag.unwrap_or_else(|| "a".to_string())), + }; + write_file_impl(path_str, data, Some(merged)) +} + +#[napi(js_name = "appendFileSync")] +pub fn append_file_sync( + path: String, + data: Either, + options: Option, +) -> Result<()> { + append_file_impl(path, data, options) +} + +pub struct AppendFileTask { + pub path: String, + pub string_data: Option, + pub bytes_data: Option>, + pub options: Option, +} + +impl Task for AppendFileTask { + type Output = (); + type JsValue = (); + + fn compute(&mut self) -> Result { + let data = if let Some(s) = self.string_data.take() { + Either::A(s) + } else { + Either::B(Buffer::from(self.bytes_data.take().unwrap_or_default())) + }; + append_file_impl(self.path.clone(), data, self.options.clone()) + } + + fn resolve(&mut self, _env: Env, _output: Self::Output) -> Result { + Ok(()) + } +} + +#[napi(js_name = "appendFile")] +pub fn append_file( + path: String, + data: Either, + options: Option, +) -> AsyncTask { + let (string_data, bytes_data) = match data { + Either::A(s) => (Some(s), None), + Either::B(b) => (None, Some(b.to_vec())), + }; + AsyncTask::new(AppendFileTask { + path, + string_data, + bytes_data, + options, + }) +}