Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
81 commits
Select commit Hold shift + click to select a range
6f3e323
feat: unified config schema — createConfig replaces createDdbConfig/c…
brunozoric May 11, 2026
494d40f
feat: loadConfig uses unified schema, drops storage guard
brunozoric May 11, 2026
cdf1fb8
feat: bootstrap registers all processors always; OS features conditio…
brunozoric May 11, 2026
3d2b76b
feat: replace config.storage guards with opensearch null-checks in pr…
brunozoric May 11, 2026
dfa021f
feat: processSegment receives preset via --preset CLI argument
brunozoric May 11, 2026
0c99d18
feat: preset discovery + simplified configDiscovery (config.ts only)
brunozoric May 11, 2026
4370ea3
feat: TransferWizard adds preset selection step, returns WizardResult
brunozoric May 11, 2026
9bf270f
feat: run/handler accepts presetName param, passes --preset to workers
brunozoric May 11, 2026
a0aac35
feat: public API exports createConfig; unified config template; proje…
brunozoric May 11, 2026
6198638
chore: update example templates and docs to use unified createConfig API
brunozoric May 11, 2026
772ec56
chore: raise coverage thresholds to current levels (84/83/74/84)
brunozoric May 11, 2026
b99b063
chore: prettier
brunozoric May 11, 2026
fd6ca64
fix: use raw DynamoDBClient for table creation in integration test
brunozoric May 11, 2026
0a83143
test: add coverage for Result, ResultAsync, and BaseError
brunozoric May 11, 2026
19f306c
docs: update README and AGENTS for unified createConfig
brunozoric May 11, 2026
4a512cf
feat: show preset description in wizard selection
brunozoric May 11, 2026
208fb8f
chore: prettier
brunozoric May 11, 2026
128bea9
feat: ask user to repopulate or reuse .env when JSON files are present
brunozoric May 11, 2026
e3c5cbe
feat: warn when source and target are in different AWS accounts
brunozoric May 11, 2026
2ba50a6
fix: restore accountId in merged webiny+pulumi path; add coverage for…
brunozoric May 11, 2026
ab2caf1
feat: add dry-run mode — reads source, skips all target writes
brunozoric May 11, 2026
dce4b66
chore: fix formatting
brunozoric May 11, 2026
fedb98a
docs: update AGENTS and README for wizard UX features
brunozoric May 11, 2026
ac31a58
chore: update dependencies
brunozoric May 11, 2026
5116b87
feat: populate TARGET_AUDIT_LOGS_TABLE from webiny/pulumi output
brunozoric May 11, 2026
9e5f967
feat: add source audit log table support
brunozoric May 11, 2026
99203a5
style: add ANSI color to cross-account warning in wizard
brunozoric May 11, 2026
3688a85
chore: remove staged JSON files and lock down projects/ gitignore
brunozoric May 11, 2026
8851386
feat: fromEnv supports null default for optional env vars
brunozoric May 11, 2026
df0dbfc
fix: lazy-resolve OpenSearchClient in OsProcessor to allow DDB-only runs
brunozoric May 11, 2026
892d88d
chore: default log level to debug
brunozoric May 11, 2026
4ecd8bb
feat: blackhole Form Builder records in v5-to-v6-ddb preset
brunozoric May 11, 2026
821b165
feat: blackhole Form Builder records in v5-to-v6-ddb preset
brunozoric May 11, 2026
fc4c7d3
fix: flush log streams immediately so piped output is not buffered
brunozoric May 11, 2026
574be1d
chore: fix formatting in PinoLogger
brunozoric May 11, 2026
dd7f571
fix: use SonicBoom periodicFlush instead of sync writes for log file
brunozoric May 11, 2026
f14b09d
feat: debug-level throttle logging for DDB, S3, and OpenSearch clients
brunozoric May 11, 2026
701144c
feat: write log file by default, opt out with logFile: false
brunozoric May 11, 2026
10c2ed2
docs: spec for periodic shard flush (flushEvery)
brunozoric May 11, 2026
f95ce69
feat: transform long text field
brunozoric May 11, 2026
5ca4fdb
docs: implementation plan for periodic shard flush
brunozoric May 11, 2026
ebbd18f
feat: transform long text field
brunozoric May 11, 2026
d01510d
feat: add tuning.flushEvery to schema
brunozoric May 11, 2026
e9c3a76
feat: periodic shard flush via tuning.flushEvery
brunozoric May 11, 2026
fafc6e9
test: cover zero-record shard path in periodic flush tests
brunozoric May 11, 2026
933cf81
feat: wire FLUSH_EVERY env var in config templates
brunozoric May 11, 2026
ba34ece
docs: document flushEvery periodic flush in AGENTS, README, and skills
brunozoric May 11, 2026
28216f3
fix: copy files preset
brunozoric May 11, 2026
3137f29
fix: suppress spurious "Set logLevel: debug" hint when no log level i…
brunozoric May 11, 2026
167b1ac
feat: processor guard warnings with cross-account S3 check
brunozoric May 11, 2026
49babaa
fix: fail fast on Access Denied S3 errors; revert unused putObject
brunozoric May 11, 2026
be72396
feat: add required checkAccess() to Processor.Interface with stub imp…
brunozoric May 11, 2026
4981283
feat: implement DdbProcessor.checkAccess() via DescribeTable
brunozoric May 11, 2026
8d2d7a4
fix: treat ResourceNotFoundException as denied; destroy DDB client af…
brunozoric May 11, 2026
9e8a2de
feat: implement AuditLogProcessor.checkAccess() via DescribeTable
brunozoric May 11, 2026
5e264f9
feat: add 'missing' AccessCheck status for ResourceNotFoundException
brunozoric May 11, 2026
9760ed0
feat: implement S3Processor.checkAccess() via HeadBucket
brunozoric May 11, 2026
aae80d7
test: reset S3 mock between describes in S3Processor tests
brunozoric May 11, 2026
8e447a5
feat: implement OsProcessor.checkAccess() via listIndexes
brunozoric May 11, 2026
8d53702
feat: add AccessChecker aggregator service
brunozoric May 11, 2026
5a9d3d8
test: use named StubProcessor type in AccessChecker test helper
brunozoric May 11, 2026
55781a3
feat: wire AccessChecker into run handler; abort on denied or missing…
brunozoric May 11, 2026
fd0dc2e
fix: use Promise.allSettled in AccessChecker; document checkAccess co…
brunozoric May 11, 2026
9bed20f
fix: throw AccessCheckError instead of process.exit in access check f…
brunozoric May 11, 2026
64a5ff0
test: fix mock reset, constructor resolver, and stale container in pr…
brunozoric May 11, 2026
96566d5
refactor: use named AwsErrorLike/OpenSearchErrorLike types in access …
brunozoric May 11, 2026
1326583
docs: add access checker implementation plan
brunozoric May 11, 2026
d8165dd
refactor: remove STS; persist account IDs from ARN into env and config
brunozoric May 11, 2026
af84894
fix: correct DynamoDB import path and fix never type in checkAccess t…
brunozoric May 11, 2026
a4bd5c9
test: cover missing branches to restore 71% branch threshold
brunozoric May 11, 2026
311b550
refactor: remove redundant type casts verified by ts-check
brunozoric May 11, 2026
0910a8e
fix: replace stale @/src import with ~/features alias; drop unused @/…
brunozoric May 11, 2026
70b1f1b
docs: remove init-project from quick start; yarn transfer is the only…
brunozoric May 11, 2026
21ff5b1
docs: add models/ note next to JSON file instructions in Quick start
brunozoric May 11, 2026
ea0e036
docs: remove OS-preset restriction from model exports note
brunozoric May 11, 2026
34b68dd
chore: prettier
brunozoric May 11, 2026
f430d3c
docs: add models/ note to init-project post-scaffold output
brunozoric May 11, 2026
fbfa9c3
feat: add models/ hint to wizard file-placement instructions
brunozoric May 11, 2026
2d46cf7
test: cover fieldUtils, fieldVisitor, copyFileToTarget, hook composit…
brunozoric May 11, 2026
b8fedaa
fix: remove unused vitest imports flagged by oxlint
brunozoric May 11, 2026
15dc45d
chore: prettier
brunozoric May 11, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .claude/skills/writing-data-transfer-config/SKILL.md
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,7 @@ Post-run inspection: `cat .transfer/<runId>/logs/*.log | pino-pretty`. Default p

```ts
tuning: {
flushEvery: numberFromEnv("FLUSH_EVERY", 500), // records per shard flush — bounds peak memory
ddb: { maxRetries: 3, initialBackoffMs: 100 },
s3: { concurrency: 10, maxRetries: 3, initialBackoffMs: 100 },
os: {
Expand All @@ -194,6 +195,8 @@ tuning: {

All optional; absent = built-in defaults. AWS SDK `retryMode: "adaptive"` is always on for DDB + S3 — it self-tunes backoff based on real throttle signals, so you usually don't need to tune these.

**`flushEvery`** caps peak per-shard memory. The runner calls `processor.execute()` every N records and resets the pending-commands buffer. Default 500 (≈ 5 MB at a 10 KB average record). Lower to 100 for tables with very large records (approaching the 400 KB DDB max).

## Running it

From the user project root:
Expand Down
8 changes: 4 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,11 @@ logs.txt
!.yarn/sdks
!.yarn/versions
.pnp.*
projects/**/.env
projects/v5-to-v6/models/
# User project directories — not committed; v5-to-v6 is the committed example
projects/*/
# Projects — only the two reference files below are committed; everything else is local
projects/**
!projects/v5-to-v6/
!projects/v5-to-v6/config.ts
!projects/v5-to-v6/.env.example

CLAUDE.*.md
# Ignore .claude/ except the two shipped skills — config/preset writing
Expand Down
71 changes: 44 additions & 27 deletions AGENTS.md

Large diffs are not rendered by default.

113 changes: 45 additions & 68 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,30 +8,34 @@ A generic data-transfer tool for Webiny environments. Copies DynamoDB + S3 (or O
- **Prod → dev seeding** — zero transformers, just copy.
- **Custom transfers** — write your own transformers + pipelines + preset for bespoke data moves.

The package ships two built-in presets (`v5-to-v6-ddb`, `v5-to-v6-os`) plus full authoring support for your own.
The package ships four built-in presets (`v5-to-v6-ddb`, `v5-to-v6-os`, `copy-ddb`, `copy-files`) plus full authoring support for your own.

## Quick start

```bash
git clone git@github.com:webiny/v5-to-v6.git
cd v5-to-v6
git clone git@github.com:webiny/data-transfer.git
cd data-transfer
yarn install
yarn transfer init-project my-transfer
# then run the guided setup:
yarn transfer
```

`yarn transfer` (no `--config`) launches the **guided setup wizard**. It walks you through selecting your project, collecting your Webiny output or Pulumi state JSON files, and automatically writing your `.env`. After writing the `.env` it exits — review the file and run `yarn transfer` again to start the transfer.
`yarn transfer` (no `--config`) launches the **guided setup wizard**. It walks you through:

To scaffold a new project folder:
1. Selecting (or creating) a project folder under `projects/`
2. Collecting your Webiny output or Pulumi state JSON files and writing `.env`
3. Selecting a preset and optional dry-run mode, then starting the transfer

```bash
yarn transfer init-project <name>
# e.g.
yarn transfer init-project my-client-prod
```
**First run (no `.env` yet):** the wizard extracts values from your JSON files, writes `.env`, and exits so you can review it before anything runs. Run `yarn transfer` again to continue.

**Subsequent runs (`.env` exists, no JSON files):** the wizard skips env setup entirely and goes straight to preset selection.

**`.env` exists AND JSON files present:** the wizard asks whether to repopulate `.env` from the JSON files or keep the existing values. Choose "repopulate" to refresh after deploying a new environment; choose "use existing" to skip to preset selection.

**Account ID warning:** the wizard extracts the AWS account ID from `primaryDynamodbTableArn` in the JSON files. If source and target accounts differ, it warns you to set `SOURCE_PROFILE` and `TARGET_PROFILE` in `.env` so the right credentials are used for each side.

This creates `projects/<name>/` with `ddb.transfer.config.ts`, `os.transfer.config.ts`, `README.md`, `.env.example`, `models/`, and `presets/` already wired up.
**Preset selection:** each preset is listed with its one-line description (`v5-to-v6-ddb — Full DDB migration`). User-supplied presets in `presetsDir` appear alongside built-ins.

**Dry-run mode:** after selecting a preset the wizard asks "Dry run?" (default: No). In dry-run mode the tool scans and transforms records normally but skips all writes to the target (DynamoDB, S3, OpenSearch). Useful for validating your pipeline and transformer chain before committing a full transfer.

New project folders are **gitignored** by default — credentials and env files stay local. Only `projects/v5-to-v6/` is committed as the reference example.

Expand All @@ -58,76 +62,40 @@ cp /path/to/target-project/state.json projects/<name>/target.pulumi.json

Mixed formats are allowed (e.g. `source.webiny.json` + `target.pulumi.json`).

## Storage modes

The config builder determines which AWS storage the transfer reads from and writes to:

- **`createDdbConfig(...)`** — DynamoDB primary table (+ S3 files). Handles all record types: CMS entries + models, security, file manager, folder permissions, mailer settings.
- **`createOsConfig(...)`** — OpenSearch companion DynamoDB table. Reads gzipped records, unzips, transforms, zips, writes to target OS DDB table.

Run DDB transfer first, then OS transfer with a separate config file. They don't share state.
**CMS model exports (optional):** drop your exported model definitions into `projects/<name>/models/`. Export them from the Webiny Admin CMS → Models → Export, then copy the file there. See [`modelsDir`](#modelsdir) for accepted formats.

## Config reference

### DDB config

```typescript
import {
loadEnv,
createDdbConfig,
fromAwsProfile,
fromEnv,
numberFromEnv
} from "@webiny/data-transfer";

loadEnv(import.meta.url);

export default createDdbConfig({
source: {
region: fromEnv("SOURCE_REGION", "us-east-1"),
credentials: fromAwsProfile({ profile: fromEnv("SOURCE_PROFILE", "default") }),
dynamodb: { tableName: fromEnv("SOURCE_DDB_TABLE") },
s3: { bucket: fromEnv("SOURCE_S3_BUCKET") }
},
target: {
region: fromEnv("TARGET_REGION", "us-east-1"),
credentials: fromAwsProfile({ profile: fromEnv("TARGET_PROFILE", "default") }),
dynamodb: { tableName: fromEnv("TARGET_DDB_TABLE") },
s3: { bucket: fromEnv("TARGET_S3_BUCKET") }
},
pipeline: {
preset: "./presets/my-preset.ts",
segments: numberFromEnv("SEGMENTS", 4),
modelsDir: "./models" // optional
}
});
```

`loadEnv(import.meta.url)` loads the `.env` file sitting next to this config file. Each project folder should have its own `.env` so credentials stay isolated between projects.

### OS config
One `config.ts` file covers all storage types. DynamoDB and S3 are required; OpenSearch is optional — omit or set to `null` if your environment doesn't use it. The preset you select at runtime determines which storage operations actually run.

```typescript
import {
loadEnv,
createOsConfig,
createConfig,
fromAwsProfile,
fromEnv,
numberFromEnv
} from "@webiny/data-transfer";

loadEnv(import.meta.url);

export default createOsConfig({
export default createConfig({
source: {
region: fromEnv("SOURCE_REGION", "us-east-1"),
region: fromEnv("SOURCE_REGION", "eu-central-1"),
credentials: fromAwsProfile({ profile: fromEnv("SOURCE_PROFILE", "default") }),
dynamodb: { tableName: fromEnv("SOURCE_DDB_TABLE") },
s3: { bucket: fromEnv("SOURCE_S3_BUCKET") },
// Remove or set to null if your source has no OpenSearch:
opensearch: { tableName: fromEnv("SOURCE_OS_TABLE") }
},
target: {
region: fromEnv("TARGET_REGION", "us-east-1"),
region: fromEnv("TARGET_REGION", "eu-central-1"),
credentials: fromAwsProfile({ profile: fromEnv("TARGET_PROFILE", "default") }),
dynamodb: { tableName: fromEnv("TARGET_DDB_TABLE") },
s3: { bucket: fromEnv("TARGET_S3_BUCKET") },
// Set tableName to null or omit the block to skip the audit log:
auditLog: { dynamodb: { tableName: fromEnv("TARGET_AUDIT_LOGS_TABLE") } },
// Remove or set to null if your target has no OpenSearch:
opensearch: {
endpoint: fromEnv("TARGET_OS_ENDPOINT"),
tableName: fromEnv("TARGET_OS_TABLE"),
Expand All @@ -136,14 +104,17 @@ export default createOsConfig({
}
},
pipeline: {
preset: "v5-to-v6-os",
segments: numberFromEnv("SEGMENTS", 4),
modelsDir: fromEnv("MODELS_DIR", "./models")
modelsDir: fromEnv("MODELS_DIR", "./models"),
// Optional: point at your own preset files (alongside built-ins):
presetsDir: "./presets"
}
});
```

**Index management** (OS mode): the tool disables `refresh_interval` just-in-time when it first writes to each index, and restores the original value after the transfer completes. Missing indexes are created with the Webiny base mapping. Only touched indexes are affected.
`loadEnv(import.meta.url)` loads the `.env` file sitting next to this config file. Each project folder should have its own `.env` so credentials stay isolated between projects.

**Index management** (OpenSearch): the tool disables `refresh_interval` just-in-time when it first writes to each index, and restores the original value after the transfer completes. Missing indexes are created with the Webiny base mapping. Only touched indexes are affected.

### Env helpers

Expand Down Expand Up @@ -186,6 +157,7 @@ JSON models override DB-loaded models when both exist.

```typescript
tuning: {
flushEvery: numberFromEnv("FLUSH_EVERY", 500), // records per shard flush — bounds peak memory
ddb: { maxRetries: 3, initialBackoffMs: 100 },
s3: { concurrency: 10, maxRetries: 3, initialBackoffMs: 100 },
os: { maxRetries: 3, retryScheduleMs: [5000, 10000, 20000], gzipConcurrency: 16 }
Expand All @@ -194,6 +166,8 @@ tuning: {

All fields are optional; absent = built-in defaults. `BATCH_SIZE` for DynamoDB is NOT tunable (AWS enforces 25 items per `BatchWriteItem`). DDB and S3 clients run in AWS SDK `adaptive` retry mode — `tuning.{ddb,s3}.maxRetries` caps the outer retry envelope on top of the SDK's own self-tuning backoff.

**`tuning.flushEvery`** controls how often accumulated write commands are flushed during a shard scan. The runner calls `processor.execute()` every N records and resets the buffer, so peak memory stays at `flushEvery × avg_record_size` regardless of table size. Default 500 (≈ 5 MB at a 10 KB average). Lower to 100 for tables with very large records.

### Debug options

Add a `debug` block to your config to opt into diagnostics:
Expand Down Expand Up @@ -269,7 +243,7 @@ export default createTransferPreset({
});
```

Point `config.pipeline.preset` at the file path (relative to the config): `"./presets/my-preset.ts"`. Or use a built-in name like `"v5-to-v6-ddb"`.
Drop the file in your `projects/<name>/presets/` directory. The wizard will offer it by name alongside built-ins.

### `pipelineBuilderFactory.create({ name, scanner, processors })`

Expand Down Expand Up @@ -380,12 +354,14 @@ export default createTransferPreset({

### Built-in presets

Pass by name in `config.pipeline.preset`:
Select by name when the wizard asks "Which preset do you want to run?":

- **`"v5-to-v6-ddb"`** — full Webiny v5 → v6 migration of the primary DynamoDB table (CMS entries, file manager, security, mailer, folder permissions, etc.).
- **`"v5-to-v6-os"`** — migration of the OpenSearch companion DynamoDB table. Run **after** `v5-to-v6-ddb`.
- **`"copy-ddb"`** — verbatim DynamoDB-only copy (no transformations).
- **`"copy-files"`** — verbatim DynamoDB + S3 file copy.

Custom presets are path-resolved from your config file's directory.
Custom presets placed in your `presetsDir` are listed alongside built-ins.

---

Expand Down Expand Up @@ -552,6 +528,7 @@ The CLI picks it up automatically and runs it **before** loading your preset, so

## Troubleshooting

- **Out-of-memory on large tables** — each worker buffers write commands between flushes. Reduce `tuning.flushEvery` (default 500) to a smaller value (e.g. `FLUSH_EVERY=100`) so each flush covers fewer records and peak memory stays manageable.
- **AWS throttling** — the SDK self-tunes via `retryMode: "adaptive"`. If you still hit the outer cap, bump `tuning.ddb.maxRetries` / `tuning.s3.maxRetries`; lower `tuning.s3.concurrency` for S3-heavy transfers.
- **OS indexes not creating** — the transfer aborts if index prep exhausts retries. Tune `tuning.os.maxRetries` and `tuning.os.retryScheduleMs`, or fix the underlying mapping error surfaced in the logs.
- **Missing env vars** — run `yarn transfer` (no `--config`) to launch the guided setup wizard, which writes your `.env` automatically. Or copy `.env.example` manually and fill it in. Config files use `loadEnv(import.meta.url)` to load the sibling `.env`.
Expand Down
50 changes: 50 additions & 0 deletions __tests__/base/BaseError.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import { describe, it, expect } from "vitest";
import { BaseError } from "~/base/BaseError.ts";

class TestError extends BaseError<{ field: string }> {
public readonly code = "TEST_ERROR";

public constructor(message: string, field: string) {
super({ message, data: { field } });
}
}

class VoidTestError extends BaseError {
public readonly code = "VOID_ERROR";

public constructor(message: string, stack?: string) {
super({ message }, { stack });
}
}

describe("BaseError", () => {
it("sets message from input", () => {
const err = new TestError("something broke", "name");
expect(err.message).toBe("something broke");
});

it("sets data from input", () => {
const err = new TestError("oops", "email");
expect(err.data).toEqual({ field: "email" });
});

it("exposes code on the instance", () => {
const err = new TestError("x", "y");
expect(err.code).toBe("TEST_ERROR");
});

it("is an instance of Error", () => {
const err = new TestError("x", "y");
expect(err).toBeInstanceOf(Error);
});

it("stores custom stack when provided", () => {
const err = new VoidTestError("test", "custom stack");
expect(err.stack).toBe("custom stack");
});

it("data is undefined for void data type", () => {
const err = new VoidTestError("no data");
expect(err.data).toBeUndefined();
});
});
98 changes: 98 additions & 0 deletions __tests__/base/Result.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import { describe, it, expect, vi } from "vitest";
import { Result } from "~/base/Result.ts";

describe("Result", () => {
describe("ok", () => {
it("creates a successful result with a value", () => {
const r = Result.ok(42);
expect(r.isOk()).toBe(true);
expect(r.isFail()).toBe(false);
expect(r.value).toBe(42);
});

it("creates a successful result with no value", () => {
const r = Result.ok();
expect(r.isOk()).toBe(true);
expect(r.value).toBeUndefined();
});
});

describe("fail", () => {
it("creates a failed result with an error", () => {
const r = Result.fail("oops");
expect(r.isFail()).toBe(true);
expect(r.isOk()).toBe(false);
expect(r.error).toBe("oops");
});
});

describe("value getter", () => {
it("throws when accessed on a failed result", () => {
const r = Result.fail("err");
expect(() => r.value).toThrow("Tried to get value from a failed Result.");
});
});

describe("error getter", () => {
it("throws when accessed on a successful result", () => {
const r = Result.ok(1);
expect(() => r.error).toThrow("Tried to get error from a successful Result.");
});
});

describe("map", () => {
it("transforms the value on success", () => {
const r = Result.ok(2).map(v => v * 3);
expect(r.value).toBe(6);
});

it("passes through the error on failure", () => {
const r = Result.fail<string>("e").map((v: never) => v);
expect(r.error).toBe("e");
});
});

describe("mapError", () => {
it("transforms the error on failure", () => {
const r = Result.fail("raw").mapError(e => `wrapped:${e}`);
expect(r.error).toBe("wrapped:raw");
});

it("passes through the value on success", () => {
const r = Result.ok(7).mapError(() => "x");
expect(r.value).toBe(7);
});
});

describe("flatMap", () => {
it("chains a new Result on success", () => {
const r = Result.ok(5).flatMap(v => Result.ok(v + 1));
expect(r.value).toBe(6);
});

it("short-circuits on failure", () => {
const fn = vi.fn();
const r = Result.fail<string>("e").flatMap(fn);
expect(fn).not.toHaveBeenCalled();
expect(r.error).toBe("e");
});
});

describe("match", () => {
it("calls ok handler on success", () => {
const out = Result.ok("hi").match({
ok: v => `ok:${v}`,
fail: () => "fail"
});
expect(out).toBe("ok:hi");
});

it("calls fail handler on failure", () => {
const out = Result.fail("boom").match({
ok: () => "ok",
fail: e => `fail:${e}`
});
expect(out).toBe("fail:boom");
});
});
});
Loading
Loading