diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 12b1507e..cf91f07f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -93,7 +93,7 @@ jobs: workspaces: | . # Intentionally test only warp-core under MUSL; warp-wasm targets wasm32 - # (wasm-bindgen/js-sys) and warp-ffi has separate cross-compilation concerns. + # (wasm-bindgen/js-sys) and has separate cross-compilation concerns. - name: cargo test (warp-core, musl) run: cargo test -p warp-core --target x86_64-unknown-linux-musl @@ -344,9 +344,6 @@ jobs: run: RUSTDOCFLAGS="-D warnings" cargo doc -p warp-core --no-deps - name: rustdoc warnings gate (warp-geom) run: RUSTDOCFLAGS="-D warnings" cargo doc -p warp-geom --no-deps - - name: rustdoc warnings gate (warp-ffi) - run: | - if [ -f crates/warp-ffi/Cargo.toml ]; then RUSTDOCFLAGS="-D warnings" cargo doc -p warp-ffi --no-deps; fi - name: rustdoc warnings gate (warp-wasm) run: | if [ -f crates/warp-wasm/Cargo.toml ]; then RUSTDOCFLAGS="-D warnings" cargo doc -p warp-wasm --no-deps; fi diff --git a/CHANGELOG.md b/CHANGELOG.md index 5705097f..7d2371db 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,122 @@ ## Unreleased +### Fixed — Developer CLI (`echo-cli`) + +- **Bench Filter:** `echo-cli bench --filter ` now passes the filter + as a Criterion regex (`-- `) instead of a `--bench` cargo target + selector. Previous behavior would look for a bench _target_ named after the + pattern rather than filtering benchmarks by regex. +- **Verify Expected Hash:** `--expected` now correctly reports "unchecked" for + warps 1+ instead of silently claiming "pass". Emits a stderr warning when + `--expected` is used with multi-warp snapshots. Text and JSON output now + use consistent lowercase status values. +- **Unused Dependency:** Removed `colored = "2"` from `warp-cli` (declared but + never imported). +- **Output Hardening:** `emit()` no longer panics on JSON serialization failure; + falls back to stderr. Bench exit status now reports Unix signal numbers + instead of a misleading `-1`. +- **Error Handling:** `collect_criterion_results` now logs a warning on + unparseable `estimates.json` instead of silently skipping. `format_duration` + returns "N/A" for NaN/negative values. `att_row_to_value` warns on missing + blob data instead of silent fallback. +- **Dead Code:** Replaced blanket `#![allow(dead_code)]` on `lib.rs` with + targeted `#[allow(dead_code)]` on the `output` module only. +- **Man Page Headers:** Subcommand man pages now use prefixed names + (`echo-cli-bench`, `echo-cli-verify`, `echo-cli-inspect`) in `.TH` headers + instead of bare subcommand names. +- **Visibility:** Narrowed all non-API structs and functions from `pub` to + `pub(crate)` in bench, verify, inspect, and wsc_loader modules. Only + `cli.rs` types remain `pub` (required by xtask man page generation). +- **cargo-deny:** Fixed wildcard dependency error for `warp-cli` in + `xtask/Cargo.toml` by adding explicit `version = "0.1.0"` alongside + the path override. +- **Man Page Cleanup:** `cargo xtask man-pages` now removes stale + `echo-cli*.1` files before regeneration so the output directory is an + exact snapshot. + +### Fixed — Code Review (PR #289, Round 2) + +- **Inspect Tree Warp Identity:** Multi-warp snapshots now label each tree + section with its warp index (`Tree (warp 0):`, `Tree (warp 1):`) instead of + flattening all trees into a single unlabeled `Tree:` section. +- **WSC Loader Attachment Checks:** Replaced `debug_assert!` with runtime + warnings for attachment multiplicity violations. Previously, release builds + silently dropped extra attachments; now emits a warning to stderr. +- **Test Naming:** Renamed `tampered_wsc_fails` to `tampered_wsc_does_not_panic` + to accurately reflect the test's behavior (no assertion, just no-panic guard). +- **Test Coverage:** Added `roundtrip_with_edge_attachments` and + `roundtrip_with_descend_attachment` tests to `wsc_loader.rs`, covering + previously untested code paths. +- **SPEC-0005 `global_tick` Invariant:** Reworded from `patches[i].global_tick == i` + to correctly state contiguity relative to the payload's start tick, since + payloads can begin at any absolute tick via `from_store(store, wl, 5..10)`. +- **SPEC-0005 BTR Verification:** Fixed step 5 of the verification algorithm + to reference the actual hash formula from §5.4 instead of a nonexistent + `parents` field. +- **SPEC-0005 Derivation Algorithm:** Fixed backward-cone traversal that dropped + transitive dependencies. The original filter checked the root query slot at + every hop; now accepts all frontier nodes unconditionally (they are already + known-causal) and traces all `in_slots` backward. +- **Stale `warp-ffi` References:** Removed dead `warp-ffi` entry from + `det-policy.yaml`, C ABI text from `phase1-plan.md`, and stale CLI names + from `rust-rhai-ts-division.md`. + +### Fixed — Docs & CI + +- **TASKS-DAG Spec Path:** `SPEC-PROVENANCE-PAYLOAD.md` → + `SPEC-0005-provenance-payload.md` in sub-task title and AC1 (two + occurrences). Same stale path fixed in ROADMAP backlog `security.md`. +- **SPEC-0005 Byte Counts:** Domain separation tag sizes corrected: + `echo:provenance_payload:v1\0` = 27 bytes (was 28), + `echo:provenance_edge:v1\0` = 24 bytes (was 25). +- **Project Tour:** Updated `warp-cli` description from "Placeholder CLI home" + to list actual subcommands (verify, bench, inspect). +- **CI Formatting:** Removed stray blank line between warp-geom and warp-wasm + rustdoc steps in `ci.yml`. + +### Added — Developer CLI (`echo-cli`) + +- **CLI Scaffold (`warp-cli`):** Replaced placeholder with full `clap` 4 derive + subcommand dispatch. Three subcommands: `verify`, `bench`, `inspect`. Global + `--format text|json` flag for machine-readable output. +- **Verify Subcommand:** `echo-cli verify ` loads a WSC snapshot, + validates structural integrity via `validate_wsc`, reconstructs the in-memory + `GraphStore` from columnar data, and computes the state root hash. Optional + `--expected ` flag compares against a known hash. +- **WSC Loader:** New `wsc_loader` module bridges WSC columnar format to + `GraphStore` — the inverse of `warp_core::wsc::build_one_warp_input`. + Reconstructs nodes, edges, and attachments from `WarpView`. +- **Bench Subcommand:** `echo-cli bench [--filter ]` shells out to + `cargo bench -p warp-benches`, parses Criterion JSON from + `target/criterion/*/new/estimates.json`, and renders an ASCII table via + `comfy-table`. Supports `--format json` for CI integration. +- **Inspect Subcommand:** `echo-cli inspect [--tree]` displays + WSC metadata (tick, schema hash, warp count), graph statistics (node/edge + counts, type breakdown, connected components via BFS), and optional ASCII + tree rendering depth-limited to 5 levels. +- **Man Pages:** Added `clap_mangen`-based man page generation to `xtask`. + `cargo xtask man-pages` generates `docs/man/echo-cli.1`, + `echo-cli-verify.1`, `echo-cli-bench.1`, `echo-cli-inspect.1`. + +### Removed + +- **`warp-ffi` crate deleted:** The C ABI integration path (`crates/warp-ffi`) + has been removed. The C ABI approach was abandoned in favor of Rust plugin + extension via `RewriteRule` trait registration and Rhai scripting. See + TASKS-DAG.md #26 (Graveyard). This is a **BREAKING CHANGE** for any + downstream code that depended on the C FFI surface. + +### Added — Provenance Payload Spec (PP-1) + +- **SPEC-0005:** Published `docs/spec/SPEC-0005-provenance-payload.md` mapping + Paper III (AION Foundations) formalism to concrete Echo types. Defines four + new types (`ProvenancePayload`, `BoundaryTransitionRecord`, `ProvenanceNode`, + `DerivationGraph`), wire format with CBOR encoding and domain separation tags, + two worked examples (3-tick accumulator, branching fork), bridge to existing + `ProvenanceStore`/`PlaybackCursor` APIs, and attestation envelope with SLSA + alignment. + ### Fixed (CI) - **Evidence Derivation:** Replaced artifact-directory-presence check for `DET-001` with diff --git a/Cargo.lock b/Cargo.lock index a822d129..9b307429 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -220,6 +220,21 @@ dependencies = [ "libloading", ] +[[package]] +name = "assert_cmd" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c5bcfa8749ac45dd12cb11055aeeb6b27a3895560d60d71e3c23bf979e60514" +dependencies = [ + "anstyle", + "bstr", + "libc", + "predicates", + "predicates-core", + "predicates-tree", + "wait-timeout", +] + [[package]] name = "async-lock" version = "3.4.2" @@ -465,6 +480,17 @@ dependencies = [ "objc2 0.5.2", ] +[[package]] +name = "bstr" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab" +dependencies = [ + "memchr", + "regex-automata", + "serde", +] + [[package]] name = "bumpalo" version = "3.19.1" @@ -672,6 +698,16 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" +[[package]] +name = "clap_mangen" +version = "0.2.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ea63a92086df93893164221ad4f24142086d535b3a0957b9b9bea2dc86301" +dependencies = [ + "clap", + "roff", +] + [[package]] name = "clipboard-win" version = "5.4.1" @@ -734,6 +770,17 @@ dependencies = [ "memchr", ] +[[package]] +name = "comfy-table" +version = "7.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "958c5d6ecf1f214b4c2bbbbf6ab9523a864bd136dcf71a7e8904799acfe1ad47" +dependencies = [ + "crossterm", + "unicode-segmentation", + "unicode-width", +] + [[package]] name = "concurrent-queue" version = "2.5.0" @@ -969,6 +1016,29 @@ version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" +[[package]] +name = "crossterm" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" +dependencies = [ + "bitflags 2.10.0", + "crossterm_winapi", + "document-features", + "parking_lot", + "rustix 1.1.3", + "winapi", +] + +[[package]] +name = "crossterm_winapi" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" +dependencies = [ + "winapi", +] + [[package]] name = "crunchy" version = "0.2.4" @@ -1028,6 +1098,12 @@ dependencies = [ "syn", ] +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + [[package]] name = "digest" version = "0.10.7" @@ -1652,6 +1728,15 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" +[[package]] +name = "float-cmp" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8" +dependencies = [ + "num-traits", +] + [[package]] name = "fnv" version = "1.0.7" @@ -2873,6 +2958,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" +[[package]] +name = "normalize-line-endings" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" + [[package]] name = "nu-ansi-term" version = "0.50.3" @@ -3493,6 +3584,36 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "predicates" +version = "3.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ada8f2932f28a27ee7b70dd6c1c39ea0675c55a36879ab92f3a715eaa1e63cfe" +dependencies = [ + "anstyle", + "difflib", + "float-cmp 0.10.0", + "normalize-line-endings", + "predicates-core", + "regex", +] + +[[package]] +name = "predicates-core" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cad38746f3166b4031b1a0d39ad9f954dd291e7854fcc0eed52ee41a0b50d144" + +[[package]] +name = "predicates-tree" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0de1b847b39c8131db0467e9df1ff60e6d0562ab8e9a16e568ad0fdb372e2f2" +dependencies = [ + "predicates-core", + "termtree", +] + [[package]] name = "presser" version = "0.3.1" @@ -3929,6 +4050,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "roff" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88f8660c1ff60292143c98d08fc6e2f654d722db50410e3f3797d40baaf9d8f3" + [[package]] name = "roxmltree" version = "0.20.0" @@ -4477,7 +4604,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6637bab7722d379c8b41ba849228d680cc12d0a45ba1fa2b48f2a30577a06731" dependencies = [ - "float-cmp", + "float-cmp 0.9.0", ] [[package]] @@ -4598,6 +4725,12 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "termtree" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" + [[package]] name = "thiserror" version = "1.0.69" @@ -5178,6 +5311,19 @@ dependencies = [ [[package]] name = "warp-cli" version = "0.1.0" +dependencies = [ + "anyhow", + "assert_cmd", + "bytes", + "clap", + "comfy-table", + "hex", + "predicates", + "serde", + "serde_json", + "tempfile", + "warp-core", +] [[package]] name = "warp-core" @@ -5198,13 +5344,6 @@ dependencies = [ "thiserror 1.0.69", ] -[[package]] -name = "warp-ffi" -version = "0.1.0" -dependencies = [ - "warp-core", -] - [[package]] name = "warp-geom" version = "0.1.0" @@ -5693,6 +5832,22 @@ dependencies = [ "web-sys", ] +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + [[package]] name = "winapi-util" version = "0.1.11" @@ -5702,6 +5857,12 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + [[package]] name = "windows" version = "0.58.0" @@ -6217,8 +6378,10 @@ version = "0.1.0" dependencies = [ "anyhow", "clap", + "clap_mangen", "serde", "serde_json", + "warp-cli", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 73f35d7f..ab3c971f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,7 @@ [workspace] members = [ "crates/warp-core", - "crates/warp-ffi", + "crates/warp-wasm", "crates/warp-cli", "crates/warp-geom", diff --git a/TASKS-DAG.md b/TASKS-DAG.md index 9710184f..12806e95 100644 --- a/TASKS-DAG.md +++ b/TASKS-DAG.md @@ -34,22 +34,20 @@ This living list documents open issues and the inferred dependencies contributor - Confidence: strong - Evidence: Inferred: Epic completion depends on constituent task -## [#21: Spec: Security Contexts (FFI/WASM/CLI)](https://github.com/flyingrobots/echo/issues/21) +## [#21: Spec: Security Contexts (WASM/CLI)](https://github.com/flyingrobots/echo/issues/21) - Status: Open - Blocked by: - [#37: Draft security contexts spec](https://github.com/flyingrobots/echo/issues/37) - Confidence: strong - Evidence: Inferred: Epic completion depends on Draft Spec task - - [#38: FFI limits and validation](https://github.com/flyingrobots/echo/issues/38) + - ~~[#38: FFI limits and validation](https://github.com/flyingrobots/echo/issues/38)~~ — Closed (Graveyard: C ABI abandoned for determinism) + - [#39: WASM input validation](https://github.com/flyingrobots/echo/issues/39) — Completed - Confidence: strong - - Evidence: Inferred: Epic completion depends on constituent task - - [#39: WASM input validation](https://github.com/flyingrobots/echo/issues/39) - - Confidence: strong - - Evidence: Inferred: Epic completion depends on constituent task + - Evidence: `crates/warp-wasm/src/lib.rs` implements `validate_object_against_args` with 4 test cases. - [#40: Unit tests for denials](https://github.com/flyingrobots/echo/issues/40) - Confidence: strong - - Evidence: Inferred: Epic completion depends on constituent task + - Evidence: Inferred: Epic completion depends on constituent task (scoped to WASM/CLI denials) ## [#22: Benchmarks & CI Regression Gates](https://github.com/flyingrobots/echo/issues/22) @@ -74,7 +72,7 @@ This living list documents open issues and the inferred dependencies contributor ## [#26: Plugin ABI (C) v0](https://github.com/flyingrobots/echo/issues/26) -- Status: In Progress +- Status: Closed (Graveyard: C ABI abandoned for determinism — C's UB is incompatible with Echo's determinism guarantees. Rust plugin extension via RewriteRule trait registration and Rhai scripting replace this path.) - (No detected dependencies) ## [#27: Add golden test vectors (encoder/decoder)](https://github.com/flyingrobots/echo/issues/27) @@ -150,19 +148,17 @@ This living list documents open issues and the inferred dependencies contributor ## [#38: FFI limits and validation](https://github.com/flyingrobots/echo/issues/38) -- Status: In Progress +- Status: Closed (Graveyard: C ABI abandoned for determinism — warp-ffi crate deleted) - Blocks: - - [#21: Spec: Security Contexts (FFI/WASM/CLI)](https://github.com/flyingrobots/echo/issues/21) - - Confidence: strong - - Evidence: Inferred: Epic completion depends on constituent task + - [#21: Spec: Security Contexts (WASM/CLI)](https://github.com/flyingrobots/echo/issues/21) — no longer blocking (FFI path removed) ## [#39: WASM input validation](https://github.com/flyingrobots/echo/issues/39) -- Status: In Progress +- Status: Completed - Blocks: - - [#21: Spec: Security Contexts (FFI/WASM/CLI)](https://github.com/flyingrobots/echo/issues/21) + - [#21: Spec: Security Contexts (WASM/CLI)](https://github.com/flyingrobots/echo/issues/21) - Confidence: strong - - Evidence: `crates/warp-wasm/src/lib.rs` implements `validate_object_against_args` for schema checks. + - Evidence: `crates/warp-wasm/src/lib.rs` implements `validate_object_against_args` with full schema validation + 4 test cases. GitHub issue closed. ## [#40: Unit tests for denials](https://github.com/flyingrobots/echo/issues/40) @@ -259,22 +255,22 @@ This living list documents open issues and the inferred dependencies contributor ## [#86: C header + host loader](https://github.com/flyingrobots/echo/issues/86) -- Status: In Progress +- Status: Closed (Graveyard: C ABI abandoned for determinism) - (No detected dependencies) ## [#87: Version negotiation](https://github.com/flyingrobots/echo/issues/87) -- Status: Open +- Status: Closed (Graveyard: C ABI abandoned for determinism) - (No detected dependencies) ## [#88: Capability tokens](https://github.com/flyingrobots/echo/issues/88) -- Status: Open +- Status: Closed (Graveyard: C ABI abandoned for determinism) - (No detected dependencies) ## [#89: Example plugin + tests](https://github.com/flyingrobots/echo/issues/89) -- Status: Open +- Status: Closed (Graveyard: C ABI abandoned for determinism) - (No detected dependencies) ## [#103: Policy: Require PR↔Issue linkage and 'Closes #…' in PRs](https://github.com/flyingrobots/echo/issues/103) @@ -410,10 +406,179 @@ This living list documents open issues and the inferred dependencies contributor - Confidence: weak - Evidence: Inferred: TT3 task depends on TT2 MVP -## [#202: Spec: Provenance Payload (PP) v1 (canonical envelope for artifact lineage + signatures)](https://github.com/flyingrobots/echo/issues/202) +## [#202: Provenance Payload (PP) v1 — spec + implementation](https://github.com/flyingrobots/echo/issues/202) -- Status: In Progress -- (No detected dependencies) +- Status: Open — ACTIVE (prerequisite for time travel debugging) +- Evidence: Paper III (AION Foundations) provides full formal spec: Provenance Payloads, Boundary Transition Records (BTRs), payload monoid, slicing, wormholes. Lower-level infrastructure exists in `warp-core` (ProvenanceStore, WorldlineTickPatchV1, HashTriplet, AtomWrite) but the Paper III formalism is not yet connected. +- Blocks: + - [#170: TT1: StreamsFrame inspector support](https://github.com/flyingrobots/echo/issues/170) + - Confidence: strong + - Evidence: Time travel debugging requires provenance payloads for replay, slicing, and causal cone analysis. + +### Sub-tasks + +#### PP-1: Write SPEC-0005-provenance-payload.md + +Translate Paper III (AION Foundations) into a concrete engineering spec with wire format. + +**Requirements:** + +- R1: Define TickPatch record (rule-pack ID, accepted match keys, attachment deltas, commit flag, optional trace ρ) +- R2: Define ProvenancePayload as ordered sequence P = (μ₀, …, μₙ₋₁) with monoid structure +- R3: Define BoundaryEncoding B = (U₀, P) — initial state + payload +- R4: Define BTR envelope: (h_in, h_out, U₀, P, t, κ) with content-addressed hashing and authentication tag +- R5: Define In(μ)/Out(μ) — declared inputs/outputs per patch — and the provenance graph 𝕡 induced by them +- R6: Map to W3C PROV vocabulary (tick patch = Activity, values = Entity) +- R7: Specify canonical serialization format (deterministic CBOR or canonical JSON) + +**Acceptance Criteria:** + +- [ ] AC1: Spec document exists at `docs/spec/SPEC-0005-provenance-payload.md` +- [ ] AC2: All Paper III definitions (Def 3.1–3.9) have concrete field-level wire format +- [ ] AC3: Two worked examples: (a) 3-tick accumulator (Paper III §A), (b) branching fork +- [ ] AC4: Patch sufficiency checklist from Paper III Remark 3.3 is reproduced with Echo-specific field names +- [ ] AC5: Security posture section (tamper-evidence, not tamper-proof; hash + auth binding) + +**Est. Hours:** 6h + +--- + +#### PP-2: TickPatch type + Apply wiring + +Define the core TickPatch record in Rust and wire it to the existing engine tick logic. + +**Requirements:** + +- R1: `TickPatch` struct capturing: rule-pack hash, accepted matches (content-addressed keys), attachment deltas (TickDelta), commit flag, optional trace +- R2: `Apply(state, patch) -> state` function that replays a single tick deterministically +- R3: Integrate with existing `WorldlineTickPatchV1` — either replace or bridge + +**Acceptance Criteria:** + +- [ ] AC1: `TickPatch` type defined in `warp-core` +- [ ] AC2: `Apply` function produces identical state to live engine execution for the same tick +- [ ] AC3: Round-trip test: run engine tick → extract TickPatch → Apply from prior state → assert identical post-state + +**Test Plan:** + +- **Goldens:** Bit-exact patch bytes for the motion demo rule (3 ticks) +- **Failures:** Corrupt patch (wrong rule-pack hash, missing match key, truncated delta) +- **Edges:** Empty tick (no matches), single-match tick, max-conflict-resolution tick +- **Fuzz:** proptest over random graph states + random rule applications → extract patch → replay → assert convergence + +**Est. Hours:** 10h + +--- + +#### PP-3: ProvenancePayload + monoid operations + +Implement the payload sequence type with composition (concatenation) and identity. + +**Requirements:** + +- R1: `ProvenancePayload` wrapping `Vec` with monoid `compose(P, Q)` = concatenation +- R2: `BoundaryEncoding` struct: (initial_state: SnapshotHash, payload: ProvenancePayload) +- R3: `Replay(B) -> Worldline` iterator that applies patches sequentially +- R4: Payload serialization/deserialization (canonical byte format) + +**Acceptance Criteria:** + +- [ ] AC1: Monoid laws hold: `compose(P, empty) == P`, `compose(empty, P) == P`, associativity +- [ ] AC2: `Replay(U₀, P·Q)` produces same final state as `Replay(Replay(U₀, P).final, Q)` +- [ ] AC3: Serialized payload round-trips bit-exactly + +**Test Plan:** + +- **Goldens:** Canonical bytes for known payloads (motion demo, 5-tick sequence) +- **Failures:** Payload with patch for wrong state (Apply should fail gracefully) +- **Edges:** Empty payload, single-patch payload, 1000-patch payload +- **Fuzz:** proptest compose random payloads → assert monoid laws + +**Est. Hours:** 6h + +--- + +#### PP-4: Boundary Transition Record (BTR) + +Implement the tamper-evident packaging format from Paper III §3.3. + +**Requirements:** + +- R1: `BTR` struct: (h_in: Hash, h_out: Hash, initial_state: U₀, payload: P, counter: u64, auth_tag: Vec) +- R2: Content-addressed hashing for h_in and h_out (domain-separated, consistent with Lock the Hashes) +- R3: Authentication tag computation (HMAC-SHA256 or Ed25519 signature binding all fields) +- R4: BTR verification: recompute h_out from replay and compare + +**Acceptance Criteria:** + +- [ ] AC1: BTR creation from a completed worldline segment +- [ ] AC2: BTR verification succeeds for valid records +- [ ] AC3: BTR verification fails for any single-bit mutation in any field +- [ ] AC4: BTR indexable by h_in and h_out for content-addressed storage + +**Test Plan:** + +- **Goldens:** Known BTR bytes for motion demo (3-tick worldline) +- **Failures:** Tampered h_out, tampered payload, tampered auth_tag, swapped h_in/h_out +- **Edges:** Zero-tick BTR (h_in == h_out), single-tick BTR, BTR at counter=u64::MAX +- **Fuzz:** proptest mutate random byte positions in serialized BTR → assert verification fails + +**Est. Hours:** 8h + +--- + +#### PP-5: Provenance graph + derivation graph D(v) + +Build the backward causal cone data structure from Paper III §3.4–3.5. + +**Requirements:** + +- R1: Track In(μ)/Out(μ) per TickPatch during replay +- R2: Build provenance graph 𝕡 = (V, E) from patch inputs/outputs +- R3: Compute derivation graph D(v) — backward reachable subgraph for any value v +- R4: Assert finiteness and acyclicity (Paper III Prop 3.4) + +**Acceptance Criteria:** + +- [ ] AC1: Provenance graph correctly captures all data-flow edges +- [ ] AC2: D(v) for a known value matches hand-computed expected cone +- [ ] AC3: Acyclicity assertion never fires for valid worldlines + +**Test Plan:** + +- **Goldens:** Hand-traced provenance graph for 3-tick accumulator example (Paper III §A) +- **Failures:** Malformed patch with cyclic In/Out declarations → assert acyclicity violation +- **Edges:** Value with no dependencies (initial state), value depending on all ticks +- **Fuzz:** proptest random worldlines → build provenance graph → assert acyclicity + backward completeness + +**Est. Hours:** 8h + +--- + +#### PP-6: Slice payloads (partial materialization) + +Implement causal-cone slicing from Paper III §4. + +**Requirements:** + +- R1: Given target value v and full payload P, compute slice payload P|D(v) +- R2: Replaying P|D(v) from U₀ reconstructs v with the same value as full replay +- R3: Slice is minimal: no patch in P|D(v) can be removed without breaking reconstruction + +**Acceptance Criteria:** + +- [ ] AC1: Slice payload for accumulator example matches Paper III worked example +- [ ] AC2: Slice replay produces identical target value to full replay +- [ ] AC3: Slice is strictly smaller than or equal to full payload + +**Test Plan:** + +- **Goldens:** Slice bytes for known target values in motion demo +- **Failures:** Slice with removed patch → assert replay diverges or fails +- **Edges:** Target value that depends on all patches (slice == full), target in initial state (slice == empty) +- **Fuzz:** proptest random worldlines + random target values → slice → replay → assert value match + +**Est. Hours:** 6h ## [#203: TT1: Constraint Lens panel (admission/scheduler explain-why + counterfactual sliders)](https://github.com/flyingrobots/echo/issues/203) @@ -497,7 +662,7 @@ This living list documents open issues and the inferred dependencies contributor ## [#231: Demo 3: Tumble Tower — Stage 0 physics (2D AABB stacking)](https://github.com/flyingrobots/echo/issues/231) -- Status: In Progress +- Status: Open (unscheduled — future milestone) - Blocks: - [#238: Demo 3: Tumble Tower — docs course (physics ladder)](https://github.com/flyingrobots/echo/issues/238) - Confidence: medium @@ -505,7 +670,7 @@ This living list documents open issues and the inferred dependencies contributor - [#232: Demo 3: Tumble Tower — Stage 1 physics (rotation + angular, OBB contacts)](https://github.com/flyingrobots/echo/issues/232) - Confidence: strong - Evidence: Inferred: Stage 1 physics depends on Stage 0 -- Evidence: `crates/warp-geom` implements primitives (AABB, Transform), but solver logic for "stacking" is not yet visible in the top-level modules. +- Evidence: `crates/warp-geom` implements geometric primitives (AABB, Transform, broad-phase detection) but no physics simulation code exists: zero gravity, zero solver, zero contact resolution. Status corrected from "In Progress" to "Open" (2026-03-03). ## [#232: Demo 3: Tumble Tower — Stage 1 physics (rotation + angular, OBB contacts)](https://github.com/flyingrobots/echo/issues/232) diff --git a/crates/warp-cli/Cargo.toml b/crates/warp-cli/Cargo.toml index ed35864d..a91bae04 100644 --- a/crates/warp-cli/Cargo.toml +++ b/crates/warp-cli/Cargo.toml @@ -5,11 +5,28 @@ name = "warp-cli" version = "0.1.0" edition = "2021" rust-version = "1.90.0" -description = "Echo CLI: demos, benches, inspector launcher (future)" +description = "Echo developer CLI: verify, bench, inspect" license = "Apache-2.0" repository = "https://github.com/flyingrobots/echo" readme = "README.md" keywords = ["echo", "cli", "ecs"] categories = ["command-line-utilities"] +[[bin]] +name = "echo-cli" +path = "src/main.rs" + [dependencies] +anyhow = "1" +bytes = "1" +clap = { version = "4", features = ["derive"] } +comfy-table = "7" +hex = "0.4" +serde = { version = "1", features = ["derive"] } +serde_json = "1" +warp-core = { workspace = true } + +[dev-dependencies] +assert_cmd = "2" +predicates = "3" +tempfile = "3" diff --git a/crates/warp-cli/README.md b/crates/warp-cli/README.md index dc41c7d4..e5356fbd 100644 --- a/crates/warp-cli/README.md +++ b/crates/warp-cli/README.md @@ -1,21 +1,80 @@ -# warp-cli -Placeholder CLI for Echo tooling. Subcommands will be added as the engine matures. +# echo-cli -See the repository root `README.md` for project context. +Developer CLI for the Echo deterministic simulation engine. -## What this crate does +## Installation -- Provides a home for command-line entrypoints into Echo tooling: - - future subcommands for running the engine, inspecting WARPs, driving the - session service, etc. -- Currently a placeholder; behavior will be fleshed out alongside engine and - tooling milestones. +```sh +cargo install --path crates/warp-cli +``` + +The binary is named `echo-cli`. + +## Subcommands + +### `echo-cli verify ` + +Validate WSC snapshot integrity. Loads the file, validates structure, reconstructs the graph, and computes state root hashes. + +```sh +# Verify a snapshot +echo-cli verify state.wsc + +# Verify against a known hash (warp 0 only; additional warps report "unchecked") +echo-cli verify state.wsc --expected abcd1234... + +# JSON output +echo-cli --format json verify state.wsc +``` + +### `echo-cli bench [--filter ]` + +Run Criterion benchmarks, parse JSON results, and format as an ASCII table. + +```sh +# Run all benchmarks +echo-cli bench + +# Filter by name +echo-cli bench --filter hotpath + +# JSON output for CI +echo-cli --format json bench +``` + +### `echo-cli inspect [--tree]` + +Display WSC snapshot metadata and graph statistics. + +```sh +# Show metadata and stats +echo-cli inspect state.wsc + +# Include ASCII tree of graph structure +echo-cli inspect state.wsc --tree + +# JSON output +echo-cli --format json inspect state.wsc +``` + +## Global Flags + +- `--format text|json` — Output format (default: `text`). Can appear before or after the subcommand. +- `--help` — Show help. +- `--version` — Show version. + +## Man Pages + +Generate man pages via xtask: + +```sh +cargo xtask man-pages +# Output: docs/man/echo-cli.1, echo-cli-verify.1, etc. +``` ## Documentation -- For now, see the root `README.md` and the Echo book (`docs/book/echo/`) for - the overall architecture and planned CLI roles (runtime control, debugging, - inspection). +See the root `README.md` and `docs/spec/` for architecture context. diff --git a/crates/warp-cli/src/bench.rs b/crates/warp-cli/src/bench.rs new file mode 100644 index 00000000..e9eae988 --- /dev/null +++ b/crates/warp-cli/src/bench.rs @@ -0,0 +1,373 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! `echo-cli bench` — run benchmarks and format results. +//! +//! Shells out to `cargo bench -p warp-benches`, parses Criterion JSON from +//! `target/criterion/*/new/estimates.json`, and renders an ASCII table or +//! JSON array. + +use std::path::Path; +use std::process::Command; + +use anyhow::{bail, Context, Result}; +use comfy_table::{ContentArrangement, Table}; +use serde::{Deserialize, Serialize}; + +use crate::cli::OutputFormat; +use crate::output::emit; + +/// Parsed benchmark result from Criterion's `estimates.json`. +#[derive(Debug, Clone, Serialize)] +pub(crate) struct BenchResult { + pub(crate) name: String, + pub(crate) mean_ns: f64, + pub(crate) median_ns: f64, + pub(crate) stddev_ns: f64, +} + +/// Raw Criterion estimates JSON structure. +#[derive(Debug, Deserialize)] +pub(crate) struct CriterionEstimates { + pub(crate) mean: Estimate, + pub(crate) median: Estimate, + pub(crate) std_dev: Estimate, +} + +/// A single Criterion estimate. +#[derive(Debug, Deserialize)] +pub(crate) struct Estimate { + pub(crate) point_estimate: f64, +} + +/// Describes a process exit caused by a signal (Unix) or unknown termination. +fn format_signal(status: &std::process::ExitStatus) -> String { + #[cfg(unix)] + { + use std::os::unix::process::ExitStatusExt; + match status.signal() { + Some(sig) => format!("killed by signal {sig}"), + None => "unknown termination".to_string(), + } + } + #[cfg(not(unix))] + { + let _ = status; + "unknown termination".to_string() + } +} + +/// Builds the `cargo bench` command with optional Criterion regex filter. +pub(crate) fn build_bench_command(filter: Option<&str>) -> Command { + let mut cmd = Command::new("cargo"); + cmd.args(["bench", "-p", "warp-benches"]); + + if let Some(f) = filter { + cmd.args(["--", f]); + } + + // Inherit stdout/stderr so Criterion progress is visible. + cmd.stdout(std::process::Stdio::inherit()); + cmd.stderr(std::process::Stdio::inherit()); + + cmd +} + +/// Runs the bench subcommand. +pub(crate) fn run(filter: Option<&str>, format: &OutputFormat) -> Result<()> { + // 1. Shell out to cargo bench. + let mut cmd = build_bench_command(filter); + + let status = cmd + .status() + .context("failed to run cargo bench (is cargo available?)")?; + + if !status.success() { + let code_desc = match status.code() { + Some(code) => format!("exit code {code}"), + None => format_signal(&status), + }; + bail!("cargo bench failed: {code_desc}"); + } + + // 2. Parse Criterion JSON results. + let results = collect_criterion_results(Path::new("target/criterion"), filter)?; + + if results.is_empty() { + let text = "No benchmark results found.\n"; + let json = serde_json::json!({ "benchmarks": [], "message": "no results found" }); + emit(format, text, &json); + return Ok(()); + } + + // 3. Format output. + let text = format_table(&results); + let json = serde_json::to_value(&results).context("failed to serialize bench results")?; + let json = serde_json::json!({ "benchmarks": json }); + + emit(format, &text, &json); + Ok(()) +} + +/// Scans `target/criterion/*/new/estimates.json` for benchmark results. +pub(crate) fn collect_criterion_results( + criterion_dir: &Path, + filter: Option<&str>, +) -> Result> { + let mut results = Vec::new(); + + if !criterion_dir.is_dir() { + return Ok(results); + } + + let entries = std::fs::read_dir(criterion_dir) + .with_context(|| format!("failed to read {}", criterion_dir.display()))?; + + for entry in entries { + let entry = entry?; + let path = entry.path(); + + if !path.is_dir() { + continue; + } + + let bench_name = path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("") + .to_string(); + + // Skip Criterion metadata directories. + if bench_name.starts_with('.') || bench_name == "report" { + continue; + } + + // Apply filter if specified. + if let Some(f) = filter { + if !bench_name.contains(f) { + continue; + } + } + + let estimates_path = path.join("new").join("estimates.json"); + if !estimates_path.is_file() { + continue; + } + + match parse_estimates(&bench_name, &estimates_path) { + Ok(result) => results.push(result), + Err(e) => eprintln!("warning: skipping {bench_name}: {e:#}"), + } + } + + results.sort_by(|a, b| a.name.cmp(&b.name)); + Ok(results) +} + +/// Parses a single `estimates.json` file into a `BenchResult`. +pub(crate) fn parse_estimates(name: &str, path: &Path) -> Result { + let content = std::fs::read_to_string(path) + .with_context(|| format!("failed to read {}", path.display()))?; + let estimates: CriterionEstimates = serde_json::from_str(&content) + .with_context(|| format!("failed to parse {}", path.display()))?; + + Ok(BenchResult { + name: name.to_string(), + mean_ns: estimates.mean.point_estimate, + median_ns: estimates.median.point_estimate, + stddev_ns: estimates.std_dev.point_estimate, + }) +} + +/// Formats benchmark results as an ASCII table. +pub(crate) fn format_table(results: &[BenchResult]) -> String { + let mut table = Table::new(); + table.set_content_arrangement(ContentArrangement::Dynamic); + table.set_header(vec!["Benchmark", "Mean", "Median", "Std Dev"]); + + for r in results { + table.add_row(vec![ + r.name.clone(), + format_duration(r.mean_ns), + format_duration(r.median_ns), + format_duration(r.stddev_ns), + ]); + } + + format!("{table}\n") +} + +/// Formats nanosecond durations in human-readable form. +fn format_duration(ns: f64) -> String { + if ns.is_nan() || ns < 0.0 { + return "N/A".to_string(); + } + if ns >= 1_000_000_000.0 { + format!("{:.2} s", ns / 1_000_000_000.0) + } else if ns >= 1_000_000.0 { + format!("{:.2} ms", ns / 1_000_000.0) + } else if ns >= 1_000.0 { + #[allow(clippy::unicode_not_nfc)] + { + format!("{:.2} \u{00b5}s", ns / 1_000.0) + } + } else { + format!("{:.2} ns", ns) + } +} + +#[cfg(test)] +#[allow(clippy::expect_used, clippy::unwrap_used)] +mod tests { + use super::*; + use std::fs; + + fn make_estimates_json(mean: f64, median: f64, stddev: f64) -> String { + serde_json::json!({ + "mean": { "confidence_interval": { "confidence_level": 0.95, "lower_bound": mean - 10.0, "upper_bound": mean + 10.0 }, "point_estimate": mean, "standard_error": 1.0 }, + "median": { "confidence_interval": { "confidence_level": 0.95, "lower_bound": median - 10.0, "upper_bound": median + 10.0 }, "point_estimate": median, "standard_error": 1.0 }, + "std_dev": { "confidence_interval": { "confidence_level": 0.95, "lower_bound": stddev - 1.0, "upper_bound": stddev + 1.0 }, "point_estimate": stddev, "standard_error": 0.5 }, + "median_abs_dev": { "confidence_interval": { "confidence_level": 0.95, "lower_bound": 0.0, "upper_bound": 10.0 }, "point_estimate": 5.0, "standard_error": 1.0 }, + "slope": null + }) + .to_string() + } + + #[test] + fn parse_mock_criterion_json() { + let dir = tempfile::tempdir().unwrap(); + let bench_dir = dir.path().join("my_bench").join("new"); + fs::create_dir_all(&bench_dir).unwrap(); + + let estimates = make_estimates_json(1_234_567.0, 1_200_000.0, 50_000.0); + fs::write(bench_dir.join("estimates.json"), &estimates).unwrap(); + + let results = collect_criterion_results(dir.path(), None).unwrap(); + assert_eq!(results.len(), 1); + assert_eq!(results[0].name, "my_bench"); + assert!((results[0].mean_ns - 1_234_567.0).abs() < 0.01); + assert!((results[0].median_ns - 1_200_000.0).abs() < 0.01); + assert!((results[0].stddev_ns - 50_000.0).abs() < 0.01); + } + + #[test] + fn table_formatter_produces_expected_output() { + let results = vec![ + BenchResult { + name: "tick_pipeline".to_string(), + mean_ns: 1_230_000.0, + median_ns: 1_210_000.0, + stddev_ns: 120_000.0, + }, + BenchResult { + name: "materialize".to_string(), + mean_ns: 456_700.0, + median_ns: 450_200.0, + stddev_ns: 32_100.0, + }, + ]; + + let table = format_table(&results); + assert!( + table.contains("tick_pipeline"), + "table should contain bench name" + ); + assert!( + table.contains("1.23 ms"), + "table should contain formatted mean" + ); + assert!(table.contains("Benchmark"), "table should have header"); + } + + #[test] + fn json_output_is_valid_json() { + let results = vec![BenchResult { + name: "test".to_string(), + mean_ns: 100.0, + median_ns: 95.0, + stddev_ns: 5.0, + }]; + + let json = serde_json::to_value(&results).unwrap(); + assert!(json.is_array()); + assert_eq!(json.as_array().unwrap().len(), 1); + assert_eq!(json[0]["name"], "test"); + } + + #[test] + fn filter_applies_correctly() { + let dir = tempfile::tempdir().unwrap(); + + for name in &["alpha_bench", "beta_bench", "gamma_bench"] { + let bench_dir = dir.path().join(name).join("new"); + fs::create_dir_all(&bench_dir).unwrap(); + let est = make_estimates_json(1000.0, 1000.0, 10.0); + fs::write(bench_dir.join("estimates.json"), &est).unwrap(); + } + + let results = collect_criterion_results(dir.path(), Some("beta")).unwrap(); + assert_eq!(results.len(), 1); + assert_eq!(results[0].name, "beta_bench"); + } + + #[test] + fn no_results_returns_empty_vec() { + let dir = tempfile::tempdir().unwrap(); + let results = collect_criterion_results(dir.path(), None).unwrap(); + assert!(results.is_empty()); + } + + #[test] + fn format_duration_scales() { + assert_eq!(format_duration(500.0), "500.00 ns"); + assert_eq!(format_duration(1_500.0), "1.50 \u{00b5}s"); + assert_eq!(format_duration(1_500_000.0), "1.50 ms"); + assert_eq!(format_duration(1_500_000_000.0), "1.50 s"); + } + + #[test] + fn format_duration_nan_returns_na() { + assert_eq!(format_duration(f64::NAN), "N/A"); + } + + #[test] + fn format_duration_negative_returns_na() { + assert_eq!(format_duration(-1.0), "N/A"); + } + + #[test] + fn nonexistent_criterion_dir_returns_empty() { + let results = collect_criterion_results(Path::new("/nonexistent/criterion"), None).unwrap(); + assert!(results.is_empty()); + } + + #[test] + fn build_bench_command_with_filter_passes_criterion_regex() { + let cmd = build_bench_command(Some("hotpath")); + let args: Vec<&std::ffi::OsStr> = cmd.get_args().collect(); + // Filter should appear after "--" (Criterion regex), not "--bench" (cargo target). + assert!( + args.contains(&std::ffi::OsStr::new("--")), + "command should contain '--' separator" + ); + assert!( + args.contains(&std::ffi::OsStr::new("hotpath")), + "command should contain filter pattern" + ); + // Ensure "--bench" is NOT used for filter. + let bench_pos = args.iter().position(|a| *a == "--bench"); + assert!( + bench_pos.is_none(), + "command should not use --bench for filter" + ); + } + + #[test] + fn build_bench_command_without_filter_omits_separator() { + let cmd = build_bench_command(None); + let args: Vec<&std::ffi::OsStr> = cmd.get_args().collect(); + assert!( + !args.contains(&std::ffi::OsStr::new("--")), + "command without filter should not contain '--'" + ); + } +} diff --git a/crates/warp-cli/src/cli.rs b/crates/warp-cli/src/cli.rs new file mode 100644 index 00000000..d0c795a8 --- /dev/null +++ b/crates/warp-cli/src/cli.rs @@ -0,0 +1,179 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! CLI type definitions for `echo-cli`. +//! +//! Extracted into a separate module for testability — `try_parse_from` lets +//! us verify argument parsing without spawning processes. + +use std::path::PathBuf; + +use clap::{Parser, Subcommand, ValueEnum}; + +/// Echo developer CLI. +#[derive(Parser, Debug)] +#[command( + name = "echo-cli", + about = "Echo developer CLI", + version, + disable_help_subcommand = true +)] +pub struct Cli { + /// Output format (text or json). + #[arg(long, global = true, default_value = "text", value_enum)] + pub format: OutputFormat, + + /// Subcommand to execute. + #[command(subcommand)] + pub command: Commands, +} + +/// Available subcommands. +#[derive(Subcommand, Debug)] +pub enum Commands { + /// Verify hash integrity of a WSC snapshot. + Verify { + /// Path to WSC snapshot file. + snapshot: PathBuf, + + /// Expected state root hash (hex) for warp 0 only; additional warps + /// report "unchecked". + #[arg(long)] + expected: Option, + }, + + /// Run benchmarks and format results. + Bench { + /// Filter benchmarks by pattern. + #[arg(long)] + filter: Option, + }, + + /// Inspect a WSC snapshot. + Inspect { + /// Path to WSC snapshot file. + snapshot: PathBuf, + + /// Show ASCII tree of graph structure. + #[arg(long)] + tree: bool, + }, +} + +/// Output format selector. +#[derive(Clone, Debug, Default, PartialEq, Eq, ValueEnum)] +pub enum OutputFormat { + /// Human-readable text output. + #[default] + Text, + /// Machine-readable JSON output. + Json, +} + +#[cfg(test)] +#[allow(clippy::expect_used, clippy::unwrap_used)] +mod tests { + use super::*; + + #[test] + fn parse_verify_with_snapshot_path() { + let cli = Cli::try_parse_from(["echo-cli", "verify", "state.wsc"]).unwrap(); + match cli.command { + Commands::Verify { + ref snapshot, + ref expected, + } => { + assert_eq!(snapshot, &PathBuf::from("state.wsc")); + assert!(expected.is_none()); + } + _ => panic!("expected Verify command"), + } + assert_eq!(cli.format, OutputFormat::Text); + } + + #[test] + fn parse_verify_with_expected_hash() { + let cli = + Cli::try_parse_from(["echo-cli", "verify", "state.wsc", "--expected", "abcd1234"]) + .unwrap(); + match cli.command { + Commands::Verify { ref expected, .. } => { + assert_eq!(expected.as_deref(), Some("abcd1234")); + } + _ => panic!("expected Verify command"), + } + } + + #[test] + fn format_json_before_subcommand() { + let cli = + Cli::try_parse_from(["echo-cli", "--format", "json", "verify", "test.wsc"]).unwrap(); + assert_eq!(cli.format, OutputFormat::Json); + assert!(matches!(cli.command, Commands::Verify { .. })); + } + + #[test] + fn format_json_after_subcommand() { + let cli = + Cli::try_parse_from(["echo-cli", "verify", "test.wsc", "--format", "json"]).unwrap(); + assert_eq!(cli.format, OutputFormat::Json); + } + + #[test] + fn parse_bench_no_filter() { + let cli = Cli::try_parse_from(["echo-cli", "bench"]).unwrap(); + match cli.command { + Commands::Bench { ref filter } => assert!(filter.is_none()), + _ => panic!("expected Bench command"), + } + } + + #[test] + fn parse_bench_with_filter() { + let cli = Cli::try_parse_from(["echo-cli", "bench", "--filter", "hotpath"]).unwrap(); + match cli.command { + Commands::Bench { ref filter } => { + assert_eq!(filter.as_deref(), Some("hotpath")); + } + _ => panic!("expected Bench command"), + } + } + + #[test] + fn parse_inspect_basic() { + let cli = Cli::try_parse_from(["echo-cli", "inspect", "state.wsc"]).unwrap(); + match cli.command { + Commands::Inspect { ref snapshot, tree } => { + assert_eq!(snapshot, &PathBuf::from("state.wsc")); + assert!(!tree); + } + _ => panic!("expected Inspect command"), + } + } + + #[test] + fn parse_inspect_with_tree() { + let cli = Cli::try_parse_from(["echo-cli", "inspect", "state.wsc", "--tree"]).unwrap(); + match cli.command { + Commands::Inspect { tree, .. } => assert!(tree), + _ => panic!("expected Inspect command"), + } + } + + #[test] + fn unknown_subcommand_is_error() { + let result = Cli::try_parse_from(["echo-cli", "bogus"]); + assert!(result.is_err()); + } + + #[test] + fn no_subcommand_is_error() { + let result = Cli::try_parse_from(["echo-cli"]); + assert!(result.is_err()); + } + + #[test] + fn default_format_is_text() { + let cli = Cli::try_parse_from(["echo-cli", "bench"]).unwrap(); + assert_eq!(cli.format, OutputFormat::Text); + } +} diff --git a/crates/warp-cli/src/inspect.rs b/crates/warp-cli/src/inspect.rs new file mode 100644 index 00000000..aed1162c --- /dev/null +++ b/crates/warp-cli/src/inspect.rs @@ -0,0 +1,517 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! `echo-cli inspect` — display WSC snapshot metadata and graph statistics. +//! +//! Prints metadata (tick count, schema hash, warp count), graph statistics +//! (node/edge counts, type breakdown, connected components), and an optional +//! ASCII tree rendering of the graph structure. + +use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet, VecDeque}; +use std::path::Path; + +use anyhow::{Context, Result}; +use serde::Serialize; + +use warp_core::wsc::view::WarpView; +use warp_core::wsc::{validate_wsc, WscFile}; + +use crate::cli::OutputFormat; +use crate::output::{emit, hex_hash, short_hex}; +use crate::wsc_loader::graph_store_from_warp_view; + +/// Metadata section of the inspect report. +#[derive(Debug, Serialize)] +pub(crate) struct Metadata { + pub(crate) file: String, + pub(crate) tick: u64, + pub(crate) schema_hash: String, + pub(crate) warp_count: usize, +} + +/// Per-warp statistics. +#[derive(Debug, Serialize)] +pub(crate) struct WarpStats { + pub(crate) warp_id: String, + pub(crate) root_node_id: String, + pub(crate) state_root: String, + pub(crate) total_nodes: usize, + pub(crate) total_edges: usize, + pub(crate) node_types: BTreeMap, + pub(crate) edge_types: BTreeMap, + pub(crate) connected_components: usize, +} + +/// Full inspect report. +#[derive(Debug, Serialize)] +pub(crate) struct InspectReport { + pub(crate) metadata: Metadata, + pub(crate) warps: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) tree: Option>, +} + +/// A node in the ASCII tree rendering. +#[derive(Debug, Serialize)] +pub(crate) struct TreeNode { + pub(crate) warp_index: usize, + pub(crate) depth: usize, + pub(crate) node_id: String, + pub(crate) node_type: String, + pub(crate) children: Vec, +} + +/// Maximum depth for ASCII tree rendering. +const TREE_MAX_DEPTH: usize = 5; + +/// Runs the inspect subcommand. +pub(crate) fn run(snapshot: &Path, show_tree: bool, format: &OutputFormat) -> Result<()> { + let file = WscFile::open(snapshot) + .with_context(|| format!("failed to open WSC file: {}", snapshot.display()))?; + + validate_wsc(&file) + .with_context(|| format!("WSC validation failed: {}", snapshot.display()))?; + + let metadata = Metadata { + file: snapshot.display().to_string(), + tick: file.tick(), + schema_hash: hex_hash(file.schema_hash()), + warp_count: file.warp_count(), + }; + + let mut warp_stats = Vec::with_capacity(file.warp_count()); + let mut trees = if show_tree { Some(Vec::new()) } else { None }; + + for i in 0..file.warp_count() { + let view = file + .warp_view(i) + .with_context(|| format!("failed to read warp {i}"))?; + + let store = graph_store_from_warp_view(&view); + let state_root = store.canonical_state_hash(); + + let stats = compute_stats(&view, &state_root); + warp_stats.push(stats); + + if let Some(ref mut tree_list) = trees { + let tree = build_tree(&view, i, TREE_MAX_DEPTH); + tree_list.push(tree); + } + } + + let report = InspectReport { + metadata, + warps: warp_stats, + tree: trees.map(|t| t.into_iter().flatten().collect()), + }; + + let text = format_text_report(&report); + let json = serde_json::to_value(&report).context("failed to serialize inspect report")?; + emit(format, &text, &json); + + Ok(()) +} + +fn compute_stats(view: &WarpView<'_>, state_root: &[u8; 32]) -> WarpStats { + let nodes = view.nodes(); + let edges = view.edges(); + + // Type breakdown. + let mut node_types: BTreeMap = BTreeMap::new(); + for n in nodes { + *node_types.entry(short_hex(&n.node_type)).or_default() += 1; + } + + let mut edge_types: BTreeMap = BTreeMap::new(); + for e in edges { + *edge_types.entry(short_hex(&e.edge_type)).or_default() += 1; + } + + // Connected components via BFS. + let connected_components = count_connected_components(view); + + WarpStats { + warp_id: hex_hash(view.warp_id()), + root_node_id: hex_hash(view.root_node_id()), + state_root: hex_hash(state_root), + total_nodes: nodes.len(), + total_edges: edges.len(), + node_types, + edge_types, + connected_components, + } +} + +/// Counts connected components using BFS on the undirected graph. +fn count_connected_components(view: &WarpView<'_>) -> usize { + let nodes = view.nodes(); + if nodes.is_empty() { + return 0; + } + + // Build adjacency from edges (undirected). + // HashMap/HashSet: this is CLI-only code, not the deterministic engine. + let mut adjacency: HashMap<[u8; 32], HashSet<[u8; 32]>> = HashMap::new(); + for n in nodes { + adjacency.entry(n.node_id).or_default(); + } + for e in view.edges() { + adjacency + .entry(e.from_node_id) + .or_default() + .insert(e.to_node_id); + adjacency + .entry(e.to_node_id) + .or_default() + .insert(e.from_node_id); + } + + let mut visited: HashSet<[u8; 32]> = HashSet::new(); + let mut components = 0; + + for node in nodes { + if visited.contains(&node.node_id) { + continue; + } + + // BFS from this node. + let mut queue = VecDeque::new(); + queue.push_back(node.node_id); + visited.insert(node.node_id); + + while let Some(current) = queue.pop_front() { + if let Some(neighbors) = adjacency.get(¤t) { + for &neighbor in neighbors { + if visited.insert(neighbor) { + queue.push_back(neighbor); + } + } + } + } + + components += 1; + } + + components +} + +/// Builds an ASCII tree from the root node, depth-limited. +fn build_tree(view: &WarpView<'_>, warp_index: usize, max_depth: usize) -> Vec { + let root_id = *view.root_node_id(); + let root_ix = match view.node_ix(&root_id) { + Some(ix) => ix, + None => return vec![], + }; + + let root_node = &view.nodes()[root_ix]; + let mut visited = BTreeSet::new(); + visited.insert(root_id); + + vec![build_tree_node( + view, + warp_index, + &root_id, + &root_node.node_type, + 0, + max_depth, + &mut visited, + )] +} + +fn build_tree_node( + view: &WarpView<'_>, + warp_index: usize, + node_id: &[u8; 32], + node_type: &[u8; 32], + depth: usize, + max_depth: usize, + visited: &mut BTreeSet<[u8; 32]>, +) -> TreeNode { + let mut children = Vec::new(); + + if depth < max_depth { + if let Some(node_ix) = view.node_ix(node_id) { + let out_edges = view.out_edges_for_node(node_ix); + for out_edge in out_edges { + let edge_ix = out_edge.edge_ix() as usize; + if edge_ix < view.edges().len() { + let edge = &view.edges()[edge_ix]; + let to_id = edge.to_node_id; + + if visited.insert(to_id) { + if let Some(to_ix) = view.node_ix(&to_id) { + let to_node = &view.nodes()[to_ix]; + children.push(build_tree_node( + view, + warp_index, + &to_id, + &to_node.node_type, + depth + 1, + max_depth, + visited, + )); + } + } + } + } + } + } + + TreeNode { + warp_index, + depth, + node_id: short_hex(node_id), + node_type: short_hex(node_type), + children, + } +} + +fn format_text_report(report: &InspectReport) -> String { + use std::fmt::Write; + + let mut out = String::new(); + let _ = writeln!(out, "echo-cli inspect"); + let _ = writeln!(out, " File: {}", report.metadata.file); + let _ = writeln!(out, " Tick: {}", report.metadata.tick); + let _ = writeln!(out, " Schema: {}", report.metadata.schema_hash); + let _ = writeln!(out, " Warps: {}", report.metadata.warp_count); + let _ = writeln!(out); + + for (i, w) in report.warps.iter().enumerate() { + let _ = writeln!(out, " Warp {i}:"); + let _ = writeln!(out, " ID: {}", w.warp_id); + let _ = writeln!(out, " Root node: {}", w.root_node_id); + let _ = writeln!(out, " State root: {}", w.state_root); + let _ = writeln!(out, " Nodes: {}", w.total_nodes); + let _ = writeln!(out, " Edges: {}", w.total_edges); + let _ = writeln!(out, " Components: {}", w.connected_components); + + if !w.node_types.is_empty() { + let _ = writeln!(out, " Node types:"); + for (ty, count) in &w.node_types { + let _ = writeln!(out, " {ty}: {count}"); + } + } + + if !w.edge_types.is_empty() { + let _ = writeln!(out, " Edge types:"); + for (ty, count) in &w.edge_types { + let _ = writeln!(out, " {ty}: {count}"); + } + } + let _ = writeln!(out); + } + + if let Some(ref tree) = report.tree { + let multi_warp = report.metadata.warp_count > 1; + let mut current_warp: Option = None; + for node in tree { + if multi_warp && (current_warp != Some(node.warp_index)) { + let _ = writeln!(out, " Tree (warp {}):", node.warp_index); + current_warp = Some(node.warp_index); + } else if !multi_warp && current_warp.is_none() { + let _ = writeln!(out, " Tree:"); + current_warp = Some(0); + } + format_tree_node(&mut out, node, "", true); + } + let _ = writeln!(out); + } + + out +} + +fn format_tree_node(out: &mut String, node: &TreeNode, prefix: &str, is_last: bool) { + use std::fmt::Write; + + let connector = if node.depth == 0 { + "" + } else if is_last { + "\u{2514}\u{2500}\u{2500} " + } else { + "\u{251c}\u{2500}\u{2500} " + }; + + let _ = writeln!( + out, + " {prefix}{connector}[{}] type={}", + node.node_id, node.node_type + ); + + let child_prefix = if node.depth == 0 { + String::new() + } else if is_last { + format!("{prefix} ") + } else { + format!("{prefix}\u{2502} ") + }; + + for (i, child) in node.children.iter().enumerate() { + let last = i == node.children.len() - 1; + format_tree_node(out, child, &child_prefix, last); + } +} + +#[cfg(test)] +#[allow(clippy::expect_used, clippy::unwrap_used)] +mod tests { + use super::*; + use std::io::Write as IoWrite; + use tempfile::NamedTempFile; + use warp_core::wsc::build::build_one_warp_input; + use warp_core::wsc::write::write_wsc_one_warp; + use warp_core::{ + make_edge_id, make_node_id, make_type_id, make_warp_id, EdgeRecord, GraphStore, NodeRecord, + }; + + fn make_test_graph() -> (GraphStore, warp_core::NodeId) { + let warp = make_warp_id("test"); + let node_ty = make_type_id("Actor"); + let child_ty = make_type_id("Item"); + let edge_ty = make_type_id("HasItem"); + let root = make_node_id("root"); + let child1 = make_node_id("child1"); + let child2 = make_node_id("child2"); + + let mut store = GraphStore::new(warp); + store.insert_node(root, NodeRecord { ty: node_ty }); + store.insert_node(child1, NodeRecord { ty: child_ty }); + store.insert_node(child2, NodeRecord { ty: child_ty }); + store.insert_edge( + root, + EdgeRecord { + id: make_edge_id("root->child1"), + from: root, + to: child1, + ty: edge_ty, + }, + ); + store.insert_edge( + root, + EdgeRecord { + id: make_edge_id("root->child2"), + from: root, + to: child2, + ty: edge_ty, + }, + ); + + (store, root) + } + + fn make_test_wsc() -> Vec { + let (store, root) = make_test_graph(); + let input = build_one_warp_input(&store, root); + write_wsc_one_warp(&input, [0u8; 32], 42).expect("WSC write") + } + + fn write_temp_wsc(data: &[u8]) -> NamedTempFile { + let mut f = NamedTempFile::new().expect("tempfile"); + f.write_all(data).expect("write"); + f.flush().expect("flush"); + f + } + + #[test] + fn metadata_fields_present() { + let wsc = make_test_wsc(); + let f = write_temp_wsc(&wsc); + let result = run(f.path(), false, &OutputFormat::Text); + assert!(result.is_ok()); + } + + #[test] + fn type_breakdown_sums_to_total() { + let wsc = make_test_wsc(); + let file = WscFile::from_bytes(wsc).unwrap(); + let view = file.warp_view(0).unwrap(); + let store = graph_store_from_warp_view(&view); + let state_root = store.canonical_state_hash(); + + let stats = compute_stats(&view, &state_root); + + let node_type_sum: usize = stats.node_types.values().sum(); + assert_eq!(node_type_sum, stats.total_nodes); + + let edge_type_sum: usize = stats.edge_types.values().sum(); + assert_eq!(edge_type_sum, stats.total_edges); + } + + #[test] + fn tree_shows_root_at_depth_zero() { + let wsc = make_test_wsc(); + let file = WscFile::from_bytes(wsc).unwrap(); + let view = file.warp_view(0).unwrap(); + + let tree = build_tree(&view, 0, 5); + assert!(!tree.is_empty()); + assert_eq!(tree[0].depth, 0); + assert_eq!(tree[0].warp_index, 0); + } + + #[test] + fn tree_shows_children_indented() { + let wsc = make_test_wsc(); + let file = WscFile::from_bytes(wsc).unwrap(); + let view = file.warp_view(0).unwrap(); + + let tree = build_tree(&view, 0, 5); + assert!(!tree.is_empty()); + // Root should have children from edges. + assert!(!tree[0].children.is_empty(), "root should have children"); + for child in &tree[0].children { + assert_eq!(child.depth, 1); + } + } + + #[test] + fn json_includes_metadata_and_stats() { + let wsc = make_test_wsc(); + let f = write_temp_wsc(&wsc); + // Verify JSON mode doesn't panic. + let result = run(f.path(), false, &OutputFormat::Json); + assert!(result.is_ok()); + } + + #[test] + fn connected_components_single_graph() { + let wsc = make_test_wsc(); + let file = WscFile::from_bytes(wsc).unwrap(); + let view = file.warp_view(0).unwrap(); + let components = count_connected_components(&view); + assert_eq!( + components, 1, + "single connected graph should have 1 component" + ); + } + + #[test] + fn connected_components_empty_graph() { + let warp = make_warp_id("test"); + let store = GraphStore::new(warp); + let zero_root = warp_core::NodeId([0u8; 32]); + let input = build_one_warp_input(&store, zero_root); + let wsc = write_wsc_one_warp(&input, [0u8; 32], 0).unwrap(); + let file = WscFile::from_bytes(wsc).unwrap(); + let view = file.warp_view(0).unwrap(); + assert_eq!(count_connected_components(&view), 0); + } + + #[test] + fn connected_components_disconnected_nodes() { + let warp = make_warp_id("test"); + let node_ty = make_type_id("Node"); + let a = make_node_id("a"); + let b = make_node_id("b"); + + let mut store = GraphStore::new(warp); + store.insert_node(a, NodeRecord { ty: node_ty }); + store.insert_node(b, NodeRecord { ty: node_ty }); + // No edges — two disconnected nodes. + + let input = build_one_warp_input(&store, a); + let wsc = write_wsc_one_warp(&input, [0u8; 32], 0).unwrap(); + let file = WscFile::from_bytes(wsc).unwrap(); + let view = file.warp_view(0).unwrap(); + assert_eq!(count_connected_components(&view), 2); + } +} diff --git a/crates/warp-cli/src/lib.rs b/crates/warp-cli/src/lib.rs new file mode 100644 index 00000000..744723d0 --- /dev/null +++ b/crates/warp-cli/src/lib.rs @@ -0,0 +1,10 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! Echo CLI library — re-exports CLI types for man page generation. +//! +//! The library target exists solely to let `xtask` import the `Cli` struct +//! for `clap_mangen` man page generation. The output module is included for +//! completeness but its functions are only called by the binary target. +pub mod cli; +#[allow(dead_code)] +pub(crate) mod output; diff --git a/crates/warp-cli/src/main.rs b/crates/warp-cli/src/main.rs index 4be91b78..5aefed2f 100644 --- a/crates/warp-cli/src/main.rs +++ b/crates/warp-cli/src/main.rs @@ -2,19 +2,38 @@ // © James Ross Ω FLYING•ROBOTS //! Echo CLI entrypoint. //! -//! Provides developer-facing commands for working with Echo projects. *Planned* -//! subcommands include `echo demo` (run deterministic demo suites), `echo -//! bench` (execute Criterion benchmarks), and `echo inspect` (open the -//! inspector tooling). +//! Provides developer-facing commands for working with Echo snapshots: +//! +//! - `echo-cli verify ` — validate WSC snapshot integrity +//! - `echo-cli bench [--filter ]` — run and format benchmarks +//! - `echo-cli inspect [--tree]` — display snapshot metadata //! //! # Usage //! ```text -//! echo [options] +//! echo-cli [--format text|json] [options] //! ``` -//! -//! The CLI exits with code `0` on success and non-zero on error. Until the -//! subcommands are implemented the binary simply prints a placeholder message. -fn main() { - println!("Hello, world!"); +use anyhow::Result; +use clap::Parser; + +mod bench; +mod cli; +mod inspect; +mod output; +mod verify; +mod wsc_loader; + +use cli::{Cli, Commands}; + +fn main() -> Result<()> { + let cli = Cli::parse(); + + match cli.command { + Commands::Verify { + ref snapshot, + ref expected, + } => verify::run(snapshot, expected.as_deref(), &cli.format), + Commands::Bench { ref filter } => bench::run(filter.as_deref(), &cli.format), + Commands::Inspect { ref snapshot, tree } => inspect::run(snapshot, tree, &cli.format), + } } diff --git a/crates/warp-cli/src/output.rs b/crates/warp-cli/src/output.rs new file mode 100644 index 00000000..146a1bdf --- /dev/null +++ b/crates/warp-cli/src/output.rs @@ -0,0 +1,50 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! Shared output formatting for text and JSON modes. + +use crate::cli::OutputFormat; + +/// Emits output in the selected format. +/// +/// - `Text` mode prints `text` as-is (caller includes newlines). +/// - `Json` mode pretty-prints `json` with a trailing newline. +pub fn emit(format: &OutputFormat, text: &str, json: &serde_json::Value) { + match format { + OutputFormat::Text => print!("{text}"), + OutputFormat::Json => match serde_json::to_string_pretty(json) { + Ok(s) => println!("{s}"), + Err(e) => eprintln!("error: failed to serialize JSON output: {e}"), + }, + } +} + +/// Formats a 32-byte hash as lowercase hex. +pub fn hex_hash(hash: &[u8; 32]) -> String { + hex::encode(hash) +} + +/// Formats a hash as a short hex prefix (first 8 chars). +pub fn short_hex(hash: &[u8; 32]) -> String { + hex::encode(&hash[..4]) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn hex_hash_produces_64_chars() { + let hash = [0xAB; 32]; + let hex = hex_hash(&hash); + assert_eq!(hex.len(), 64); + assert_eq!(&hex[..4], "abab"); + } + + #[test] + fn short_hex_produces_8_chars() { + let hash = [0xCD; 32]; + let short = short_hex(&hash); + assert_eq!(short.len(), 8); + assert_eq!(short, "cdcdcdcd"); + } +} diff --git a/crates/warp-cli/src/verify.rs b/crates/warp-cli/src/verify.rs new file mode 100644 index 00000000..d272066c --- /dev/null +++ b/crates/warp-cli/src/verify.rs @@ -0,0 +1,322 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! `echo-cli verify` — validate WSC snapshot integrity. +//! +//! Loads a WSC file, validates its structure, reconstructs the graph for +//! each warp, and computes state root hashes. Optionally compares against +//! an expected hash. + +use std::path::Path; + +use anyhow::{bail, Context, Result}; +use serde::Serialize; + +use warp_core::wsc::{validate_wsc, WscFile}; + +use crate::cli::OutputFormat; +use crate::output::{emit, hex_hash}; +use crate::wsc_loader::graph_store_from_warp_view; + +/// Result of verifying a single warp instance within a WSC file. +#[derive(Debug, Serialize)] +pub(crate) struct WarpVerifyResult { + pub(crate) warp_id: String, + pub(crate) root_node_id: String, + pub(crate) nodes: usize, + pub(crate) edges: usize, + pub(crate) state_root: String, + pub(crate) status: String, +} + +/// Result of the full verify operation. +#[derive(Debug, Serialize)] +pub(crate) struct VerifyReport { + pub(crate) file: String, + pub(crate) tick: u64, + pub(crate) schema_hash: String, + pub(crate) warp_count: usize, + pub(crate) warps: Vec, + pub(crate) result: String, +} + +/// Runs the verify subcommand. +pub(crate) fn run(snapshot: &Path, expected: Option<&str>, format: &OutputFormat) -> Result<()> { + // 1. Load WSC file. + let file = WscFile::open(snapshot) + .with_context(|| format!("failed to open WSC file: {}", snapshot.display()))?; + + // 2. Structural validation. + validate_wsc(&file) + .with_context(|| format!("WSC validation failed: {}", snapshot.display()))?; + + let tick = file.tick(); + let schema_hash = hex_hash(file.schema_hash()); + let warp_count = file.warp_count(); + + let mut warp_results = Vec::with_capacity(warp_count); + let mut all_pass = true; + + if expected.is_some() && warp_count > 1 { + eprintln!( + "warning: --expected only applies to warp 0; {} additional warp(s) will report 'unchecked'", + warp_count - 1 + ); + } + + // 3. For each warp: reconstruct graph, compute state root. + for i in 0..warp_count { + let view = file + .warp_view(i) + .with_context(|| format!("failed to read warp {i}"))?; + + let store = graph_store_from_warp_view(&view); + let state_root = store.canonical_state_hash(); + let state_root_hex = hex_hash(&state_root); + + // Check against expected hash (if provided, applies to first warp). + let status = if let Some(exp) = expected { + if i == 0 { + if state_root_hex == exp { + "pass".to_string() + } else { + all_pass = false; + format!("MISMATCH (expected {exp})") + } + } else { + "unchecked".to_string() + } + } else { + "pass".to_string() + }; + + warp_results.push(WarpVerifyResult { + warp_id: hex_hash(view.warp_id()), + root_node_id: hex_hash(view.root_node_id()), + nodes: view.nodes().len(), + edges: view.edges().len(), + state_root: state_root_hex, + status, + }); + } + + let report = VerifyReport { + file: snapshot.display().to_string(), + tick, + schema_hash, + warp_count, + warps: warp_results, + result: if all_pass { + "pass".to_string() + } else { + "fail".to_string() + }, + }; + + // 4. Output. + let text = format_text_report(&report); + let json = serde_json::to_value(&report).context("failed to serialize verify report")?; + + emit(format, &text, &json); + + if !all_pass { + bail!("verification failed"); + } + Ok(()) +} + +fn format_text_report(report: &VerifyReport) -> String { + use std::fmt::Write; + + let mut out = String::new(); + let _ = writeln!(out, "echo-cli verify"); + let _ = writeln!(out, " File: {}", report.file); + let _ = writeln!(out, " Tick: {}", report.tick); + let _ = writeln!(out, " Schema: {}", report.schema_hash); + let _ = writeln!(out, " Warps: {}", report.warp_count); + let _ = writeln!(out); + + for (i, w) in report.warps.iter().enumerate() { + let _ = writeln!(out, " Warp {i}:"); + let _ = writeln!(out, " ID: {}", w.warp_id); + let _ = writeln!(out, " Root node: {}", w.root_node_id); + let _ = writeln!(out, " Nodes: {}", w.nodes); + let _ = writeln!(out, " Edges: {}", w.edges); + let _ = writeln!(out, " State root: {}", w.state_root); + let _ = writeln!(out, " Status: {}", w.status); + let _ = writeln!(out); + } + + let _ = writeln!(out, " Result: {}", report.result); + out +} + +#[cfg(test)] +#[allow(clippy::expect_used, clippy::unwrap_used)] +mod tests { + use super::*; + use std::io::Write as IoWrite; + use tempfile::NamedTempFile; + use warp_core::wsc::build::build_one_warp_input; + use warp_core::wsc::write::write_wsc_one_warp; + use warp_core::{ + make_edge_id, make_node_id, make_type_id, make_warp_id, EdgeRecord, GraphStore, Hash, + NodeRecord, + }; + + fn make_test_wsc() -> (Vec, Hash) { + let warp = make_warp_id("test"); + let node_ty = make_type_id("TestNode"); + let edge_ty = make_type_id("TestEdge"); + let root = make_node_id("root"); + let child = make_node_id("child"); + + let mut store = GraphStore::new(warp); + store.insert_node(root, NodeRecord { ty: node_ty }); + store.insert_node(child, NodeRecord { ty: node_ty }); + store.insert_edge( + root, + EdgeRecord { + id: make_edge_id("root->child"), + from: root, + to: child, + ty: edge_ty, + }, + ); + + let state_root = store.canonical_state_hash(); + let input = build_one_warp_input(&store, root); + let wsc_bytes = write_wsc_one_warp(&input, [0u8; 32], 42).expect("WSC write"); + (wsc_bytes, state_root) + } + + fn write_temp_wsc(data: &[u8]) -> NamedTempFile { + let mut f = NamedTempFile::new().expect("tempfile"); + f.write_all(data).expect("write"); + f.flush().expect("flush"); + f + } + + #[test] + fn valid_snapshot_passes() { + let (wsc_bytes, _) = make_test_wsc(); + let f = write_temp_wsc(&wsc_bytes); + let result = run(f.path(), None, &OutputFormat::Text); + assert!(result.is_ok(), "valid snapshot should pass: {result:?}"); + } + + #[test] + fn valid_snapshot_with_matching_expected_hash() { + let (wsc_bytes, state_root) = make_test_wsc(); + let expected_hex = hex_hash(&state_root); + let f = write_temp_wsc(&wsc_bytes); + let result = run(f.path(), Some(&expected_hex), &OutputFormat::Text); + assert!( + result.is_ok(), + "matching expected hash should pass: {result:?}" + ); + } + + #[test] + fn mismatched_expected_hash_fails() { + let (wsc_bytes, _) = make_test_wsc(); + let f = write_temp_wsc(&wsc_bytes); + let result = run( + f.path(), + Some("0000000000000000000000000000000000000000000000000000000000000000"), + &OutputFormat::Text, + ); + assert!(result.is_err(), "mismatched hash should fail"); + } + + #[test] + fn tampered_wsc_does_not_panic() { + let (mut wsc_bytes, _) = make_test_wsc(); + // Flip a byte in the node data (well past the header). + let flip_pos = wsc_bytes.len() / 2; + wsc_bytes[flip_pos] ^= 0xFF; + let f = write_temp_wsc(&wsc_bytes); + // Tampered files may fail at structural validation or produce a + // different state root — the outcome depends on which byte was + // flipped. We intentionally allow both Ok and Err here; the point + // is that the loader never panics on corrupted input. + let _result = run(f.path(), None, &OutputFormat::Text); + } + + #[test] + fn json_output_is_valid() { + let (wsc_bytes, _) = make_test_wsc(); + let f = write_temp_wsc(&wsc_bytes); + // Just verify it doesn't panic in JSON mode. + let result = run(f.path(), None, &OutputFormat::Json); + assert!(result.is_ok()); + } + + #[test] + fn missing_file_gives_clean_error() { + let result = run( + Path::new("/nonexistent/path/state.wsc"), + None, + &OutputFormat::Text, + ); + assert!(result.is_err()); + let err_msg = format!("{:#}", result.unwrap_err()); + assert!( + err_msg.contains("failed to open WSC file"), + "error should mention file open failure: {err_msg}" + ); + } + + #[test] + fn text_report_shows_unchecked_for_extra_warps() { + let report = VerifyReport { + file: "test.wsc".to_string(), + tick: 1, + schema_hash: "abcd".to_string(), + warp_count: 2, + warps: vec![ + WarpVerifyResult { + warp_id: "0000".to_string(), + root_node_id: "1111".to_string(), + nodes: 3, + edges: 2, + state_root: "aaaa".to_string(), + status: "pass".to_string(), + }, + WarpVerifyResult { + warp_id: "2222".to_string(), + root_node_id: "3333".to_string(), + nodes: 1, + edges: 0, + state_root: "bbbb".to_string(), + status: "unchecked".to_string(), + }, + ], + result: "pass".to_string(), + }; + + let text = format_text_report(&report); + assert!( + text.contains("unchecked"), + "multi-warp report should show 'unchecked' for warps 1+: {text}" + ); + // Result line should be lowercase (no .to_uppercase()). + assert!( + text.contains("Result: pass"), + "result should be lowercase 'pass': {text}" + ); + } + + #[test] + fn empty_graph_passes() { + let warp = make_warp_id("test"); + let store = GraphStore::new(warp); + let zero_root = warp_core::NodeId([0u8; 32]); + + let input = build_one_warp_input(&store, zero_root); + let wsc_bytes = write_wsc_one_warp(&input, [0u8; 32], 0).expect("WSC write"); + let f = write_temp_wsc(&wsc_bytes); + + let result = run(f.path(), None, &OutputFormat::Text); + assert!(result.is_ok(), "empty graph should pass: {result:?}"); + } +} diff --git a/crates/warp-cli/src/wsc_loader.rs b/crates/warp-cli/src/wsc_loader.rs new file mode 100644 index 00000000..4ca07f4f --- /dev/null +++ b/crates/warp-cli/src/wsc_loader.rs @@ -0,0 +1,288 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! WSC → GraphStore reconstruction. +//! +//! Bridges the gap between the on-disk WSC columnar format and the in-memory +//! `GraphStore` used by warp-core's hash computation APIs. This is the inverse +//! of `warp_core::wsc::build_one_warp_input`. + +use bytes::Bytes; + +use warp_core::wsc::types::{AttRow, EdgeRow, NodeRow}; +use warp_core::wsc::view::WarpView; +use warp_core::{ + AtomPayload, AttachmentValue, EdgeId, EdgeRecord, GraphStore, NodeId, NodeRecord, TypeId, + WarpId, +}; + +/// Reconstructs a `GraphStore` from a `WarpView`. +/// +/// Iterates the columnar WSC data (nodes, edges, attachments) and populates +/// an in-memory `GraphStore` suitable for hash recomputation via +/// `GraphStore::canonical_state_hash()`. +pub(crate) fn graph_store_from_warp_view(view: &WarpView<'_>) -> GraphStore { + let warp_id = WarpId(*view.warp_id()); + let mut store = GraphStore::new(warp_id); + + // 1. Insert all nodes. + for node_row in view.nodes() { + let (node_id, record) = node_row_to_record(node_row); + store.insert_node(node_id, record); + } + + // 2. Insert all edges. + for edge_row in view.edges() { + let (from, record) = edge_row_to_record(edge_row); + store.insert_edge(from, record); + } + + // 3. Reconstruct node attachments. + for (node_ix, node_row) in view.nodes().iter().enumerate() { + let node_id = NodeId(node_row.node_id); + let atts = view.node_attachments(node_ix); + // WSC stores at most one attachment per node (alpha plane). + if atts.len() > 1 { + eprintln!( + "warning: node {node_ix} has {} attachments (expected ≤1); using first", + atts.len() + ); + } + if let Some(att) = atts.first() { + let value = att_row_to_value(att, view); + store.set_node_attachment(node_id, Some(value)); + } + } + + // 4. Reconstruct edge attachments. + for (edge_ix, edge_row) in view.edges().iter().enumerate() { + let edge_id = EdgeId(edge_row.edge_id); + let atts = view.edge_attachments(edge_ix); + // WSC stores at most one attachment per edge (beta plane). + if atts.len() > 1 { + eprintln!( + "warning: edge {edge_ix} has {} attachments (expected ≤1); using first", + atts.len() + ); + } + if let Some(att) = atts.first() { + let value = att_row_to_value(att, view); + store.set_edge_attachment(edge_id, Some(value)); + } + } + + store +} + +fn node_row_to_record(row: &NodeRow) -> (NodeId, NodeRecord) { + ( + NodeId(row.node_id), + NodeRecord { + ty: TypeId(row.node_type), + }, + ) +} + +fn edge_row_to_record(row: &EdgeRow) -> (NodeId, EdgeRecord) { + let from = NodeId(row.from_node_id); + let record = EdgeRecord { + id: EdgeId(row.edge_id), + from, + to: NodeId(row.to_node_id), + ty: TypeId(row.edge_type), + }; + (from, record) +} + +fn att_row_to_value(att: &AttRow, view: &WarpView<'_>) -> AttachmentValue { + if att.is_atom() { + let blob = match view.blob_for_attachment(att) { + Some(b) => b, + None => { + eprintln!("warning: missing blob for atom attachment; using empty payload"); + &[] + } + }; + AttachmentValue::Atom(AtomPayload::new( + TypeId(att.type_or_warp), + Bytes::copy_from_slice(blob), + )) + } else { + AttachmentValue::Descend(WarpId(att.type_or_warp)) + } +} + +#[cfg(test)] +#[allow(clippy::expect_used, clippy::unwrap_used)] +mod tests { + use super::*; + use warp_core::wsc::build::build_one_warp_input; + use warp_core::wsc::write::write_wsc_one_warp; + use warp_core::wsc::WscFile; + use warp_core::{make_edge_id, make_node_id, make_type_id, make_warp_id, Hash}; + + /// Creates a simple graph, serializes to WSC, reconstructs, and verifies + /// the state root hash matches the original. + #[test] + fn roundtrip_state_root_matches() { + let warp = make_warp_id("test"); + let node_ty = make_type_id("TestNode"); + let edge_ty = make_type_id("TestEdge"); + let root = make_node_id("root"); + let child = make_node_id("child"); + + let mut store = GraphStore::new(warp); + store.insert_node(root, NodeRecord { ty: node_ty }); + store.insert_node(child, NodeRecord { ty: node_ty }); + store.insert_edge( + root, + EdgeRecord { + id: make_edge_id("root->child"), + from: root, + to: child, + ty: edge_ty, + }, + ); + + let original_hash = store.canonical_state_hash(); + + // Serialize to WSC + let input = build_one_warp_input(&store, root); + let schema: Hash = [0u8; 32]; + let wsc_bytes = write_wsc_one_warp(&input, schema, 1).expect("WSC write failed"); + + // Reconstruct from WSC + let file = WscFile::from_bytes(wsc_bytes).expect("WSC load failed"); + let view = file.warp_view(0).expect("warp_view failed"); + let reconstructed = graph_store_from_warp_view(&view); + + let reconstructed_hash = reconstructed.canonical_state_hash(); + assert_eq!( + original_hash, reconstructed_hash, + "state root must survive WSC roundtrip" + ); + } + + /// Verifies that attachments survive the WSC roundtrip. + #[test] + fn roundtrip_with_attachments() { + let warp = make_warp_id("test"); + let node_ty = make_type_id("TestNode"); + let payload_ty = make_type_id("Payload"); + let root = make_node_id("root"); + + let mut store = GraphStore::new(warp); + store.insert_node(root, NodeRecord { ty: node_ty }); + store.set_node_attachment( + root, + Some(AttachmentValue::Atom(AtomPayload::new( + payload_ty, + Bytes::from_static(&[1, 2, 3, 4, 5, 6, 7, 8]), + ))), + ); + + let original_hash = store.canonical_state_hash(); + + let input = build_one_warp_input(&store, root); + let wsc_bytes = write_wsc_one_warp(&input, [0u8; 32], 0).expect("WSC write failed"); + + let file = WscFile::from_bytes(wsc_bytes).expect("WSC load failed"); + let view = file.warp_view(0).expect("warp_view failed"); + let reconstructed = graph_store_from_warp_view(&view); + + assert_eq!(original_hash, reconstructed.canonical_state_hash()); + } + + /// Verifies that edge attachments survive the WSC roundtrip. + #[test] + fn roundtrip_with_edge_attachments() { + let warp = make_warp_id("test"); + let node_ty = make_type_id("TestNode"); + let edge_ty = make_type_id("TestEdge"); + let payload_ty = make_type_id("EdgePayload"); + let root = make_node_id("root"); + let child = make_node_id("child"); + let edge_id = make_edge_id("root->child"); + + let mut store = GraphStore::new(warp); + store.insert_node(root, NodeRecord { ty: node_ty }); + store.insert_node(child, NodeRecord { ty: node_ty }); + store.insert_edge( + root, + EdgeRecord { + id: edge_id, + from: root, + to: child, + ty: edge_ty, + }, + ); + store.set_edge_attachment( + edge_id, + Some(AttachmentValue::Atom(AtomPayload::new( + payload_ty, + Bytes::from_static(&[10, 20, 30]), + ))), + ); + + let original_hash = store.canonical_state_hash(); + + let input = build_one_warp_input(&store, root); + let wsc_bytes = write_wsc_one_warp(&input, [0u8; 32], 0).expect("WSC write failed"); + + let file = WscFile::from_bytes(wsc_bytes).expect("WSC load failed"); + let view = file.warp_view(0).expect("warp_view failed"); + let reconstructed = graph_store_from_warp_view(&view); + + assert_eq!( + original_hash, + reconstructed.canonical_state_hash(), + "state root must survive edge-attachment roundtrip" + ); + } + + /// Verifies that Descend (cross-warp reference) attachments survive roundtrip. + #[test] + fn roundtrip_with_descend_attachment() { + let warp = make_warp_id("test"); + let child_warp = make_warp_id("child_warp"); + let node_ty = make_type_id("TestNode"); + let root = make_node_id("root"); + + let mut store = GraphStore::new(warp); + store.insert_node(root, NodeRecord { ty: node_ty }); + store.set_node_attachment(root, Some(AttachmentValue::Descend(child_warp))); + + let original_hash = store.canonical_state_hash(); + + let input = build_one_warp_input(&store, root); + let wsc_bytes = write_wsc_one_warp(&input, [0u8; 32], 0).expect("WSC write failed"); + + let file = WscFile::from_bytes(wsc_bytes).expect("WSC load failed"); + let view = file.warp_view(0).expect("warp_view failed"); + let reconstructed = graph_store_from_warp_view(&view); + + assert_eq!( + original_hash, + reconstructed.canonical_state_hash(), + "state root must survive Descend-attachment roundtrip" + ); + } + + /// Empty graph (0 nodes) roundtrips successfully. + #[test] + fn roundtrip_empty_graph() { + let warp = make_warp_id("test"); + let store = GraphStore::new(warp); + let zero_root = NodeId([0u8; 32]); + + let original_hash = store.canonical_state_hash(); + + let input = build_one_warp_input(&store, zero_root); + let wsc_bytes = write_wsc_one_warp(&input, [0u8; 32], 0).expect("WSC write failed"); + + let file = WscFile::from_bytes(wsc_bytes).expect("WSC load failed"); + let view = file.warp_view(0).expect("warp_view failed"); + let reconstructed = graph_store_from_warp_view(&view); + + assert_eq!(original_hash, reconstructed.canonical_state_hash()); + } +} diff --git a/crates/warp-cli/tests/cli_integration.rs b/crates/warp-cli/tests/cli_integration.rs new file mode 100644 index 00000000..34ecca52 --- /dev/null +++ b/crates/warp-cli/tests/cli_integration.rs @@ -0,0 +1,82 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! Integration tests for `echo-cli` binary. +//! +//! These tests run the actual binary via `assert_cmd` and verify exit codes, +//! help output, and error messages. + +#![allow(deprecated)] // assert_cmd::cargo::cargo_bin deprecation — no stable replacement in v2.x + +use assert_cmd::cargo::cargo_bin; +use predicates::prelude::*; + +fn echo_cli() -> assert_cmd::Command { + assert_cmd::Command::new(cargo_bin("echo-cli")) +} + +#[test] +fn help_shows_all_subcommands() { + echo_cli() + .arg("--help") + .assert() + .success() + .stdout(predicate::str::contains("Echo developer CLI")) + .stdout(predicate::str::contains("verify")) + .stdout(predicate::str::contains("bench")) + .stdout(predicate::str::contains("inspect")); +} + +#[test] +fn verify_help_lists_snapshot_arg() { + echo_cli() + .args(["verify", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("snapshot")); +} + +#[test] +fn bench_help_lists_filter() { + echo_cli() + .args(["bench", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("filter")); +} + +#[test] +fn inspect_help_lists_tree_flag() { + echo_cli() + .args(["inspect", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("tree")); +} + +#[test] +fn unknown_subcommand_exits_2() { + echo_cli().arg("bogus").assert().code(2); +} + +#[test] +fn no_subcommand_exits_2() { + echo_cli().assert().code(2); +} + +#[test] +fn verify_missing_file_exits_nonzero() { + echo_cli() + .args(["verify", "/nonexistent/path/state.wsc"]) + .assert() + .failure() + .stderr(predicate::str::contains("failed to open WSC file")); +} + +#[test] +fn format_flag_is_global() { + // --format should work before and after the subcommand. + echo_cli() + .args(["--format", "json", "verify", "--help"]) + .assert() + .success(); +} diff --git a/crates/warp-ffi/Cargo.toml b/crates/warp-ffi/Cargo.toml deleted file mode 100644 index 6eac886d..00000000 --- a/crates/warp-ffi/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# © James Ross Ω FLYING•ROBOTS -[package] -name = "warp-ffi" -version = "0.1.0" -edition = "2021" -rust-version = "1.90.0" -description = "Echo FFI: C ABI for host integrations (Rhai in-process; C/others via ABI)" -license = "Apache-2.0" -repository = "https://github.com/flyingrobots/echo" -readme = "README.md" -keywords = ["echo", "ffi", "ecs", "deterministic"] -categories = ["external-ffi-bindings", "game-engines"] - -[lib] -crate-type = ["rlib", "cdylib", "staticlib"] - -[dependencies] -warp-core = { workspace = true } diff --git a/crates/warp-ffi/README.md b/crates/warp-ffi/README.md deleted file mode 100644 index 8c273ead..00000000 --- a/crates/warp-ffi/README.md +++ /dev/null @@ -1,78 +0,0 @@ - - -# warp-ffi - -Thin C ABI bindings for Echo’s deterministic engine (`warp-core`). - -This crate produces a C-callable library for embedding Echo’s core in other runtimes (C/C++, host modules alongside Rhai, etc.). - -Today, the exposed surface is intentionally small and focused on the **motion rewrite spike** (a concrete, deterministic end-to-end example). As the engine hardens, this crate can grow toward a broader “register rules by name, apply/commit, snapshot” ABI. - -## Platforms and Toolchain - -- Rust toolchain is pinned by the repository `rust-toolchain.toml`. -- MSRV policy is tracked by CI (when enabled) and the root docs. -- Targets: macOS (aarch64/x86_64), Linux (x86_64). Windows support is planned. - -## Building - -Build static and shared libraries: - -``` -cargo build -p warp-ffi --release -``` - -Artifacts (platform-dependent): - -- `target/release/libwarp_ffi.a` (static) -- `target/release/libwarp_ffi.dylib` or `libwarp_ffi.so` (shared) - -## Linking - -Example (clang): - -``` -clang -o demo demo.c -L target/release -lwarp_ffi -Wl,-rpath,@executable_path/../lib -``` - -Ensure the library search path includes `target/release` (or install path) at runtime. - -## API Overview - -Headers are generated in a follow-up task. The currently-exported ABI is motion-demo focused: - -- `warp_engine_new() -> warp_engine*` -- `warp_engine_free(warp_engine*)` -- `warp_engine_spawn_motion_entity(warp_engine*, const char* label, ... , warp_node_id* out)` -- `warp_engine_begin(warp_engine*) -> warp_tx_id` -- `warp_engine_apply_motion(warp_engine*, warp_tx_id, const warp_node_id*) -> int` (`0`/`1` as bool) -- `warp_engine_commit(warp_engine*, warp_tx_id, warp_snapshot* out) -> int` (`0`/`1` as bool) -- `warp_engine_read_motion(warp_engine*, const warp_node_id*, float* out_pos3, float* out_vel3) -> int` - -Snapshots currently expose a 32-byte BLAKE3 hash. See `docs/spec-mwmr-concurrency.md` for determinism rules. - -## Quick Start (Pseudo‑C) - -```c -warp_engine* eng = warp_engine_new(); -warp_node_id entity; -warp_engine_spawn_motion_entity(eng, "entity-1", /* pos */ 0,0,0, /* vel */ 0,0,0, &entity); -warp_tx_id tx = warp_engine_begin(eng); -warp_engine_apply_motion(eng, tx, &entity); -warp_snapshot snap; -warp_engine_commit(eng, tx, &snap); -warp_engine_free(eng); -``` - -## Troubleshooting - -- Undefined symbols at link: verify `-L` and `-l` flags and that `cargo build --release` produced the library. -- Snapshot hashes differ across runs: confirm identical state and rule registrations; see determinism invariants in `docs/determinism-invariants.md`. - -## More Documentation - -- Root docs: see repository `README.md` for the architecture and links. -- Engine surface: `crates/warp-core/src/lib.rs` (re‑exports) and rustdoc. -- Engine design details: Core booklet (`docs/book/echo/booklet-02-core.tex`) - and ECS/scheduler specs in `docs/` (`spec-ecs-storage.md`, - `spec-scheduler.md`, etc.). diff --git a/crates/warp-ffi/src/lib.rs b/crates/warp-ffi/src/lib.rs deleted file mode 100644 index 5e9cf0d6..00000000 --- a/crates/warp-ffi/src/lib.rs +++ /dev/null @@ -1,94 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -//! C-compatible bindings for the warp-core engine. -//! -//! This module exposes a minimal ABI that higher-level languages (Rhai host modules, Python, -//! etc.) can use to interact with the deterministic engine without knowing the -//! internal Rust types. -#![deny(missing_docs)] - -use warp_core::{Engine, TxId}; - -/// Opaque engine pointer exposed over the C ABI. -pub struct WarpEngine { - inner: Engine, -} - -/// 256-bit node identifier exposed as a raw byte array for FFI consumers. -#[repr(C)] -#[derive(Clone, Copy)] -pub struct warp_node_id { - /// Raw bytes representing the hashed node identifier. - pub bytes: [u8; 32], -} - -/// Transaction identifier mirrored on the C side. -#[repr(C)] -#[derive(Clone, Copy)] -pub struct warp_tx_id { - /// Native transaction value. - pub value: u64, -} - -/// Snapshot hash emitted after a successful commit. -#[repr(C)] -#[derive(Clone, Copy)] -pub struct warp_snapshot { - /// Canonical hash bytes for the snapshot. - pub hash: [u8; 32], -} - -/// Releases the engine allocation. -/// -/// # Safety -/// `engine` must be a pointer previously returned by an engine constructor that -/// has not already been freed. -#[no_mangle] -pub unsafe extern "C" fn warp_engine_free(engine: *mut WarpEngine) { - if engine.is_null() { - return; - } - unsafe { - drop(Box::from_raw(engine)); - } -} - -/// Starts a new transaction and returns its identifier. -/// -/// # Safety -/// `engine` must be a valid pointer to a `WarpEngine`. -#[no_mangle] -pub unsafe extern "C" fn warp_engine_begin(engine: *mut WarpEngine) -> warp_tx_id { - if engine.is_null() { - return warp_tx_id { value: 0 }; - } - let engine = unsafe { &mut *engine }; - let tx = engine.inner.begin(); - warp_tx_id { value: tx.value() } -} - -/// Commits the transaction and writes the resulting snapshot hash. -/// -/// # Safety -/// Pointers must be valid; `tx` must correspond to a live transaction. -#[no_mangle] -pub unsafe extern "C" fn warp_engine_commit( - engine: *mut WarpEngine, - tx: warp_tx_id, - out_snapshot: *mut warp_snapshot, -) -> bool { - if engine.is_null() || out_snapshot.is_null() || tx.value == 0 { - return false; - } - let engine = unsafe { &mut *engine }; - match engine.inner.commit(TxId::from_raw(tx.value)) { - Ok(snapshot) => { - unsafe { - (*out_snapshot).hash = snapshot.hash; - } - true - } - Err(_) => false, - } -} diff --git a/det-policy.yaml b/det-policy.yaml index 8107b9b6..9fd2fca5 100644 --- a/det-policy.yaml +++ b/det-policy.yaml @@ -33,10 +33,6 @@ crates: class: DET_CRITICAL owner_role: "Architect" paths: ["crates/warp-wasm/**"] - warp-ffi: - class: DET_CRITICAL - owner_role: "Architect" - paths: ["crates/warp-ffi/**"] echo-wasm-abi: class: DET_CRITICAL owner_role: "Architect" diff --git a/docs/ROADMAP/backlog/security.md b/docs/ROADMAP/backlog/security.md index 50fdf01f..0bf43d0b 100644 --- a/docs/ROADMAP/backlog/security.md +++ b/docs/ROADMAP/backlog/security.md @@ -193,7 +193,7 @@ Specifications and hardening for trust boundaries across FFI, WASM, and CLI surf **Acceptance Criteria:** -- [ ] AC1: Spec document exists at `docs/specs/SPEC-PROVENANCE-PAYLOAD.md` +- [ ] AC1: Spec document exists at `docs/spec/SPEC-0005-provenance-payload.md` - [ ] AC2: Envelope structure is fully defined with field-level documentation - [ ] AC3: At least two worked examples (single artifact, chained artifacts) - [ ] AC4: Relationship to SLSA levels is explicitly discussed diff --git a/docs/code-map.md b/docs/code-map.md index 74f512a5..68b0a85b 100644 --- a/docs/code-map.md +++ b/docs/code-map.md @@ -27,9 +27,6 @@ - Deterministic math: `crates/warp-core/src/math/*` - Tests (integration): `crates/warp-core/tests/*` -- warp-ffi — C ABI for host integrations - - `crates/warp-ffi/src/lib.rs` - - warp-wasm — wasm-bindgen bindings - `crates/warp-wasm/src/lib.rs` diff --git a/docs/man/echo-cli-bench.1 b/docs/man/echo-cli-bench.1 new file mode 100644 index 00000000..bd5e8895 --- /dev/null +++ b/docs/man/echo-cli-bench.1 @@ -0,0 +1,16 @@ +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.TH echo-cli-bench 1 "echo-cli-bench " +.SH NAME +echo\-cli\-bench \- Run benchmarks and format results +.SH SYNOPSIS +\fBecho\-cli\-bench\fR [\fB\-\-filter\fR] [\fB\-h\fR|\fB\-\-help\fR] +.SH DESCRIPTION +Run benchmarks and format results +.SH OPTIONS +.TP +\fB\-\-filter\fR \fI\fR +Filter benchmarks by pattern +.TP +\fB\-h\fR, \fB\-\-help\fR +Print help diff --git a/docs/man/echo-cli-inspect.1 b/docs/man/echo-cli-inspect.1 new file mode 100644 index 00000000..1cfef011 --- /dev/null +++ b/docs/man/echo-cli-inspect.1 @@ -0,0 +1,19 @@ +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.TH echo-cli-inspect 1 "echo-cli-inspect " +.SH NAME +echo\-cli\-inspect \- Inspect a WSC snapshot +.SH SYNOPSIS +\fBecho\-cli\-inspect\fR [\fB\-\-tree\fR] [\fB\-h\fR|\fB\-\-help\fR] <\fISNAPSHOT\fR> +.SH DESCRIPTION +Inspect a WSC snapshot +.SH OPTIONS +.TP +\fB\-\-tree\fR +Show ASCII tree of graph structure +.TP +\fB\-h\fR, \fB\-\-help\fR +Print help +.TP +<\fISNAPSHOT\fR> +Path to WSC snapshot file diff --git a/docs/man/echo-cli-verify.1 b/docs/man/echo-cli-verify.1 new file mode 100644 index 00000000..92a2bfc9 --- /dev/null +++ b/docs/man/echo-cli-verify.1 @@ -0,0 +1,19 @@ +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.TH echo-cli-verify 1 "echo-cli-verify " +.SH NAME +echo\-cli\-verify \- Verify hash integrity of a WSC snapshot +.SH SYNOPSIS +\fBecho\-cli\-verify\fR [\fB\-\-expected\fR] [\fB\-h\fR|\fB\-\-help\fR] <\fISNAPSHOT\fR> +.SH DESCRIPTION +Verify hash integrity of a WSC snapshot +.SH OPTIONS +.TP +\fB\-\-expected\fR \fI\fR +Expected state root hash (hex) for warp 0 only; additional warps report "unchecked" +.TP +\fB\-h\fR, \fB\-\-help\fR +Print help +.TP +<\fISNAPSHOT\fR> +Path to WSC snapshot file diff --git a/docs/man/echo-cli.1 b/docs/man/echo-cli.1 new file mode 100644 index 00000000..d9be7929 --- /dev/null +++ b/docs/man/echo-cli.1 @@ -0,0 +1,41 @@ +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.TH echo-cli 1 "echo-cli 0.1.0" +.SH NAME +echo\-cli \- Echo developer CLI +.SH SYNOPSIS +\fBecho\-cli\fR [\fB\-\-format\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] <\fIsubcommands\fR> +.SH DESCRIPTION +Echo developer CLI +.SH OPTIONS +.TP +\fB\-\-format\fR \fI\fR [default: text] +Output format (text or json) +.br + +.br +\fIPossible values:\fR +.RS 14 +.IP \(bu 2 +text: Human\-readable text output +.IP \(bu 2 +json: Machine\-readable JSON output +.RE +.TP +\fB\-h\fR, \fB\-\-help\fR +Print help (see a summary with \*(Aq\-h\*(Aq) +.TP +\fB\-V\fR, \fB\-\-version\fR +Print version +.SH SUBCOMMANDS +.TP +echo\-cli\-verify(1) +Verify hash integrity of a WSC snapshot +.TP +echo\-cli\-bench(1) +Run benchmarks and format results +.TP +echo\-cli\-inspect(1) +Inspect a WSC snapshot +.SH VERSION +v0.1.0 diff --git a/docs/notes/project-tour-2025-12-28.md b/docs/notes/project-tour-2025-12-28.md index 4ee35e9c..5c1519f5 100644 --- a/docs/notes/project-tour-2025-12-28.md +++ b/docs/notes/project-tour-2025-12-28.md @@ -1,5 +1,6 @@ + # Echo Project Tour (2025-12-28) This note is a fast “become dangerous” map of the repository as it exists today. @@ -21,12 +22,12 @@ Today’s repo is a Rust workspace that already contains: The stable story that matches both docs and code: -- The *state* of the world is a graph (nodes + edges + payloads). -- A *change* is a rewrite (rule applied at a scope). -- A *frame / tick* is a transaction: - - `begin()` → collect candidate rewrites - - `apply(...)` → match + enqueue rewrites - - `commit()` → deterministically order + execute an independent subset → emit a snapshot hash +- The _state_ of the world is a graph (nodes + edges + payloads). +- A _change_ is a rewrite (rule applied at a scope). +- A _frame / tick_ is a transaction: + - `begin()` → collect candidate rewrites + - `apply(...)` → match + enqueue rewrites + - `commit()` → deterministically order + execute an independent subset → emit a snapshot hash - Snapshots can be streamed to tools as full snapshots + gapless diffs (epoch-to-epoch). - Hashes are the checksum of truth: if peers disagree, you detect desync early. @@ -35,18 +36,18 @@ The stable story that matches both docs and code: Implemented (today): - `warp-core` rewrite engine spike: - - deterministic pending queue and deterministic drain ordering, - - footprint-based independence checks, - - reachable-only graph hashing (`state_root`) and commit header hashing (`commit_id`), - - deterministic math primitives + PRNG. + - deterministic pending queue and deterministic drain ordering, + - footprint-based independence checks, + - reachable-only graph hashing (`state_root`) and commit header hashing (`commit_id`), + - deterministic math primitives + PRNG. - Session/tooling pipeline: - - deterministic JS-ABI v1.0 framing + canonical CBOR encoding (`echo-session-proto`), - - Unix socket hub (`echo-session-service`), - - tool client + port abstraction (`echo-session-client`), - - WGPU viewer that reconstructs and validates streamed graphs (`warp-viewer`). + - deterministic JS-ABI v1.0 framing + canonical CBOR encoding (`echo-session-proto`), + - Unix socket hub (`echo-session-service`), + - tool client + port abstraction (`echo-session-client`), + - WGPU viewer that reconstructs and validates streamed graphs (`warp-viewer`). - Living spec scaffolding: - - Spec-000 Leptos/Trunk shell (`specs/spec-000-rewrite`), - - DTO schema (`echo-wasm-abi`) + demo kernel (`echo-wasm-bindings`). + - Spec-000 Leptos/Trunk shell (`specs/spec-000-rewrite`), + - DTO schema (`echo-wasm-abi`) + demo kernel (`echo-wasm-bindings`). Aspirational / partially specified (not fully implemented yet): @@ -60,117 +61,116 @@ Aspirational / partially specified (not fully implemented yet): ### Core engine + math - `crates/warp-core` - - Engine transaction model: `Engine::begin`, `Engine::apply`, `Engine::commit`, `Engine::snapshot` - - Deterministic scheduler: radix drain ordering + footprint independence checks - - Snapshot hashing: `state_root` and `commit_id` - - Deterministic math: `math::{Vec3, Mat4, Quat, Prng}` + - Engine transaction model: `Engine::begin`, `Engine::apply`, `Engine::commit`, `Engine::snapshot` + - Deterministic scheduler: radix drain ordering + footprint independence checks + - Snapshot hashing: `state_root` and `commit_id` + - Deterministic math: `math::{Vec3, Mat4, Quat, Prng}` - `crates/warp-geom` - - Geometry primitives (AABB, transforms, temporal helpers). + - Geometry primitives (AABB, transforms, temporal helpers). ### Tooling ports - `crates/echo-app-core` - - “tool hexagon” ports/services: config, toasts, redraw port, etc. + - “tool hexagon” ports/services: config, toasts, redraw port, etc. - `crates/echo-config-fs` - - Filesystem config adapter for tool prefs (implements the `ConfigStore` port). + - Filesystem config adapter for tool prefs (implements the `ConfigStore` port). ### Session and streaming graph - `crates/echo-graph` - - Canonical renderable graph (`RenderGraph`) + diff ops (`WarpOp`) - - Canonical hashing via deterministic CBOR bytes (node/edge sorting before encoding) + - Canonical renderable graph (`RenderGraph`) + diff ops (`WarpOp`) + - Canonical hashing via deterministic CBOR bytes (node/edge sorting before encoding) - `crates/echo-session-proto` - - Wire types (`Message`, `OpEnvelope`, notifications, WARP stream payload) - - Deterministic CBOR canonicalization + JS-ABI v1.0 framing + BLAKE3 checksum + - Wire types (`Message`, `OpEnvelope`, notifications, WARP stream payload) + - Deterministic CBOR canonicalization + JS-ABI v1.0 framing + BLAKE3 checksum - `crates/echo-session-service` - - Hub process: handshake, monotonic `ts`, subscriptions, gapless diff enforcement, fan-out + - Hub process: handshake, monotonic `ts`, subscriptions, gapless diff enforcement, fan-out - `crates/echo-session-client` - - Client helpers + `tool::SessionPort` abstraction for UIs + - Client helpers + `tool::SessionPort` abstraction for UIs - `crates/echo-session-ws-gateway` - - WebSocket ↔ Unix-socket bridge for browser-based consumers. + - WebSocket ↔ Unix-socket bridge for browser-based consumers. ### Tools / adapters - `crates/warp-viewer` - - Native viewer: subscribes to an WARP stream, applies snapshots/diffs, verifies hashes, renders. -- `crates/warp-ffi` - - Thin C ABI surface over `warp-core` (currently focused on the motion demo rule). + - Native viewer: subscribes to an WARP stream, applies snapshots/diffs, verifies hashes, renders. - `crates/warp-wasm` - - wasm-bindgen bindings for `warp-core` (tooling/web environments). + - wasm-bindgen bindings for `warp-core` (tooling/web environments). - `crates/warp-cli` - - Placeholder CLI home. + - Developer CLI (`echo-cli`): `verify` (WSC integrity), `bench` (Criterion + runner/formatter), `inspect` (snapshot metadata + ASCII tree). - `crates/warp-benches` - - Criterion microbenchmarks (scheduler drain, snapshot hash, etc.). + - Criterion microbenchmarks (scheduler drain, snapshot hash, etc.). ### Living specs (teaching slice) - `crates/echo-wasm-abi` - - WASM-friendly DTO schema for Spec-000 and future living specs. + - WASM-friendly DTO schema for Spec-000 and future living specs. - `crates/echo-wasm-bindings` - - Demo kernel + rewrite history (teaching slice; not the production engine). + - Demo kernel + rewrite history (teaching slice; not the production engine). - `specs/spec-000-rewrite` - - Leptos/Trunk scaffold; currently not yet wired to the demo kernel bindings. + - Leptos/Trunk scaffold; currently not yet wired to the demo kernel bindings. ## Core Determinism Invariants (Code-Backed) ### Rewrite ordering (warp-core scheduler) - Deterministic sort key: - - (`scope_hash`, `rule_id`, `nonce`) in ascending lexicographic order. + - (`scope_hash`, `rule_id`, `nonce`) in ascending lexicographic order. - Implementation detail: - - stable LSD radix sort (16-bit digits; 20 passes) for `O(n)` drain, - - tiny batches use a comparison sort fast-path. + - stable LSD radix sort (16-bit digits; 20 passes) for `O(n)` drain, + - tiny batches use a comparison sort fast-path. - Pending queue semantics: - - last-wins de-dupe on (`scope_hash`, `compact_rule_id`) within a tx queue. + - last-wins de-dupe on (`scope_hash`, `compact_rule_id`) within a tx queue. ### Independence (MWMR groundwork) - Each pending rewrite computes a `Footprint`: - - node read/write sets, edge read/write sets, boundary port sets, plus a coarse `factor_mask`. + - node read/write sets, edge read/write sets, boundary port sets, plus a coarse `factor_mask`. - Independence fails if any of the following intersect: - - writes vs prior reads/writes, on nodes and edges - - any overlap on boundary ports - - `factor_mask` overlap (used as a coarse “might-touch” prefilter) + - writes vs prior reads/writes, on nodes and edges + - any overlap on boundary ports + - `factor_mask` overlap (used as a coarse “might-touch” prefilter) ### Snapshot hashing (warp-core) - `state_root` is BLAKE3 over a canonical byte stream of the reachable subgraph: - - reachability: deterministic BFS from root following outbound edges - - node order: ascending `NodeId` (32-byte lexicographic) - - edge order: per source node, edges sorted by `EdgeId`, include only edges to reachable nodes - - payloads: `u64` little-endian length prefix + raw bytes + - reachability: deterministic BFS from root following outbound edges + - node order: ascending `NodeId` (32-byte lexicographic) + - edge order: per source node, edges sorted by `EdgeId`, include only edges to reachable nodes + - payloads: `u64` little-endian length prefix + raw bytes ### Commit hashing (warp-core) - `commit_id` is BLAKE3 over a commit header: - - header version `u16 = 1` - - parent commit hashes (length-prefixed) - - `state_root` + plan/decision/rewrites digests + policy id -- Empty digests for *length-prefixed list digests* use `blake3(0u64.to_le_bytes())`. + - header version `u16 = 1` + - parent commit hashes (length-prefixed) + - `state_root` + plan/decision/rewrites digests + policy id +- Empty digests for _length-prefixed list digests_ use `blake3(0u64.to_le_bytes())`. ### Wire protocol (echo-session-proto) - JS-ABI v1.0 packet: - - `MAGIC(4) || VERSION(2) || FLAGS(2) || LENGTH(4) || PAYLOAD || CHECKSUM(32)` - - checksum = blake3(header||payload) + - `MAGIC(4) || VERSION(2) || FLAGS(2) || LENGTH(4) || PAYLOAD || CHECKSUM(32)` + - checksum = blake3(header||payload) - PAYLOAD is canonical CBOR: - - definite lengths only, no tags, minimal integer widths - - floats encoded at the smallest width that round-trips - - forbid “int as float” encodings - - map keys sorted by their CBOR byte encoding; duplicates rejected + - definite lengths only, no tags, minimal integer widths + - floats encoded at the smallest width that round-trips + - forbid “int as float” encodings + - map keys sorted by their CBOR byte encoding; duplicates rejected ## “Follow the Code” Entry Points - Engine core: - - `crates/warp-core/src/engine_impl.rs` (begin/apply/commit) - - `crates/warp-core/src/scheduler.rs` (deterministic ordering + independence) - - `crates/warp-core/src/snapshot.rs` (state_root + commit_id hashing) + - `crates/warp-core/src/engine_impl.rs` (begin/apply/commit) + - `crates/warp-core/src/scheduler.rs` (deterministic ordering + independence) + - `crates/warp-core/src/snapshot.rs` (state_root + commit_id hashing) - Wire protocol: - - `crates/echo-session-proto/src/wire.rs` (packet framing + encode/decode) - - `crates/echo-session-proto/src/canonical.rs` (canonical CBOR) + - `crates/echo-session-proto/src/wire.rs` (packet framing + encode/decode) + - `crates/echo-session-proto/src/canonical.rs` (canonical CBOR) - Hub + viewer: - - `crates/echo-session-service/src/main.rs` (hub state machine + enforcement) - - `crates/warp-viewer/src/session_logic.rs` (apply frames + hash checks) + - `crates/echo-session-service/src/main.rs` (hub state machine + enforcement) + - `crates/warp-viewer/src/session_logic.rs` (apply frames + hash checks) ## Commands (Common Workflows) @@ -187,4 +187,3 @@ Aspirational / partially specified (not fully implemented yet): - `docs/spec-merkle-commit.md` historically claimed empty list digests used `blake3(b"")`; the engine uses `blake3(0u64.to_le_bytes())` for length-prefixed list digests. Keep this consistent, since it affects hash identity. - diff --git a/docs/phase1-plan.md b/docs/phase1-plan.md index ea244a7a..61b663c4 100644 --- a/docs/phase1-plan.md +++ b/docs/phase1-plan.md @@ -1,5 +1,6 @@ + # Phase 1 – Core Ignition Plan Goal: deliver a deterministic Rust implementation of WARP powering the Echo runtime, with tangible demos at each milestone. This plan outlines task chains, dependencies, and expected demonstrations. @@ -12,6 +13,7 @@ Status (2025-12-30): --- ## Task Graph + ```mermaid graph TD A[1A · WARP Core Bootstrap] @@ -40,63 +42,70 @@ graph TD ## Phases & Tangible Outcomes ### 1A · WARP Core Bootstrap + - Tasks - - Scaffold crates (`warp-core`, `warp-ffi`, `warp-wasm`, `warp-cli`). - - Implement GraphStore primitives, hash utilities, scheduler skeleton. - - CI: `cargo fmt/clippy/test` baseline. -- Demonstration: *None* (foundation only). + - Scaffold crates (`warp-core`, `warp-wasm`, `warp-cli`). + - Implement GraphStore primitives, hash utilities, scheduler skeleton. + - CI: `cargo fmt/clippy/test` baseline. +- Demonstration: _None_ (foundation only). ### 1B · Rewrite Executor Spike + - Tasks - - Implement motion rule test (Position + Velocity rewrite). - - Execute deterministic ordering + snapshot hashing. - - Add minimal diff/commit log entries. + - Implement motion rule test (Position + Velocity rewrite). + - Execute deterministic ordering + snapshot hashing. + - Add minimal diff/commit log entries. - Demonstration: **Demo 2 · Toy Benchmark** - - 100 nodes, 10 rules, property tests showing stable hashes. + - 100 nodes, 10 rules, property tests showing stable hashes. ### 1C · Rhai/TS Bindings + - Tasks - - Expose C ABI for host integrations, embed Rhai with deterministic sandbox + host modules. - - Build WASM bindings for tooling. - - Port inspector CLI to use snapshots. + - Embed Rhai with deterministic sandbox + host modules. + - Build WASM bindings for tooling. + - Port inspector CLI to use snapshots. - Demonstration: Rhai script triggers rewrite; inspector shows matching snapshot hash. ### 1D · Echo ECS on WARP + - Tasks - - Map existing ECS system set onto rewrite rules. - - Replace Codex’s Baby event queue with rewrite intents. - - Emit frame hash HUD. + - Map existing ECS system set onto rewrite rules. + - Replace Codex’s Baby event queue with rewrite intents. + - Emit frame hash HUD. - Demonstration: **Demo 1 · Deterministic Netcode** - - Two instances, identical inputs, frame hash displayed per tick. + - Two instances, identical inputs, frame hash displayed per tick. ### 1E · Networking & Confluence MVP + - Tasks - - Implement rewrite transaction packets; replay on peers. - - Converge canonical snapshots; handle conflicts deterministically. - - Integrate rollback path (branch rewind, replay log). + - Implement rewrite transaction packets; replay on peers. + - Converge canonical snapshots; handle conflicts deterministically. + - Integrate rollback path (branch rewind, replay log). - Demonstration: **Demo 5 · Time Travel** - - Fork, edit, merge branch; show canonical outcome. + - Fork, edit, merge branch; show canonical outcome. ### 1F · Tooling Integration + - Tasks - - Echo Studio (TS + WASM) graph viewer with live updates. - - Entropy lens, paradox heatmap overlays. - - Rhai live coding pipeline (hot reload). + - Echo Studio (TS + WASM) graph viewer with live updates. + - Entropy lens, paradox heatmap overlays. + - Rhai live coding pipeline (hot reload). - Demonstrations: - - **Demo 3 · Real Benchmark** (1k nodes, 100 rules). - - **Demo 6 · Live Coding** (Rhai edit updates live graph). + - **Demo 3 · Real Benchmark** (1k nodes, 100 rules). + - **Demo 6 · Live Coding** (Rhai edit updates live graph). --- ## Performance / Benchmark Milestones -| Milestone | Target | Notes | -| --------- | ------ | ----- | -| Toy Benchmark | 100 nodes / 10 rules / 200 iterations < 1ms | Demo 2 | -| Real Demo | 1,000 nodes / 100 rules < 10ms rewrite checks | Demo 3 | -| Production Stretch | 10,000 nodes / 1000 rules (profiling only) | Phase 2 optimizations | +| Milestone | Target | Notes | +| ------------------ | --------------------------------------------- | --------------------- | +| Toy Benchmark | 100 nodes / 10 rules / 200 iterations < 1ms | Demo 2 | +| Real Demo | 1,000 nodes / 100 rules < 10ms rewrite checks | Demo 3 | +| Production Stretch | 10,000 nodes / 1000 rules (profiling only) | Phase 2 optimizations | Optimization roadmap once baseline is working: + 1. Incremental pattern matching. 2. Spatial indexing. 3. SIMD bitmap operations. @@ -105,15 +114,17 @@ Optimization roadmap once baseline is working: --- ## Networking Demo Targets -| Mode | Deliverable | -| ---- | ----------- | -| Lockstep | Replay identical inputs; frame hash equality per tick. | -| Rollback | Predictive input with rollback on mismatch. | + +| Mode | Deliverable | +| --------- | --------------------------------------------------------------- | +| Lockstep | Replay identical inputs; frame hash equality per tick. | +| Rollback | Predictive input with rollback on mismatch. | | Authority | Host selects canonical branch; entropy auditor rejects paradox. | --- ## Documentation Checklist + - Update `docs/warp-runtime-architecture.md` as rules/loop evolve. Phase 1 completes when Demo 6 (Live Coding) runs atop the Rust WARP runtime with inspector tooling in place, using Rhai as the scripting layer. diff --git a/docs/rust-rhai-ts-division.md b/docs/rust-rhai-ts-division.md index 0ee904e1..e59ad3c3 100644 --- a/docs/rust-rhai-ts-division.md +++ b/docs/rust-rhai-ts-division.md @@ -1,14 +1,16 @@ + # Language & Responsibility Map (Phase 1) Echo’s runtime stack is intentionally stratified. Rust owns the deterministic graph engine; Rhai sits on top for gameplay scripting; TypeScript powers the tooling layer via WebAssembly bindings. This document captures what lives where as we enter Phase 1 (Core Ignition). --- -## Rust (warp-core, ffi, wasm, cli) +## Rust (warp-core, wasm, cli) ### Responsibilities + - WARP engine: GraphStore, PatternGraph, RewriteRule, DeterministicScheduler, commit/Snapshot APIs. - ECS foundations: Worlds, Systems, Components expressed as rewrite rules. - Timeline & Branch tree: rewrite transactions, snapshot hashing, concurrency guard rails. @@ -17,53 +19,59 @@ Echo’s runtime stack is intentionally stratified. Rust owns the deterministic - Asset pipeline: import/export graphs, payload storage, zero-copy access. - Confluence: distributed synchronization of rewrite transactions. - Rhai engine hosting: embed Rhai with deterministic module set; expose WARP bindings. -- CLI tools: `warp` command for apply/snapshot/diff/verify. +- CLI tools: `echo-cli` with `verify`, `bench`, and `inspect` subcommands. ### Key Crates -- `warp-core` – core engine -- `warp-ffi` – C ABI for host/native consumers; Rhai binds directly in-process + +- `warp-core` – core engine; Rhai binds directly in-process - `warp-wasm` – WASM build for tooling/editor -- `warp-cli` – CLI utilities +- `warp-cli` – CLI utilities (`echo-cli` binary: verify, bench, inspect) --- ## Rhai (gameplay authoring layer) -### Responsibilities +### Rhai Responsibilities + - Gameplay systems & components (e.g., AI state machines, quests, input handling). - Component registration, entity creation/destruction via exposed APIs. - Scripting for deterministic “async” (scheduled events through Codex’s Baby). - Editor lenses and inspector overlays written in Rhai for rapid iteration. ### Constraints + - Single-threaded per branch; no OS threads. - Engine budgeted deterministically per tick. - Mutations occur through rewrite intents (`warp.apply(...)`), not raw memory access. ### Bindings + - `warp` Rhai module providing: - - `apply(rule_name, scope, params)` - - `delay(seconds, fn)` (schedules replay-safe events) - - Query helpers (read components, iterate entities) - - Capability-guarded operations (world:rewrite, asset:import, etc.) + - `apply(rule_name, scope, params)` + - `delay(seconds, fn)` (schedules replay-safe events) + - Query helpers (read components, iterate entities) + - Capability-guarded operations (world:rewrite, asset:import, etc.) --- ## TypeScript / Web Tooling -### Responsibilities +### TypeScript Responsibilities + - Echo Studio (graph IDE) – visualizes world graph, rewrites, branch tree. - Inspector dashboards – display Codex, entropy, paradox frames. - Replay/rollback visualizers, network debugging tools. - Plugin builders and determinism test harness UI. ### Integration + - Uses `warp-wasm` to call into WARP engine from the browser. - IPC/WebSocket for live inspector feeds (`InspectorEnvelope`). - Works with JSONL logs for offline analysis. - All mutations go through bindings; tooling never mutates state outside WARP APIs. ### Tech + - Frontend frameworks: React/Svelte/Vanilla as needed. - WebGPU/WebGL for graph visualization. - TypeScript ensures type safety for tooling code. @@ -71,6 +79,7 @@ Echo’s runtime stack is intentionally stratified. Rust owns the deterministic --- ## Summary + - Rust: core deterministic runtime + binding layers. - Rhai: gameplay logic, editor lenses, deterministic script-level behavior. - TypeScript: visualization and tooling on top of WASM/IPC. diff --git a/docs/spec-warp-core.md b/docs/spec-warp-core.md index 91ebb854..b1241a5f 100644 --- a/docs/spec-warp-core.md +++ b/docs/spec-warp-core.md @@ -1,12 +1,13 @@ + # `warp-core` — WARP Core Runtime & API Tour -> + > **Background:** For a gentler introduction, see [WARP Primer](/guide/warp-primer). This document is a **tour of the `warp-core` crate**: the core data model, -deterministic boundary artifacts, and the runtime APIs that higher layers (`warp-ffi`, -`warp-wasm`, tools, and eventually the full Echo runtime) build on. +deterministic boundary artifacts, and the runtime APIs that higher layers (`warp-wasm`, +tools, and eventually the full Echo runtime) build on. If you only remember one thing: @@ -80,7 +81,7 @@ Key types (from `ident.rs`): - `WarpId(Hash)` — namespacing identity for Stage B1 WarpInstances (“layers”). - `TypeId(Hash)` — meaning tag for either skeleton typing (node/edge record types) or attachment atoms. -Stage B1 adds *instance-scoped keys*: +Stage B1 adds _instance-scoped keys_: - `NodeKey { warp_id: WarpId, local_id: NodeId }` - `EdgeKey { warp_id: WarpId, local_id: EdgeId }` @@ -103,15 +104,15 @@ Construction helpers: `GraphStore` is the in-memory store for one warp instance (one `warp_id`): - Skeleton plane: - - `nodes: BTreeMap` - - `edges_from: BTreeMap>` (adjacency buckets) - - `edges_to: BTreeMap>` (reverse adjacency, used for fast deletes) + - `nodes: BTreeMap` + - `edges_from: BTreeMap>` (adjacency buckets) + - `edges_to: BTreeMap>` (reverse adjacency, used for fast deletes) - Attachment plane (stored separately, but co-located in the struct): - - `node_attachments: BTreeMap` (node-attachment plane) - - `edge_attachments: BTreeMap` (edge-attachment plane) + - `node_attachments: BTreeMap` (node-attachment plane) + - `edge_attachments: BTreeMap` (edge-attachment plane) - Reverse indexes: - - `edge_index: BTreeMap` (EdgeId → from) - - `edge_to_index: BTreeMap` (EdgeId → to) + - `edge_index: BTreeMap` (EdgeId → from) + - `edge_to_index: BTreeMap` (EdgeId → to) Design intent: @@ -176,7 +177,7 @@ The engine does not decode attachments in matching/indexing. Typed boundaries us - `trait Codec { const TYPE_ID: TypeId; fn encode_canon(&T)->Bytes; fn decode_strict(&Bytes)->Result; }` - `AtomPayload::decode_for_match` encodes the v0 decode-failure policy: - - type mismatch or decode error ⇒ “rule does not apply” + - type mismatch or decode error ⇒ “rule does not apply” --- @@ -252,7 +253,7 @@ Commit hash v2 commits to: - `patch_digest` (replayable delta) - `policy_id` -Plan/decision/rewrites digests remain deterministic diagnostics but are *not* committed by v2. +Plan/decision/rewrites digests remain deterministic diagnostics but are _not_ committed by v2. See `docs/spec-merkle-commit.md` for the canonical encoding. ### 8.2 `TickReceipt`: Paper II outcomes @@ -306,7 +307,7 @@ Crucial correctness law: ### 9.1 Worked example: descent-chain reads become `Footprint.a_read` -The engine enforces the law in `Engine::apply_in_warp` by *injecting* the descent +The engine enforces the law in `Engine::apply_in_warp` by _injecting_ the descent chain into the footprint before the candidate is enqueued: ```rust @@ -411,9 +412,9 @@ and `WarpTickPatchV1` alongside the snapshot hash. The minimal “B1-shaped” workflow is: -1) establish a portal (`OpenPortal`) from a node-owned attachment slot (Alpha plane) to a child `WarpId` -2) apply a rewrite inside the child warp using `Engine::apply_in_warp` with a `descent_stack` containing that portal key -3) verify the tick patch `in_slots` includes the portal slot, and slicing pulls in the portal-opening tick +1. establish a portal (`OpenPortal`) from a node-owned attachment slot (Alpha plane) to a child `WarpId` +2. apply a rewrite inside the child warp using `Engine::apply_in_warp` with a `descent_stack` containing that portal key +3. verify the tick patch `in_slots` includes the portal slot, and slicing pulls in the portal-opening tick ```rust use warp_core::{ @@ -557,6 +558,6 @@ assert_eq!(ticks, vec![0, 1]); Notes: -- `Engine::apply_in_warp(..., descent_stack)` is the *only* place the engine needs to “know about recursion” +- `Engine::apply_in_warp(..., descent_stack)` is the _only_ place the engine needs to “know about recursion” for correctness: the hot path still matches within an instance skeleton only. - If you don’t record descent-chain reads, you can build a system that “looks right” but produces incorrect slices. diff --git a/docs/spec/SPEC-0005-provenance-payload.md b/docs/spec/SPEC-0005-provenance-payload.md new file mode 100644 index 00000000..86249649 --- /dev/null +++ b/docs/spec/SPEC-0005-provenance-payload.md @@ -0,0 +1,708 @@ + + + +# SPEC-0005: Provenance Payload + +**Status:** Draft +**Authors:** James Ross +**Prerequisite:** SPEC-0004 (Worldlines, Playback, TruthBus) +**Blocks:** PP-2 (Implementation), Time Travel Debugging + +--- + +## 1. Purpose + +This specification translates the provenance formalism from Paper III (AION +Foundations) into concrete Echo types. It defines the data structures needed +to answer "show me why" queries — tracing any observed state back through the +causal chain of tick patches that produced it. + +### Scope + +- **In scope:** Type definitions, wire format, composition rules, bridge to + existing APIs, attestation envelope structure. +- **Out of scope:** Implementation (PP-2+), storage tiers (echo-cas), network + transport, consensus protocols. + +--- + +## 2. Glossary Mapping — Paper III → Echo + +| Paper III Symbol | Paper III Name | Echo Type | Location | Status | +| ------------------------------ | ------------------------ | --------------------------------------------------- | ----------------------------------- | ---------------------------- | +| `μ_i` | TickPatch | `WorldlineTickPatchV1` | `warp-core/src/worldline.rs` | **Exists** | +| `P = (μ₀, …, μₙ₋₁)` | ProvenancePayload | `ProvenancePayload` | — | **New** | +| `(U₀, P)` | BoundaryEncoding | `(WarpId, ProvenancePayload)` via `ProvenanceStore` | `warp-core/src/provenance_store.rs` | **Partial** | +| `BTR` | BoundaryTransitionRecord | `BoundaryTransitionRecord` | — | **New** | +| `H(μ)` | TickPatchDigest | `WorldlineTickPatchV1::patch_digest` | `worldline.rs` | **Exists** | +| `(h_state, h_patch, h_commit)` | HashTriplet | `HashTriplet` | `worldline.rs` | **Exists** | +| `ρ` | Trace / Receipt | `TickReceipt` | `warp-core/src/receipt.rs` | **Exists** (needs extension) | +| `In(μ)` | Input slots | `WorldlineTickPatchV1::in_slots: Vec` | `worldline.rs` | **Exists** | +| `Out(μ)` | Output slots | `WorldlineTickPatchV1::out_slots: Vec` | `worldline.rs` | **Exists** | +| `𝕡` | Provenance graph | `ProvenanceGraph` | — | **New** (algorithm) | +| `D(v)` | Derivation graph | `DerivationGraph` | — | **New** (algorithm) | +| `W` | Worldline | `WorldlineId` | `worldline.rs` | **Exists** | +| `U₀` | Initial state ref | `WarpId` (via `ProvenanceStore::u0()`) | `provenance_store.rs` | **Exists** | +| `κ` | Policy ID | `WorldlineTickHeaderV1::policy_id: u32` | `worldline.rs` | **Exists** | +| `t` | Global tick | `WorldlineTickHeaderV1::global_tick: u64` | `worldline.rs` | **Exists** | +| `α(v)` | AtomWrite | `AtomWrite` | `worldline.rs` | **Exists** | +| `checkpoint(t)` | State checkpoint | `CheckpointRef` | `provenance_store.rs` | **Exists** | + +--- + +## 3. Inventory — Existing vs. New + +### 3.1 Existing Types (no changes required) + +| Type | Role in PP-1 | +| ------------------------- | -------------------------------------------------------------------------------------------------------- | +| `WorldlineTickPatchV1` | The atomic unit of provenance — one tick's delta for one warp. Contains ops, slot I/O, and patch digest. | +| `WorldlineTickHeaderV1` | Shared tick metadata: global_tick, policy_id, rule_pack_id, plan/decision/rewrites digests. | +| `HashTriplet` | Three-way commitment `(state_root, patch_digest, commit_hash)` for verification. | +| `WorldlineId` | Identifies a worldline (history branch). | +| `AtomWrite` | Causal arrow: records which rule mutated which atom at which tick, with old/new values. | +| `ProvenanceStore` (trait) | History access: retrieve patches, expected hashes, outputs, checkpoints per worldline. | +| `LocalProvenanceStore` | In-memory `BTreeMap`-backed implementation of `ProvenanceStore`. | +| `CheckpointRef` | Fast-seek anchor: `(tick, state_hash)`. | +| `TickReceipt` | Candidate outcomes: applied vs. rejected, with blocking causality via `blocked_by`. | +| `TickReceiptEntry` | Per-candidate record: `(rule_id, scope_hash, scope, disposition)`. | +| `SlotId` | Abstract resource identifier: `Node`, `Edge`, `Attachment`, or `Port`. | +| `WarpOp` | Canonical delta operation (8 variants: upsert/delete node/edge, set attachment, portal, instance). | +| `OutputFrameSet` | Ordered channel outputs: `Vec<(ChannelId, Vec)>`. | +| `CursorReceipt` | Provenance envelope for truth delivery: `(session, cursor, worldline, warp, tick, commit_hash)`. | +| `TruthFrame` | Authoritative value with provenance: `(CursorReceipt, channel, value, value_hash)`. | + +### 3.2 New Types (defined in this spec) + +| Type | Role in PP-1 | Section | +| ----------------------------------- | ------------------------------------------------------------------------------------- | ------- | +| `ProvenancePayload` | Ordered sequence of tick patches — the "proof" that transforms U₀ into current state. | §4.1 | +| `BoundaryTransitionRecord` | Tamper-evident envelope binding input hash, output hash, payload, and policy. | §4.2 | +| `ProvenanceNode` / `ProvenanceEdge` | Graph nodes/edges for the provenance graph `𝕡`. | §4.3 | +| `DerivationGraph` | Backward causal cone algorithm specification. | §4.4 | + +### 3.3 Extensions to Existing Types + +| Type | Extension | Rationale | +| ---------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- | +| `TickReceipt` | Add `blocking_poset: Vec>` (already exists as `blocked_by`). Extend `TickReceiptRejection` with richer rejection reasons. | Paper III trace `ρ` requires detailed rejection causality. | +| `TickReceiptRejection` | Add: `GuardFailure`, `PreconditionViolation`, `ResourceContention`. | Current `FootprintConflict` is the only rejection reason; richer reasons enable "show me why this rule didn't fire". | + +--- + +## 4. New Type Definitions + +### 4.1 ProvenancePayload + +The provenance payload is an ordered sequence of tick patches that, applied +sequentially to an initial state `U₀`, deterministically reproduce the current +state. + +```rust +/// Ordered sequence of tick patches forming a provenance proof. +/// +/// Invariant: ticks are strictly contiguous over the payload's range. +/// For all i > 0: patches[i].header.global_tick == +/// patches[i-1].header.global_tick + 1. +/// Equivalently: patches[i].header.global_tick == start_tick + i, +/// where start_tick = patches[0].header.global_tick. +/// The start tick need NOT be zero — payloads constructed from +/// `from_store(store, wl, 5..10)` begin at tick 5. +/// +/// Paper III: P = (μ₀, μ₁, …, μₙ₋₁) +pub struct ProvenancePayload { + /// The worldline this payload belongs to. + pub worldline_id: WorldlineId, + /// Initial state reference (MVP: WarpId). + pub u0: WarpId, + /// Ordered tick patches. Must be contiguous (no gaps) but may + /// start at any absolute tick. + pub patches: Vec, + /// Corresponding hash triplets for each tick (verification anchors). + pub expected: Vec, +} +``` + +**Monoid structure (composition):** + +```text +compose(P₁, P₂) = ProvenancePayload { + worldline_id: P₁.worldline_id, // must match P₂ + u0: P₁.u0, + patches: P₁.patches ++ P₂.patches, + expected: P₁.expected ++ P₂.expected, +} +``` + +- Identity: empty payload `(worldline_id, u0, [], [])`. +- Associativity: concatenation is associative. +- Precondition: `P₁.worldline_id == P₂.worldline_id` and + last tick of `P₁` + 1 == first tick of `P₂` (contiguity). + +**Construction from `LocalProvenanceStore`:** + +```rust +impl ProvenancePayload { + pub fn from_store( + store: &impl ProvenanceStore, + worldline_id: WorldlineId, + tick_range: Range, + ) -> Result { + let u0 = store.u0(worldline_id)?; + let mut patches = Vec::new(); + let mut expected = Vec::new(); + for tick in tick_range { + patches.push(store.patch(worldline_id, tick)?); + expected.push(store.expected(worldline_id, tick)?); + } + Ok(Self { worldline_id, u0, patches, expected }) + } +} +``` + +### 4.2 BoundaryTransitionRecord (BTR) + +A tamper-evident envelope that commits to: + +- The state before (`h_in` — state root at tick start) +- The state after (`h_out` — state root at tick end) +- The full provenance payload +- The policy under which the transition was evaluated + +```rust +/// Tamper-evident record of a state transition boundary. +/// +/// Paper III: BTR = (h_in, h_out, U₀, P, t, κ) +/// +/// The BTR is the unit of trust for replay verification: given h_in, +/// a verifier can replay P and confirm h_out matches. +pub struct BoundaryTransitionRecord { + /// State root hash before the transition. + pub h_in: Hash, + /// State root hash after the transition. + pub h_out: Hash, + /// Initial state reference. + pub u0: WarpId, + /// The provenance payload (ordered patches). + pub payload: ProvenancePayload, + /// Global tick at transition boundary. + pub tick: u64, + /// Policy ID governing the transition. + pub policy_id: u32, + /// Commit hash binding all fields. + pub commit_hash: Hash, +} +``` + +**Verification algorithm:** + +```text +verify_btr(btr, initial_store): + 1. store ← clone(initial_store) + 2. assert canonical_state_hash(store) == btr.h_in + 3. for each patch in btr.payload.patches: + a. patch.apply_to_store(&mut store) + b. assert canonical_state_hash(store) == btr.payload.expected[i].state_root + 4. assert canonical_state_hash(store) == btr.h_out + 5. recompute commit_hash per §5.4: BLAKE3("echo:btr:v1\0" || h_in || h_out || u0 || payload_digest || tick || policy_id) + 6. assert recomputed == btr.commit_hash +``` + +### 4.3 Provenance Graph Nodes and Edges + +The provenance graph `𝕡` connects tick patches through their slot I/O: +if `Out(μ_i)` ∩ `In(μ_j)` ≠ ∅, there is a causal edge from `μ_i` to `μ_j`. + +```rust +/// A node in the provenance graph. +/// +/// Each node represents one tick patch in one worldline. +pub struct ProvenanceNode { + pub worldline_id: WorldlineId, + pub tick: u64, + pub patch_digest: Hash, + pub in_slots: Vec, + pub out_slots: Vec, +} + +/// A directed edge in the provenance graph. +/// +/// Represents a causal dependency: the source tick produced slots +/// that the target tick consumed. +pub struct ProvenanceEdge { + /// Source tick (producer). + pub from: (WorldlineId, u64), + /// Target tick (consumer). + pub to: (WorldlineId, u64), + /// The slots that connect them (Out(from) ∩ In(to)). + pub shared_slots: Vec, +} +``` + +**Construction algorithm:** + +```text +build_provenance_graph(store, worldline_id, tick_range): + nodes ← [] + edges ← [] + for tick in tick_range: + patch ← store.patch(worldline_id, tick) + node ← ProvenanceNode { + worldline_id, tick, + patch_digest: patch.patch_digest, + in_slots: patch.in_slots, + out_slots: patch.out_slots, + } + nodes.push(node) + + // Find causal predecessors. + for prev_tick in (0..tick).rev(): + prev_patch ← store.patch(worldline_id, prev_tick) + shared ← intersect(prev_patch.out_slots, patch.in_slots) + if !shared.is_empty(): + edges.push(ProvenanceEdge { + from: (worldline_id, prev_tick), + to: (worldline_id, tick), + shared_slots: shared, + }) + + return (nodes, edges) +``` + +**Optimization note:** In practice, maintain a slot→tick index to avoid the +O(n²) backward scan. The naive algorithm is shown for specification clarity. + +### 4.4 Derivation Graph — Backward Causal Cone + +The derivation graph `D(v)` for a slot `v` at tick `t` is the backward +transitive closure of the provenance graph, restricted to patches that +contributed (directly or transitively) to the value of `v`. + +```rust +/// Backward causal cone for a specific slot at a specific tick. +/// +/// Paper III: D(v) = transitive closure of 𝕡 backward from v. +pub struct DerivationGraph { + /// The query: which slot's provenance are we tracing? + pub query_slot: SlotId, + /// The tick at which the query is evaluated. + pub query_tick: u64, + /// Provenance nodes in the backward cone (topologically sorted). + pub nodes: Vec, + /// Causal edges within the cone. + pub edges: Vec, +} +``` + +**Algorithm:** + +```text +derive(store, worldline_id, slot, tick): + // Seed: find the query tick only if it actually produces the queried slot. + seed_patch ← store.patch(worldline_id, tick) + if slot not in seed_patch.out_slots: + return DerivationGraph { query_slot: slot, query_tick: tick, nodes: [], edges: [] } + + frontier ← { (worldline_id, tick) } + visited ← {} + result_nodes ← [] + result_edges ← [] + + while frontier is not empty: + (wl, t) ← frontier.pop() + if (wl, t) in visited: continue + visited.insert((wl, t)) + + // Every node pulled from the frontier is already known to be + // in the backward cone (it was added because a downstream node + // consumed one of its out_slots). Accept it unconditionally. + patch ← store.patch(wl, t) + node ← ProvenanceNode from patch + result_nodes.push(node) + + // Trace backward through ALL in_slots of this patch. + for in_slot in patch.in_slots: + for prev_tick in (0..t).rev(): + prev_patch ← store.patch(wl, prev_tick) + if in_slot in prev_patch.out_slots: + result_edges.push(ProvenanceEdge { + from: (wl, prev_tick), + to: (wl, t), + shared_slots: [in_slot], + }) + frontier.insert((wl, prev_tick)) + break // Found the most recent producer. + + return DerivationGraph { + query_slot: slot, + query_tick: tick, + nodes: topological_sort(result_nodes), + edges: result_edges, + } +``` + +**Correctness note:** The frontier is seeded only with the query tick (after +verifying it produces `slot`). Each subsequent node is added to the frontier +because a node already in the cone consumed one of its `out_slots`. Therefore, +every node in the frontier is transitively causal — no per-node slot filter +is needed after the seed check. + +--- + +## 5. Wire Format + +### 5.1 Encoding Rules + +All provenance types use canonical CBOR encoding, consistent with warp-core's +`ciborium` conventions: + +- **Integer encoding:** Minimal-length CBOR integers. +- **Map keys:** Sorted lexicographically (canonical CBOR). +- **Byte strings:** Raw `[u8; 32]` for hashes (no hex encoding on wire). +- **Arrays:** CBOR definite-length arrays. + +### 5.2 Domain Separation Tags + +Each type gets a unique domain separator for hash computation, consistent +with `warp_core::domain`: + +| Type | Domain Tag | Bytes | +| ------------------------------- | ------------------------------ | ----- | +| `ProvenancePayload` digest | `echo:provenance_payload:v1\0` | 27 | +| `BoundaryTransitionRecord` hash | `echo:btr:v1\0` | 12 | +| `ProvenanceEdge` identifier | `echo:provenance_edge:v1\0` | 24 | + +These tags MUST be added to `crates/warp-core/src/domain.rs` during +implementation (PP-2). + +### 5.3 ProvenancePayload Digest + +```text +provenance_payload_digest = BLAKE3( + "echo:provenance_payload:v1\0" + worldline_id: [u8; 32] + u0: [u8; 32] + num_patches: u64 (LE) + for each patch: + patch_digest: [u8; 32] +) +``` + +### 5.4 BTR Commit Hash + +```text +btr_hash = BLAKE3( + "echo:btr:v1\0" + h_in: [u8; 32] + h_out: [u8; 32] + u0: [u8; 32] + payload_digest: [u8; 32] + tick: u64 (LE) + policy_id: u32 (LE) +) +``` + +--- + +## 6. Worked Examples + +### 6.1 Three-Tick Accumulator (Paper III Appendix A) + +**Setup:** A single worldline with an accumulator node. Each tick increments +the accumulator by 1. + +```text +Worldline W, U₀ = warp_id("acc") + Tick 0: acc = 0 → acc = 1 (μ₀) + Tick 1: acc = 1 → acc = 2 (μ₁) + Tick 2: acc = 2 → acc = 3 (μ₂) +``` + +**ProvenancePayload:** + +```text +P = { + worldline_id: W, + u0: warp_id("acc"), + patches: [μ₀, μ₁, μ₂], + expected: [ + HashTriplet { state_root: H(acc=1), patch_digest: H(μ₀), commit_hash: C₀ }, + HashTriplet { state_root: H(acc=2), patch_digest: H(μ₁), commit_hash: C₁ }, + HashTriplet { state_root: H(acc=3), patch_digest: H(μ₂), commit_hash: C₂ }, + ], +} +``` + +**BTR for tick 0→2:** + +```text +BTR = { + h_in: H(acc=0), // state root at tick 0 start + h_out: H(acc=3), // state root at tick 2 end + u0: warp_id("acc"), + payload: P, + tick: 2, + policy_id: 0, + commit_hash: BLAKE3("echo:btr:v1\0" || h_in || h_out || u0 || H(P) || 2u64 || 0u32), +} +``` + +**Provenance graph:** + +```text +μ₀ → μ₁ → μ₂ +(each tick's out_slots contain the accumulator node; each subsequent + tick's in_slots consume it) +``` + +**Derivation of acc at tick 2:** + +```text +D(acc) = { μ₀, μ₁, μ₂ } // Full causal cone — every tick contributed. +``` + +### 6.2 Branching Fork with Shared Prefix + +**Setup:** Two worldlines diverge at tick 3 from a common prefix. + +```text +Worldline W₁: + Tick 0-2: shared prefix (μ₀, μ₁, μ₂) + Tick 3: branch A operation (μ₃ₐ) + +Worldline W₂ (forked from W₁ at tick 2): + Tick 0-2: inherited from W₁ + Tick 3: branch B operation (μ₃ᵦ) +``` + +**ProvenancePayloads:** + +```text +P₁ = { worldline_id: W₁, u0, patches: [μ₀, μ₁, μ₂, μ₃ₐ], ... } +P₂ = { worldline_id: W₂, u0, patches: [μ₀, μ₁, μ₂, μ₃ᵦ], ... } +``` + +**Key property:** `P₁.patches[0..3] == P₂.patches[0..3]` (shared prefix). +The provenance graphs diverge at tick 3. + +**Fork creation via `LocalProvenanceStore::fork()`:** + +```rust +store.fork( + source: W₁, + fork_tick: 2, // Fork after tick 2 + new_id: W₂, +) +``` + +This copies patches 0..2 from W₁ to W₂, then W₂ independently appends μ₃ᵦ. + +--- + +## 7. Bridge to Existing APIs + +### 7.1 LocalProvenanceStore::append() → ProvenancePayload + +`append()` already stores per-tick patches, expected hash triplets, and +outputs. A `ProvenancePayload` is constructed by reading back a contiguous +range of ticks: + +```rust +let payload = ProvenancePayload::from_store( + &store, + worldline_id, + 0..store.len(worldline_id)?, +)?; +``` + +No changes to `LocalProvenanceStore` are required for basic payload +construction. + +### 7.2 ProvenancePayload → PlaybackCursor + +The `PlaybackCursor` already supports seeking via `seek_to()`, which +internally replays patches from `ProvenanceStore`. A `ProvenancePayload` can +feed a cursor by wrapping it in a `ProvenanceStore` adapter: + +```rust +impl ProvenanceStore for ProvenancePayload { + fn u0(&self, w: WorldlineId) -> Result { ... } + fn len(&self, w: WorldlineId) -> Result { ... } + fn patch(&self, w: WorldlineId, tick: u64) -> Result { ... } + fn expected(&self, w: WorldlineId, tick: u64) -> Result { ... } + // outputs, checkpoint_before: delegate or return unavailable +} +``` + +This allows a `PlaybackCursor` to replay directly from a portable provenance +payload without a full `LocalProvenanceStore`. + +### 7.3 TickReceipt Extensions + +Current `TickReceiptRejection` has a single variant: `FootprintConflict`. +For "show me why" queries, richer rejection reasons are needed: + +```rust +pub enum TickReceiptRejection { + FootprintConflict, // Existing + GuardFailure, // New: rule's guard predicate returned false + PreconditionViolation, // New: required state missing + ResourceContention, // New: write-write conflict on shared resource +} +``` + +**Migration path:** These are additive enum variants. Existing code matching +on `FootprintConflict` is unaffected. Wire format uses CBOR enum tags; +new variants get new tags (backward-compatible for decoders that ignore +unknown tags, forward-compatible for encoders). + +### 7.4 Hash Commitment Compatibility + +All new hash computations use BLAKE3 with domain separation, consistent with: + +- `compute_state_root_for_warp_store()` — domain `echo:state_root:v1\0` +- `compute_commit_hash_v2()` — domain `echo:commit_id:v2\0` +- `compute_tick_commit_hash_v2()` — domain `tick_commit:v2` + +New domain tags (§5.2) follow the same `echo::v1\0` convention. + +**No existing hash commitments are changed.** All new types layer on top of +existing hashes without modifying them. + +--- + +## 8. Attestation Envelope (PP Envelope) + +The attestation envelope wraps a `BoundaryTransitionRecord` with +external claims and signatures. This is the publishable unit of provenance. + +### 8.1 Structure + +```rust +/// Provenance attestation envelope. +/// +/// Wraps a BTR with external claims and cryptographic signatures. +/// This is the publishable, transferable unit of provenance. +pub struct ProvenanceEnvelope { + /// Header: version, timestamp, envelope ID. + pub header: EnvelopeHeader, + /// The runtime provenance (BTR). + pub btr: BoundaryTransitionRecord, + /// External claims about the provenance. + pub claims: Vec, + /// Cryptographic signatures over (header || btr_hash || claims_digest). + pub signatures: Vec, +} + +pub struct EnvelopeHeader { + pub version: u16, + pub envelope_id: Hash, + pub created_at: u64, // Unix timestamp (seconds) +} +``` + +### 8.2 Claim Types + +```rust +pub enum ProvenanceClaim { + /// Identifies the build system that produced the simulation binary. + BuiltBy { + builder_id: String, + build_hash: Hash, + }, + /// References a parent BTR that this one was derived from. + DerivedFrom { + parent_btr_hash: Hash, + relationship: DerivationRelationship, + }, + /// Cryptographic identity of the signer. + SignedBy { + signer_id: String, + public_key: Vec, + }, + /// Human review attestation. + ReviewedBy { + reviewer_id: String, + review_hash: Hash, + }, +} + +pub enum DerivationRelationship { + Fork, // Branched from parent worldline + Merge, // Merged multiple worldlines + Extension, // Appended ticks to parent +} +``` + +### 8.3 SLSA Alignment + +The `ProvenanceEnvelope` maps to SLSA v1.0 concepts: + +| SLSA Concept | Echo Mapping | +| ------------------ | ------------------------------ | +| Build provenance | `BuiltBy` claim | +| Source provenance | `DerivedFrom` claim chain | +| Verification | BTR replay verification (§4.2) | +| Attestation bundle | `ProvenanceEnvelope` | + +Full SLSA compliance requires additional fields (builder identity URI, +build configuration digest) that are deferred to implementation. + +### 8.4 BTR vs. Envelope + +- **BTR** is _runtime provenance_: it records what happened during simulation + execution. It is produced automatically by the engine. +- **Envelope** is _attestation provenance_: it wraps a BTR with external + claims about who built it, who reviewed it, and what it was derived from. + It is produced by tooling and humans. + +--- + +## 9. Deviation Notes — Echo vs. Paper III + +| Area | Paper III | Echo | Rationale | +| --------------------- | ------------------ | --------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Hash function | Unspecified | BLAKE3 | Performance; keyed mode for future MAC support. | +| Patch encoding | Abstract `μ` | `WorldlineTickPatchV1` with concrete `Vec` | Echo's typed graph ops are the canonical encoding. | +| Initial state | Abstract `U₀` | `WarpId` (MVP) | Sufficient for single-warp worldlines. Multi-warp U₀ requires `WarpState` snapshot (future). | +| Slot model | Abstract resources | `SlotId` enum: `Node`, `Edge`, `Attachment`, `Port` | Four concrete slot types cover Echo's graph model. | +| Checkpoint | Not in Paper III | `CheckpointRef { tick, state_hash }` | Pragmatic optimization for fast seeking in long worldlines. | +| Receipt | Abstract trace `ρ` | `TickReceipt` with `TickReceiptEntry` entries | Concrete candidate outcomes with blocking causality. | +| Attestation | Not in Paper III | `ProvenanceEnvelope` with SLSA alignment | Extension for real-world trust chains. | +| Cross-worldline edges | Implicit | Not yet implemented | Provenance graph currently operates within a single worldline. Cross-worldline provenance edges require multi-worldline `ProvenanceStore` queries (future). | + +--- + +## 10. Open Questions + +1. **Multi-warp U₀:** When a worldline spans multiple warp instances, `WarpId` + is insufficient as the initial state reference. Should `U₀` become a + `Vec<(WarpId, Hash)>` (one state root per warp)? + +2. **Provenance graph persistence:** Should the provenance graph be computed + on-demand from `ProvenanceStore`, or materialized and stored? On-demand is + simpler but O(n²) worst case; materialized requires storage management. + +3. **Cross-worldline provenance:** When a fork creates a new worldline, the + provenance graph should have edges from the source worldline to the fork. + The current `ProvenanceEdge` type supports this via + `(WorldlineId, tick)` tuples, but the construction algorithm (§4.3) only + considers a single worldline. Multi-worldline traversal is deferred. + +4. **Envelope signature scheme:** Which signature algorithm? Ed25519 is the + pragmatic default, but the envelope should be algorithm-agnostic (include + an algorithm identifier field). + +--- + +## 11. Implementation Roadmap + +| Phase | Deliverable | Depends On | +| ----- | --------------------------------------------------------------------- | ---------- | +| PP-2 | `ProvenancePayload` type + `from_store()` constructor + unit tests | This spec | +| PP-3 | `BoundaryTransitionRecord` type + verification algorithm | PP-2 | +| PP-4 | `ProvenanceGraph` construction + `DerivationGraph` backward cone | PP-3 | +| PP-5 | `TickReceiptRejection` extensions (additive) | PP-2 | +| PP-6 | `ProvenanceEnvelope` + claim types + signature verification | PP-3 | +| PP-7 | Wire format (CBOR) + golden vector tests | PP-2, PP-3 | +| PP-8 | `ProvenancePayload` as `ProvenanceStore` adapter for `PlaybackCursor` | PP-2 | diff --git a/docs/warp-demo-roadmap.md b/docs/warp-demo-roadmap.md index 803aa11e..1ba4d8c6 100644 --- a/docs/warp-demo-roadmap.md +++ b/docs/warp-demo-roadmap.md @@ -1,5 +1,6 @@ + # WARP Demo Roadmap (Phase 1 Targets) This document captures the interactive demos and performance milestones we want to hit as we implement the Rust-based WARP runtime. Each demo proves a key property of Echo’s deterministic multiverse architecture. @@ -58,26 +59,25 @@ This document captures the interactive demos and performance milestones we want - **Input Stream Discipline:** Inputs recorded as timestamped events with deterministic seeds. Replay harness reuses the same log to verify determinism. - **Floating-Point Policy:** All demos rely on fixed-point math or deterministic float wrappers; document configuration in README. - **Performance Targets:** - - Demo 1: tick time ≤ 2 ms on reference hardware (M2 Pro / 32 GB). - - Demo 2: criterion bench median ≤ 0.5 ms; 99th percentile ≤ 1.0 ms. - - Demo 5: sync 10 000 transactions in under 2 s with zero conflicts. + - Demo 1: tick time ≤ 2 ms on reference hardware (M2 Pro / 32 GB). + - Demo 2: criterion bench median ≤ 0.5 ms; 99th percentile ≤ 1.0 ms. + - Demo 5: sync 10 000 transactions in under 2 s with zero conflicts. ## Roadmap / Dependencies -| Phase | Demo Coverage | Dependencies | -| ----- | ------------- | ------------- | -| 1A | Demo 2 harness scaffolding | Criterion setup, synthetic rewrite fixtures | -| 1B | Demo 1 prototype (local hash) | Motion rewrite spike, snapshot hashing | -| 1C | Demo 4 Rhai API | `warp-ffi` bindings, hot-reload CLI | -| 1D | Demo 3 timeline tooling | Branch tree diff viewer, entropy metrics | -| 1E | Demo 5 networking | Confluence transaction protocol, replay verification | -| 1F | Demo dashboards | Inspector frame overlays, JSON ingestion | - +| Phase | Demo Coverage | Dependencies | +| ----- | ----------------------------- | ---------------------------------------------------- | +| 1A | Demo 2 harness scaffolding | Criterion setup, synthetic rewrite fixtures | +| 1B | Demo 1 prototype (local hash) | Motion rewrite spike, snapshot hashing | +| 1C | Demo 4 Rhai API | Rhai in-process bindings, hot-reload CLI | +| 1D | Demo 3 timeline tooling | Branch tree diff viewer, entropy metrics | +| 1E | Demo 5 networking | Confluence transaction protocol, replay verification | +| 1F | Demo dashboards | Inspector frame overlays, JSON ingestion | **Prerequisites:** BLAKE3 hashing utilities, deterministic PRNG module, snapshot serialiser, inspector graph viewer, CI runners with wasm/criterion toolchains. - **Timeline:** + - Milestone Alpha (end 1B): Demo 1 frame-hash prototype + Demo 2 toy bench executed manually. - Milestone Beta (end 1D): Demos 1–3 automated in CI with golden outputs. - Milestone GA (end 1F): Full demo suite (all five) runnable via `cargo xtask demo` and published as part of release notes. diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 71fdbf72..dd40bde1 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -12,5 +12,7 @@ publish = false [dependencies] anyhow = "1" clap = { version = "4", features = ["derive"] } +clap_mangen = "0.2" serde = { version = "1", features = ["derive"] } serde_json = "1" +warp-cli = { path = "../crates/warp-cli", version = "0.1.0" } diff --git a/xtask/src/main.rs b/xtask/src/main.rs index f83993f8..54da875e 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -31,6 +31,8 @@ enum Commands { Dags(DagsArgs), /// Run DIND (Deterministic Ironclad Nightmare Drills) harness. Dind(DindArgs), + /// Generate man pages for echo-cli. + ManPages(ManPagesArgs), } #[derive(Args)] @@ -114,12 +116,20 @@ struct DagsArgs { snapshot: Option, } +#[derive(Args)] +struct ManPagesArgs { + /// Output directory for generated man pages. + #[arg(long, default_value = "docs/man")] + out: std::path::PathBuf, +} + fn main() -> Result<()> { let cli = Cli::parse(); match cli.command { Commands::Dags(args) => run_dags(args), Commands::Dind(args) => run_dind(args), + Commands::ManPages(args) => run_man_pages(args), } } @@ -473,3 +483,56 @@ fn load_matching_scenarios( Ok(filtered) } + +fn run_man_pages(args: ManPagesArgs) -> Result<()> { + use clap::CommandFactory; + + let out_dir = &args.out; + std::fs::create_dir_all(out_dir) + .with_context(|| format!("failed to create output directory: {}", out_dir.display()))?; + + // Remove stale man pages so the output is an exact snapshot. + if let Ok(entries) = std::fs::read_dir(out_dir) { + for entry in entries.flatten() { + let name = entry.file_name(); + let name = name.to_string_lossy(); + if name.starts_with("echo-cli") && name.ends_with(".1") { + std::fs::remove_file(entry.path()).with_context(|| { + format!( + "failed to remove stale man page: {}", + entry.path().display() + ) + })?; + } + } + } + + let cmd = warp_cli::cli::Cli::command(); + let man = clap_mangen::Man::new(cmd.clone()); + let mut buf: Vec = Vec::new(); + man.render(&mut buf) + .context("failed to render echo-cli.1")?; + let path = out_dir.join("echo-cli.1"); + std::fs::write(&path, &buf).with_context(|| format!("failed to write {}", path.display()))?; + println!(" wrote {}", path.display()); + + for sub in cmd.get_subcommands() { + let sub_name = sub.get_name().to_string(); + // Leak is fine: xtask is short-lived and we need 'static for clap::Str. + let prefixed_name: &'static str = + Box::leak(format!("echo-cli-{sub_name}").into_boxed_str()); + let prefixed = sub.clone().name(prefixed_name); + let man = clap_mangen::Man::new(prefixed); + let mut buf: Vec = Vec::new(); + man.render(&mut buf) + .with_context(|| format!("failed to render echo-cli-{sub_name}.1"))?; + let filename = format!("echo-cli-{sub_name}.1"); + let path = out_dir.join(&filename); + std::fs::write(&path, &buf) + .with_context(|| format!("failed to write {}", path.display()))?; + println!(" wrote {}", path.display()); + } + + println!("Man pages generated in {}", out_dir.display()); + Ok(()) +}