diff --git a/.ban-unordered-abi-allowlist b/.ban-unordered-abi-allowlist index c7b608b8..eccc9089 100644 --- a/.ban-unordered-abi-allowlist +++ b/.ban-unordered-abi-allowlist @@ -1,7 +1,6 @@ # Non-ABI crates and internal state where HashMap/HashSet are acceptable. **/warp-viewer/** **/echo-session-service/** -**/echo-session-ws-gateway/** **/warp-core/src/engine_impl.rs **/warp-core/src/attachment.rs **/echo-wasm-abi/src/lib.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d4d8eeeb..fd08519c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,408 +3,374 @@ name: CI on: - push: - branches: - - main - pull_request: + push: + branches: + - main + pull_request: jobs: - fmt: - name: Format - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: false - - uses: dtolnay/rust-toolchain@1.90.0 - - uses: Swatinem/rust-cache@v2 - with: - workspaces: | - . - - name: cargo fmt - run: cargo fmt --all -- --check + fmt: + name: Format + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + - uses: Swatinem/rust-cache@v2 + with: + workspaces: | + . + - name: cargo fmt + run: cargo fmt --all -- --check - clippy: - name: Clippy - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: false - - uses: dtolnay/rust-toolchain@1.90.0 - with: - components: clippy - - uses: Swatinem/rust-cache@v2 - with: - workspaces: | - . - - name: cargo clippy - run: cargo clippy --all-targets -- -D warnings -D missing_docs + clippy: + name: Clippy + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + with: + components: clippy + - uses: Swatinem/rust-cache@v2 + with: + workspaces: | + . + - name: cargo clippy + run: cargo clippy --all-targets -- -D warnings -D missing_docs - clippy-det-fixed: - name: Clippy (det_fixed) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: false - - uses: dtolnay/rust-toolchain@1.90.0 - with: - components: clippy - - uses: Swatinem/rust-cache@v2 - with: - workspaces: | - . - - name: cargo clippy (warp-core, det_fixed) - run: cargo clippy -p warp-core --all-targets --features det_fixed -- -D warnings -D missing_docs + clippy-det-fixed: + name: Clippy (det_fixed) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + with: + components: clippy + - uses: Swatinem/rust-cache@v2 + with: + workspaces: | + . + - name: cargo clippy (warp-core, det_fixed) + run: cargo clippy -p warp-core --all-targets --features det_fixed -- -D warnings -D missing_docs - test-workspace: - name: Tests (workspace sans warp-core) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: false - - uses: dtolnay/rust-toolchain@1.90.0 - - uses: Swatinem/rust-cache@v2 - with: - workspaces: | - . - - name: cargo test (workspace sans warp-core) - run: cargo test --workspace --exclude warp-core + test-workspace: + name: Tests (workspace sans warp-core) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + - uses: Swatinem/rust-cache@v2 + with: + workspaces: | + . + - name: cargo test (workspace sans warp-core) + run: cargo test --workspace --exclude warp-core - test-warp-core: - name: Tests (warp-core) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: false - - uses: dtolnay/rust-toolchain@1.90.0 - - uses: Swatinem/rust-cache@v2 - with: - workspaces: | - . - - name: Install cargo-nextest - uses: taiki-e/install-action@5ab5d1729c22acd8f798b267eadcfe5e5be6f5c2 # v2.68.27 - with: - tool: nextest - - name: cargo nextest run (warp-core) - run: cargo nextest run -p warp-core - - name: cargo test --doc (warp-core) - run: cargo test -p warp-core --doc - - name: PRNG golden regression (warp-core) - run: cargo test -p warp-core --features golden_prng --test prng_golden_regression + build-echo-cas-wasm: + name: Build echo-cas (wasm32) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + with: + targets: wasm32-unknown-unknown + - uses: Swatinem/rust-cache@v2 + with: + workspaces: | + . + - name: cargo build (echo-cas, wasm32) + run: cargo build --target wasm32-unknown-unknown -p echo-cas - test: - name: Tests - runs-on: ubuntu-latest - needs: - - test-workspace - - test-warp-core - if: always() - steps: - - name: Require test shard success - shell: bash - run: | - set -euo pipefail - workspace_result="${{ needs.test-workspace.result }}" - warp_core_result="${{ needs.test-warp-core.result }}" - if [[ "$workspace_result" != "success" ]]; then - echo "workspace shard result: $workspace_result" >&2 - exit 1 - fi - if [[ "$warp_core_result" != "success" ]]; then - echo "warp-core shard result: $warp_core_result" >&2 - exit 1 - fi - echo "All test shards passed." + test-warp-core: + name: Tests (warp-core) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + - uses: Swatinem/rust-cache@v2 + with: + workspaces: | + . + - name: Install cargo-nextest + uses: taiki-e/install-action@5ab5d1729c22acd8f798b267eadcfe5e5be6f5c2 # v2.68.27 + with: + tool: nextest + - name: cargo nextest run (warp-core) + run: cargo nextest run -p warp-core + - name: cargo test --doc (warp-core) + run: cargo test -p warp-core --doc + - name: PRNG golden regression (warp-core) + run: cargo test -p warp-core --features golden_prng --test prng_golden_regression - test-musl: - name: Tests (musl) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: false - - uses: dtolnay/rust-toolchain@1.90.0 - with: - targets: x86_64-unknown-linux-musl - - name: Install musl tools - run: sudo apt-get update && sudo apt-get install -y musl-tools - - uses: Swatinem/rust-cache@v2 - with: - workspaces: | - . - # Intentionally test only warp-core under MUSL; warp-wasm targets wasm32 - # (wasm-bindgen/js-sys) and has separate cross-compilation concerns. - - name: cargo test (warp-core, musl) - run: cargo test -p warp-core --target x86_64-unknown-linux-musl - - test-det-fixed: - name: Tests (det_fixed) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: false - - uses: dtolnay/rust-toolchain@1.90.0 - - uses: Swatinem/rust-cache@v2 - with: - workspaces: | - . - - name: cargo test (warp-core, det_fixed) - run: cargo test -p warp-core --features det_fixed - - test-musl-det-fixed: - name: Tests (musl, det_fixed) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: false - - uses: dtolnay/rust-toolchain@1.90.0 - with: - targets: x86_64-unknown-linux-musl - - name: Install musl tools - run: sudo apt-get update && sudo apt-get install -y musl-tools - - uses: Swatinem/rust-cache@v2 - with: - workspaces: | - . - - name: cargo test (warp-core, musl, det_fixed) - run: cargo test -p warp-core --features det_fixed --target x86_64-unknown-linux-musl - - rust-version: - name: Rust Version Guard - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: false - - uses: dtolnay/rust-toolchain@1.90.0 - - name: Check workspace rust-version declarations - shell: bash - run: | - set -euo pipefail - script="scripts/check_rust_versions.sh" - if [[ ! -x "$script" ]]; then - echo "Error: $script is missing or not executable" >&2 - ls -la scripts || true - exit 1 - fi - "$script" - - name: Rust version checker self-tests - shell: bash - run: | - set -euo pipefail - bash scripts/tests/check_rust_versions_test.sh - - task-lists: - name: Task Lists Guard - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Check task lists for contradictions - shell: bash - run: | - set -euo pipefail - script="scripts/check_task_lists.sh" - if [[ ! -x "$script" ]]; then - echo "Error: $script is missing or not executable" >&2 - ls -la scripts || true - exit 1 - fi - "$script" - - name: Task list checker self-tests - shell: bash - run: | - set -euo pipefail - bash scripts/tests/check_task_lists_test.sh - - deterministic-math-guard: - name: Deterministic Math Guard - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Forbid raw trig in warp-core math - shell: bash - run: | - set -euo pipefail - script="scripts/check_no_raw_trig.sh" - if [[ ! -x "$script" ]]; then - echo "Error: $script is missing or not executable" >&2 - ls -la scripts || true - exit 1 - fi - "$script" - - determinism-guards: - name: Determinism Guards - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Install ripgrep - run: sudo apt-get update && sudo apt-get install -y ripgrep - - name: Ban globals - shell: bash - run: | - set -euo pipefail - script="scripts/ban-globals.sh" - if [[ ! -x "$script" ]]; then - echo "Error: $script is missing or not executable" >&2 - ls -la scripts || true - exit 1 - fi - "$script" - - name: Ban nondeterminism - shell: bash - run: | - set -euo pipefail - script="scripts/ban-nondeterminism.sh" - if [[ ! -x "$script" ]]; then - echo "Error: $script is missing or not executable" >&2 - ls -la scripts || true - exit 1 - fi - "$script" - - name: Ban unordered ABI containers - shell: bash - run: | - set -euo pipefail - script="scripts/ban-unordered-abi.sh" - if [[ ! -x "$script" ]]; then - echo "Error: $script is missing or not executable" >&2 - ls -la scripts || true - exit 1 - fi - "$script" - - dind-pr: - name: DIND (PR set) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@1.90.0 - - uses: actions/setup-node@v4 - with: - node-version: "20" - - name: Run DIND PR suite (full hash) - run: node scripts/dind-run-suite.mjs --tags pr - - benches: - name: Benchmarks (compilation gate) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: false - - uses: dtolnay/rust-toolchain@1.90.0 - - uses: Swatinem/rust-cache@v2 - with: - workspaces: | - . - - name: cargo check (benches) - run: cargo check --benches -p warp-benches - - # MSRV job removed per policy: use @stable everywhere - - playwright: - name: E2E (Playwright) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: false + test: + name: Tests + runs-on: ubuntu-latest + needs: + - test-workspace + - test-warp-core + if: always() + steps: + - name: Require test shard success + shell: bash + run: | + set -euo pipefail + workspace_result="${{ needs.test-workspace.result }}" + warp_core_result="${{ needs.test-warp-core.result }}" + if [[ "$workspace_result" != "success" ]]; then + echo "workspace shard result: $workspace_result" >&2 + exit 1 + fi + if [[ "$warp_core_result" != "success" ]]; then + echo "warp-core shard result: $warp_core_result" >&2 + exit 1 + fi + echo "All test shards passed." - - uses: dtolnay/rust-toolchain@1.90.0 + test-musl: + name: Tests (musl) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + with: + targets: x86_64-unknown-linux-musl + - name: Install musl tools + run: sudo apt-get update && sudo apt-get install -y musl-tools + - uses: Swatinem/rust-cache@v2 + with: + workspaces: | + . + # Intentionally test only warp-core under MUSL; warp-wasm targets wasm32 + # (wasm-bindgen/js-sys) and has separate cross-compilation concerns. + - name: cargo test (warp-core, musl) + run: cargo test -p warp-core --target x86_64-unknown-linux-musl - - uses: Swatinem/rust-cache@v2 - with: - workspaces: | - . + test-det-fixed: + name: Tests (det_fixed) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + - uses: Swatinem/rust-cache@v2 + with: + workspaces: | + . + - name: cargo test (warp-core, det_fixed) + run: cargo test -p warp-core --features det_fixed - - uses: actions/setup-node@v4 - with: - node-version: "20" + test-musl-det-fixed: + name: Tests (musl, det_fixed) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + with: + targets: x86_64-unknown-linux-musl + - name: Install musl tools + run: sudo apt-get update && sudo apt-get install -y musl-tools + - uses: Swatinem/rust-cache@v2 + with: + workspaces: | + . + - name: cargo test (warp-core, musl, det_fixed) + run: cargo test -p warp-core --features det_fixed --target x86_64-unknown-linux-musl - - uses: pnpm/action-setup@v4 - with: - version: 10.23.0 - run_install: false + rust-version: + name: Rust Version Guard + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + - name: Check workspace rust-version declarations + shell: bash + run: | + set -euo pipefail + script="scripts/check_rust_versions.sh" + if [[ ! -x "$script" ]]; then + echo "Error: $script is missing or not executable" >&2 + ls -la scripts || true + exit 1 + fi + "$script" + - name: Rust version checker self-tests + shell: bash + run: | + set -euo pipefail + bash scripts/tests/check_rust_versions_test.sh - - name: Resolve pnpm store path - shell: bash - run: | - set -euo pipefail - echo "PNPM_STORE_PATH=$(pnpm store path --silent)" >> "$GITHUB_ENV" + task-lists: + name: Task Lists Guard + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Check task lists for contradictions + shell: bash + run: | + set -euo pipefail + script="scripts/check_task_lists.sh" + if [[ ! -x "$script" ]]; then + echo "Error: $script is missing or not executable" >&2 + ls -la scripts || true + exit 1 + fi + "$script" + - name: Task list checker self-tests + shell: bash + run: | + set -euo pipefail + bash scripts/tests/check_task_lists_test.sh - - name: Cache pnpm store - uses: actions/cache@v4 - with: - path: ${{ env.PNPM_STORE_PATH }} - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- + man-pages: + name: Man Pages Freshness + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + - uses: Swatinem/rust-cache@v2 + with: + workspaces: | + . + - name: Check generated echo-cli man pages + run: cargo xtask man-pages --check - - name: Cache Playwright browsers - uses: actions/cache@v4 - with: - path: ~/.cache/ms-playwright - key: ${{ runner.os }}-playwright-${{ hashFiles('pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-playwright- + deterministic-math-guard: + name: Deterministic Math Guard + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Forbid raw trig in warp-core math + shell: bash + run: | + set -euo pipefail + script="scripts/check_no_raw_trig.sh" + if [[ ! -x "$script" ]]; then + echo "Error: $script is missing or not executable" >&2 + ls -la scripts || true + exit 1 + fi + "$script" - - name: Install JS dependencies - run: pnpm install --frozen-lockfile + determinism-guards: + name: Determinism Guards + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Install ripgrep + run: sudo apt-get update && sudo apt-get install -y ripgrep + - name: Ban globals + shell: bash + run: | + set -euo pipefail + script="scripts/ban-globals.sh" + if [[ ! -x "$script" ]]; then + echo "Error: $script is missing or not executable" >&2 + ls -la scripts || true + exit 1 + fi + "$script" + - name: Ban nondeterminism + shell: bash + run: | + set -euo pipefail + script="scripts/ban-nondeterminism.sh" + if [[ ! -x "$script" ]]; then + echo "Error: $script is missing or not executable" >&2 + ls -la scripts || true + exit 1 + fi + "$script" + - name: Ban unordered ABI containers + shell: bash + run: | + set -euo pipefail + script="scripts/ban-unordered-abi.sh" + if [[ ! -x "$script" ]]; then + echo "Error: $script is missing or not executable" >&2 + ls -la scripts || true + exit 1 + fi + "$script" - - name: Install Playwright Chromium - run: pnpm exec playwright install --with-deps chromium + dind-pr: + name: DIND (PR set) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@1.90.0 + - uses: actions/setup-node@v4 + with: + node-version: "20" + - name: Run DIND PR suite (full hash) + run: node scripts/dind-run-suite.mjs --tags pr - - name: Playwright e2e - run: pnpm exec playwright test + benches: + name: Benchmarks (compilation gate) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + - uses: Swatinem/rust-cache@v2 + with: + workspaces: | + . + - name: cargo check (benches) + run: cargo check --benches -p warp-benches - - name: Upload Playwright report - if: always() - uses: actions/upload-artifact@v4 - with: - name: playwright-report - path: | - playwright-report - test-results - if-no-files-found: ignore - retention-days: 7 + # MSRV job removed per policy: use @stable everywhere - rustdoc: - name: Rustdoc (warnings gate) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: false - - uses: dtolnay/rust-toolchain@1.90.0 - - uses: Swatinem/rust-cache@v2 - - name: rustdoc warnings gate (warp-core) - run: RUSTDOCFLAGS="-D warnings" cargo doc -p warp-core --no-deps - - name: rustdoc warnings gate (warp-geom) - run: RUSTDOCFLAGS="-D warnings" cargo doc -p warp-geom --no-deps - - name: rustdoc warnings gate (warp-wasm) - run: | - if [ -f crates/warp-wasm/Cargo.toml ]; then RUSTDOCFLAGS="-D warnings" cargo doc -p warp-wasm --no-deps; fi + rustdoc: + name: Rustdoc (warnings gate) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + submodules: false + - uses: dtolnay/rust-toolchain@1.90.0 + - uses: Swatinem/rust-cache@v2 + - name: rustdoc warnings gate (warp-core) + run: RUSTDOCFLAGS="-D warnings" cargo doc -p warp-core --no-deps + - name: rustdoc warnings gate (warp-geom) + run: RUSTDOCFLAGS="-D warnings" cargo doc -p warp-geom --no-deps + - name: rustdoc warnings gate (warp-wasm) + run: | + if [ -f crates/warp-wasm/Cargo.toml ]; then RUSTDOCFLAGS="-D warnings" cargo doc -p warp-wasm --no-deps; fi - deny: - name: Dependency Policy (cargo-deny) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 - - name: Pre-fetch crates (locked) - run: cargo fetch --locked - - name: cargo-audit runner self-tests - shell: bash - run: | - set -euo pipefail - bash scripts/tests/run_cargo_audit_test.sh - - name: Run cargo-deny - uses: EmbarkStudios/cargo-deny-action@76cd80eb775d7bbbd2d80292136d74d39e1b4918 # v2.0.14 + deny: + name: Dependency Policy (cargo-deny) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + - name: Pre-fetch crates (locked) + run: cargo fetch --locked + - name: cargo-audit runner self-tests + shell: bash + run: | + set -euo pipefail + bash scripts/tests/run_cargo_audit_test.sh + - name: Run cargo-deny + uses: EmbarkStudios/cargo-deny-action@76cd80eb775d7bbbd2d80292136d74d39e1b4918 # v2.0.14 diff --git a/.github/workflows/refresh-dependency-dags.yml b/.github/workflows/refresh-dependency-dags.yml index 9a797b88..59e1dc21 100644 --- a/.github/workflows/refresh-dependency-dags.yml +++ b/.github/workflows/refresh-dependency-dags.yml @@ -36,9 +36,6 @@ jobs: - name: Install Graphviz run: sudo apt-get update && sudo apt-get install -y graphviz - - name: Generate Tasks DAG assets - run: node scripts/generate-tasks-dag.js - - name: Generate DAG DOT+SVG (from GitHub) env: GH_TOKEN: ${{ github.token }} diff --git a/.gitignore b/.gitignore index bfccc3bb..73defebd 100644 --- a/.gitignore +++ b/.gitignore @@ -48,10 +48,6 @@ artifacts/pr-review/ __pycache__/ *.pyc -# Playwright artifacts -test-results -playwright-report - # LaTeX build artifacts (keep PDFs tracked) *.aux *.log diff --git a/CHANGELOG.md b/CHANGELOG.md index 3cc77c3d..6232acea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,12 @@ ### Added +- `echo-registry-api::verify_contract_artifact(...)` — generic load-time + verification for Wesley-generated registries, including schema/codec/layout + checks, expected footprint certificate hashes, optional generated artifact + hashes, and a policy switch requiring all mutation operations to be backed by + an expected certificate before the artifact is treated as + compile-time-certified. - Cycle 0003 (dt policy) — ratify fixed timestep as default, variable-dt as opt-in admitted stream, braidability constraint for settlement. - `KERNEL_strand-contract` backlog item — strand as a first-class @@ -45,6 +51,29 @@ ### Fixed (PR #326 follow-up) +- Added regression coverage that rejects trailing whitespace in the committed + `echo-cli --help` golden fixture, and cleaned the existing padded blank line. +- Split generated contract artifact verification into `MetadataVerified` and + `CompileTimeCertified` postures so weak or metadata-only host policies cannot + accidentally enable the trusted footprint fast path. +- Strengthened Wesley footprint certificate artifact hashes so they incorporate + a generated Rust artifact manifest hash and operation argument shape instead + of only the declared read/write footprint. +- Changed GraphQL SDL operation id generation to fail closed on derived id + collisions instead of silently incrementing persisted ABI ids. +- Replaced generated query optic variable digests with Echo ABI's + domain-separated BLAKE3 `query_vars_digest_v1(...)` helper. +- Made built-in observation request helpers fail closed on invalid + frame/projection pairs instead of silently falling back to `QueryBytes`. +- Restored the CodeRabbit archive path filter and added a hook regression guard + so frozen `docs/archive/**` files stay out of automated review. +- Split the large `warp-core` optic module test body into `optic/tests.rs` and + added a hook guard so production optic code is no longer buried under the + test suite. +- Verified imported witnessed causal suffix bundle digests before admission and + reject forged retained-shell identities. +- Validated exported witnessed suffix boundary witnesses against the source + worldline and resolved base/target frontier range. - Fixed Wesley-generated helper output so helper-only vars and intent error types live in a generated namespace instead of colliding with user contract types, while preserving top-level helper function re-exports, and added diff --git a/CONTINUUM.md b/CONTINUUM.md deleted file mode 100644 index 00de5352..00000000 --- a/CONTINUUM.md +++ /dev/null @@ -1,23 +0,0 @@ - - - -# Continuum - -Status: Archived platform memo - -This file used to describe a broader multi-repo Continuum platform around Echo. That language is no longer the live documentation frame for this repo. - -Echo's current docs describe Echo directly: - -- runtime carrier state in `warp-core` -- deterministic settlement through footprints and tick patches -- retained worldlines and provenance shells -- observer-relative readings through the observation and ABI surfaces - -Start with: - -- [Echo docs map](docs/index.md) -- [Echo runtime model](docs/architecture/outline.md) -- [Echo theory map](docs/theory/THEORY.md) - -Git history is the archive for the older Continuum memo. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a6886150..7a5ac5f4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -59,11 +59,9 @@ Echo is a deterministic, renderer-agnostic engine. We prioritize: `warp-core` integration-test binary before applying that runtime filter. - The broader local gate must pass before PR submission. - Add unit/integration coverage for new logic; Rhai/TypeScript tooling will regain coverage when reintroduced. -- For WASM / living specs: - - Install toolchain target: `rustup target add wasm32-unknown-unknown`. - - Install Trunk once: `cargo install --locked trunk`. - - Dev loop for Spec-000: from repo root run `make spec-000-dev` (hot reload at `http://127.0.0.1:8080`). - - Release build: `make spec-000-build` (outputs to `specs/spec-000-rewrite/dist/`). +- For WASM work, install the required target with + `rustup target add wasm32-unknown-unknown` and use the crate-specific build + instructions for the surface you are changing. ## Documentation & Telemetry diff --git a/Cargo.lock b/Cargo.lock index 60a862bc..7da1da26 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11,6 +11,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "anes" version = "0.1.6" @@ -67,23 +76,23 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "any_spawner" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1384d3fe1eecb464229fcf6eebb72306591c56bf27b373561489458a7c73027d" -dependencies = [ - "futures", - "thiserror 2.0.17", - "wasm-bindgen-futures", -] - [[package]] name = "anyhow" version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" +[[package]] +name = "apollo-parser" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "947e21ff51879f8a40d7519dfe619268de2afba4042a8a43878276de3cb910f0" +dependencies = [ + "memchr", + "rowan", + "thiserror 2.0.17", +] + [[package]] name = "arrayref" version = "0.3.9" @@ -111,23 +120,6 @@ dependencies = [ "wait-timeout", ] -[[package]] -name = "async-lock" -version = "3.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311" -dependencies = [ - "event-listener", - "event-listener-strategy", - "pin-project-lite", -] - -[[package]] -name = "async-once-cell" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4288f83726785267c6f2ef073a3d83dc3f9b81464e9f99898240cced85fce35a" - [[package]] name = "async-trait" version = "0.1.89" @@ -139,54 +131,12 @@ dependencies = [ "syn", ] -[[package]] -name = "attribute-derive" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05832cdddc8f2650cc2cc187cc2e952b8c133a48eb055f35211f61ee81502d77" -dependencies = [ - "attribute-derive-macro", - "derive-where", - "manyhow", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "attribute-derive-macro" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a7cdbbd4bd005c5d3e2e9c885e6fa575db4f4a3572335b974d8db853b6beb61" -dependencies = [ - "collection_literals", - "interpolator", - "manyhow", - "proc-macro-utils", - "proc-macro2", - "quote", - "quote-use", - "syn", -] - [[package]] name = "autocfg" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" -[[package]] -name = "base16" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d27c3610c36aee21ce8ac510e6224498de4228ad772a171ed65643a24693a5a8" - -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - [[package]] name = "bit-set" version = "0.8.0" @@ -267,6 +217,12 @@ dependencies = [ "syn", ] +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + [[package]] name = "bytes" version = "1.11.1" @@ -276,12 +232,6 @@ dependencies = [ "serde", ] -[[package]] -name = "camino" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48" - [[package]] name = "cast" version = "0.3.0" @@ -304,6 +254,20 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" +[[package]] +name = "chrono" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link", +] + [[package]] name = "ciborium" version = "0.2.2" @@ -381,23 +345,6 @@ dependencies = [ "roff", ] -[[package]] -name = "codee" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9dbbdc4b4d349732bc6690de10a9de952bd39ba6a065c586e26600b6b0b91f5" -dependencies = [ - "serde", - "serde_json", - "thiserror 2.0.17", -] - -[[package]] -name = "collection_literals" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2550f75b8cfac212855f6b1885455df8eaee8fe8e246b647d69146142e016084" - [[package]] name = "colorchoice" version = "1.0.4" @@ -415,28 +362,6 @@ dependencies = [ "unicode-width", ] -[[package]] -name = "concurrent-queue" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "config" -version = "0.15.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b30fa8254caad766fc03cb0ccae691e14bf3bd72bfff27f72802ce729551b3d6" -dependencies = [ - "convert_case 0.6.0", - "pathdiff", - "serde_core", - "toml", - "winnow", -] - [[package]] name = "console_error_panic_hook" version = "0.1.7" @@ -447,38 +372,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "const-str" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0664d2867b4a32697dfe655557f5c3b187e9b605b38612a748e5ec99811d160" - -[[package]] -name = "const_format" -version = "0.2.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" -dependencies = [ - "const_format_proc_macros", -] - -[[package]] -name = "const_format_proc_macros" -version = "0.2.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "const_str_slice_concat" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f67855af358fcb20fac58f9d714c94e2b228fe5694c1c9b4ead4a366343eda1b" - [[package]] name = "constant_time_eq" version = "0.3.1" @@ -486,31 +379,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" [[package]] -name = "convert_case" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "convert_case" -version = "0.8.0" +name = "core-foundation-sys" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f" -dependencies = [ - "unicode-segmentation", -] +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] -name = "convert_case" -version = "0.10.0" +name = "countme" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" -dependencies = [ - "unicode-segmentation", -] +checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" [[package]] name = "cpufeatures" @@ -533,7 +411,7 @@ dependencies = [ "clap", "criterion-plot", "is-terminal", - "itertools 0.10.5", + "itertools", "num-traits", "once_cell", "oorandom", @@ -552,7 +430,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" dependencies = [ "cast", - "itertools 0.10.5", + "itertools", ] [[package]] @@ -619,20 +497,6 @@ dependencies = [ "typenum", ] -[[package]] -name = "dashmap" -version = "6.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" -dependencies = [ - "cfg-if", - "crossbeam-utils", - "hashbrown 0.14.5", - "lock_api", - "once_cell", - "parking_lot_core", -] - [[package]] name = "deranged" version = "0.5.8" @@ -642,17 +506,6 @@ dependencies = [ "powerfmt", ] -[[package]] -name = "derive-where" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef941ded77d15ca19b40374869ac6000af1c9f2a4c0f3d4c70926287e6364a8f" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "difflib" version = "0.4.0" @@ -690,17 +543,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "displaydoc" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "document-features" version = "0.2.12" @@ -710,12 +552,6 @@ dependencies = [ "litrs", ] -[[package]] -name = "drain_filter_polyfill" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "669a445ee724c5c69b1b06fe0b63e70a1c84bc9bb7d9696cd4f4e3ec45050408" - [[package]] name = "echo-app-core" version = "0.1.0" @@ -848,6 +684,7 @@ dependencies = [ name = "echo-wasm-abi" version = "0.1.0" dependencies = [ + "blake3", "ciborium", "half", "proptest", @@ -873,6 +710,7 @@ name = "echo-wesley-gen" version = "0.1.0" dependencies = [ "anyhow", + "blake3", "clap", "prettyplease", "proc-macro2", @@ -880,6 +718,7 @@ dependencies = [ "serde", "serde_json", "syn", + "wesley-core", ] [[package]] @@ -888,28 +727,12 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" -[[package]] -name = "either_of" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216d23e0ec69759a17f05e1c553f3a6870e5ec73420fbb07807a6f34d5d1d5a4" -dependencies = [ - "paste", - "pin-project-lite", -] - [[package]] name = "equivalent" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" -[[package]] -name = "erased" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1731451909bde27714eacba19c2566362a7f35224f52b153d3f42cf60f72472" - [[package]] name = "errno" version = "0.3.14" @@ -920,27 +743,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "event-listener" -version = "5.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - -[[package]] -name = "event-listener-strategy" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" -dependencies = [ - "event-listener", - "pin-project-lite", -] - [[package]] name = "fastrand" version = "2.3.0" @@ -968,20 +770,11 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" -[[package]] -name = "form_urlencoded" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" -dependencies = [ - "percent-encoding", -] - [[package]] name = "futures" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" dependencies = [ "futures-channel", "futures-core", @@ -994,9 +787,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", "futures-sink", @@ -1004,33 +797,32 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" [[package]] name = "futures-executor" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" dependencies = [ "futures-core", "futures-task", "futures-util", - "num_cpus", ] [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", @@ -1039,21 +831,21 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures-channel", "futures-core", @@ -1063,7 +855,6 @@ dependencies = [ "futures-task", "memchr", "pin-project-lite", - "pin-utils", "slab", ] @@ -1095,53 +886,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", - "js-sys", "libc", "r-efi", "wasip2", - "wasm-bindgen", -] - -[[package]] -name = "gloo-net" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06f627b1a58ca3d42b45d6104bf1e1a03799df472df00988b6ba21accc10580" -dependencies = [ - "futures-channel", - "futures-core", - "futures-sink", - "gloo-utils", - "http", - "js-sys", - "pin-project", - "serde", - "serde_json", - "thiserror 1.0.69", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", ] -[[package]] -name = "gloo-utils" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5555354113b18c547c1d3a98fbf7fb32a9ff4f6fa112ce823a21641a0ba3aa" -dependencies = [ - "js-sys", - "serde", - "serde_json", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "guardian" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17e2ac29387b1aa07a1e448f7bb4f35b500787971e965b02842b900afa5c8f6f" - [[package]] name = "half" version = "2.7.1" @@ -1161,9 +910,19 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" [[package]] name = "hashbrown" -version = "0.16.1" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +checksum = "ed5909b6e89a2db4456e54cd5f673791d7eca6732202bbf2a9cc504fe2f9b84a" + +[[package]] +name = "hdrhistogram" +version = "7.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" +dependencies = [ + "byteorder", + "num-traits", +] [[package]] name = "heck" @@ -1184,172 +943,57 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] -name = "html-escape" -version = "0.2.13" +name = "iana-time-zone" +version = "0.1.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d1ad449764d627e22bfd7cd5e8868264fc9236e07c752972b4080cd351cb476" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" dependencies = [ - "utf8-width", + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", ] [[package]] -name = "http" -version = "1.4.0" +name = "iana-time-zone-haiku" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ - "bytes", - "itoa", + "cc", ] [[package]] -name = "hydration_context" -version = "0.3.0" +name = "indexmap" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8714ae4adeaa846d838f380fbd72f049197de629948f91bf045329e0cf0a283" +checksum = "d466e9454f08e4a911e14806c24e16fba1b4c121d1ea474396f396069cf949d9" dependencies = [ - "futures", - "once_cell", - "or_poisoned", - "pin-project-lite", + "equivalent", + "hashbrown 0.17.1", "serde", - "throw_error", + "serde_core", ] [[package]] -name = "icu_collections" -version = "2.1.1" +name = "is-terminal" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" dependencies = [ - "displaydoc", - "potential_utf", - "yoke", - "zerofrom", - "zerovec", + "hermit-abi", + "libc", + "windows-sys 0.61.2", ] [[package]] -name = "icu_locale_core" -version = "2.1.1" +name = "is_terminal_polyfill" +version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" -dependencies = [ - "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", -] - -[[package]] -name = "icu_normalizer" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" -dependencies = [ - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" - -[[package]] -name = "icu_properties" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" -dependencies = [ - "icu_collections", - "icu_locale_core", - "icu_properties_data", - "icu_provider", - "zerotrie", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" - -[[package]] -name = "icu_provider" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" -dependencies = [ - "displaydoc", - "icu_locale_core", - "writeable", - "yoke", - "zerofrom", - "zerotrie", - "zerovec", -] - -[[package]] -name = "idna" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" -dependencies = [ - "idna_adapter", - "smallvec", - "utf8_iter", -] - -[[package]] -name = "idna_adapter" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" -dependencies = [ - "icu_normalizer", - "icu_properties", -] - -[[package]] -name = "indexmap" -version = "2.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" -dependencies = [ - "equivalent", - "hashbrown 0.16.1", -] - -[[package]] -name = "interpolator" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71dd52191aae121e8611f1e8dc3e324dd0dd1dee1e6dd91d10ee07a3cfb4d9d8" - -[[package]] -name = "is-terminal" -version = "0.4.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" -dependencies = [ - "hermit-abi", - "libc", - "windows-sys 0.61.2", -] - -[[package]] -name = "is_terminal_polyfill" -version = "1.70.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" [[package]] name = "itertools" @@ -1360,15 +1004,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.17" @@ -1385,135 +1020,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "leptos" -version = "0.8.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f9569fc37575a5d64c0512145af7630bf651007237ef67a8a77328199d315bb" -dependencies = [ - "any_spawner", - "cfg-if", - "either_of", - "futures", - "getrandom 0.3.4", - "hydration_context", - "leptos_config", - "leptos_dom", - "leptos_hot_reload", - "leptos_macro", - "leptos_server", - "oco_ref", - "or_poisoned", - "paste", - "reactive_graph", - "rustc-hash", - "rustc_version", - "send_wrapper", - "serde", - "serde_json", - "serde_qs", - "server_fn", - "slotmap", - "tachys", - "thiserror 2.0.17", - "throw_error", - "typed-builder 0.23.2", - "typed-builder-macro 0.23.2", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm_split_helpers", - "web-sys", -] - -[[package]] -name = "leptos_config" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071fc40aeb9fcab885965bad1887990477253ad51f926cd19068f45a44c59e89" -dependencies = [ - "config", - "regex", - "serde", - "thiserror 2.0.17", - "typed-builder 0.21.2", -] - -[[package]] -name = "leptos_dom" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78f4330c88694c5575e0bfe4eecf81b045d14e76a4f8b00d5fd2a63f8779f895" -dependencies = [ - "js-sys", - "or_poisoned", - "reactive_graph", - "send_wrapper", - "tachys", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "leptos_hot_reload" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d61ec3e1ff8aaee8c5151688550c0363f85bc37845450764c31ff7584a33f38" -dependencies = [ - "anyhow", - "camino", - "indexmap", - "parking_lot", - "proc-macro2", - "quote", - "rstml", - "serde", - "syn", - "walkdir", -] - -[[package]] -name = "leptos_macro" -version = "0.8.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c86ffd2e9cf3e264e9b3e16bdb086cefa26bd0fa7bc6a26b0cc5f6c1fd3178ed" -dependencies = [ - "attribute-derive", - "cfg-if", - "convert_case 0.10.0", - "html-escape", - "itertools 0.14.0", - "leptos_hot_reload", - "prettyplease", - "proc-macro-error2", - "proc-macro2", - "quote", - "rstml", - "rustc_version", - "server_fn_macro", - "syn", - "uuid", -] - -[[package]] -name = "leptos_server" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbf1045af93050bf3388d1c138426393fc131f6d9e46a65519da884c033ed730" -dependencies = [ - "any_spawner", - "base64", - "codee", - "futures", - "hydration_context", - "or_poisoned", - "reactive_graph", - "send_wrapper", - "serde", - "serde_json", - "server_fn", - "tachys", -] - [[package]] name = "libc" version = "0.2.178" @@ -1536,24 +1042,12 @@ dependencies = [ "libc", ] -[[package]] -name = "linear-map" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfae20f6b19ad527b550c223fddc3077a547fc70cda94b9b566575423fd303ee" - [[package]] name = "linux-raw-sys" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" -[[package]] -name = "litemap" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" - [[package]] name = "litrs" version = "1.0.0" @@ -1570,27 +1064,10 @@ dependencies = [ ] [[package]] -name = "manyhow" -version = "0.11.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b33efb3ca6d3b07393750d4030418d594ab1139cee518f0dc88db70fec873587" -dependencies = [ - "manyhow-macros", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "manyhow-macros" -version = "0.11.4" +name = "log" +version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46fce34d199b78b6e6073abf984c9cf5fd3e9330145a93ee0738a7443e371495" -dependencies = [ - "proc-macro-utils", - "proc-macro2", - "quote", -] +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] name = "memchr" @@ -1628,10 +1105,31 @@ dependencies = [ ] [[package]] -name = "next_tuple" -version = "0.1.0" +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "ninelives" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60993920e071b0c9b66f14e2b32740a4e27ffc82854dcd72035887f336a09a28" +checksum = "9eb329d63297228258d6747b37dcaaf6d9daa83ea7473b6b5c248ce87e93219d" +dependencies = [ + "async-trait", + "futures", + "rand 0.9.3", + "tokio", + "tower 0.5.3", + "tower-layer", + "tower-service", + "tracing", +] [[package]] name = "normalize-line-endings" @@ -1654,26 +1152,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "num_cpus" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "oco_ref" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed0423ff9973dea4d6bd075934fdda86ebb8c05bdf9d6b0507067d4a1226371d" -dependencies = [ - "serde", - "thiserror 2.0.17", -] - [[package]] name = "once_cell" version = "1.21.3" @@ -1698,12 +1176,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" -[[package]] -name = "or_poisoned" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c04f5d74368e4d0dfe06c45c8627c81bd7c317d52762d118fb9b3076f6420fd" - [[package]] name = "ordered-float" version = "2.10.1" @@ -1713,12 +1185,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "parking" -version = "2.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" - [[package]] name = "parking_lot" version = "0.12.5" @@ -1742,38 +1208,20 @@ dependencies = [ "windows-link", ] -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "pathdiff" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" - -[[package]] -name = "percent-encoding" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" - [[package]] name = "pin-project" -version = "1.1.10" +version = "1.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +checksum = "cbf0d9e68100b3a7989b4901972f265cd542e560a3a8a724e1e20322f4d06ce9" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.10" +version = "1.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +checksum = "a990e22f43e84855daf260dded30524ef4a9021cc7541c26540500a50b624389" dependencies = [ "proc-macro2", "quote", @@ -1782,24 +1230,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "potential_utf" -version = "0.1.4" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" -dependencies = [ - "zerovec", -] +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" [[package]] name = "powerfmt" @@ -1856,39 +1289,6 @@ dependencies = [ "syn", ] -[[package]] -name = "proc-macro-error-attr2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" -dependencies = [ - "proc-macro2", - "quote", -] - -[[package]] -name = "proc-macro-error2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" -dependencies = [ - "proc-macro-error-attr2", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "proc-macro-utils" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eeaf08a13de400bc215877b5bdc088f241b12eb42f0a548d3390dc1c56bb7071" -dependencies = [ - "proc-macro2", - "quote", - "smallvec", -] - [[package]] name = "proc-macro2" version = "1.0.104" @@ -1898,19 +1298,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "proc-macro2-diagnostics" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "version_check", - "yansi", -] - [[package]] name = "proptest" version = "1.9.0" @@ -1956,28 +1343,6 @@ dependencies = [ "proc-macro2", ] -[[package]] -name = "quote-use" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9619db1197b497a36178cfc736dc96b271fe918875fbf1344c436a7e93d0321e" -dependencies = [ - "quote", - "quote-use-macros", -] - -[[package]] -name = "quote-use-macros" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82ebfb7faafadc06a7ab141a6f67bcfb24cb8beb158c6fe933f2f035afa99f35" -dependencies = [ - "proc-macro-utils", - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "r-efi" version = "5.3.0" @@ -2072,60 +1437,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "reactive_graph" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17f0df355582937223ea403e52490201d65295bd6981383c69bfae5a1f8730c2" -dependencies = [ - "any_spawner", - "async-lock", - "futures", - "guardian", - "hydration_context", - "indexmap", - "or_poisoned", - "paste", - "pin-project-lite", - "rustc-hash", - "rustc_version", - "send_wrapper", - "serde", - "slotmap", - "thiserror 2.0.17", - "web-sys", -] - -[[package]] -name = "reactive_stores" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35372f05664a62a3dd389503371a15b8feb3396f99f6ec000de651fddb030942" -dependencies = [ - "dashmap", - "guardian", - "itertools 0.14.0", - "or_poisoned", - "paste", - "reactive_graph", - "reactive_stores_macro", - "rustc-hash", - "send_wrapper", -] - -[[package]] -name = "reactive_stores_macro" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fa40919eb2975100283b2a70e68eafce1e8bcf81f0622ff168e4c2b3f8d46bb" -dependencies = [ - "convert_case 0.8.0", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "redox_syscall" version = "0.5.18" @@ -2182,34 +1493,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88f8660c1ff60292143c98d08fc6e2f654d722db50410e3f3797d40baaf9d8f3" [[package]] -name = "rstml" -version = "0.12.1" +name = "rowan" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61cf4616de7499fc5164570d40ca4e1b24d231c6833a88bff0fe00725080fd56" +checksum = "417a3a9f582e349834051b8a10c8d71ca88da4211e4093528e36b9845f6b5f21" dependencies = [ - "derive-where", - "proc-macro2", - "proc-macro2-diagnostics", - "quote", - "syn", - "syn_derive", - "thiserror 2.0.17", + "countme", + "hashbrown 0.14.5", + "rustc-hash 1.1.0", + "text-size", ] [[package]] name = "rustc-hash" -version = "2.1.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] -name = "rustc_version" -version = "0.4.1" +name = "rustc-hash" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" -dependencies = [ - "semver", -] +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustix" @@ -2257,21 +1562,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "semver" -version = "1.0.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" - -[[package]] -name = "send_wrapper" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" -dependencies = [ - "futures-core", -] - [[package]] name = "serde" version = "1.0.228" @@ -2329,6 +1619,7 @@ version = "1.0.148" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3084b546a1dd6289475996f182a22aba973866ea8e8b02c51d9f46b1336a22da" dependencies = [ + "indexmap", "itoa", "memchr", "serde", @@ -2336,84 +1627,6 @@ dependencies = [ "zmij", ] -[[package]] -name = "serde_qs" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3faaf9e727533a19351a43cc5a8de957372163c7d35cc48c90b75cdda13c352" -dependencies = [ - "percent-encoding", - "serde", - "thiserror 2.0.17", -] - -[[package]] -name = "serde_spanned" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" -dependencies = [ - "serde_core", -] - -[[package]] -name = "server_fn" -version = "0.8.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "353d02fa2886cd8dae0b8da0965289fa8f2ecc7df633d1ce965f62fdf9644d29" -dependencies = [ - "base64", - "bytes", - "const-str", - "const_format", - "dashmap", - "futures", - "gloo-net", - "http", - "js-sys", - "pin-project-lite", - "rustc_version", - "rustversion", - "send_wrapper", - "serde", - "serde_json", - "serde_qs", - "server_fn_macro_default", - "thiserror 2.0.17", - "throw_error", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm-streams", - "web-sys", - "xxhash-rust", -] - -[[package]] -name = "server_fn_macro" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "950b8cfc9ff5f39ca879c5a7c5e640de2695a199e18e424c3289d0964cabe642" -dependencies = [ - "const_format", - "convert_case 0.8.0", - "proc-macro2", - "quote", - "rustc_version", - "syn", - "xxhash-rust", -] - -[[package]] -name = "server_fn_macro_default" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63eb08f80db903d3c42f64e60ebb3875e0305be502bdc064ec0a0eab42207f00" -dependencies = [ - "server_fn_macro", - "syn", -] - [[package]] name = "sha2" version = "0.10.9" @@ -2432,19 +1645,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] -name = "slab" -version = "0.4.11" +name = "signal-hook-registry" +version = "1.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] [[package]] -name = "slotmap" -version = "1.1.1" +name = "slab" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdd58c3c93c3d278ca835519292445cb4b0d4dc59ccfdf7ceadaab3f8aeb4038" -dependencies = [ - "version_check", -] +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" [[package]] name = "smallvec" @@ -2453,20 +1667,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] -name = "spec-000-rewrite" -version = "0.1.0" +name = "socket2" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" dependencies = [ - "console_error_panic_hook", - "leptos", - "wasm-bindgen", + "libc", + "windows-sys 0.61.2", ] -[[package]] -name = "stable_deref_trait" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" - [[package]] name = "strsim" version = "0.11.1" @@ -2484,62 +1693,11 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "syn_derive" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb066a04799e45f5d582e8fc6ec8e6d6896040d00898eb4e6a835196815b219" -dependencies = [ - "proc-macro-error2", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "synstructure" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tachys" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b2db11e455f7e84e2cc3e76f8a3f3843f7956096265d5ecff781eabe235077" -dependencies = [ - "any_spawner", - "async-trait", - "const_str_slice_concat", - "drain_filter_polyfill", - "either_of", - "erased", - "futures", - "html-escape", - "indexmap", - "itertools 0.14.0", - "js-sys", - "linear-map", - "next_tuple", - "oco_ref", - "or_poisoned", - "parking_lot", - "paste", - "reactive_graph", - "reactive_stores", - "rustc-hash", - "rustc_version", - "send_wrapper", - "slotmap", - "throw_error", - "wasm-bindgen", - "web-sys", -] +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" [[package]] name = "tempfile" @@ -2560,6 +1718,12 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" +[[package]] +name = "text-size" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" + [[package]] name = "thiserror" version = "1.0.69" @@ -2600,15 +1764,6 @@ dependencies = [ "syn", ] -[[package]] -name = "throw_error" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc0ed6038fcbc0795aca7c92963ddda636573b956679204e044492d2b13c8f64" -dependencies = [ - "pin-project-lite", -] - [[package]] name = "time" version = "0.3.47" @@ -2640,16 +1795,6 @@ dependencies = [ "time-core", ] -[[package]] -name = "tinystr" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" -dependencies = [ - "displaydoc", - "zerovec", -] - [[package]] name = "tinytemplate" version = "1.2.1" @@ -2661,82 +1806,110 @@ dependencies = [ ] [[package]] -name = "toml" -version = "0.9.10+spec-1.1.0" +name = "tokio" +version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0825052159284a1a8b4d6c0c86cbc801f2da5afd2b225fa548c72f2e74002f48" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ - "serde_core", - "serde_spanned", - "toml_datetime", - "toml_parser", - "winnow", + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.61.2", ] [[package]] -name = "toml_datetime" -version = "0.7.5+spec-1.1.0" +name = "tokio-macros" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" +checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" dependencies = [ - "serde_core", + "proc-macro2", + "quote", + "syn", ] [[package]] -name = "toml_parser" -version = "1.0.6+spec-1.1.0" +name = "tokio-util" +version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" dependencies = [ - "winnow", + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", ] [[package]] -name = "ttd-browser" -version = "0.1.0" +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ - "ciborium", - "console_error_panic_hook", - "echo-session-proto", - "echo-ttd", - "js-sys", - "serde", - "ttd-protocol-rs", - "warp-core", - "wasm-bindgen", - "web-sys", + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tower-layer", + "tower-service", + "tracing", ] [[package]] -name = "ttd-protocol-rs" -version = "0.1.0" +name = "tower" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" dependencies = [ - "serde", + "futures-core", + "futures-util", + "hdrhistogram", + "indexmap", + "pin-project-lite", + "slab", + "sync_wrapper", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", ] [[package]] -name = "typed-builder" -version = "0.21.2" +name = "tower-layer" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fef81aec2ca29576f9f6ae8755108640d0a86dd3161b2e8bca6cfa554e98f77d" -dependencies = [ - "typed-builder-macro 0.21.2", -] +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] -name = "typed-builder" -version = "0.23.2" +name = "tracing" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31aa81521b70f94402501d848ccc0ecaa8f93c8eb6999eb9747e72287757ffda" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" dependencies = [ - "typed-builder-macro 0.23.2", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", ] [[package]] -name = "typed-builder-macro" -version = "0.21.2" +name = "tracing-attributes" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ecb9ecf7799210407c14a8cfdfe0173365780968dc57973ed082211958e0b18" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", @@ -2744,14 +1917,35 @@ dependencies = [ ] [[package]] -name = "typed-builder-macro" -version = "0.23.2" +name = "tracing-core" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "076a02dc54dd46795c2e9c8282ed40bcfb1e22747e955de9389a1de28190fb26" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" dependencies = [ - "proc-macro2", - "quote", - "syn", + "once_cell", +] + +[[package]] +name = "ttd-browser" +version = "0.1.0" +dependencies = [ + "ciborium", + "console_error_panic_hook", + "echo-session-proto", + "echo-ttd", + "js-sys", + "serde", + "ttd-protocol-rs", + "warp-core", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "ttd-protocol-rs" +version = "0.1.0" +dependencies = [ + "serde", ] [[package]] @@ -2790,53 +1984,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" - -[[package]] -name = "url" -version = "2.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", - "serde", -] - -[[package]] -name = "utf8-width" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1292c0d970b54115d14f2492fe0170adf21d68a1de108eebc51c1df4f346a091" - -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - [[package]] name = "utf8parse" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" -[[package]] -name = "uuid" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a" -dependencies = [ - "getrandom 0.3.4", - "js-sys", - "wasm-bindgen", -] - [[package]] name = "version_check" version = "0.9.5" @@ -2871,7 +2024,7 @@ dependencies = [ "criterion", "echo-dry-tests", "rayon", - "rustc-hash", + "rustc-hash 2.1.1", "warp-core", ] @@ -2907,7 +2060,7 @@ dependencies = [ "hex", "libm", "proptest", - "rustc-hash", + "rustc-hash 2.1.1", "serde", "serde-value", "thiserror 1.0.69", @@ -2963,19 +2116,6 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c" -dependencies = [ - "cfg-if", - "js-sys", - "once_cell", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "wasm-bindgen-macro" version = "0.2.106" @@ -3009,48 +2149,33 @@ dependencies = [ ] [[package]] -name = "wasm-streams" -version = "0.4.2" +name = "web-sys" +version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" dependencies = [ - "futures-util", "js-sys", "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "wasm_split_helpers" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a114b3073258dd5de3d812cdd048cca6842342755e828a14dbf15f843f2d1b84" -dependencies = [ - "async-once-cell", - "wasm_split_macros", ] [[package]] -name = "wasm_split_macros" -version = "0.2.0" +name = "wesley-core" +version = "0.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56481f8ed1a9f9ae97ea7b08a5e2b12e8adf9a7818a6ba952b918e09c7be8bf0" +checksum = "8365972ab1d1d8ece193aa3de936223e33abe3844f3fba0e7049efc50f64b3ea" dependencies = [ - "base16", - "quote", + "apollo-parser", + "async-trait", + "chrono", + "hex", + "indexmap", + "ninelives", + "serde", + "serde_json", "sha2", - "syn", -] - -[[package]] -name = "web-sys" -version = "0.3.83" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" -dependencies = [ - "js-sys", - "wasm-bindgen", + "thiserror 1.0.69", + "tokio", + "tower 0.4.13", ] [[package]] @@ -3084,12 +2209,65 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "windows-link" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + [[package]] name = "windows-sys" version = "0.48.0" @@ -3165,27 +2343,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" -[[package]] -name = "winnow" -version = "0.7.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" -dependencies = [ - "memchr", -] - [[package]] name = "wit-bindgen" version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" -[[package]] -name = "writeable" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" - [[package]] name = "xtask" version = "0.1.0" @@ -3202,41 +2365,6 @@ dependencies = [ "warp-cli", ] -[[package]] -name = "xxhash-rust" -version = "0.8.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3" - -[[package]] -name = "yansi" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" - -[[package]] -name = "yoke" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" -dependencies = [ - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] - -[[package]] -name = "yoke-derive" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] - [[package]] name = "zerocopy" version = "0.8.31" @@ -3257,60 +2385,6 @@ dependencies = [ "syn", ] -[[package]] -name = "zerofrom" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] - -[[package]] -name = "zerotrie" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" -dependencies = [ - "displaydoc", - "yoke", - "zerofrom", -] - -[[package]] -name = "zerovec" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" -dependencies = [ - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "zmij" version = "1.0.2" diff --git a/Cargo.toml b/Cargo.toml index 4036a28e..bafd9f87 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,6 @@ members = [ "crates/echo-dind-tests", "crates/echo-wasm-abi", "crates/echo-registry-api", - "specs/spec-000-rewrite", "crates/echo-wasm-bindings", "crates/echo-wesley-gen", "crates/echo-dry-tests", diff --git a/Makefile b/Makefile index 8bc6781b..bf6a6e75 100644 --- a/Makefile +++ b/Makefile @@ -223,16 +223,3 @@ bench-policy-open-inline: else \ echo "Open URL: $(BENCH_POLICY_REPORT_URI)" ; \ fi - -# Spec-000 (WASM) helpers -.PHONY: spec-000-dev spec-000-build - -spec-000-dev: - @command -v trunk >/dev/null 2>&1 || { echo "Error: trunk not found. Install: cargo install trunk" >&2; exit 1; } - @test -d specs/spec-000-rewrite || { echo "Error: specs/spec-000-rewrite not found" >&2; exit 1; } - @cd specs/spec-000-rewrite && trunk serve - -spec-000-build: - @command -v trunk >/dev/null 2>&1 || { echo "Error: trunk not found. Install: cargo install trunk" >&2; exit 1; } - @test -d specs/spec-000-rewrite || { echo "Error: specs/spec-000-rewrite not found" >&2; exit 1; } - @cd specs/spec-000-rewrite && trunk build --release diff --git a/README.md b/README.md index efc00f74..8a91b45a 100644 --- a/README.md +++ b/README.md @@ -1,173 +1,572 @@ +

ECHO

-

-

A deterministic WARP runtime for witnessed causal history, bounded observation, and graph-shaped readings

-

+

+ A deterministic WARP runtime for witnessed causal history, bounded optics, and holographic readings. +

DocsArchitecture • -Continuum • -warp-core • -AIΩN Framework +There Is No Graph • +Continuum • +WSC / Verkle / IPA • +warp-core

-Determinism CI -CI +Determinism CI +CI Platforms

+# Echo + +Echo is the hot runtime optic in the WARP stack. + +It does not treat a graph, database, file tree, editor buffer, or in-memory +object heap as the ultimate truth. Echo's substrate is witnessed causal history: +admitted transitions, frontiers, receipts, witnesses, patches, checkpoints, +retained readings, and boundary artifacts. + +The hard doctrine is: + +```text +There is no privileged graph. +There are causal histories and lawful readings of those histories. +``` + +Echo turns that doctrine into runtime machinery. + +It admits canonical intents, schedules deterministic work, settles speculative +paths, emits evidence-bearing receipts, serves bounded observations, and retains +the artifacts needed to replay or verify what happened. Graph-shaped state is a +reading. Files are readings. Build outputs are readings. Debugger views are +readings. Echo exists to make those readings lawful, witnessed, and +replayable. + +## Thirty Second Version + +Echo is a deterministic runtime for admitting canonical intents and producing +witnessed readings. + +Most applications do not call Echo with application objects directly. They: + +```text +author GraphQL contract + -> compile with Wesley + -> use generated helpers + -> dispatch canonical EINT intents + -> observe ReadingEnvelope-backed results +``` + +Echo handles causal admission, receipts, witnesses, retention, replay, and +bounded observations. The application owns domain semantics. Wesley bridges the +two by turning authored contracts into typed generated surfaces. + +## Reader Paths + +- **Write an app:** start with + [Writing An Echo Application](#writing-an-echo-application), then read + [Application Contract Hosting](docs/architecture/application-contract-hosting.md). +- **Understand the model:** read [WARP And Continuum](#warp-and-continuum), + [Core Ontology](#core-ontology), and + [There Is No Graph](docs/architecture/there-is-no-graph.md). +- **Generate contracts:** use + [echo-wesley-gen](crates/echo-wesley-gen/README.md) with a GraphQL SDL + contract. +- **Hack the runtime:** start with [Core Crates](#core-crates), then run the + [Quick Start](#quick-start) checks. +- **Follow retained readings and proofs:** read + [WSC, Verkle, IPA, And Retained Readings](docs/architecture/wsc-verkle-ipa-retained-readings.md). + +## Why It Exists + +Traditional systems pretend there is one mutable global state: + +```text +program + state -> mutated state +``` + +That model leaks. It turns concurrency into locks, collaboration into merge +pain, debugging into archaeology, and generated artifacts into "trust me, this +script probably ran." +Echo follows the WARP model instead: -# What is Echo? +```text +causal basis + optic law + support obligations -> witnessed reading -Echo is a Rust implementation of the [WARP](https://github.com/flyingrobots/aion) (Worldline Algebra for Recursive Provenance) architecture. +reading + intent + admission law -> witnessed suffix -Traditional applications model state as a hierarchy of mutable containers, relying on locks and mutexes to manage concurrency. This approach leads to non-deterministic execution, making bugs difficult to reproduce and debug. +witnessed suffix + optic -> new reading +``` -**Echo fundamentally changes this model:** Instead of mutating a global state, Echo treats **witnessed causal history** as the ultimate source of truth. Graph-shaped structures are treated merely as observer-relative *views* over that history, rather than the core reality. Once kernel history is admitted, it is immutable. Parallel work is handled via private deltas that merge only when mathematically proven to be lawful, eliminating the need for runtime locks. +The result is not "no state." State-like values still exist everywhere. The +difference is authority: materialized state is a chart, cache, viewport, or +hologram. It is not the territory. + +## WARP And Continuum + +WARP is the runtime/optic model used here. A WARP optic is a bounded, +law-governed participant over causal history. It can observe, admit, retain, +reveal, import, materialize, or verify readings, but it does not own a canonical +global graph. + +Continuum is the compatibility layer between WARP participants. It is not Echo, +not "the Echo protocol," and not a second runtime that owns the truth. It is the +shared transport vocabulary for exchanging enough causal evidence for another +optic to produce a compatible local reading: + +- causal suffixes; +- coordinates and frontiers; +- witnesses, receipts, and support obligations; +- hologram and reading boundaries; +- optic, rule, schema, and artifact identifiers. + +Echo is one Continuum-speaking WARP participant. `git-warp`, Wesley, Graft, +WARPDrive, `warp-ttd`, and application tools such as `jedit` can also be WARP +participants when they exchange witnessed causal structure instead of pretending +to pass around a privileged graph object. + +The payload is not "the graph." The payload is the causal suffix, coordinate, +support, and witness material needed for another optic to construct its own +lawful reading. + +## Core Ontology + +| Concept | Meaning in Echo | +| ------------------ | ------------------------------------------------------------------------------------------------------------------------------------- | +| **Causal history** | The witnessed substrate: admitted transitions, frontiers, receipts, witnesses, and retained boundary artifacts. | +| **WARP optic** | A bounded, law-named operation over causal history. It may admit, observe, retain, reveal, import, or materialize. | +| **Reading** | An observer-relative artifact emitted from a coordinate, aperture, and projection law. | +| **Hologram** | A witnessed output carrying enough basis, law, aperture, evidence, identity, and posture to recreate the claim at its declared level. | +| **Witness** | Evidence that a transition or reading followed from a named basis under a named law. | +| **Shell** | A retained boundary artifact such as a tick patch, suffix bundle, provenance payload, or checkpoint base. | +| **ReadIdentity** | The semantic question a retained payload answers. It is intentionally separate from the CAS byte hash. | + +The front-door architecture note is +[There Is No Graph](docs/architecture/there-is-no-graph.md). + +## What Echo Owns + +Echo owns the generic hot runtime path: + +- canonical intent ingress; +- deterministic scheduling and footprint checks; +- rewrite settlement; +- worldline and provenance retention; +- replayable tick patches; +- Merkle commitments over state and patch boundaries; +- observation artifacts and `ReadingEnvelope` metadata; +- WASM/session boundaries for browser and host integration; +- `echo-cas` retention for bytes, witnesses, receipts, and cached readings. + +Echo does **not** own application nouns. + +Names like `ReplaceRange`, `JeditBuffer`, `CounterIncrement`, +`RenameSymbol`, or `GraftProjection` belong in authored contracts, +Wesley-generated code, application adapters, or fixtures. They must not become +Echo substrate APIs. + +## WARP Runtime Flow + +Echo's hot path is deliberately boring: + +1. External callers submit canonical intent bytes. +2. Inbox sequencing derives content identity and canonical pending order. +3. Rules propose candidate rewrites with explicit footprints. +4. The scheduler admits a deterministic independent subset. +5. The engine applies admitted rewrites. +6. Echo emits receipts, tick patches, provenance, and hashes. +7. Observation services resolve coordinates and return readings. +8. Retention stores the bytes and witness material needed for replay or + obstruction. + +The point is not to mutate a global graph. The point is to admit and observe +witnessed causal structure through explicit laws. + +## Application Contracts + +Applications talk to Echo through generated contracts, not app-specific runtime +APIs. + +Wesley is the compiler optic for those contracts. Application authors describe +their domain operations and readings in GraphQL SDL; Wesley lowers that +authored contract into generated helpers, registries, codecs, operation ids, +artifact metadata, and footprint certificates. Echo then hosts the generated +contract through generic dispatch and observation boundaries. + +Wesley exists because Echo's runtime boundary is intentionally generic. Echo +should not learn what `increment`, `ReplaceRange`, `CounterValue`, or +`JeditBuffer` mean. Generated Wesley code gives applications a typed surface +while preserving Echo's substrate rule: -# At a Glance +```text +Application nouns live in contracts. +Echo receives canonical intents and returns witnessed readings. +``` -| Feature | How Echo Achieves It | -|---|---| -| **Lock-Free Parallelism** | Immutable bases, private deltas, canonical merging, and strict footprint checks. | -| **0-ULP Determinism** | Platform-invariant math, logical (not system) time, and seeded randomness. | -| **Witnessed Admission** | Every accepted state transition carries reviewable cryptographic evidence. | -| **Bounded Optics** | State modifications (lowering) produce an explicit outcome, a witness, and a retained shell. | -| **Observer-Relative Reads** | Data reads carry coordinates, basis info, witnesses, and context. | -| **Live Settlement** | Speculative paths (strands) are settled against live evidence before merging. | +The current shape is: -# Core Architecture +```text +Application UI / adapter + -> Wesley-generated contract client + -> canonical operation variables + -> EINT intent bytes + -> Echo dispatch_intent(...) + -> Echo causal admission and receipts + -> Echo observe(...) + -> ReadingEnvelope + payload bytes + -> generated/application decoding + -> UI +``` -## The WARP Runtime Model +This is why a serious text editor such as `jedit` can own its rope model, +buffer law, edit-group law, checkpoint policy, and UI behavior while Echo stays +generic. Echo hosts the generated contract, verifies artifact metadata, admits +intents, emits readings, and retains bytes. It does not become a text editor. -Echo relies on a specific set of concepts to manage state and history: +See [Application Contract Hosting](docs/architecture/application-contract-hosting.md). -* **Witnessed Causal History:** The immutable, underlying semantic truth of the system. -* **Graph-Shaped Readings:** Filtered, observer-relative views projected from causal history. -* **Optics:** The rules defining how changes are lowered, admitted, witnessed, and retained. -* **Observers:** The rules defining what a read operation can project, preserve, accumulate, and output. -* **Shells:** Retained data packages that enable deterministic replay, auditing, network transport, and state revelation. +## Writing An Echo Application -Materialized state in Echo is just a cache, checkpoint, or reading surface—never the definitive source of truth. The only thing that truly matters is what was admitted, the laws governing it, the witness that proves it, and what an observer is allowed to read from it. +The normal authoring loop is contract-first: -## How State Evolves (Admission) +1. Author a GraphQL SDL contract in the application repo. +2. Declare operation/read names with `@wes_op`. +3. Declare deterministic access footprints with `@wes_footprint` when the + operation mutates or observes application state. +4. Run `echo-wesley-gen` to generate Rust contract helpers. +5. Have the host verify the generated registry/artifact metadata. +6. Use generated helpers to pack EINT intent bytes and build observation + requests. +7. Let Echo admit the intent, emit receipts, retain witnesses, and return a + `ReadingEnvelope`. +8. Decode and present the result in the application. -When Echo steps forward (ticks), it does not mutate a global object. Instead, work is evaluated through an **Optic**: +The end-to-end shape is: ```text -Optic = (ObserverPlan, OpticSlice, LoweringSurface, AdmissionLaw, RetentionContract) +counter.graphql + -> echo-wesley-gen + -> generated.rs + -> verify_contract_artifact(...) + -> pack_increment_intent(...) + -> dispatch_intent(...) + -> counter_value_observation_request(...) + -> observe(...) + -> inspect ReadingEnvelope +``` + +A tiny contract looks like this: + +```graphql +directive @wes_op(name: String!) on FIELD_DEFINITION +directive @wes_footprint( + reads: [String!] + writes: [String!] +) on FIELD_DEFINITION + +type CounterValue { + value: Int! +} + +input IncrementInput { + amount: Int! +} + +type Query { + counterValue: CounterValue! @wes_op(name: "counterValue") +} + +type Mutation { + increment(input: IncrementInput!): CounterValue! + @wes_op(name: "increment") + @wes_footprint(reads: ["CounterValue"], writes: ["CounterValue"]) +} +``` + +Generate the Rust contract surface: + +```bash +cargo run -p echo-wesley-gen -- --schema counter.graphql --out generated.rs +``` + +Application code should use generated helpers rather than hand-rolling Echo +wire bytes. Conceptually: + +```rust +let intent = generated::pack_increment_intent( + &generated::__echo_wesley_generated::IncrementVars { + input: generated::IncrementInput { amount: 1 }, + }, +)?; + +let response = echo_wasm_abi::kernel_port::KernelPort::dispatch_intent( + &mut kernel, + &intent, +)?; +``` + +For reads, generated query helpers build `ObservationRequest` values. Echo +returns an `ObservationArtifact` containing payload bytes plus a +`ReadingEnvelope`; the application should inspect that envelope before treating +the reading as complete. -Lower(frontier, weave) = (Outcome, Witness, Shell) +Current checked-in generation is Rust-first. TypeScript/browser generation +should follow the same contract identity, registry, artifact-verification, and +footprint-honesty rules rather than inventing a separate Echo API. -Outcome(X) = Derived(X) | Plural(X) | Conflict | Obstruction +### Boundary Vocabulary +- **GraphQL SDL contract:** the application-owned declaration of types, + operations, reads, and metadata. +- **Wesley:** the compiler optic that lowers the contract into generated Echo + helpers and registry metadata. +- **EINT:** Echo's canonical intent envelope. Generated helpers pack operation + variables into this shape. +- **ObservationRequest:** the generic Echo read request produced by generated + query helpers. +- **ReadingEnvelope:** the evidence wrapper around a returned reading. It names + basis, observer, projection, witness references, and whether the reading is + complete, residual, obstructed, or otherwise limited. +- **Artifact verification:** the host check that a generated contract registry + matches the expected schema, codec, registry version, and certificate + posture. + +## What Not To Put In Echo + +Echo is generic substrate. Keep application semantics above the generated +contract boundary. + +Do not add: + +- app-specific runtime APIs such as `replace_range(...)`, + `increment_counter(...)`, `rename_symbol(...)`, or `save_buffer(...)`; +- application-owned structs as core Echo state; +- GraphQL execution as Echo's runtime language; +- hand-rolled EINT packing in product code when generated helpers exist; +- jedit, Graft, Wesley, Continuum, or `git-warp` ownership inside Echo core. + +The operational anchor is: + +```text +big ontology claim: there is no privileged graph +runtime consequence: Echo stores witnessed causal history and serves readings +through explicit dispatch and observation boundaries ``` -* **Outcome:** Determines whether the change was admitted or why it failed. -* **Witness:** Provides the evidence required to audit the decision. -* **Shell:** Packages the data required for future replays or reads. +## jedit Boundary + +`jedit` is expected to be a serious Echo consumer, not an Echo submodule. + +`jedit` owns: + +- rope model and buffer semantics; +- edit group law; +- dirty state and checkpoint policy; +- editor UI and user interaction policy; +- the external text GraphQL contract. + +Wesley owns: + +- compiling that external GraphQL contract into generated helpers; +- carrying contract identity, schema identity, operation ids, registry + metadata, and footprint certificates. -# Observation and Artifacts +Echo owns: -In Echo, observation is an active, structured process, not a passive query. +- generic contract hosting; +- intent admission and scheduling; +- receipts, witnesses, and retained bytes; +- contract-aware readings and `ReadingEnvelope` posture. + +Echo tests may use generated `jedit` Wesley output as a fixture. Echo should not +author the `jedit` contract or grow text-editor APIs. + +## Retained Readings: WSC, Verkle, IPA, CAS + +Echo's retained-reading direction is: ```text -StructuralObserver = (Projection, ObserverBasis, ObserverState, UpdateLaw, EmissionLaw) +WSC = canonical columnar bytes for a reading or checkpoint +Verkle = authenticated commitment/index over those bytes +IPA = compact proof mechanism for opening bounded apertures +echo-cas = content-addressed byte retention +``` + +Short version: +```text +WSC gives us the table. +Verkle gives us the root. +IPA gives us the aperture proof. +echo-cas stores the bytes. ``` -Observations yield a `ReadingArtifact` containing the payload, coordinates, basis, budget, and witness. The WASM ABI (currently v6) wraps these in a `ReadingEnvelope` so host tools understand exactly how a read was resolved. This "holographic" approach allows tools to seamlessly reconstruct prior states or causal slices without bolting on an external debugger. +Current reality: + +- `warp-core` has WSC writing, validation, and borrowed view support. +- `echo-cas` stores opaque bytes by `BLAKE3(bytes)`. +- retained reading identity is intentionally separate from CAS byte identity. + +Future direction: + +- WSC-backed retained readings and checkpoints; +- Verkle or equivalent authenticated indexes over WSC coordinates; +- IPA or equivalent compact opening proofs for proof-carrying apertures; +- bounded reads that can verify selected rows, chunks, or ranges without + materializing the full retained reading. + +This is future proof infrastructure, not a new ontology. WSC is not truth. +Verkle is not truth. IPA is not storage. CAS is not semantic identity. + +See [WSC, Verkle, IPA, And Retained Readings](docs/architecture/wsc-verkle-ipa-retained-readings.md). + +## Current Reality -# Determinism by Construction +Works today: -Echo achieves exact, cross-platform reproducibility (0-ULP determinism). The kernel output will be identical whether running on 1 thread or 32, across Linux, macOS, or Windows, today or ten years from now. +- Rust contract generation from GraphQL SDL through `echo-wesley-gen`; +- generated registry metadata and operation descriptors; +- generated footprint certificate constants for `@wes_footprint`; +- host-side contract artifact verification through `echo-registry-api`; +- generic EINT dispatch and observation plumbing; +- WSC writing, validation, inspection, and borrowed views in `warp-core`; +- content-addressed byte retention in `echo-cas`; +- docs and Method backlog tracking for active contract-hosting work. -To enforce this, Echo strictly bans: +Designed or in progress: -* **Bare host floats:** All math uses fixed-point or platform-invariant scalars. -* **System wall-clock time:** Simulation time is an intrinsic property of the worldline. -* **Unseeded randomness:** Any tick utilizing randomness must include the seed as part of the admitted input. -* **Footprint enforcement** ensures parallelism remains deterministic. Optics declare bounded regions; the scheduler proves independence. Any proposed delta that violates its contract is structurally rejected—never patched or retried. +- TypeScript/browser generator parity; +- generated `jedit` contract fixtures as Echo integration evidence; +- contract-aware receipts and readings with full application identity; +- WSC-backed retained readings and checkpoints; +- Verkle or equivalent authenticated retained-reading indexes; +- IPA or equivalent proof-carrying aperture openings; +- full Continuum interchange across Echo, `git-warp`, Wesley, Graft, + WARPDrive, and `warp-ttd`. -# Runtime Surfaces & Stack +## Determinism Posture -Echo serves as the engine layer governing the transition from private speculation (strands) to shared causal reality. +Echo is built around exact replay and cross-platform convergence. -# Core Components +The runtime treats nondeterminism as an input discipline problem: -| Component | Role | -|---|---| -| **warp-core** | Hot runtime kernel handling worldlines, strands, observation, and settlement. | -| **echo-wasm-abi** | Current ABI v6 DTOs and canonical CBOR boundary. *Note: v6 is a compatibility epoch, not a promise of support for v1-v5.* | -| **warp-wasm** | wasm-bindgen boundary for browser and JavaScript environments. | -| **warp-cli** | Native CLI for inspection and verification. | -| **ObservationService** | Canonical read path emitting observation artifacts. | -| **SettlementService** | Handles strand comparison, import candidates, and conflicts. | -| **echo-cas** | Content-addressed storage substrate. | -| **echo-ttd & ttd-browser** | Time-travel/debugging protocol surfaces and their browser bridges. | -| **echo-dind-*** | Cross-platform harness for verifying hash convergence. | +- no ambient wall-clock time in admitted simulation law; +- no unseeded randomness inside ticks; +- platform-sensitive math is pinned behind deterministic representations; +- canonical CBOR is used at ABI boundaries; +- footprint declarations constrain parallel work; +- receipts and patches carry the evidence needed for replay and audit. -# Quick Start +The slogan is not "parallelism is safe because we hope so." The rule is: -## 1. Repository Setup +```text +parallel work is admitted only when the runtime can prove the admitted subset +is lawful for the current basis. +``` + +## Core Crates + +| Crate | Role | +| -------------------------- | -------------------------------------------------------------------------------------------------------- | +| `warp-core` | Hot runtime kernel: worldlines, scheduling, settlement, observation, WSC, receipts, and core WARP state. | +| `echo-wasm-abi` | Canonical host/runtime DTOs, `KernelPort`, canonical CBOR helpers, observation and dispatch surfaces. | +| `warp-wasm` | Browser/JavaScript boundary around the runtime kernel. | +| `warp-cli` | Native CLI for WSC inspection, validation, and runtime support tooling. | +| `echo-registry-api` | Minimal generic registry boundary for generated application contracts. | +| `echo-wesley-gen` | Wesley-to-Echo Rust generator for generated DTOs, op ids, registry metadata, and contract helpers. | +| `echo-cas` | Content-addressed byte store. It stores bytes; typed identity lives above it. | +| `echo-ttd` / `ttd-browser` | Time-travel/debugging protocol surfaces and browser bridges. | +| `echo-dind-*` | Cross-platform determinism harnesses and evidence tooling. | + +## Quick Start + +### Hacking Echo -Install the necessary guardrails and verify the current operational Method view. +Install hooks and check the current Method view: ```bash make hooks cargo xtask method status --json - ``` -## 2. Run a Fast Runtime Slice - -Execute the narrow test-slice path for rapid local iteration. +Run a fast runtime slice: ```bash cargo xtask test-slice warp-core-smoke +``` +Run focused generated-contract checks: + +```bash +cargo test -p echo-wesley-gen +cargo test -p echo-registry-api ``` -## 3. Build Documentation +Build the docs: -The documentation build serves as an active regression gate. ```bash pnpm docs:build +``` +Run the determinism harness: + +```bash +cargo xtask dind run ``` -## 4. Run the Determinism Harness +### Generating A Contract -Use Docker-in-Docker (DIND) to verify cross-platform hash convergence. +Generate a Rust contract surface from GraphQL SDL: ```bash -cargo xtask dind run +cargo run -p echo-wesley-gen -- --schema counter.graphql --out generated.rs +``` + +Generate to stdout while iterating: +```bash +cargo run -p echo-wesley-gen -- --schema counter.graphql ``` -# Documentation Directory +### Inspecting WSC + +Inspect a WSC snapshot: + +```bash +export SNAPSHOT=/path/to/state.wsc + +cargo run -p warp-cli -- inspect "$SNAPSHOT" +cargo run -p warp-cli -- inspect "$SNAPSHOT" --tree +cargo run -p warp-cli -- verify "$SNAPSHOT" +cargo run -p warp-cli -- --format json verify "$SNAPSHOT" +``` -* **Docs**: Main documentation map (runtime, replay, observation). -* **Bearing**: Repository direction and near-term priorities. -* **Architecture**: System architecture and layer model. -* **WARP Drift**: Adjustments regarding strands and suffix admission. -* **Optic & Observer Doctrine**: Core definitions for runtime nouns. -* **WASM ABI (v6)**: The active host/runtime contract. -* **Method**: Operational workflow and backlog automation rules. -* **DIND**: Instructions for the determinism testing harness. -* **Theory**: Theoretical foundations of the WARP model. -* **Continuum**: The multi-repository system model. +## Documentation Map + +- [Docs index](docs/index.md) +- [Current bearing](docs/BEARING.md) +- [Runtime model](docs/architecture/outline.md) +- [There Is No Graph](docs/architecture/there-is-no-graph.md) +- [Continuum Transport](docs/architecture/continuum-transport.md) +- [Application Contract Hosting](docs/architecture/application-contract-hosting.md) +- [echo-wesley-gen CLI](crates/echo-wesley-gen/README.md) +- [WSC, Verkle, IPA, And Retained Readings](docs/architecture/wsc-verkle-ipa-retained-readings.md) +- [warp-core spec](docs/spec/warp-core.md) +- [WASM ABI contract](docs/spec/SPEC-0009-wasm-abi.md) +- [Theory map](docs/theory/THEORY.md) +- [Contributor workflow](docs/workflows.md) ---

-Built by FLYING•ROBOTS +Built by FLYING•ROBOTS.

diff --git a/apps/ttd-app/src/components/ProvenanceDrawer.css b/apps/ttd-app/src/components/ProvenanceDrawer.css deleted file mode 100644 index f835c8a9..00000000 --- a/apps/ttd-app/src/components/ProvenanceDrawer.css +++ /dev/null @@ -1,195 +0,0 @@ -/* SPDX-License-Identifier: Apache-2.0 */ -/* © James Ross Ω FLYING•ROBOTS */ - -.provenance-drawer { - position: fixed; - right: 0; - top: 0; - bottom: 0; - width: 400px; - background: var(--bg-secondary); - border-left: 1px solid var(--border-color); - box-shadow: -4px 0 20px rgba(0, 0, 0, 0.3); - z-index: 100; - display: flex; - flex-direction: column; - animation: slideIn 0.2s ease-out; -} - -@keyframes slideIn { - from { - transform: translateX(100%); - } - to { - transform: translateX(0); - } -} - -.drawer-header { - display: flex; - justify-content: space-between; - align-items: center; - padding: 12px 16px; - background: var(--bg-tertiary); - border-bottom: 1px solid var(--border-color); -} - -.drawer-header h3 { - font-size: 14px; - font-weight: 600; - margin: 0; -} - -.drawer-actions { - display: flex; - gap: 8px; -} - -.drawer-content { - flex: 1; - overflow-y: auto; - padding: 16px; -} - -.provenance-timeline { - position: relative; - padding-left: 24px; -} - -.provenance-timeline::before { - content: ""; - position: absolute; - left: 7px; - top: 8px; - bottom: 8px; - width: 2px; - background: var(--border-color); -} - -.provenance-entry { - position: relative; - margin-bottom: 24px; -} - -.entry-tick { - display: flex; - align-items: center; - gap: 8px; - margin-bottom: 8px; -} - -.tick-marker { - position: absolute; - left: -20px; - width: 12px; - height: 12px; - background: var(--accent-blue); - border-radius: 50%; - border: 2px solid var(--bg-secondary); -} - -.tick-value { - font-family: "JetBrains Mono", monospace; - font-size: 12px; - font-weight: 600; - color: var(--accent-blue); -} - -.entry-content { - background: var(--bg-tertiary); - border-radius: 6px; - padding: 12px; -} - -.entry-rule { - display: flex; - align-items: center; - gap: 8px; - margin-bottom: 8px; - font-size: 12px; -} - -.entry-rule .label { - color: var(--text-secondary); -} - -.entry-rule code { - font-family: "JetBrains Mono", monospace; - font-size: 10px; - background: var(--bg-secondary); - padding: 2px 6px; - border-radius: 3px; -} - -.entry-diff { - font-family: "JetBrains Mono", monospace; - font-size: 11px; -} - -.diff-old, -.diff-new { - display: flex; - gap: 8px; - padding: 4px 8px; - border-radius: 3px; - margin-bottom: 4px; -} - -.diff-old { - background: rgba(255, 74, 74, 0.1); - color: var(--accent-red); -} - -.diff-new { - background: rgba(74, 224, 74, 0.1); - color: var(--accent-green); -} - -.diff-marker { - font-weight: bold; - width: 12px; -} - -.diff-old pre, -.diff-new pre { - margin: 0; - white-space: pre-wrap; - word-break: break-word; -} - -.provenance-summary { - margin-top: 24px; - padding-top: 16px; - border-top: 1px solid var(--border-color); -} - -.provenance-summary h4 { - font-size: 12px; - font-weight: 600; - text-transform: uppercase; - letter-spacing: 0.5px; - color: var(--text-secondary); - margin-bottom: 12px; -} - -.summary-stats { - display: flex; - gap: 24px; -} - -.stat { - display: flex; - flex-direction: column; - gap: 2px; -} - -.stat-value { - font-size: 18px; - font-weight: 600; - color: var(--accent-blue); -} - -.stat-label { - font-size: 11px; - color: var(--text-secondary); -} diff --git a/apps/ttd-app/src/components/ProvenanceDrawer.tsx b/apps/ttd-app/src/components/ProvenanceDrawer.tsx deleted file mode 100644 index 6f108a8c..00000000 --- a/apps/ttd-app/src/components/ProvenanceDrawer.tsx +++ /dev/null @@ -1,125 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -import type { TtdEngine } from "../hooks/useTtdEngine"; -import { useTtdStore } from "../store/ttdStore"; -import type { AtomWrite } from "../types/ttd"; -import { truncateHash } from "../types/ttd"; -import "./ProvenanceDrawer.css"; - -interface ProvenanceDrawerProps { - engine: TtdEngine; -} - -// Mock provenance data -const mockWrites: AtomWrite[] = [ - { - atomId: new Uint8Array(32).fill(10), - ruleId: new Uint8Array(32).fill(200), - tick: 45n, - oldValue: new TextEncoder().encode(JSON.stringify({ count: 46 })), - newValue: new TextEncoder().encode(JSON.stringify({ count: 47 })), - }, - { - atomId: new Uint8Array(32).fill(10), - ruleId: new Uint8Array(32).fill(200), - tick: 40n, - oldValue: new TextEncoder().encode(JSON.stringify({ count: 45 })), - newValue: new TextEncoder().encode(JSON.stringify({ count: 46 })), - }, - { - atomId: new Uint8Array(32).fill(10), - ruleId: new Uint8Array(32).fill(200), - tick: 35n, - oldValue: new TextEncoder().encode(JSON.stringify({ count: 44 })), - newValue: new TextEncoder().encode(JSON.stringify({ count: 45 })), - }, - { - atomId: new Uint8Array(32).fill(10), - ruleId: new Uint8Array(32).fill(201), - tick: 0n, - oldValue: undefined, - newValue: new TextEncoder().encode(JSON.stringify({ count: 0 })), - }, -]; - -export function ProvenanceDrawer({ engine: _engine }: ProvenanceDrawerProps) { - const selectedAtomId = useTtdStore((s) => s.selectedAtomId); - const { toggleProvenanceDrawer, toggle4DView } = useTtdStore(); - - const decodeValue = (bytes?: Uint8Array): string => { - if (!bytes) return "(created)"; - try { - return new TextDecoder().decode(bytes); - } catch { - return truncateHash(bytes); - } - }; - - return ( -
-
-

Provenance: {selectedAtomId ? truncateHash(selectedAtomId) : "—"}

-
- - -
-
- -
-
- {mockWrites.map((write, i) => ( -
-
- - T{write.tick.toString()} -
- -
-
- Rule - {truncateHash(write.ruleId)} -
- -
- {write.oldValue && ( -
- -
{decodeValue(write.oldValue)}
-
- )} -
- + -
{decodeValue(write.newValue)}
-
-
-
-
- ))} -
- -
-

Summary

-
-
- {mockWrites.length} - Total Writes -
-
- 1 - Rules Involved -
-
- T0 - Created At -
-
-
-
-
- ); -} diff --git a/apps/ttd-app/src/components/StateInspector.css b/apps/ttd-app/src/components/StateInspector.css deleted file mode 100644 index 7d1f9c19..00000000 --- a/apps/ttd-app/src/components/StateInspector.css +++ /dev/null @@ -1,99 +0,0 @@ -/* SPDX-License-Identifier: Apache-2.0 */ -/* © James Ross Ω FLYING•ROBOTS */ - -.state-inspector { - height: 100%; - display: flex; - flex-direction: column; -} - -.state-inspector .panel-header { - display: flex; - justify-content: space-between; - align-items: center; -} - -.state-inspector .panel-content { - flex: 1; - overflow-y: auto; - padding: 0; -} - -.btn-sm { - padding: 2px 8px; - font-size: 11px; -} - -.atom-list { - display: flex; - flex-direction: column; -} - -.atom-row { - padding: 12px; - border-bottom: 1px solid var(--border-color); - cursor: pointer; - transition: background 0.1s ease; -} - -.atom-row:hover { - background: var(--bg-tertiary); -} - -.atom-row.selected { - background: rgba(74, 158, 255, 0.1); - border-left: 3px solid var(--accent-blue); -} - -.atom-header { - display: flex; - justify-content: space-between; - align-items: center; - margin-bottom: 8px; -} - -.atom-type { - font-weight: 600; - font-size: 13px; - color: var(--accent-purple); -} - -.atom-id { - font-family: "JetBrains Mono", monospace; - font-size: 10px; - color: var(--text-secondary); - background: var(--bg-tertiary); - padding: 2px 6px; - border-radius: 3px; -} - -.atom-value { - background: var(--bg-primary); - border-radius: 4px; - padding: 8px; - margin-bottom: 8px; -} - -.atom-value pre { - font-family: "JetBrains Mono", monospace; - font-size: 11px; - color: var(--accent-green); - margin: 0; - white-space: pre-wrap; - word-break: break-word; -} - -.atom-meta { - display: flex; - justify-content: space-between; - font-size: 11px; - color: var(--text-secondary); -} - -.atom-tick { - font-family: "JetBrains Mono", monospace; -} - -.atom-rule { - font-style: italic; -} diff --git a/apps/ttd-app/src/components/StateInspector.tsx b/apps/ttd-app/src/components/StateInspector.tsx deleted file mode 100644 index f4362405..00000000 --- a/apps/ttd-app/src/components/StateInspector.tsx +++ /dev/null @@ -1,108 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -import type { TtdEngine } from "../hooks/useTtdEngine"; -import { useTtdStore } from "../store/ttdStore"; -import type { AtomEntry } from "../types/ttd"; -import { truncateHash } from "../types/ttd"; -import "./StateInspector.css"; - -interface StateInspectorProps { - engine: TtdEngine; -} - -// Mock atoms for visualization -const mockAtoms: AtomEntry[] = [ - { - id: new Uint8Array(32).fill(10), - typeId: new Uint8Array(32).fill(100), - typeName: "Counter", - value: { count: 47 }, - lastWriteTick: 45n, - lastWriteRule: "update_counter", - }, - { - id: new Uint8Array(32).fill(11), - typeId: new Uint8Array(32).fill(101), - typeName: "Position", - value: { x: 120.5, y: 340.2 }, - lastWriteTick: 44n, - lastWriteRule: "physics_step", - }, - { - id: new Uint8Array(32).fill(12), - typeId: new Uint8Array(32).fill(102), - typeName: "Velocity", - value: { dx: 2.1, dy: -5.3 }, - lastWriteTick: 44n, - lastWriteRule: "physics_step", - }, - { - id: new Uint8Array(32).fill(13), - typeId: new Uint8Array(32).fill(103), - typeName: "Input", - value: { key: "ArrowRight", pressed: true }, - lastWriteTick: 40n, - lastWriteRule: "handle_input", - }, -]; - -export function StateInspector({ engine: _engine }: StateInspectorProps) { - const selectedAtomId = useTtdStore((s) => s.selectedAtomId); - const { selectAtom, toggleProvenanceDrawer } = useTtdStore(); - - const handleAtomClick = (atom: AtomEntry) => { - selectAtom(atom.id); - }; - - const handleViewProvenance = () => { - toggleProvenanceDrawer(); - }; - - return ( -
-
- State Inspector - {selectedAtomId && ( - - )} -
-
-
- {mockAtoms.map((atom) => { - const isSelected = selectedAtomId && arraysEqual(atom.id, selectedAtomId); - return ( -
handleAtomClick(atom)} - > -
- {atom.typeName} - {truncateHash(atom.id, 4)} -
-
-
{JSON.stringify(atom.value, null, 2)}
-
-
- T{atom.lastWriteTick.toString()} - {atom.lastWriteRule} -
-
- ); - })} -
-
-
- ); -} - -function arraysEqual(a: Uint8Array, b: Uint8Array): boolean { - if (a.length !== b.length) return false; - for (let i = 0; i < a.length; i++) { - if (a[i] !== b[i]) return false; - } - return true; -} diff --git a/apps/ttd-app/src/components/TimeControls.css b/apps/ttd-app/src/components/TimeControls.css deleted file mode 100644 index 322e13e2..00000000 --- a/apps/ttd-app/src/components/TimeControls.css +++ /dev/null @@ -1,97 +0,0 @@ -/* SPDX-License-Identifier: Apache-2.0 */ -/* © James Ross Ω FLYING•ROBOTS */ - -.time-controls { - display: flex; - align-items: center; - gap: 24px; - width: 100%; -} - -.time-controls-brand { - display: flex; - align-items: center; - gap: 8px; - font-weight: 600; -} - -.brand-icon { - color: var(--accent-blue); - font-size: 18px; -} - -.brand-name { - font-size: 14px; - letter-spacing: 0.5px; -} - -.time-controls-playback { - display: flex; - align-items: center; - gap: 4px; -} - -.btn-icon { - width: 36px; - height: 36px; - padding: 0; - display: flex; - align-items: center; - justify-content: center; - font-size: 14px; -} - -.time-controls-tick { - display: flex; - align-items: baseline; - gap: 4px; - font-family: "JetBrains Mono", monospace; -} - -.tick-label { - color: var(--text-secondary); - font-size: 11px; - text-transform: uppercase; - margin-right: 4px; -} - -.tick-value { - font-size: 18px; - font-weight: 600; - color: var(--accent-blue); - min-width: 60px; -} - -.tick-separator { - color: var(--text-secondary); -} - -.tick-max { - color: var(--text-secondary); - font-size: 14px; -} - -.time-controls-speed { - display: flex; - align-items: center; - gap: 8px; -} - -.time-controls-speed label { - color: var(--text-secondary); - font-size: 12px; -} - -.time-controls-speed select { - background: var(--bg-tertiary); - border: 1px solid var(--border-color); - border-radius: 4px; - color: var(--text-primary); - padding: 4px 8px; - font-size: 12px; -} - -/* Push compliance badge to the right */ -.time-controls > .badge { - margin-left: auto; -} diff --git a/apps/ttd-app/src/components/TimeControls.tsx b/apps/ttd-app/src/components/TimeControls.tsx deleted file mode 100644 index 97805659..00000000 --- a/apps/ttd-app/src/components/TimeControls.tsx +++ /dev/null @@ -1,100 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -import type { TtdEngine } from "../hooks/useTtdEngine"; -import { useTtdStore, selectCurrentTick, selectMaxTick, selectIsCompliant } from "../store/ttdStore"; -import "./TimeControls.css"; - -interface TimeControlsProps { - engine: TtdEngine; -} - -export function TimeControls({ engine: _engine }: TimeControlsProps) { - const currentTick = useTtdStore(selectCurrentTick); - const maxTick = useTtdStore(selectMaxTick); - const isPlaying = useTtdStore((s) => s.isPlaying); - const playbackSpeed = useTtdStore((s) => s.playbackSpeed); - const isCompliant = useTtdStore(selectIsCompliant); - - const { play, pause, stepForward, stepBack, setPlaybackSpeed, fork } = useTtdStore(); - - return ( -
- {/* Logo / Title */} -
- - Echo TTD -
- - {/* Playback buttons */} -
- - - {isPlaying ? ( - - ) : ( - - )} - - -
- - {/* Tick display */} -
- Tick - {currentTick.toString()} - / - {maxTick.toString()} -
- - {/* Speed control */} -
- - -
- - {/* Fork button */} - - - {/* Compliance badge */} -
- {isCompliant ? "✓ Compliant" : "✗ Violations"} -
-
- ); -} diff --git a/apps/ttd-app/src/components/Timeline.css b/apps/ttd-app/src/components/Timeline.css deleted file mode 100644 index cf2a8e58..00000000 --- a/apps/ttd-app/src/components/Timeline.css +++ /dev/null @@ -1,103 +0,0 @@ -/* SPDX-License-Identifier: Apache-2.0 */ -/* © James Ross Ω FLYING•ROBOTS */ - -.timeline { - display: flex; - flex-direction: column; - gap: 8px; -} - -.timeline-track { - position: relative; - height: 24px; - background: var(--bg-tertiary); - border-radius: 4px; - cursor: pointer; - overflow: hidden; -} - -.timeline-progress { - position: absolute; - left: 0; - top: 0; - bottom: 0; - background: linear-gradient(90deg, var(--accent-blue), rgba(74, 158, 255, 0.5)); - border-radius: 4px 0 0 4px; - pointer-events: none; -} - -.timeline-playhead { - position: absolute; - top: -2px; - bottom: -2px; - width: 3px; - background: var(--accent-blue); - border-radius: 2px; - transform: translateX(-50%); - box-shadow: 0 0 8px var(--accent-blue); - pointer-events: none; -} - -.timeline-marker { - position: absolute; - top: 50%; - width: 8px; - height: 8px; - transform: translate(-50%, -50%) rotate(45deg); - border-radius: 2px; - pointer-events: none; -} - -.timeline-marker-intent { - background: var(--accent-purple); -} - -.timeline-marker-fork { - background: var(--accent-green); -} - -.timeline-marker-rule { - background: var(--accent-blue); -} - -.timeline-marker-violation { - background: var(--accent-red); -} - -.timeline-labels { - display: flex; - justify-content: space-between; - font-size: 10px; - color: var(--text-secondary); - font-family: "JetBrains Mono", monospace; - padding: 0 4px; -} - -.timeline-legend { - display: flex; - gap: 16px; - font-size: 11px; - color: var(--text-secondary); -} - -.legend-item { - display: flex; - align-items: center; - gap: 4px; -} - -.legend-intent { - color: var(--accent-purple); -} - -.legend-fork { - color: var(--accent-green); -} - -.legend-rule { - color: var(--accent-blue); -} - -.legend-violation { - color: var(--accent-red); -} diff --git a/apps/ttd-app/src/components/Timeline.tsx b/apps/ttd-app/src/components/Timeline.tsx deleted file mode 100644 index 2de3c4ab..00000000 --- a/apps/ttd-app/src/components/Timeline.tsx +++ /dev/null @@ -1,87 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -import { useCallback } from "react"; -import type { TtdEngine } from "../hooks/useTtdEngine"; -import { useTtdStore, selectCurrentTick, selectMaxTick } from "../store/ttdStore"; -import "./Timeline.css"; - -interface TimelineProps { - engine: TtdEngine; -} - -export function Timeline({ engine: _engine }: TimelineProps) { - const currentTick = useTtdStore(selectCurrentTick); - const maxTick = useTtdStore(selectMaxTick); - const { seekTo } = useTtdStore(); - - // Mock markers for visualization - const markers = [ - { tick: 10n, type: "intent", label: "ClickIntent" }, - { tick: 25n, type: "fork", label: "Fork A" }, - { tick: 45n, type: "rule", label: "update_counter" }, - { tick: 72n, type: "violation", label: "Deadline missed" }, - ]; - - const handleScrub = useCallback( - (e: React.MouseEvent) => { - const rect = e.currentTarget.getBoundingClientRect(); - const x = e.clientX - rect.left; - const ratio = x / rect.width; - const tick = BigInt(Math.round(Number(maxTick) * ratio)); - seekTo(tick); - }, - [maxTick, seekTo] - ); - - const progress = - maxTick > 0n ? (Number(currentTick) / Number(maxTick)) * 100 : 0; - - return ( -
-
- {/* Progress bar */} -
- - {/* Playhead */} -
- - {/* Markers */} - {markers.map((marker, i) => { - const pos = - maxTick > 0n - ? (Number(marker.tick) / Number(maxTick)) * 100 - : 0; - return ( -
- ); - })} -
- - {/* Tick labels */} -
- 0 - {(Number(maxTick) / 4).toFixed(0)} - {(Number(maxTick) / 2).toFixed(0)} - {((Number(maxTick) * 3) / 4).toFixed(0)} - {maxTick.toString()} -
- - {/* Legend */} -
- ◆ Intent - ◆ Fork - ◆ Rule Fire - ◆ Violation -
-
- ); -} diff --git a/apps/ttd-app/src/components/WorldlineTree.css b/apps/ttd-app/src/components/WorldlineTree.css deleted file mode 100644 index 1634c045..00000000 --- a/apps/ttd-app/src/components/WorldlineTree.css +++ /dev/null @@ -1,126 +0,0 @@ -/* SPDX-License-Identifier: Apache-2.0 */ -/* © James Ross Ω FLYING•ROBOTS */ - -.worldline-tree { - height: 100%; - display: flex; - flex-direction: column; -} - -.worldline-tree .panel-content { - flex: 1; - overflow-y: auto; - padding: 8px 0; -} - -.tree-node-row { - display: flex; - align-items: center; - gap: 6px; - padding: 6px 8px; - cursor: pointer; - border-radius: 4px; - margin: 1px 4px; - transition: background 0.1s ease; -} - -.tree-node-row:hover { - background: var(--bg-tertiary); -} - -.tree-node-row.selected { - background: rgba(74, 158, 255, 0.15); -} - -.tree-expand { - font-size: 10px; - color: var(--text-secondary); - width: 12px; -} - -.tree-icon { - color: var(--accent-blue); - font-size: 12px; -} - -.tree-label { - flex: 1; - font-size: 13px; - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; -} - -.tree-fork-tick { - font-size: 11px; - color: var(--text-secondary); - font-family: "JetBrains Mono", monospace; -} - -.tree-badge { - width: 18px; - height: 18px; - border-radius: 50%; - display: flex; - align-items: center; - justify-content: center; - font-size: 10px; - font-weight: bold; -} - -.tree-children { - border-left: 1px solid var(--border-color); - margin-left: 20px; -} - -.tree-details { - padding: 8px; - margin: 4px; - background: var(--bg-tertiary); - border-radius: 4px; - font-size: 12px; -} - -.detail-row { - display: flex; - gap: 8px; - margin-bottom: 4px; -} - -.detail-label { - color: var(--text-secondary); - min-width: 40px; -} - -.detail-value { - font-family: "JetBrains Mono", monospace; - font-size: 11px; - background: var(--bg-secondary); - padding: 2px 6px; - border-radius: 3px; -} - -.detail-violations { - margin-top: 8px; - padding-top: 8px; - border-top: 1px solid var(--border-color); -} - -.violation-item { - display: flex; - gap: 8px; - color: var(--accent-red); - margin-bottom: 4px; -} - -.violation-code { - font-family: "JetBrains Mono", monospace; - font-size: 10px; - background: rgba(255, 74, 74, 0.15); - padding: 1px 4px; - border-radius: 2px; -} - -.violation-msg { - font-size: 11px; -} diff --git a/apps/ttd-app/src/components/WorldlineTree.tsx b/apps/ttd-app/src/components/WorldlineTree.tsx deleted file mode 100644 index 0b992c7d..00000000 --- a/apps/ttd-app/src/components/WorldlineTree.tsx +++ /dev/null @@ -1,147 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -import type { TtdEngine } from "../hooks/useTtdEngine"; -import { useTtdStore } from "../store/ttdStore"; -import type { WorldlineNode } from "../types/ttd"; -import { truncateHash } from "../types/ttd"; -import { ViolationSeverity } from "@echo/ttd-protocol-ts"; -import "./WorldlineTree.css"; - -interface WorldlineTreeProps { - engine: TtdEngine; -} - -// Mock data for visualization -const mockTree: WorldlineNode = { - id: new Uint8Array(32).fill(1), - label: "Main Timeline", - compliance: { isGreen: true, violations: [] }, - children: [ - { - id: new Uint8Array(32).fill(2), - parentId: new Uint8Array(32).fill(1), - forkTick: 25n, - label: "Fork A (1 worker)", - compliance: { isGreen: true, violations: [] }, - children: [], - }, - { - id: new Uint8Array(32).fill(3), - parentId: new Uint8Array(32).fill(1), - forkTick: 25n, - label: "Fork B (16 workers)", - compliance: { isGreen: true, violations: [] }, - children: [ - { - id: new Uint8Array(32).fill(4), - parentId: new Uint8Array(32).fill(3), - forkTick: 50n, - label: "Fork B.1 (nudged)", - compliance: { - isGreen: false, - violations: [{ code: "V001", message: "Deadline missed", severity: ViolationSeverity.ERROR, channelId: undefined, tick: undefined, emissionCount: undefined }], - }, - children: [], - }, - ], - }, - ], -}; - -export function WorldlineTree({ engine: _engine }: WorldlineTreeProps) { - const selectedWorldlineId = useTtdStore((s) => s.selectedWorldlineId); - const { selectWorldline } = useTtdStore(); - - return ( -
-
Worldlines
-
- -
-
- ); -} - -interface TreeNodeProps { - node: WorldlineNode; - depth: number; - selectedId: Uint8Array | null; - onSelect: (id: Uint8Array) => void; -} - -function TreeNode({ node, depth, selectedId, onSelect }: TreeNodeProps) { - const isSelected = - selectedId && arraysEqual(node.id, selectedId); - const hasViolations = !node.compliance.isGreen; - - return ( -
-
onSelect(node.id)} - > - {node.children.length > 0 && ( - - )} - - {node.label} - {node.forkTick !== undefined && ( - @{node.forkTick.toString()} - )} - - {hasViolations ? "!" : "✓"} - -
- - {node.children.length > 0 && ( -
- {node.children.map((child, i) => ( - - ))} -
- )} - - {isSelected && ( -
-
- ID - {truncateHash(node.id)} -
- {node.compliance.violations.length > 0 && ( -
- {node.compliance.violations.map((v, i) => ( -
- {v.code} - {v.message} -
- ))} -
- )} -
- )} -
- ); -} - -function arraysEqual(a: Uint8Array, b: Uint8Array): boolean { - if (a.length !== b.length) return false; - for (let i = 0; i < a.length; i++) { - if (a[i] !== b[i]) return false; - } - return true; -} diff --git a/apps/ttd-app/src/hooks/useTtdEngine.ts b/apps/ttd-app/src/hooks/useTtdEngine.ts deleted file mode 100644 index a0aef158..00000000 --- a/apps/ttd-app/src/hooks/useTtdEngine.ts +++ /dev/null @@ -1,251 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -import { useEffect, useState } from "react"; - -/** - * TtdEngine wrapper interface. - * - * This will eventually import from ttd-browser WASM package. - * For now, it's a placeholder that simulates the API. - */ -export interface TtdEngine { - // Worldline management - register_empty_worldline(worldline_id: Uint8Array, warp_id: Uint8Array): void; - - // Cursor management - create_cursor(worldline_id: Uint8Array): number; - seek_to(cursor_id: number, tick: bigint): boolean; - step(cursor_id: number): Uint8Array; // CBOR-encoded StepResult - get_tick(cursor_id: number): bigint; - set_mode(cursor_id: number, mode: string): void; - set_seek(cursor_id: number, target: bigint, then_play: boolean): void; - update_frontier(cursor_id: number, max_tick: bigint): void; - drop_cursor(cursor_id: number): void; - - // Provenance queries - get_state_root(cursor_id: number): Uint8Array; - get_commit_hash(cursor_id: number): Uint8Array; - get_emissions_digest(cursor_id: number): Uint8Array; - get_history_length(worldline_id: Uint8Array): bigint; - - // Session management - create_session(): number; - set_session_cursor(session_id: number, cursor_id: number): void; - subscribe(session_id: number, channel: Uint8Array): void; - unsubscribe(session_id: number, channel: Uint8Array): void; - publish_truth(session_id: number, cursor_id: number): void; - drain_frames(session_id: number): Uint8Array; // CBOR-encoded TruthFrame[] - drop_session(session_id: number): void; - - // Transactions - begin(cursor_id: number): bigint; - commit(tx_id: bigint): Uint8Array; // TTDR receipt - - // Fork - snapshot(cursor_id: number): Uint8Array; - fork_from_snapshot(snapshot: Uint8Array, new_worldline_id: Uint8Array): number; - - // Compliance (stubs) - get_compliance(): Uint8Array; - get_obligations(): Uint8Array; -} - -export type EngineState = "loading" | "ready" | "error"; - -/** - * Hook to initialize and manage the TTD WASM engine. - * - * Usage: - * ```tsx - * const { engine, state, error } = useTtdEngine(); - * - * if (state === 'loading') return ; - * if (state === 'error') return ; - * - * // Use engine... - * ``` - */ -export function useTtdEngine(): { - engine: TtdEngine | null; - state: EngineState; - error: string | null; -} { - const [engine, setEngine] = useState(null); - const [state, setState] = useState("loading"); - const [error, setError] = useState(null); - - useEffect(() => { - let cancelled = false; - - async function initEngine() { - try { - let engineInstance: TtdEngine; - - try { - // Attempt actual WASM import - // @ts-ignore - const wasm = await import("ttd-browser"); - // wasm-bindgen init might be different depending on build - if (wasm.default && typeof wasm.default === "function") { - await wasm.default(); - } - engineInstance = new wasm.TtdEngine() as TtdEngine; - console.log("[ttd] WASM engine initialized"); - } catch (wasmErr) { - console.warn("[ttd] Failed to load WASM engine, falling back to mock:", wasmErr); - engineInstance = createMockEngine(); - } - - if (!cancelled) { - setEngine(engineInstance); - setState("ready"); - } - } catch (err) { - if (!cancelled) { - setError(err instanceof Error ? err.message : "Failed to load engine"); - setState("error"); - } - } - } - - initEngine(); - return () => { - cancelled = true; - }; - }, []); - - return { engine, state, error }; -} - -/** - * Create a mock engine for UI development. - * This simulates the ttd-browser API without actual WASM. - */ -function createMockEngine(): TtdEngine { - let nextCursorId = 1; - let nextSessionId = 1; - let nextTxId = 1n; - const cursors = new Map(); - - return { - register_empty_worldline(_worldlineId: Uint8Array, _warpId: Uint8Array) { - console.log("[mock] register_empty_worldline"); - }, - - create_cursor(worldlineId: Uint8Array): number { - const id = nextCursorId++; - cursors.set(id, { tick: 0n, worldlineId }); - console.log("[mock] createCursor:", id); - return id; - }, - - seek_to(cursorId: number, tick: bigint): boolean { - const cursor = cursors.get(cursorId); - if (cursor) cursor.tick = tick; - console.log("[mock] seekTo:", cursorId, tick); - return true; - }, - - step(cursorId: number): Uint8Array { - const cursor = cursors.get(cursorId); - if (cursor) cursor.tick++; - // Return mock CBOR-encoded StepResult - return new Uint8Array([0xa2, 0x66, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74]); - }, - - get_tick(cursorId: number): bigint { - return cursors.get(cursorId)?.tick ?? 0n; - }, - - set_mode(cursorId: number, mode: string) { - console.log("[mock] setMode:", cursorId, mode); - }, - - set_seek(cursorId: number, target: bigint, thenPlay: boolean) { - console.log("[mock] setSeek:", cursorId, target, thenPlay); - }, - - update_frontier(cursor_id: number, maxTick: bigint) { - console.log("[mock] updateFrontier:", cursor_id, maxTick); - }, - - drop_cursor(cursorId: number) { - cursors.delete(cursorId); - console.log("[mock] dropCursor:", cursorId); - }, - - get_state_root(_cursorId: number): Uint8Array { - return new Uint8Array(32); - }, - - get_commit_hash(_cursorId: number): Uint8Array { - return new Uint8Array(32); - }, - - get_emissions_digest(_cursorId: number): Uint8Array { - return new Uint8Array(32); - }, - - get_history_length(_worldlineId: Uint8Array): bigint { - return 100n; // Mock 100 ticks of history - }, - - create_session(): number { - return nextSessionId++; - }, - - set_session_cursor(sessionId: number, cursorId: number) { - console.log("[mock] setSessionCursor:", sessionId, cursorId); - }, - - subscribe(sessionId: number, _channel: Uint8Array) { - console.log("[mock] subscribe:", sessionId); - }, - - unsubscribe(sessionId: number, _channel: Uint8Array) { - console.log("[mock] unsubscribe:", sessionId); - }, - - publish_truth(sessionId: number, cursorId: number) { - console.log("[mock] publishTruth:", sessionId, cursorId); - }, - - drain_frames(_sessionId: number): Uint8Array { - return new Uint8Array([0x80]); // Empty CBOR array - }, - - drop_session(sessionId: number) { - console.log("[mock] dropSession:", sessionId); - }, - - begin(cursorId: number): bigint { - console.log("[mock] begin:", cursorId); - return nextTxId++; - }, - - commit(txId: bigint): Uint8Array { - console.log("[mock] commit:", txId); - return new Uint8Array(256); // Mock TTDR receipt - }, - - snapshot(cursorId: number): Uint8Array { - console.log("[mock] snapshot:", cursorId); - return new Uint8Array(64); - }, - - fork_from_snapshot(_snapshot: Uint8Array, _newWorldlineId: Uint8Array): number { - return nextCursorId++; - }, - - get_compliance(): Uint8Array { - // Mock CBOR: { isGreen: true, violations: [] } - return new Uint8Array([0xa2, 0x67, 0x69, 0x73, 0x47, 0x72, 0x65, 0x65, 0x6e, 0xf5]); - }, - - get_obligations(): Uint8Array { - // Mock CBOR: { pending: [], satisfied: [], violated: [] } - return new Uint8Array([0xa3]); - }, - }; -} diff --git a/apps/ttd-app/src/store/ttdStore.ts b/apps/ttd-app/src/store/ttdStore.ts deleted file mode 100644 index 01f34160..00000000 --- a/apps/ttd-app/src/store/ttdStore.ts +++ /dev/null @@ -1,174 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -import { create } from "zustand"; -import type { - CursorState, - PlaybackMode, - WorldlineId, - WorldlineNode, - ComplianceModel, - ObligationStateApp, - AtomEntry, -} from "../types/ttd"; - -// ─── Store State ───────────────────────────────────────────────────────────── - -interface TtdState { - // Active cursor - activeCursor: CursorState | null; - - // Playback state - isPlaying: boolean; - playbackSpeed: number; // 1 = normal, 0.5 = half, 2 = double - - // Worldline tree - worldlineTree: WorldlineNode | null; - selectedWorldlineId: WorldlineId | null; - - // Compliance & Obligations - compliance: ComplianceModel; - obligations: ObligationStateApp; - - // State inspector - atoms: AtomEntry[]; - selectedAtomId: Uint8Array | null; - - // UI state - showProvenanceDrawer: boolean; - show4DView: boolean; - activePanelId: string; -} - -interface TtdActions { - // Cursor actions - setActiveCursor: (cursor: CursorState | null) => void; - updateCursorTick: (tick: bigint) => void; - setPlaybackMode: (mode: PlaybackMode) => void; - - // Playback controls - play: () => void; - pause: () => void; - stepForward: () => void; - stepBack: () => void; - seekTo: (tick: bigint) => void; - setPlaybackSpeed: (speed: number) => void; - - // Worldline actions - setWorldlineTree: (tree: WorldlineNode) => void; - selectWorldline: (id: WorldlineId | null) => void; - fork: () => void; - - // Compliance - setCompliance: (compliance: ComplianceModel) => void; - setObligations: (obligations: ObligationStateApp) => void; - - // State inspector - setAtoms: (atoms: AtomEntry[]) => void; - selectAtom: (id: Uint8Array | null) => void; - - // UI actions - toggleProvenanceDrawer: () => void; - toggle4DView: () => void; - setActivePanel: (panelId: string) => void; -} - -// ─── Store Implementation ──────────────────────────────────────────────────── - -export const useTtdStore = create((set, get) => ({ - // Initial state - activeCursor: null, - isPlaying: false, - playbackSpeed: 1, - worldlineTree: null, - selectedWorldlineId: null, - compliance: { isGreen: true, violations: [] }, - obligations: { pending: [], satisfied: [], violated: [] }, - atoms: [], - selectedAtomId: null, - showProvenanceDrawer: false, - show4DView: false, - activePanelId: "state-inspector", - - // Cursor actions - setActiveCursor: (cursor) => set({ activeCursor: cursor }), - - updateCursorTick: (tick) => - set((state) => - state.activeCursor ? { activeCursor: { ...state.activeCursor, tick } } : {} - ), - - setPlaybackMode: (mode) => - set((state) => - state.activeCursor - ? { activeCursor: { ...state.activeCursor, mode } } - : {} - ), - - // Playback controls - play: () => { - set({ isPlaying: true }); - get().setPlaybackMode("PLAY"); - }, - - pause: () => { - set({ isPlaying: false }); - get().setPlaybackMode("PAUSED"); - }, - - stepForward: () => { - get().setPlaybackMode("STEP_FORWARD"); - }, - - stepBack: () => { - get().setPlaybackMode("STEP_BACK"); - }, - - seekTo: (tick) => - set((state) => - state.activeCursor - ? { activeCursor: { ...state.activeCursor, tick } } - : {} - ), - - setPlaybackSpeed: (speed) => set({ playbackSpeed: speed }), - - // Worldline actions - setWorldlineTree: (tree) => set({ worldlineTree: tree }), - - selectWorldline: (id) => set({ selectedWorldlineId: id }), - - fork: () => { - // Will be implemented with engine integration - console.log("Fork requested"); - }, - - // Compliance - setCompliance: (compliance) => set({ compliance }), - setObligations: (obligations) => set({ obligations }), - - // State inspector - setAtoms: (atoms) => set({ atoms }), - selectAtom: (id) => set({ selectedAtomId: id }), - - // UI actions - toggleProvenanceDrawer: () => - set((state) => ({ showProvenanceDrawer: !state.showProvenanceDrawer })), - - toggle4DView: () => set((state) => ({ show4DView: !state.show4DView })), - - setActivePanel: (panelId) => set({ activePanelId: panelId }), -})); - -// ─── Selectors ─────────────────────────────────────────────────────────────── - -export const selectCurrentTick = (state: TtdState) => - state.activeCursor?.tick ?? 0n; - -export const selectMaxTick = (state: TtdState) => - state.activeCursor?.maxTick ?? 0n; - -export const selectIsCompliant = (state: TtdState) => state.compliance.isGreen; - -export const selectPendingObligations = (state: TtdState) => - state.obligations.pending.length; diff --git a/apps/ttd-app/src/types/ttd.ts b/apps/ttd-app/src/types/ttd.ts deleted file mode 100644 index 849611fc..00000000 --- a/apps/ttd-app/src/types/ttd.ts +++ /dev/null @@ -1,195 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -/** - * TTD Protocol Types & App-Specific Utilities - * - * Re-exports types from @echo/ttd-protocol-ts (Wesley-generated). - * Keeps utility functions and app-specific types that are not in the protocol. - */ - -// ─── Import Protocol Types ─────────────────────────────────────────────────── - -import type { - Hash, - CursorRole, - SeekResult, - ComplianceStatus, - ViolationSeverity, - StepResultKind, - CursorMoved, - SeekCompleted, - SeekFailed, - ViolationDetected, - ComplianceUpdate, - SessionStarted, - SessionEnded, - CursorCreated, - CursorDestroyed, - Violation, - TruthFrame, - ObligationState, - StepResult, - Snapshot, - ComplianceModel, - Obligation, - ObligationReport, - TtdSystem, -} from "@echo/ttd-protocol-ts"; - -// ─── Re-export Protocol Types ──────────────────────────────────────────────── - -export type { - Hash, - CursorRole, - SeekResult, - ComplianceStatus, - ViolationSeverity, - StepResultKind, - CursorMoved, - SeekCompleted, - SeekFailed, - ViolationDetected, - ComplianceUpdate, - SessionStarted, - SessionEnded, - CursorCreated, - CursorDestroyed, - Violation, - TruthFrame, - ObligationState, - StepResult, - Snapshot, - ComplianceModel, - Obligation, - ObligationReport, - TtdSystem, -}; - -// ─── App-Specific Type Aliases & Helpers ───────────────────────────────────── - -/** Worldline identifier (32-byte hash) */ -export type WorldlineId = Hash; - -/** Channel identifier (32-byte hash) */ -export type ChannelId = Hash; - -/** - * App-specific PlaybackMode type - * Maps to the protocol's PlaybackMode enum with string literal values - */ -export type PlaybackMode = "PAUSED" | "PLAY" | "STEP_FORWARD" | "STEP_BACK" | "SEEK"; - -// ─── TTD FFI & Privacy (from echo-wasm-abi) ───────────────────────────────── - -/** Privacy mask for field-level redaction */ -export enum PrivacyMask { - Public = 0, - Pseudonymized = 1, - Private = 2, -} - -/** Opaque session token for FFI */ -export type SessionToken = bigint; - -/** TTD related errors */ -export type TtdError = - | { kind: "InvalidToken" } - | { kind: "BufferOverflow" } - | { kind: "PermissionDenied" } - | { kind: "Internal", message: string }; - -// ─── App-Specific Cursor State (extends protocol) ───────────────────────────── - -/** Extended cursor state with app-specific fields */ -export interface CursorState { - id: number; - worldlineId: WorldlineId; - tick: bigint; - mode: PlaybackMode; - maxTick: bigint; -} - -// ─── App-Specific Obligation State (extends protocol) ────────────────────────── - -/** Extended obligation state with typed status */ -export type ObligationStatus = "Pending" | "Satisfied" | "Violated"; - -export interface ObligationStateApp { - pending: Array<{ id: string; description: string; deadlineTick: bigint }>; - satisfied: Array<{ id: string; description: string; deadlineTick: bigint }>; - violated: Array<{ id: string; description: string; deadlineTick: bigint }>; -} - -// ─── Worldlines (app-specific) ──────────────────────────────────────────────── - -export interface WorldlineNode { - id: WorldlineId; - parentId?: WorldlineId; - forkTick?: bigint; - label: string; - compliance: ComplianceModel; - children: WorldlineNode[] ; -} - -// ─── Provenance (app-specific) ──────────────────────────────────────────────── - -export interface AtomWrite { - atomId: Hash; - ruleId: Hash; - tick: bigint; - oldValue?: Uint8Array; - newValue: Uint8Array; -} - -export interface ProvenanceChain { - atomId: Hash; - writes: AtomWrite[]; -} - -// ─── State Inspector (app-specific) ─────────────────────────────────────────── - -export interface AtomEntry { - id: Hash; - typeId: Hash; - typeName: string; - value: unknown; - lastWriteTick: bigint; - lastWriteRule: string; -} - -// ─── Receipts (app-specific) ───────────────────────────────────────────────── - -export interface TtdrReceipt { - version: number; - worldlineId: WorldlineId; - tick: bigint; - commitHash: Hash; - stateRoot: Hash; - patchDigest: Hash; - emissionsDigest: Hash; -} - -// ─── Utility Functions ──────────────────────────────────────────────────────── - -/** Convert a hex string to Uint8Array */ -export function hexToBytes(hex: string): Uint8Array { - const bytes = new Uint8Array(hex.length / 2); - for (let i = 0; i < bytes.length; i++) { - bytes[i] = parseInt(hex.slice(i * 2, i * 2 + 2), 16); - } - return bytes; -} - -/** Convert Uint8Array to hex string */ -export function bytesToHex(bytes: Uint8Array): string { - return Array.from(bytes) - .map((b) => b.toString(16).padStart(2, "0")) - .join(""); -} - -/** Truncate a hash for display */ -export function truncateHash(hash: Hash, chars = 8): string { - const hex = bytesToHex(hash); - return `${hex.slice(0, chars)}…${hex.slice(-chars)}`; -} diff --git a/apps/ttd-app/src/views/Layout.css b/apps/ttd-app/src/views/Layout.css deleted file mode 100644 index 142792cc..00000000 --- a/apps/ttd-app/src/views/Layout.css +++ /dev/null @@ -1,121 +0,0 @@ -/* SPDX-License-Identifier: Apache-2.0 */ -/* © James Ross Ω FLYING•ROBOTS */ - -.ttd-layout { - display: grid; - grid-template-rows: auto 1fr auto; - grid-template-columns: 260px 1fr 320px; - grid-template-areas: - "header header header" - "left center right" - "footer footer footer"; - width: 100vw; - height: 100vh; - gap: 1px; - background: var(--border-color); -} - -.ttd-header { - grid-area: header; - background: var(--bg-secondary); - padding: 8px 16px; - display: flex; - align-items: center; - gap: 16px; -} - -.ttd-main { - display: contents; -} - -.ttd-sidebar-left { - grid-area: left; - background: var(--bg-secondary); - overflow: hidden; - display: flex; - flex-direction: column; -} - -.ttd-center { - grid-area: center; - background: var(--bg-primary); - overflow: hidden; - padding: 8px; -} - -.ttd-sidebar-right { - grid-area: right; - background: var(--bg-secondary); - overflow: hidden; - display: flex; - flex-direction: column; -} - -.ttd-footer { - grid-area: footer; - background: var(--bg-secondary); - padding: 8px 16px; - border-top: 1px solid var(--border-color); -} - -/* 3D View */ -.ttd-3d-view { - height: 100%; - display: flex; - flex-direction: column; -} - -.ttd-3d-canvas { - flex: 1; - display: flex; - align-items: center; - justify-content: center; - background: var(--bg-primary); - border-radius: 4px; -} - -.ttd-3d-placeholder { - text-align: center; - color: var(--text-secondary); -} - -.ttd-3d-placeholder p { - margin-bottom: 8px; -} - -.ttd-3d-placeholder .hint { - font-size: 12px; - opacity: 0.7; -} - -.ttd-3d-placeholder code { - background: var(--bg-tertiary); - padding: 2px 6px; - border-radius: 3px; - font-family: "JetBrains Mono", monospace; - font-size: 11px; -} - -/* Responsive: collapse sidebars on small screens */ -@media (max-width: 1200px) { - .ttd-layout { - grid-template-columns: 200px 1fr 260px; - } -} - -@media (max-width: 900px) { - .ttd-layout { - grid-template-columns: 1fr; - grid-template-rows: auto 1fr auto auto; - grid-template-areas: - "header" - "center" - "footer" - "footer"; - } - - .ttd-sidebar-left, - .ttd-sidebar-right { - display: none; - } -} diff --git a/apps/ttd-app/src/views/Layout.tsx b/apps/ttd-app/src/views/Layout.tsx deleted file mode 100644 index 667970e0..00000000 --- a/apps/ttd-app/src/views/Layout.tsx +++ /dev/null @@ -1,64 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -import type { TtdEngine } from "../hooks/useTtdEngine"; -import { TimeControls } from "../components/TimeControls"; -import { Timeline } from "../components/Timeline"; -import { WorldlineTree } from "../components/WorldlineTree"; -import { StateInspector } from "../components/StateInspector"; -import { ProvenanceDrawer } from "../components/ProvenanceDrawer"; -import { useTtdStore } from "../store/ttdStore"; -import "./Layout.css"; - -interface LayoutProps { - engine: TtdEngine; -} - -export function Layout({ engine }: LayoutProps) { - const showProvenanceDrawer = useTtdStore((s) => s.showProvenanceDrawer); - - return ( -
- {/* Top: Time Controls */} -
- -
- - {/* Main content area */} -
- {/* Left sidebar: Worldline Tree */} - - - {/* Center: 3D View placeholder */} -
-
-
4D Provenance View
-
-
-

Three.js visualization will render here

-

- Connect @echo/renderer-three to enable -

-
-
-
-
- - {/* Right sidebar: State Inspector */} - -
- - {/* Bottom: Timeline */} -
- -
- - {/* Provenance Drawer (slide-out) */} - {showProvenanceDrawer && } -
- ); -} diff --git a/crates/echo-registry-api/README.md b/crates/echo-registry-api/README.md index 10d24d2e..4aca8ada 100644 --- a/crates/echo-registry-api/README.md +++ b/crates/echo-registry-api/README.md @@ -7,3 +7,30 @@ Generic registry interface for Echo WASM helpers. Provides the trait and data types (`RegistryProvider`, `RegistryInfo`, `OpDef`) that an application-specific registry crate implements. `warp-wasm` links only to this interface so Echo stays generic; apps supply their own registry at build time. + +`OpDef` preserves authored operation directive metadata as JSON. Echo admission +tooling can interpret entries such as `wes_footprint`, but this crate only +carries the data so the generic runtime boundary stays application-neutral. + +## Contract artifact verification + +Hosts can call `verify_contract_artifact(...)` against a generated +`RegistryProvider` before deciding how much trust to assign to a +Wesley-generated artifact. The verification policy compares: + +- codec id; +- registry layout version; +- schema hash; +- expected per-operation footprint certificate hashes; +- optional per-operation generated artifact hashes; +- whether every mutation must carry a footprint certificate named by policy. + +A policy that checks only schema, codec, and layout returns +`MetadataVerified`. The stronger `CompileTimeCertified` posture is reserved for +policies that also require mutation footprint certificates and successfully +verify the expected certificate set. Release fast paths must key off the +posture, not merely on successful metadata verification. + +The verifier returns a typed `ContractArtifactRejection` on mismatch. It does +not validate application payload semantics or execute an operation; generated +application adapters still own domain validation before packing EINT bytes. diff --git a/crates/echo-registry-api/src/lib.rs b/crates/echo-registry-api/src/lib.rs index 2cc6af22..5cec9b81 100644 --- a/crates/echo-registry-api/src/lib.rs +++ b/crates/echo-registry-api/src/lib.rs @@ -19,6 +19,147 @@ pub struct RegistryInfo { pub schema_sha256_hex: &'static str, } +/// Trust posture assigned after a generated contract artifact has been verified. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ContractArtifactTrustPosture { + /// The registry artifact matched expected schema, codec, and registry + /// version, but the host policy did not request or prove the stronger + /// compile-time footprint certificate posture. + MetadataVerified, + /// The registry artifact matched the expected schema, codec, registry + /// version, and footprint certificate set supplied by the host policy. + CompileTimeCertified, +} + +/// One footprint certificate expected by the host for a generated operation. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct ExpectedFootprintCertificate<'a> { + /// Operation identifier that must exist in the generated registry. + pub op_id: u32, + /// Expected footprint certificate hash for the operation. + pub certificate_hash_hex: &'a str, + /// Optional expected generated artifact hash for the operation. + pub artifact_hash_hex: Option<&'a str>, +} + +/// Host policy for verifying a generated contract artifact before admission. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct ContractArtifactVerificationPolicy<'a> { + /// Expected codec identifier for the generated artifact. + pub codec_id: &'a str, + /// Expected registry layout version for the generated artifact. + pub registry_version: u32, + /// Expected schema hash for the generated artifact. + pub schema_sha256_hex: &'a str, + /// Expected footprint certificates keyed by operation id. + pub footprint_certificates: &'a [ExpectedFootprintCertificate<'a>], + /// Require every mutation op to carry a footprint certificate named by this policy. + pub require_mutation_footprint_certificates: bool, +} + +/// Successful generated contract artifact verification result. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct VerifiedContractArtifact { + /// Registry metadata that was verified against host policy. + pub info: RegistryInfo, + /// Trust posture assigned to the generated artifact. + pub posture: ContractArtifactTrustPosture, +} + +/// Rejection returned when a generated contract artifact fails verification. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ContractArtifactRejection<'a> { + /// The registry codec id did not match host policy. + CodecIdMismatch { + /// Expected codec id. + expected: &'a str, + /// Actual codec id from the registry. + actual: &'static str, + }, + /// The registry layout version did not match host policy. + RegistryVersionMismatch { + /// Expected registry layout version. + expected: u32, + /// Actual registry layout version. + actual: u32, + }, + /// The registry schema hash did not match host policy. + SchemaHashMismatch { + /// Expected schema hash. + expected: &'a str, + /// Actual schema hash from the registry. + actual: &'static str, + }, + /// Host policy named an operation id that the registry does not contain. + MissingOperation { + /// Missing operation identifier. + op_id: u32, + }, + /// A required footprint certificate was missing from the operation. + MissingFootprintCertificate { + /// Operation identifier missing the certificate. + op_id: u32, + }, + /// A footprint certificate names a different operation id than its registry entry. + FootprintCertificateOpMismatch { + /// Operation identifier from the registry entry. + op_id: u32, + /// Operation identifier from the certificate. + certificate_op_id: u32, + }, + /// A footprint certificate names a different operation name than its registry entry. + FootprintCertificateNameMismatch { + /// Operation identifier whose certificate mismatched. + op_id: u32, + /// Operation name from the registry entry. + expected: &'static str, + /// Operation name from the certificate. + actual: &'static str, + }, + /// A footprint certificate was created for a different schema hash. + FootprintCertificateSchemaMismatch { + /// Operation identifier whose certificate mismatched. + op_id: u32, + /// Expected schema hash. + expected: &'static str, + /// Schema hash from the certificate. + actual: &'static str, + }, + /// A footprint certificate hash did not match host policy. + FootprintCertificateHashMismatch { + /// Operation identifier whose certificate mismatched. + op_id: u32, + /// Expected certificate hash. + expected: &'a str, + /// Certificate hash from the registry artifact. + actual: &'static str, + }, + /// A generated artifact hash did not match host policy. + FootprintArtifactHashMismatch { + /// Operation identifier whose artifact hash mismatched. + op_id: u32, + /// Expected generated artifact hash. + expected: &'a str, + /// Generated artifact hash from the registry artifact. + actual: &'static str, + }, + /// Host policy requires certified mutations and this mutation was uncertified. + UncertifiedMutation { + /// Operation identifier missing the certificate. + op_id: u32, + /// Operation name missing the certificate. + op_name: &'static str, + }, + /// Host policy requires certified mutations and this mutation certificate + /// was not named in the expected certificate set. + UnverifiedMutationFootprintCertificate { + /// Operation identifier missing from the expected certificate set. + op_id: u32, + /// Operation name missing from the expected certificate set. + op_name: &'static str, + }, +} + /// Error codes for wasm helpers. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum HelperError { @@ -54,6 +195,73 @@ pub struct OpDef { pub args: &'static [ArgDef], /// Result type name (GraphQL return type). pub result_ty: &'static str, + /// Preserved operation directive metadata as JSON. + /// + /// Echo-specific admission tooling can interpret entries such as + /// `wes_footprint`; the generic registry API only carries the authored + /// directive data. + pub directives_json: &'static str, + /// Optional compile-time footprint certificate emitted by Wesley tooling. + /// + /// Hosts can compare the certificate hash during registry load and treat a + /// match as the proof that this generated artifact is carrying the declared + /// footprint it was compiled with. Echo core still treats the footprint as + /// data; domain-specific meaning belongs to the generated application/module. + pub footprint_certificate: Option<&'static FootprintCertificate>, +} + +/// Compile-time footprint certificate for one generated operation. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct FootprintCertificate { + /// Operation identifier covered by this certificate. + pub op_id: u32, + /// Operation name covered by this certificate. + pub op_name: &'static str, + /// Hex-encoded schema hash used as the certificate basis. + pub schema_sha256_hex: &'static str, + /// Lowercase hex BLAKE3 hash of the generated operation artifact manifest. + /// + /// This is intentionally stronger than the declared read/write footprint: it + /// is expected to include the generated artifact family identity that + /// produced the operation helper, plus the operation shape covered by this + /// certificate. + pub artifact_hash_hex: &'static str, + /// Lowercase hex BLAKE3 hash of the full footprint certificate preimage. + pub certificate_hash_hex: &'static str, + /// Declared read resources, sorted and deduplicated by the generator. + pub reads: &'static [&'static str], + /// Declared write resources, sorted and deduplicated by the generator. + pub writes: &'static [&'static str], +} + +impl OpDef { + /// Return true when this operation carries a footprint certificate matching + /// the expected certificate hash and the registry schema hash. + /// + /// Hosts call this once while loading a generated registry artifact. A + /// successful match means the operation's declared footprint was certified + /// against the same schema hash the registry reports. + pub fn footprint_certificate_matches( + &self, + schema_sha256_hex: &str, + expected_certificate_hash_hex: &str, + ) -> bool { + let Some(certificate) = self.footprint_certificate else { + return false; + }; + + if certificate.op_id != self.op_id { + return false; + } + if certificate.op_name != self.name { + return false; + } + if certificate.schema_sha256_hex != schema_sha256_hex { + return false; + } + + certificate.certificate_hash_hex == expected_certificate_hash_hex + } } /// Argument descriptor (flat; sufficient for strict object validation). @@ -108,3 +316,388 @@ pub trait RegistryProvider: Sync { /// Return all objects (for result validation). fn all_objects(&self) -> &'static [ObjectDef]; } + +/// Verify a generated contract registry against host artifact policy. +/// +/// This check is intentionally application-neutral. It proves only that the +/// loaded generated registry matches the host's expected schema, codec, registry +/// layout, and footprint certificate identities. Domain payload validation still +/// belongs to the generated application adapter for this slice. +pub fn verify_contract_artifact<'a>( + registry: &dyn RegistryProvider, + policy: &ContractArtifactVerificationPolicy<'a>, +) -> Result> { + let info = registry.info(); + if info.codec_id != policy.codec_id { + return Err(ContractArtifactRejection::CodecIdMismatch { + expected: policy.codec_id, + actual: info.codec_id, + }); + } + if info.registry_version != policy.registry_version { + return Err(ContractArtifactRejection::RegistryVersionMismatch { + expected: policy.registry_version, + actual: info.registry_version, + }); + } + if info.schema_sha256_hex != policy.schema_sha256_hex { + return Err(ContractArtifactRejection::SchemaHashMismatch { + expected: policy.schema_sha256_hex, + actual: info.schema_sha256_hex, + }); + } + + for expected in policy.footprint_certificates { + let op = registry.op_by_id(expected.op_id).ok_or( + ContractArtifactRejection::MissingOperation { + op_id: expected.op_id, + }, + )?; + verify_expected_footprint_certificate(op, info.schema_sha256_hex, expected)?; + } + + if policy.require_mutation_footprint_certificates { + for op in registry.all_ops() { + if op.kind == OpKind::Mutation { + if op.footprint_certificate.is_none() { + return Err(ContractArtifactRejection::UncertifiedMutation { + op_id: op.op_id, + op_name: op.name, + }); + } + if !policy + .footprint_certificates + .iter() + .any(|expected| expected.op_id == op.op_id) + { + return Err( + ContractArtifactRejection::UnverifiedMutationFootprintCertificate { + op_id: op.op_id, + op_name: op.name, + }, + ); + } + } + } + } + + let posture = if policy.require_mutation_footprint_certificates { + ContractArtifactTrustPosture::CompileTimeCertified + } else { + ContractArtifactTrustPosture::MetadataVerified + }; + + Ok(VerifiedContractArtifact { info, posture }) +} + +fn verify_expected_footprint_certificate<'a>( + op: &OpDef, + schema_sha256_hex: &'static str, + expected: &ExpectedFootprintCertificate<'a>, +) -> Result<(), ContractArtifactRejection<'a>> { + let certificate = op + .footprint_certificate + .ok_or(ContractArtifactRejection::MissingFootprintCertificate { op_id: op.op_id })?; + + if certificate.op_id != op.op_id { + return Err(ContractArtifactRejection::FootprintCertificateOpMismatch { + op_id: op.op_id, + certificate_op_id: certificate.op_id, + }); + } + if certificate.op_name != op.name { + return Err( + ContractArtifactRejection::FootprintCertificateNameMismatch { + op_id: op.op_id, + expected: op.name, + actual: certificate.op_name, + }, + ); + } + if certificate.schema_sha256_hex != schema_sha256_hex { + return Err( + ContractArtifactRejection::FootprintCertificateSchemaMismatch { + op_id: op.op_id, + expected: schema_sha256_hex, + actual: certificate.schema_sha256_hex, + }, + ); + } + if certificate.certificate_hash_hex != expected.certificate_hash_hex { + return Err( + ContractArtifactRejection::FootprintCertificateHashMismatch { + op_id: op.op_id, + expected: expected.certificate_hash_hex, + actual: certificate.certificate_hash_hex, + }, + ); + } + if let Some(expected_artifact_hash_hex) = expected.artifact_hash_hex { + if certificate.artifact_hash_hex != expected_artifact_hash_hex { + return Err(ContractArtifactRejection::FootprintArtifactHashMismatch { + op_id: op.op_id, + expected: expected_artifact_hash_hex, + actual: certificate.artifact_hash_hex, + }); + } + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::{ + verify_contract_artifact, ArgDef, ContractArtifactRejection, ContractArtifactTrustPosture, + ContractArtifactVerificationPolicy, ExpectedFootprintCertificate, FootprintCertificate, + ObjectDef, OpDef, OpKind, RegistryInfo, RegistryProvider, VerifiedContractArtifact, + }; + + const SCHEMA_SHA256_HEX: &str = + "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"; + const CERTIFICATE_HASH_HEX: &str = + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; + const ARTIFACT_HASH_HEX: &str = + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"; + + static READS: &[&str] = &["CounterValue"]; + static WRITES: &[&str] = &["CounterValue"]; + static FOOTPRINT_CERTIFICATE: FootprintCertificate = FootprintCertificate { + op_id: 1001, + op_name: "increment", + schema_sha256_hex: SCHEMA_SHA256_HEX, + artifact_hash_hex: ARTIFACT_HASH_HEX, + certificate_hash_hex: CERTIFICATE_HASH_HEX, + reads: READS, + writes: WRITES, + }; + static INCREMENT_ARGS: &[ArgDef] = &[ArgDef { + name: "input", + ty: "IncrementInput", + required: true, + list: false, + }]; + static OPS_WITH_CERTIFICATE: &[OpDef] = &[ + OpDef { + kind: OpKind::Mutation, + name: "increment", + op_id: 1001, + args: INCREMENT_ARGS, + result_ty: "CounterValue", + directives_json: "{}", + footprint_certificate: Some(&FOOTPRINT_CERTIFICATE), + }, + OpDef { + kind: OpKind::Query, + name: "counterValue", + op_id: 1002, + args: &[], + result_ty: "CounterValue", + directives_json: "{}", + footprint_certificate: None, + }, + ]; + static OPS_WITHOUT_CERTIFICATE: &[OpDef] = &[OpDef { + kind: OpKind::Mutation, + name: "increment", + op_id: 1001, + args: INCREMENT_ARGS, + result_ty: "CounterValue", + directives_json: "{}", + footprint_certificate: None, + }]; + static OPS_QUERY_ONLY: &[OpDef] = &[OpDef { + kind: OpKind::Query, + name: "counterValue", + op_id: 1002, + args: &[], + result_ty: "CounterValue", + directives_json: "{}", + footprint_certificate: None, + }]; + + struct StaticRegistry { + ops: &'static [OpDef], + } + + impl RegistryProvider for StaticRegistry { + fn info(&self) -> RegistryInfo { + RegistryInfo { + codec_id: "cbor-canon-v1", + registry_version: 1, + schema_sha256_hex: SCHEMA_SHA256_HEX, + } + } + + fn op_by_id(&self, op_id: u32) -> Option<&'static OpDef> { + self.ops.iter().find(|op| op.op_id == op_id) + } + + fn all_ops(&self) -> &'static [OpDef] { + self.ops + } + + fn all_enums(&self) -> &'static [super::EnumDef] { + &[] + } + + fn all_objects(&self) -> &'static [ObjectDef] { + &[] + } + } + + #[test] + fn verifier_accepts_matching_registry_and_expected_certificate() { + let registry = StaticRegistry { + ops: OPS_WITH_CERTIFICATE, + }; + let expected_certificates = [ExpectedFootprintCertificate { + op_id: 1001, + certificate_hash_hex: CERTIFICATE_HASH_HEX, + artifact_hash_hex: Some(ARTIFACT_HASH_HEX), + }]; + let policy = ContractArtifactVerificationPolicy { + codec_id: "cbor-canon-v1", + registry_version: 1, + schema_sha256_hex: SCHEMA_SHA256_HEX, + footprint_certificates: &expected_certificates, + require_mutation_footprint_certificates: true, + }; + + let result = verify_contract_artifact(®istry, &policy); + + assert_eq!( + result, + Ok(VerifiedContractArtifact { + info: registry.info(), + posture: ContractArtifactTrustPosture::CompileTimeCertified, + }) + ); + } + + #[test] + fn verifier_rejects_certificate_hash_mismatch() { + let registry = StaticRegistry { + ops: OPS_WITH_CERTIFICATE, + }; + let expected_certificates = [ExpectedFootprintCertificate { + op_id: 1001, + certificate_hash_hex: "wrong", + artifact_hash_hex: Some(ARTIFACT_HASH_HEX), + }]; + let policy = ContractArtifactVerificationPolicy { + codec_id: "cbor-canon-v1", + registry_version: 1, + schema_sha256_hex: SCHEMA_SHA256_HEX, + footprint_certificates: &expected_certificates, + require_mutation_footprint_certificates: true, + }; + + let result = verify_contract_artifact(®istry, &policy); + + assert_eq!( + result, + Err( + ContractArtifactRejection::FootprintCertificateHashMismatch { + op_id: 1001, + expected: "wrong", + actual: CERTIFICATE_HASH_HEX, + } + ) + ); + } + + #[test] + fn verifier_does_not_compile_time_certify_empty_policy() { + let registry = StaticRegistry { + ops: OPS_QUERY_ONLY, + }; + let policy = ContractArtifactVerificationPolicy { + codec_id: "cbor-canon-v1", + registry_version: 1, + schema_sha256_hex: SCHEMA_SHA256_HEX, + footprint_certificates: &[], + require_mutation_footprint_certificates: false, + }; + + assert_eq!( + verify_contract_artifact(®istry, &policy), + Ok(VerifiedContractArtifact { + info: registry.info(), + posture: ContractArtifactTrustPosture::MetadataVerified, + }) + ); + } + + #[test] + fn verifier_does_not_compile_time_certify_weak_mutation_policy() { + let registry = StaticRegistry { + ops: OPS_WITH_CERTIFICATE, + }; + let policy = ContractArtifactVerificationPolicy { + codec_id: "cbor-canon-v1", + registry_version: 1, + schema_sha256_hex: SCHEMA_SHA256_HEX, + footprint_certificates: &[], + require_mutation_footprint_certificates: false, + }; + + assert_eq!( + verify_contract_artifact(®istry, &policy), + Ok(VerifiedContractArtifact { + info: registry.info(), + posture: ContractArtifactTrustPosture::MetadataVerified, + }) + ); + } + + #[test] + fn verifier_rejects_uncertified_mutation_when_policy_requires_it() { + let registry = StaticRegistry { + ops: OPS_WITHOUT_CERTIFICATE, + }; + let policy = ContractArtifactVerificationPolicy { + codec_id: "cbor-canon-v1", + registry_version: 1, + schema_sha256_hex: SCHEMA_SHA256_HEX, + footprint_certificates: &[], + require_mutation_footprint_certificates: true, + }; + + let result = verify_contract_artifact(®istry, &policy); + + assert_eq!( + result, + Err(ContractArtifactRejection::UncertifiedMutation { + op_id: 1001, + op_name: "increment", + }) + ); + } + + #[test] + fn verifier_rejects_mutation_certificate_not_named_by_policy() { + let registry = StaticRegistry { + ops: OPS_WITH_CERTIFICATE, + }; + let policy = ContractArtifactVerificationPolicy { + codec_id: "cbor-canon-v1", + registry_version: 1, + schema_sha256_hex: SCHEMA_SHA256_HEX, + footprint_certificates: &[], + require_mutation_footprint_certificates: true, + }; + + let result = verify_contract_artifact(®istry, &policy); + + assert_eq!( + result, + Err( + ContractArtifactRejection::UnverifiedMutationFootprintCertificate { + op_id: 1001, + op_name: "increment", + } + ) + ); + } +} diff --git a/crates/echo-session-ws-gateway/assets/vendor/buttons.min.css b/crates/echo-session-ws-gateway/assets/vendor/buttons.min.css deleted file mode 100644 index 165a06d6..00000000 --- a/crates/echo-session-ws-gateway/assets/vendor/buttons.min.css +++ /dev/null @@ -1 +0,0 @@ -:where(html){--gray-0-hsl:210 17% 98%;--gray-1-hsl:210 17% 95%;--gray-2-hsl:210 16% 93%;--gray-3-hsl:210 14% 89%;--gray-4-hsl:210 14% 83%;--gray-5-hsl:210 11% 71%;--gray-6-hsl:210 7% 56%;--gray-7-hsl:210 9% 31%;--gray-8-hsl:210 10% 23%;--gray-9-hsl:210 11% 15%;--gray-10-hsl:214 14% 10%;--gray-11-hsl:216 16% 6%;--gray-12-hsl:210 40% 2%}:where(button,input[type=button],.btn){--_accent:initial;--_text:initial;--_size:initial;--_bg-light:#fff;--_bg-dark:var(--surface-3);--_bg:var(--_bg-light);--_border:var(--surface-3);--_highlight-size:0;--_highlight-light:hsl(var(--gray-5-hsl)/25%);--_highlight-dark:hsl(var(--gray-12-hsl)/25%);--_highlight:var(--_highlight-light);--_ink-shadow-light:0 1px 0 var(--gray-3);--_ink-shadow-dark:0 1px 0 var(--surface-1);--_ink-shadow:var(--_ink-shadow-light);--_icon-size:var(--size-relative-7);--_icon-color:var(--_accent,var(--link));-webkit-tap-highlight-color:transparent;-webkit-touch-callout:none;align-items:center;background:var(--_bg);border:var(--border-size-2) solid var(--_border);border-radius:var(--radius-2);box-shadow:var(--shadow-2),0 1px var(--surface-3),0 0 0 var(--_highlight-size) var(--_highlight);color:var(--_text);display:inline-flex;font-size:var(--_size);font-weight:var(--font-weight-7);gap:var(--size-2);justify-content:center;padding-block:.75ch;padding-inline:var(--size-relative-6);text-align:center;text-shadow:var(--_ink-shadow);transition:border-color .5s var(--ease-3);user-select:none}:where(button,input[type=submit],.btn){--_accent:initial;--_text:initial;--_size:initial;--_bg-light:#fff;--_bg-dark:var(--surface-3);--_bg:var(--_bg-light);--_border:var(--surface-3);--_highlight-size:0;--_highlight-light:hsl(var(--gray-5-hsl)/25%);--_highlight-dark:hsl(var(--gray-12-hsl)/25%);--_highlight:var(--_highlight-light);--_ink-shadow-light:0 1px 0 var(--gray-3);--_ink-shadow-dark:0 1px 0 var(--surface-1);--_ink-shadow:var(--_ink-shadow-light);--_icon-size:var(--size-relative-7);--_icon-color:var(--_accent,var(--link));-webkit-tap-highlight-color:transparent;-webkit-touch-callout:none;align-items:center;background:var(--_bg);border:var(--border-size-2) solid var(--_border);border-radius:var(--radius-2);box-shadow:var(--shadow-2),0 1px var(--surface-3),0 0 0 var(--_highlight-size) var(--_highlight);color:var(--_text);display:inline-flex;font-size:var(--_size);font-weight:var(--font-weight-7);gap:var(--size-2);justify-content:center;padding-block:.75ch;padding-inline:var(--size-relative-6);text-align:center;text-shadow:var(--_ink-shadow);transition:border-color .5s var(--ease-3);user-select:none}:where(button,input[type=reset],.btn){--_accent:initial;--_text:initial;--_size:initial;--_bg-light:#fff;--_bg-dark:var(--surface-3);--_bg:var(--_bg-light);--_border:var(--surface-3);--_highlight-size:0;--_highlight-light:hsl(var(--gray-5-hsl)/25%);--_highlight-dark:hsl(var(--gray-12-hsl)/25%);--_highlight:var(--_highlight-light);--_ink-shadow-light:0 1px 0 var(--gray-3);--_ink-shadow-dark:0 1px 0 var(--surface-1);--_ink-shadow:var(--_ink-shadow-light);--_icon-size:var(--size-relative-7);--_icon-color:var(--_accent,var(--link));-webkit-tap-highlight-color:transparent;-webkit-touch-callout:none;align-items:center;background:var(--_bg);border:var(--border-size-2) solid var(--_border);border-radius:var(--radius-2);box-shadow:var(--shadow-2),0 1px var(--surface-3),0 0 0 var(--_highlight-size) var(--_highlight);color:var(--_text);display:inline-flex;font-size:var(--_size);font-weight:var(--font-weight-7);gap:var(--size-2);justify-content:center;padding-block:.75ch;padding-inline:var(--size-relative-6);text-align:center;text-shadow:var(--_ink-shadow);transition:border-color .5s var(--ease-3);user-select:none}:where(input[type=file])::-webkit-file-upload-button,:where(input[type=file])::file-selector-button{--_accent:initial;--_text:initial;--_size:initial;--_bg-light:#fff;--_bg-dark:var(--surface-3);--_bg:var(--_bg-light);--_border:var(--surface-3);--_highlight-size:0;--_highlight-light:hsl(var(--gray-5-hsl)/25%);--_highlight-dark:hsl(var(--gray-12-hsl)/25%);--_highlight:var(--_highlight-light);--_ink-shadow-light:0 1px 0 var(--gray-3);--_ink-shadow-dark:0 1px 0 var(--surface-1);--_ink-shadow:var(--_ink-shadow-light);--_icon-size:var(--size-relative-7);--_icon-color:var(--_accent,var(--link));-webkit-tap-highlight-color:transparent;-webkit-touch-callout:none;align-items:center;background:var(--_bg);border:var(--border-size-2) solid var(--_border);border-radius:var(--radius-2);box-shadow:var(--shadow-2),0 1px var(--surface-3),0 0 0 var(--_highlight-size) var(--_highlight);color:var(--_text);display:inline-flex;font-size:var(--_size);font-weight:var(--font-weight-7);gap:var(--size-2);justify-content:center;padding-block:.75ch;padding-inline:var(--size-relative-6);text-align:center;text-shadow:var(--_ink-shadow);transition:border-color .5s var(--ease-3);user-select:none}@media (prefers-color-scheme:dark){:where(button,input[type=button],.btn){--_highlight:var(--_highlight-dark);--_bg:var(--_bg-dark);--_ink-shadow:var(--_ink-shadow-dark)}:where(button,input[type=submit],.btn){--_highlight:var(--_highlight-dark);--_bg:var(--_bg-dark);--_ink-shadow:var(--_ink-shadow-dark)}:where(button,input[type=reset],.btn){--_highlight:var(--_highlight-dark);--_bg:var(--_bg-dark);--_ink-shadow:var(--_ink-shadow-dark)}:where(input[type=file])::-webkit-file-upload-button,:where(input[type=file])::file-selector-button{--_highlight:var(--_highlight-dark);--_bg:var(--_bg-dark);--_ink-shadow:var(--_ink-shadow-dark)}}@media (prefers-reduced-motion:no-preference){:where(button,input[type=button],.btn){transition:border-color .5s var(--ease-3),box-shadow 145ms var(--ease-4),outline-offset 145ms var(--ease-4)}:where(button,input[type=submit],.btn){transition:border-color .5s var(--ease-3),box-shadow 145ms var(--ease-4),outline-offset 145ms var(--ease-4)}:where(button,input[type=reset],.btn){transition:border-color .5s var(--ease-3),box-shadow 145ms var(--ease-4),outline-offset 145ms var(--ease-4)}:where(input[type=file])::-webkit-file-upload-button,:where(input[type=file])::file-selector-button{transition:border-color .5s var(--ease-3),box-shadow 145ms var(--ease-4),outline-offset 145ms var(--ease-4)}}[disabled]:where(button,input[type=button],.btn){--_bg:none;--_text:var(--gray-6);box-shadow:var(--shadow-1);cursor:not-allowed}[disabled]:where(button,input[type=submit],.btn){--_bg:none;--_text:var(--gray-6);box-shadow:var(--shadow-1);cursor:not-allowed}[disabled]:where(button,input[type=reset],.btn){--_bg:none;--_text:var(--gray-6);box-shadow:var(--shadow-1);cursor:not-allowed}@media (prefers-color-scheme:dark){[disabled]:where(button,input[type=button],.btn){--_text:var(--gray-5)}[disabled]:where(button,input[type=submit],.btn){--_text:var(--gray-5)}[disabled]:where(button,input[type=reset],.btn){--_text:var(--gray-5)}}:where(button,input[type=button],.btn):where(:not(:active):hover){--_highlight-size:var(--size-2);transition-duration:.25s}:where(button,input[type=submit],.btn):where(:not(:active):hover){--_highlight-size:var(--size-2);transition-duration:.25s}:where(button,input[type=reset],.btn):where(:not(:active):hover){--_highlight-size:var(--size-2);transition-duration:.25s}:where(button,input[type=button],.btn)>:where(svg){block-size:var(--_icon-size);filter:drop-shadow(var(--_ink-shadow));flex-shrink:0;inline-size:var(--_icon-size)}:where(button,input[type=submit],.btn)>:where(svg){block-size:var(--_icon-size);filter:drop-shadow(var(--_ink-shadow));flex-shrink:0;inline-size:var(--_icon-size)}:where(button,input[type=reset],.btn)>:where(svg){block-size:var(--_icon-size);filter:drop-shadow(var(--_ink-shadow));flex-shrink:0;inline-size:var(--_icon-size)}:where(button,input[type=button],.btn)>:where(svg>*){stroke:var(--_icon-color);stroke-width:var(--border-size-2)}:where(button,input[type=submit],.btn)>:where(svg>*){stroke:var(--_icon-color);stroke-width:var(--border-size-2)}:where(button,input[type=reset],.btn)>:where(svg>*){stroke:var(--_icon-color);stroke-width:var(--border-size-2)}:where(a.btn){-webkit-text-decoration:none;text-decoration:none}:where([type=submit],form button:not([type],[disabled])){--_text:var(--_accent,var(--link))}:where([type=reset]){--_text:var(--red-6);--_border:var(--red-3)}:where([type=reset]):focus-visible{outline-color:var(--red-6)}@media (prefers-color-scheme:dark){:where([type=reset]){--_text:var(--red-2);--_border:var(--surface-3)}}:where(form button:not([type]),[type=submit],[type=reset]):hover:not([disabled]){--_border:currentColor}:where(form button:not([type]),[type=submit],[type=reset]):focus-visible:not([disabled]){--_border:currentColor}:where(input[type=file]){align-self:flex-start;border:var(--border-size-1) solid var(--surface-2);border-radius:var(--radius-2);box-shadow:var(--inner-shadow-4);color:var(--text-2);cursor:auto;max-inline-size:100%;padding:0}:where(input[type=file])::-webkit-file-upload-button,:where(input[type=file])::file-selector-button{cursor:pointer;margin-inline-end:var(--size-relative-6)}@media (prefers-color-scheme:dark){:where([disabled]),:where([type=reset]),:where([type=submit]),:where(form button:not([type=button])){--_bg:var(--surface-1)}} \ No newline at end of file diff --git a/crates/echo-session-ws-gateway/assets/vendor/d3.v7.min.js b/crates/echo-session-ws-gateway/assets/vendor/d3.v7.min.js deleted file mode 100644 index 33bb8802..00000000 --- a/crates/echo-session-ws-gateway/assets/vendor/d3.v7.min.js +++ /dev/null @@ -1,2 +0,0 @@ -// https://d3js.org v7.9.0 Copyright 2010-2023 Mike Bostock -!function(t,n){"object"==typeof exports&&"undefined"!=typeof module?n(exports):"function"==typeof define&&define.amd?define(["exports"],n):n((t="undefined"!=typeof globalThis?globalThis:t||self).d3=t.d3||{})}(this,(function(t){"use strict";function n(t,n){return null==t||null==n?NaN:tn?1:t>=n?0:NaN}function e(t,n){return null==t||null==n?NaN:nt?1:n>=t?0:NaN}function r(t){let r,o,a;function u(t,n,e=0,i=t.length){if(e>>1;o(t[r],n)<0?e=r+1:i=r}while(en(t(e),r),a=(n,e)=>t(n)-e):(r=t===n||t===e?t:i,o=t,a=t),{left:u,center:function(t,n,e=0,r=t.length){const i=u(t,n,e,r-1);return i>e&&a(t[i-1],n)>-a(t[i],n)?i-1:i},right:function(t,n,e=0,i=t.length){if(e>>1;o(t[r],n)<=0?e=r+1:i=r}while(e{n(t,e,(r<<=2)+0,(i<<=2)+0,o<<=2),n(t,e,r+1,i+1,o),n(t,e,r+2,i+2,o),n(t,e,r+3,i+3,o)}}));function d(t){return function(n,e,r=e){if(!((e=+e)>=0))throw new RangeError("invalid rx");if(!((r=+r)>=0))throw new RangeError("invalid ry");let{data:i,width:o,height:a}=n;if(!((o=Math.floor(o))>=0))throw new RangeError("invalid width");if(!((a=Math.floor(void 0!==a?a:i.length/o))>=0))throw new RangeError("invalid height");if(!o||!a||!e&&!r)return n;const u=e&&t(e),c=r&&t(r),f=i.slice();return u&&c?(p(u,f,i,o,a),p(u,i,f,o,a),p(u,f,i,o,a),g(c,i,f,o,a),g(c,f,i,o,a),g(c,i,f,o,a)):u?(p(u,i,f,o,a),p(u,f,i,o,a),p(u,i,f,o,a)):c&&(g(c,i,f,o,a),g(c,f,i,o,a),g(c,i,f,o,a)),n}}function p(t,n,e,r,i){for(let o=0,a=r*i;o{if(!((o-=a)>=i))return;let u=t*r[i];const c=a*t;for(let t=i,n=i+c;t{if(!((a-=u)>=o))return;let c=n*i[o];const f=u*n,s=f+u;for(let t=o,n=o+f;t=n&&++e;else{let r=-1;for(let i of t)null!=(i=n(i,++r,t))&&(i=+i)>=i&&++e}return e}function _(t){return 0|t.length}function b(t){return!(t>0)}function m(t){return"object"!=typeof t||"length"in t?t:Array.from(t)}function x(t,n){let e,r=0,i=0,o=0;if(void 0===n)for(let n of t)null!=n&&(n=+n)>=n&&(e=n-i,i+=e/++r,o+=e*(n-i));else{let a=-1;for(let u of t)null!=(u=n(u,++a,t))&&(u=+u)>=u&&(e=u-i,i+=e/++r,o+=e*(u-i))}if(r>1)return o/(r-1)}function w(t,n){const e=x(t,n);return e?Math.sqrt(e):e}function M(t,n){let e,r;if(void 0===n)for(const n of t)null!=n&&(void 0===e?n>=n&&(e=r=n):(e>n&&(e=n),r=o&&(e=r=o):(e>o&&(e=o),r0){for(o=t[--i];i>0&&(n=o,e=t[--i],o=n+e,r=e-(o-n),!r););i>0&&(r<0&&t[i-1]<0||r>0&&t[i-1]>0)&&(e=2*r,n=o+e,e==n-o&&(o=n))}return o}}class InternMap extends Map{constructor(t,n=N){if(super(),Object.defineProperties(this,{_intern:{value:new Map},_key:{value:n}}),null!=t)for(const[n,e]of t)this.set(n,e)}get(t){return super.get(A(this,t))}has(t){return super.has(A(this,t))}set(t,n){return super.set(S(this,t),n)}delete(t){return super.delete(E(this,t))}}class InternSet extends Set{constructor(t,n=N){if(super(),Object.defineProperties(this,{_intern:{value:new Map},_key:{value:n}}),null!=t)for(const n of t)this.add(n)}has(t){return super.has(A(this,t))}add(t){return super.add(S(this,t))}delete(t){return super.delete(E(this,t))}}function A({_intern:t,_key:n},e){const r=n(e);return t.has(r)?t.get(r):e}function S({_intern:t,_key:n},e){const r=n(e);return t.has(r)?t.get(r):(t.set(r,e),e)}function E({_intern:t,_key:n},e){const r=n(e);return t.has(r)&&(e=t.get(r),t.delete(r)),e}function N(t){return null!==t&&"object"==typeof t?t.valueOf():t}function k(t){return t}function C(t,...n){return F(t,k,k,n)}function P(t,...n){return F(t,Array.from,k,n)}function z(t,n){for(let e=1,r=n.length;et.pop().map((([n,e])=>[...t,n,e]))));return t}function $(t,n,...e){return F(t,k,n,e)}function D(t,n,...e){return F(t,Array.from,n,e)}function R(t){if(1!==t.length)throw new Error("duplicate key");return t[0]}function F(t,n,e,r){return function t(i,o){if(o>=r.length)return e(i);const a=new InternMap,u=r[o++];let c=-1;for(const t of i){const n=u(t,++c,i),e=a.get(n);e?e.push(t):a.set(n,[t])}for(const[n,e]of a)a.set(n,t(e,o));return n(a)}(t,0)}function q(t,n){return Array.from(n,(n=>t[n]))}function U(t,...n){if("function"!=typeof t[Symbol.iterator])throw new TypeError("values is not iterable");t=Array.from(t);let[e]=n;if(e&&2!==e.length||n.length>1){const r=Uint32Array.from(t,((t,n)=>n));return n.length>1?(n=n.map((n=>t.map(n))),r.sort(((t,e)=>{for(const r of n){const n=O(r[t],r[e]);if(n)return n}}))):(e=t.map(e),r.sort(((t,n)=>O(e[t],e[n])))),q(t,r)}return t.sort(I(e))}function I(t=n){if(t===n)return O;if("function"!=typeof t)throw new TypeError("compare is not a function");return(n,e)=>{const r=t(n,e);return r||0===r?r:(0===t(e,e))-(0===t(n,n))}}function O(t,n){return(null==t||!(t>=t))-(null==n||!(n>=n))||(tn?1:0)}var B=Array.prototype.slice;function Y(t){return()=>t}const L=Math.sqrt(50),j=Math.sqrt(10),H=Math.sqrt(2);function X(t,n,e){const r=(n-t)/Math.max(0,e),i=Math.floor(Math.log10(r)),o=r/Math.pow(10,i),a=o>=L?10:o>=j?5:o>=H?2:1;let u,c,f;return i<0?(f=Math.pow(10,-i)/a,u=Math.round(t*f),c=Math.round(n*f),u/fn&&--c,f=-f):(f=Math.pow(10,i)*a,u=Math.round(t/f),c=Math.round(n/f),u*fn&&--c),c0))return[];if((t=+t)===(n=+n))return[t];const r=n=i))return[];const u=o-i+1,c=new Array(u);if(r)if(a<0)for(let t=0;t0?(t=Math.floor(t/i)*i,n=Math.ceil(n/i)*i):i<0&&(t=Math.ceil(t*i)/i,n=Math.floor(n*i)/i),r=i}}function K(t){return Math.max(1,Math.ceil(Math.log(v(t))/Math.LN2)+1)}function Q(){var t=k,n=M,e=K;function r(r){Array.isArray(r)||(r=Array.from(r));var i,o,a,u=r.length,c=new Array(u);for(i=0;i=h)if(t>=h&&n===M){const t=V(l,h,e);isFinite(t)&&(t>0?h=(Math.floor(h/t)+1)*t:t<0&&(h=(Math.ceil(h*-t)+1)/-t))}else d.pop()}for(var p=d.length,g=0,y=p;d[g]<=l;)++g;for(;d[y-1]>h;)--y;(g||y0?d[i-1]:l,v.x1=i0)for(i=0;i=n)&&(e=n);else{let r=-1;for(let i of t)null!=(i=n(i,++r,t))&&(e=i)&&(e=i)}return e}function tt(t,n){let e,r=-1,i=-1;if(void 0===n)for(const n of t)++i,null!=n&&(e=n)&&(e=n,r=i);else for(let o of t)null!=(o=n(o,++i,t))&&(e=o)&&(e=o,r=i);return r}function nt(t,n){let e;if(void 0===n)for(const n of t)null!=n&&(e>n||void 0===e&&n>=n)&&(e=n);else{let r=-1;for(let i of t)null!=(i=n(i,++r,t))&&(e>i||void 0===e&&i>=i)&&(e=i)}return e}function et(t,n){let e,r=-1,i=-1;if(void 0===n)for(const n of t)++i,null!=n&&(e>n||void 0===e&&n>=n)&&(e=n,r=i);else for(let o of t)null!=(o=n(o,++i,t))&&(e>o||void 0===e&&o>=o)&&(e=o,r=i);return r}function rt(t,n,e=0,r=1/0,i){if(n=Math.floor(n),e=Math.floor(Math.max(0,e)),r=Math.floor(Math.min(t.length-1,r)),!(e<=n&&n<=r))return t;for(i=void 0===i?O:I(i);r>e;){if(r-e>600){const o=r-e+1,a=n-e+1,u=Math.log(o),c=.5*Math.exp(2*u/3),f=.5*Math.sqrt(u*c*(o-c)/o)*(a-o/2<0?-1:1);rt(t,n,Math.max(e,Math.floor(n-a*c/o+f)),Math.min(r,Math.floor(n+(o-a)*c/o+f)),i)}const o=t[n];let a=e,u=r;for(it(t,e,n),i(t[r],o)>0&&it(t,e,r);a0;)--u}0===i(t[e],o)?it(t,e,u):(++u,it(t,u,r)),u<=n&&(e=u+1),n<=u&&(r=u-1)}return t}function it(t,n,e){const r=t[n];t[n]=t[e],t[e]=r}function ot(t,e=n){let r,i=!1;if(1===e.length){let o;for(const a of t){const t=e(a);(i?n(t,o)>0:0===n(t,t))&&(r=a,o=t,i=!0)}}else for(const n of t)(i?e(n,r)>0:0===e(n,n))&&(r=n,i=!0);return r}function at(t,n,e){if(t=Float64Array.from(function*(t,n){if(void 0===n)for(let n of t)null!=n&&(n=+n)>=n&&(yield n);else{let e=-1;for(let r of t)null!=(r=n(r,++e,t))&&(r=+r)>=r&&(yield r)}}(t,e)),(r=t.length)&&!isNaN(n=+n)){if(n<=0||r<2)return nt(t);if(n>=1)return J(t);var r,i=(r-1)*n,o=Math.floor(i),a=J(rt(t,o).subarray(0,o+1));return a+(nt(t.subarray(o+1))-a)*(i-o)}}function ut(t,n,e=o){if((r=t.length)&&!isNaN(n=+n)){if(n<=0||r<2)return+e(t[0],0,t);if(n>=1)return+e(t[r-1],r-1,t);var r,i=(r-1)*n,a=Math.floor(i),u=+e(t[a],a,t);return u+(+e(t[a+1],a+1,t)-u)*(i-a)}}function ct(t,n,e=o){if(!isNaN(n=+n)){if(r=Float64Array.from(t,((n,r)=>o(e(t[r],r,t)))),n<=0)return et(r);if(n>=1)return tt(r);var r,i=Uint32Array.from(t,((t,n)=>n)),a=r.length-1,u=Math.floor(a*n);return rt(i,u,0,a,((t,n)=>O(r[t],r[n]))),(u=ot(i.subarray(0,u+1),(t=>r[t])))>=0?u:-1}}function ft(t){return Array.from(function*(t){for(const n of t)yield*n}(t))}function st(t,n){return[t,n]}function lt(t,n,e){t=+t,n=+n,e=(i=arguments.length)<2?(n=t,t=0,1):i<3?1:+e;for(var r=-1,i=0|Math.max(0,Math.ceil((n-t)/e)),o=new Array(i);++r+t(n)}function kt(t,n){return n=Math.max(0,t.bandwidth()-2*n)/2,t.round()&&(n=Math.round(n)),e=>+t(e)+n}function Ct(){return!this.__axis}function Pt(t,n){var e=[],r=null,i=null,o=6,a=6,u=3,c="undefined"!=typeof window&&window.devicePixelRatio>1?0:.5,f=t===xt||t===Tt?-1:1,s=t===Tt||t===wt?"x":"y",l=t===xt||t===Mt?St:Et;function h(h){var d=null==r?n.ticks?n.ticks.apply(n,e):n.domain():r,p=null==i?n.tickFormat?n.tickFormat.apply(n,e):mt:i,g=Math.max(o,0)+u,y=n.range(),v=+y[0]+c,_=+y[y.length-1]+c,b=(n.bandwidth?kt:Nt)(n.copy(),c),m=h.selection?h.selection():h,x=m.selectAll(".domain").data([null]),w=m.selectAll(".tick").data(d,n).order(),M=w.exit(),T=w.enter().append("g").attr("class","tick"),A=w.select("line"),S=w.select("text");x=x.merge(x.enter().insert("path",".tick").attr("class","domain").attr("stroke","currentColor")),w=w.merge(T),A=A.merge(T.append("line").attr("stroke","currentColor").attr(s+"2",f*o)),S=S.merge(T.append("text").attr("fill","currentColor").attr(s,f*g).attr("dy",t===xt?"0em":t===Mt?"0.71em":"0.32em")),h!==m&&(x=x.transition(h),w=w.transition(h),A=A.transition(h),S=S.transition(h),M=M.transition(h).attr("opacity",At).attr("transform",(function(t){return isFinite(t=b(t))?l(t+c):this.getAttribute("transform")})),T.attr("opacity",At).attr("transform",(function(t){var n=this.parentNode.__axis;return l((n&&isFinite(n=n(t))?n:b(t))+c)}))),M.remove(),x.attr("d",t===Tt||t===wt?a?"M"+f*a+","+v+"H"+c+"V"+_+"H"+f*a:"M"+c+","+v+"V"+_:a?"M"+v+","+f*a+"V"+c+"H"+_+"V"+f*a:"M"+v+","+c+"H"+_),w.attr("opacity",1).attr("transform",(function(t){return l(b(t)+c)})),A.attr(s+"2",f*o),S.attr(s,f*g).text(p),m.filter(Ct).attr("fill","none").attr("font-size",10).attr("font-family","sans-serif").attr("text-anchor",t===wt?"start":t===Tt?"end":"middle"),m.each((function(){this.__axis=b}))}return h.scale=function(t){return arguments.length?(n=t,h):n},h.ticks=function(){return e=Array.from(arguments),h},h.tickArguments=function(t){return arguments.length?(e=null==t?[]:Array.from(t),h):e.slice()},h.tickValues=function(t){return arguments.length?(r=null==t?null:Array.from(t),h):r&&r.slice()},h.tickFormat=function(t){return arguments.length?(i=t,h):i},h.tickSize=function(t){return arguments.length?(o=a=+t,h):o},h.tickSizeInner=function(t){return arguments.length?(o=+t,h):o},h.tickSizeOuter=function(t){return arguments.length?(a=+t,h):a},h.tickPadding=function(t){return arguments.length?(u=+t,h):u},h.offset=function(t){return arguments.length?(c=+t,h):c},h}var zt={value:()=>{}};function $t(){for(var t,n=0,e=arguments.length,r={};n=0&&(n=t.slice(e+1),t=t.slice(0,e)),t&&!r.hasOwnProperty(t))throw new Error("unknown type: "+t);return{type:t,name:n}}))),a=-1,u=o.length;if(!(arguments.length<2)){if(null!=n&&"function"!=typeof n)throw new Error("invalid callback: "+n);for(;++a0)for(var e,r,i=new Array(e),o=0;o=0&&"xmlns"!==(n=t.slice(0,e))&&(t=t.slice(e+1)),Ut.hasOwnProperty(n)?{space:Ut[n],local:t}:t}function Ot(t){return function(){var n=this.ownerDocument,e=this.namespaceURI;return e===qt&&n.documentElement.namespaceURI===qt?n.createElement(t):n.createElementNS(e,t)}}function Bt(t){return function(){return this.ownerDocument.createElementNS(t.space,t.local)}}function Yt(t){var n=It(t);return(n.local?Bt:Ot)(n)}function Lt(){}function jt(t){return null==t?Lt:function(){return this.querySelector(t)}}function Ht(t){return null==t?[]:Array.isArray(t)?t:Array.from(t)}function Xt(){return[]}function Gt(t){return null==t?Xt:function(){return this.querySelectorAll(t)}}function Vt(t){return function(){return this.matches(t)}}function Wt(t){return function(n){return n.matches(t)}}var Zt=Array.prototype.find;function Kt(){return this.firstElementChild}var Qt=Array.prototype.filter;function Jt(){return Array.from(this.children)}function tn(t){return new Array(t.length)}function nn(t,n){this.ownerDocument=t.ownerDocument,this.namespaceURI=t.namespaceURI,this._next=null,this._parent=t,this.__data__=n}function en(t,n,e,r,i,o){for(var a,u=0,c=n.length,f=o.length;un?1:t>=n?0:NaN}function cn(t){return function(){this.removeAttribute(t)}}function fn(t){return function(){this.removeAttributeNS(t.space,t.local)}}function sn(t,n){return function(){this.setAttribute(t,n)}}function ln(t,n){return function(){this.setAttributeNS(t.space,t.local,n)}}function hn(t,n){return function(){var e=n.apply(this,arguments);null==e?this.removeAttribute(t):this.setAttribute(t,e)}}function dn(t,n){return function(){var e=n.apply(this,arguments);null==e?this.removeAttributeNS(t.space,t.local):this.setAttributeNS(t.space,t.local,e)}}function pn(t){return t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView}function gn(t){return function(){this.style.removeProperty(t)}}function yn(t,n,e){return function(){this.style.setProperty(t,n,e)}}function vn(t,n,e){return function(){var r=n.apply(this,arguments);null==r?this.style.removeProperty(t):this.style.setProperty(t,r,e)}}function _n(t,n){return t.style.getPropertyValue(n)||pn(t).getComputedStyle(t,null).getPropertyValue(n)}function bn(t){return function(){delete this[t]}}function mn(t,n){return function(){this[t]=n}}function xn(t,n){return function(){var e=n.apply(this,arguments);null==e?delete this[t]:this[t]=e}}function wn(t){return t.trim().split(/^|\s+/)}function Mn(t){return t.classList||new Tn(t)}function Tn(t){this._node=t,this._names=wn(t.getAttribute("class")||"")}function An(t,n){for(var e=Mn(t),r=-1,i=n.length;++r=0&&(this._names.splice(n,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(t){return this._names.indexOf(t)>=0}};var Gn=[null];function Vn(t,n){this._groups=t,this._parents=n}function Wn(){return new Vn([[document.documentElement]],Gn)}function Zn(t){return"string"==typeof t?new Vn([[document.querySelector(t)]],[document.documentElement]):new Vn([[t]],Gn)}Vn.prototype=Wn.prototype={constructor:Vn,select:function(t){"function"!=typeof t&&(t=jt(t));for(var n=this._groups,e=n.length,r=new Array(e),i=0;i=m&&(m=b+1);!(_=y[m])&&++m=0;)(r=i[o])&&(a&&4^r.compareDocumentPosition(a)&&a.parentNode.insertBefore(r,a),a=r);return this},sort:function(t){function n(n,e){return n&&e?t(n.__data__,e.__data__):!n-!e}t||(t=un);for(var e=this._groups,r=e.length,i=new Array(r),o=0;o1?this.each((null==n?gn:"function"==typeof n?vn:yn)(t,n,null==e?"":e)):_n(this.node(),t)},property:function(t,n){return arguments.length>1?this.each((null==n?bn:"function"==typeof n?xn:mn)(t,n)):this.node()[t]},classed:function(t,n){var e=wn(t+"");if(arguments.length<2){for(var r=Mn(this.node()),i=-1,o=e.length;++i=0&&(n=t.slice(e+1),t=t.slice(0,e)),{type:t,name:n}}))}(t+""),a=o.length;if(!(arguments.length<2)){for(u=n?Ln:Yn,r=0;r()=>t;function fe(t,{sourceEvent:n,subject:e,target:r,identifier:i,active:o,x:a,y:u,dx:c,dy:f,dispatch:s}){Object.defineProperties(this,{type:{value:t,enumerable:!0,configurable:!0},sourceEvent:{value:n,enumerable:!0,configurable:!0},subject:{value:e,enumerable:!0,configurable:!0},target:{value:r,enumerable:!0,configurable:!0},identifier:{value:i,enumerable:!0,configurable:!0},active:{value:o,enumerable:!0,configurable:!0},x:{value:a,enumerable:!0,configurable:!0},y:{value:u,enumerable:!0,configurable:!0},dx:{value:c,enumerable:!0,configurable:!0},dy:{value:f,enumerable:!0,configurable:!0},_:{value:s}})}function se(t){return!t.ctrlKey&&!t.button}function le(){return this.parentNode}function he(t,n){return null==n?{x:t.x,y:t.y}:n}function de(){return navigator.maxTouchPoints||"ontouchstart"in this}function pe(t,n,e){t.prototype=n.prototype=e,e.constructor=t}function ge(t,n){var e=Object.create(t.prototype);for(var r in n)e[r]=n[r];return e}function ye(){}fe.prototype.on=function(){var t=this._.on.apply(this._,arguments);return t===this._?this:t};var ve=.7,_e=1/ve,be="\\s*([+-]?\\d+)\\s*",me="\\s*([+-]?(?:\\d*\\.)?\\d+(?:[eE][+-]?\\d+)?)\\s*",xe="\\s*([+-]?(?:\\d*\\.)?\\d+(?:[eE][+-]?\\d+)?)%\\s*",we=/^#([0-9a-f]{3,8})$/,Me=new RegExp(`^rgb\\(${be},${be},${be}\\)$`),Te=new RegExp(`^rgb\\(${xe},${xe},${xe}\\)$`),Ae=new RegExp(`^rgba\\(${be},${be},${be},${me}\\)$`),Se=new RegExp(`^rgba\\(${xe},${xe},${xe},${me}\\)$`),Ee=new RegExp(`^hsl\\(${me},${xe},${xe}\\)$`),Ne=new RegExp(`^hsla\\(${me},${xe},${xe},${me}\\)$`),ke={aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsalmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,lightyellow:16777184,lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palegoldenrod:15657130,palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,rebeccapurple:6697881,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal:32896,thistle:14204888,tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074};function Ce(){return this.rgb().formatHex()}function Pe(){return this.rgb().formatRgb()}function ze(t){var n,e;return t=(t+"").trim().toLowerCase(),(n=we.exec(t))?(e=n[1].length,n=parseInt(n[1],16),6===e?$e(n):3===e?new qe(n>>8&15|n>>4&240,n>>4&15|240&n,(15&n)<<4|15&n,1):8===e?De(n>>24&255,n>>16&255,n>>8&255,(255&n)/255):4===e?De(n>>12&15|n>>8&240,n>>8&15|n>>4&240,n>>4&15|240&n,((15&n)<<4|15&n)/255):null):(n=Me.exec(t))?new qe(n[1],n[2],n[3],1):(n=Te.exec(t))?new qe(255*n[1]/100,255*n[2]/100,255*n[3]/100,1):(n=Ae.exec(t))?De(n[1],n[2],n[3],n[4]):(n=Se.exec(t))?De(255*n[1]/100,255*n[2]/100,255*n[3]/100,n[4]):(n=Ee.exec(t))?Le(n[1],n[2]/100,n[3]/100,1):(n=Ne.exec(t))?Le(n[1],n[2]/100,n[3]/100,n[4]):ke.hasOwnProperty(t)?$e(ke[t]):"transparent"===t?new qe(NaN,NaN,NaN,0):null}function $e(t){return new qe(t>>16&255,t>>8&255,255&t,1)}function De(t,n,e,r){return r<=0&&(t=n=e=NaN),new qe(t,n,e,r)}function Re(t){return t instanceof ye||(t=ze(t)),t?new qe((t=t.rgb()).r,t.g,t.b,t.opacity):new qe}function Fe(t,n,e,r){return 1===arguments.length?Re(t):new qe(t,n,e,null==r?1:r)}function qe(t,n,e,r){this.r=+t,this.g=+n,this.b=+e,this.opacity=+r}function Ue(){return`#${Ye(this.r)}${Ye(this.g)}${Ye(this.b)}`}function Ie(){const t=Oe(this.opacity);return`${1===t?"rgb(":"rgba("}${Be(this.r)}, ${Be(this.g)}, ${Be(this.b)}${1===t?")":`, ${t})`}`}function Oe(t){return isNaN(t)?1:Math.max(0,Math.min(1,t))}function Be(t){return Math.max(0,Math.min(255,Math.round(t)||0))}function Ye(t){return((t=Be(t))<16?"0":"")+t.toString(16)}function Le(t,n,e,r){return r<=0?t=n=e=NaN:e<=0||e>=1?t=n=NaN:n<=0&&(t=NaN),new Xe(t,n,e,r)}function je(t){if(t instanceof Xe)return new Xe(t.h,t.s,t.l,t.opacity);if(t instanceof ye||(t=ze(t)),!t)return new Xe;if(t instanceof Xe)return t;var n=(t=t.rgb()).r/255,e=t.g/255,r=t.b/255,i=Math.min(n,e,r),o=Math.max(n,e,r),a=NaN,u=o-i,c=(o+i)/2;return u?(a=n===o?(e-r)/u+6*(e0&&c<1?0:a,new Xe(a,u,c,t.opacity)}function He(t,n,e,r){return 1===arguments.length?je(t):new Xe(t,n,e,null==r?1:r)}function Xe(t,n,e,r){this.h=+t,this.s=+n,this.l=+e,this.opacity=+r}function Ge(t){return(t=(t||0)%360)<0?t+360:t}function Ve(t){return Math.max(0,Math.min(1,t||0))}function We(t,n,e){return 255*(t<60?n+(e-n)*t/60:t<180?e:t<240?n+(e-n)*(240-t)/60:n)}pe(ye,ze,{copy(t){return Object.assign(new this.constructor,this,t)},displayable(){return this.rgb().displayable()},hex:Ce,formatHex:Ce,formatHex8:function(){return this.rgb().formatHex8()},formatHsl:function(){return je(this).formatHsl()},formatRgb:Pe,toString:Pe}),pe(qe,Fe,ge(ye,{brighter(t){return t=null==t?_e:Math.pow(_e,t),new qe(this.r*t,this.g*t,this.b*t,this.opacity)},darker(t){return t=null==t?ve:Math.pow(ve,t),new qe(this.r*t,this.g*t,this.b*t,this.opacity)},rgb(){return this},clamp(){return new qe(Be(this.r),Be(this.g),Be(this.b),Oe(this.opacity))},displayable(){return-.5<=this.r&&this.r<255.5&&-.5<=this.g&&this.g<255.5&&-.5<=this.b&&this.b<255.5&&0<=this.opacity&&this.opacity<=1},hex:Ue,formatHex:Ue,formatHex8:function(){return`#${Ye(this.r)}${Ye(this.g)}${Ye(this.b)}${Ye(255*(isNaN(this.opacity)?1:this.opacity))}`},formatRgb:Ie,toString:Ie})),pe(Xe,He,ge(ye,{brighter(t){return t=null==t?_e:Math.pow(_e,t),new Xe(this.h,this.s,this.l*t,this.opacity)},darker(t){return t=null==t?ve:Math.pow(ve,t),new Xe(this.h,this.s,this.l*t,this.opacity)},rgb(){var t=this.h%360+360*(this.h<0),n=isNaN(t)||isNaN(this.s)?0:this.s,e=this.l,r=e+(e<.5?e:1-e)*n,i=2*e-r;return new qe(We(t>=240?t-240:t+120,i,r),We(t,i,r),We(t<120?t+240:t-120,i,r),this.opacity)},clamp(){return new Xe(Ge(this.h),Ve(this.s),Ve(this.l),Oe(this.opacity))},displayable(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1},formatHsl(){const t=Oe(this.opacity);return`${1===t?"hsl(":"hsla("}${Ge(this.h)}, ${100*Ve(this.s)}%, ${100*Ve(this.l)}%${1===t?")":`, ${t})`}`}}));const Ze=Math.PI/180,Ke=180/Math.PI,Qe=.96422,Je=1,tr=.82521,nr=4/29,er=6/29,rr=3*er*er,ir=er*er*er;function or(t){if(t instanceof ur)return new ur(t.l,t.a,t.b,t.opacity);if(t instanceof pr)return gr(t);t instanceof qe||(t=Re(t));var n,e,r=lr(t.r),i=lr(t.g),o=lr(t.b),a=cr((.2225045*r+.7168786*i+.0606169*o)/Je);return r===i&&i===o?n=e=a:(n=cr((.4360747*r+.3850649*i+.1430804*o)/Qe),e=cr((.0139322*r+.0971045*i+.7141733*o)/tr)),new ur(116*a-16,500*(n-a),200*(a-e),t.opacity)}function ar(t,n,e,r){return 1===arguments.length?or(t):new ur(t,n,e,null==r?1:r)}function ur(t,n,e,r){this.l=+t,this.a=+n,this.b=+e,this.opacity=+r}function cr(t){return t>ir?Math.pow(t,1/3):t/rr+nr}function fr(t){return t>er?t*t*t:rr*(t-nr)}function sr(t){return 255*(t<=.0031308?12.92*t:1.055*Math.pow(t,1/2.4)-.055)}function lr(t){return(t/=255)<=.04045?t/12.92:Math.pow((t+.055)/1.055,2.4)}function hr(t){if(t instanceof pr)return new pr(t.h,t.c,t.l,t.opacity);if(t instanceof ur||(t=or(t)),0===t.a&&0===t.b)return new pr(NaN,0=1?(e=1,n-1):Math.floor(e*n),i=t[r],o=t[r+1],a=r>0?t[r-1]:2*i-o,u=r()=>t;function Cr(t,n){return function(e){return t+e*n}}function Pr(t,n){var e=n-t;return e?Cr(t,e>180||e<-180?e-360*Math.round(e/360):e):kr(isNaN(t)?n:t)}function zr(t){return 1==(t=+t)?$r:function(n,e){return e-n?function(t,n,e){return t=Math.pow(t,e),n=Math.pow(n,e)-t,e=1/e,function(r){return Math.pow(t+r*n,e)}}(n,e,t):kr(isNaN(n)?e:n)}}function $r(t,n){var e=n-t;return e?Cr(t,e):kr(isNaN(t)?n:t)}var Dr=function t(n){var e=zr(n);function r(t,n){var r=e((t=Fe(t)).r,(n=Fe(n)).r),i=e(t.g,n.g),o=e(t.b,n.b),a=$r(t.opacity,n.opacity);return function(n){return t.r=r(n),t.g=i(n),t.b=o(n),t.opacity=a(n),t+""}}return r.gamma=t,r}(1);function Rr(t){return function(n){var e,r,i=n.length,o=new Array(i),a=new Array(i),u=new Array(i);for(e=0;eo&&(i=n.slice(o,i),u[a]?u[a]+=i:u[++a]=i),(e=e[0])===(r=r[0])?u[a]?u[a]+=r:u[++a]=r:(u[++a]=null,c.push({i:a,x:Yr(e,r)})),o=Hr.lastIndex;return o180?n+=360:n-t>180&&(t+=360),o.push({i:e.push(i(e)+"rotate(",null,r)-2,x:Yr(t,n)})):n&&e.push(i(e)+"rotate("+n+r)}(o.rotate,a.rotate,u,c),function(t,n,e,o){t!==n?o.push({i:e.push(i(e)+"skewX(",null,r)-2,x:Yr(t,n)}):n&&e.push(i(e)+"skewX("+n+r)}(o.skewX,a.skewX,u,c),function(t,n,e,r,o,a){if(t!==e||n!==r){var u=o.push(i(o)+"scale(",null,",",null,")");a.push({i:u-4,x:Yr(t,e)},{i:u-2,x:Yr(n,r)})}else 1===e&&1===r||o.push(i(o)+"scale("+e+","+r+")")}(o.scaleX,o.scaleY,a.scaleX,a.scaleY,u,c),o=a=null,function(t){for(var n,e=-1,r=c.length;++e=0&&n._call.call(void 0,t),n=n._next;--yi}function Ci(){xi=(mi=Mi.now())+wi,yi=vi=0;try{ki()}finally{yi=0,function(){var t,n,e=pi,r=1/0;for(;e;)e._call?(r>e._time&&(r=e._time),t=e,e=e._next):(n=e._next,e._next=null,e=t?t._next=n:pi=n);gi=t,zi(r)}(),xi=0}}function Pi(){var t=Mi.now(),n=t-mi;n>bi&&(wi-=n,mi=t)}function zi(t){yi||(vi&&(vi=clearTimeout(vi)),t-xi>24?(t<1/0&&(vi=setTimeout(Ci,t-Mi.now()-wi)),_i&&(_i=clearInterval(_i))):(_i||(mi=Mi.now(),_i=setInterval(Pi,bi)),yi=1,Ti(Ci)))}function $i(t,n,e){var r=new Ei;return n=null==n?0:+n,r.restart((e=>{r.stop(),t(e+n)}),n,e),r}Ei.prototype=Ni.prototype={constructor:Ei,restart:function(t,n,e){if("function"!=typeof t)throw new TypeError("callback is not a function");e=(null==e?Ai():+e)+(null==n?0:+n),this._next||gi===this||(gi?gi._next=this:pi=this,gi=this),this._call=t,this._time=e,zi()},stop:function(){this._call&&(this._call=null,this._time=1/0,zi())}};var Di=$t("start","end","cancel","interrupt"),Ri=[],Fi=0,qi=1,Ui=2,Ii=3,Oi=4,Bi=5,Yi=6;function Li(t,n,e,r,i,o){var a=t.__transition;if(a){if(e in a)return}else t.__transition={};!function(t,n,e){var r,i=t.__transition;function o(t){e.state=qi,e.timer.restart(a,e.delay,e.time),e.delay<=t&&a(t-e.delay)}function a(o){var f,s,l,h;if(e.state!==qi)return c();for(f in i)if((h=i[f]).name===e.name){if(h.state===Ii)return $i(a);h.state===Oi?(h.state=Yi,h.timer.stop(),h.on.call("interrupt",t,t.__data__,h.index,h.group),delete i[f]):+fFi)throw new Error("too late; already scheduled");return e}function Hi(t,n){var e=Xi(t,n);if(e.state>Ii)throw new Error("too late; already running");return e}function Xi(t,n){var e=t.__transition;if(!e||!(e=e[n]))throw new Error("transition not found");return e}function Gi(t,n){var e,r,i,o=t.__transition,a=!0;if(o){for(i in n=null==n?null:n+"",o)(e=o[i]).name===n?(r=e.state>Ui&&e.state=0&&(t=t.slice(0,n)),!t||"start"===t}))}(n)?ji:Hi;return function(){var a=o(this,t),u=a.on;u!==r&&(i=(r=u).copy()).on(n,e),a.on=i}}(e,t,n))},attr:function(t,n){var e=It(t),r="transform"===e?ni:Ki;return this.attrTween(t,"function"==typeof n?(e.local?ro:eo)(e,r,Zi(this,"attr."+t,n)):null==n?(e.local?Ji:Qi)(e):(e.local?no:to)(e,r,n))},attrTween:function(t,n){var e="attr."+t;if(arguments.length<2)return(e=this.tween(e))&&e._value;if(null==n)return this.tween(e,null);if("function"!=typeof n)throw new Error;var r=It(t);return this.tween(e,(r.local?io:oo)(r,n))},style:function(t,n,e){var r="transform"==(t+="")?ti:Ki;return null==n?this.styleTween(t,function(t,n){var e,r,i;return function(){var o=_n(this,t),a=(this.style.removeProperty(t),_n(this,t));return o===a?null:o===e&&a===r?i:i=n(e=o,r=a)}}(t,r)).on("end.style."+t,lo(t)):"function"==typeof n?this.styleTween(t,function(t,n,e){var r,i,o;return function(){var a=_n(this,t),u=e(this),c=u+"";return null==u&&(this.style.removeProperty(t),c=u=_n(this,t)),a===c?null:a===r&&c===i?o:(i=c,o=n(r=a,u))}}(t,r,Zi(this,"style."+t,n))).each(function(t,n){var e,r,i,o,a="style."+n,u="end."+a;return function(){var c=Hi(this,t),f=c.on,s=null==c.value[a]?o||(o=lo(n)):void 0;f===e&&i===s||(r=(e=f).copy()).on(u,i=s),c.on=r}}(this._id,t)):this.styleTween(t,function(t,n,e){var r,i,o=e+"";return function(){var a=_n(this,t);return a===o?null:a===r?i:i=n(r=a,e)}}(t,r,n),e).on("end.style."+t,null)},styleTween:function(t,n,e){var r="style."+(t+="");if(arguments.length<2)return(r=this.tween(r))&&r._value;if(null==n)return this.tween(r,null);if("function"!=typeof n)throw new Error;return this.tween(r,function(t,n,e){var r,i;function o(){var o=n.apply(this,arguments);return o!==i&&(r=(i=o)&&function(t,n,e){return function(r){this.style.setProperty(t,n.call(this,r),e)}}(t,o,e)),r}return o._value=n,o}(t,n,null==e?"":e))},text:function(t){return this.tween("text","function"==typeof t?function(t){return function(){var n=t(this);this.textContent=null==n?"":n}}(Zi(this,"text",t)):function(t){return function(){this.textContent=t}}(null==t?"":t+""))},textTween:function(t){var n="text";if(arguments.length<1)return(n=this.tween(n))&&n._value;if(null==t)return this.tween(n,null);if("function"!=typeof t)throw new Error;return this.tween(n,function(t){var n,e;function r(){var r=t.apply(this,arguments);return r!==e&&(n=(e=r)&&function(t){return function(n){this.textContent=t.call(this,n)}}(r)),n}return r._value=t,r}(t))},remove:function(){return this.on("end.remove",function(t){return function(){var n=this.parentNode;for(var e in this.__transition)if(+e!==t)return;n&&n.removeChild(this)}}(this._id))},tween:function(t,n){var e=this._id;if(t+="",arguments.length<2){for(var r,i=Xi(this.node(),e).tween,o=0,a=i.length;o()=>t;function Qo(t,{sourceEvent:n,target:e,selection:r,mode:i,dispatch:o}){Object.defineProperties(this,{type:{value:t,enumerable:!0,configurable:!0},sourceEvent:{value:n,enumerable:!0,configurable:!0},target:{value:e,enumerable:!0,configurable:!0},selection:{value:r,enumerable:!0,configurable:!0},mode:{value:i,enumerable:!0,configurable:!0},_:{value:o}})}function Jo(t){t.preventDefault(),t.stopImmediatePropagation()}var ta={name:"drag"},na={name:"space"},ea={name:"handle"},ra={name:"center"};const{abs:ia,max:oa,min:aa}=Math;function ua(t){return[+t[0],+t[1]]}function ca(t){return[ua(t[0]),ua(t[1])]}var fa={name:"x",handles:["w","e"].map(va),input:function(t,n){return null==t?null:[[+t[0],n[0][1]],[+t[1],n[1][1]]]},output:function(t){return t&&[t[0][0],t[1][0]]}},sa={name:"y",handles:["n","s"].map(va),input:function(t,n){return null==t?null:[[n[0][0],+t[0]],[n[1][0],+t[1]]]},output:function(t){return t&&[t[0][1],t[1][1]]}},la={name:"xy",handles:["n","w","e","s","nw","ne","sw","se"].map(va),input:function(t){return null==t?null:ca(t)},output:function(t){return t}},ha={overlay:"crosshair",selection:"move",n:"ns-resize",e:"ew-resize",s:"ns-resize",w:"ew-resize",nw:"nwse-resize",ne:"nesw-resize",se:"nwse-resize",sw:"nesw-resize"},da={e:"w",w:"e",nw:"ne",ne:"nw",se:"sw",sw:"se"},pa={n:"s",s:"n",nw:"sw",ne:"se",se:"ne",sw:"nw"},ga={overlay:1,selection:1,n:null,e:1,s:null,w:-1,nw:-1,ne:1,se:1,sw:-1},ya={overlay:1,selection:1,n:-1,e:null,s:1,w:null,nw:-1,ne:-1,se:1,sw:1};function va(t){return{type:t}}function _a(t){return!t.ctrlKey&&!t.button}function ba(){var t=this.ownerSVGElement||this;return t.hasAttribute("viewBox")?[[(t=t.viewBox.baseVal).x,t.y],[t.x+t.width,t.y+t.height]]:[[0,0],[t.width.baseVal.value,t.height.baseVal.value]]}function ma(){return navigator.maxTouchPoints||"ontouchstart"in this}function xa(t){for(;!t.__brush;)if(!(t=t.parentNode))return;return t.__brush}function wa(t){var n,e=ba,r=_a,i=ma,o=!0,a=$t("start","brush","end"),u=6;function c(n){var e=n.property("__brush",g).selectAll(".overlay").data([va("overlay")]);e.enter().append("rect").attr("class","overlay").attr("pointer-events","all").attr("cursor",ha.overlay).merge(e).each((function(){var t=xa(this).extent;Zn(this).attr("x",t[0][0]).attr("y",t[0][1]).attr("width",t[1][0]-t[0][0]).attr("height",t[1][1]-t[0][1])})),n.selectAll(".selection").data([va("selection")]).enter().append("rect").attr("class","selection").attr("cursor",ha.selection).attr("fill","#777").attr("fill-opacity",.3).attr("stroke","#fff").attr("shape-rendering","crispEdges");var r=n.selectAll(".handle").data(t.handles,(function(t){return t.type}));r.exit().remove(),r.enter().append("rect").attr("class",(function(t){return"handle handle--"+t.type})).attr("cursor",(function(t){return ha[t.type]})),n.each(f).attr("fill","none").attr("pointer-events","all").on("mousedown.brush",h).filter(i).on("touchstart.brush",h).on("touchmove.brush",d).on("touchend.brush touchcancel.brush",p).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function f(){var t=Zn(this),n=xa(this).selection;n?(t.selectAll(".selection").style("display",null).attr("x",n[0][0]).attr("y",n[0][1]).attr("width",n[1][0]-n[0][0]).attr("height",n[1][1]-n[0][1]),t.selectAll(".handle").style("display",null).attr("x",(function(t){return"e"===t.type[t.type.length-1]?n[1][0]-u/2:n[0][0]-u/2})).attr("y",(function(t){return"s"===t.type[0]?n[1][1]-u/2:n[0][1]-u/2})).attr("width",(function(t){return"n"===t.type||"s"===t.type?n[1][0]-n[0][0]+u:u})).attr("height",(function(t){return"e"===t.type||"w"===t.type?n[1][1]-n[0][1]+u:u}))):t.selectAll(".selection,.handle").style("display","none").attr("x",null).attr("y",null).attr("width",null).attr("height",null)}function s(t,n,e){var r=t.__brush.emitter;return!r||e&&r.clean?new l(t,n,e):r}function l(t,n,e){this.that=t,this.args=n,this.state=t.__brush,this.active=0,this.clean=e}function h(e){if((!n||e.touches)&&r.apply(this,arguments)){var i,a,u,c,l,h,d,p,g,y,v,_=this,b=e.target.__data__.type,m="selection"===(o&&e.metaKey?b="overlay":b)?ta:o&&e.altKey?ra:ea,x=t===sa?null:ga[b],w=t===fa?null:ya[b],M=xa(_),T=M.extent,A=M.selection,S=T[0][0],E=T[0][1],N=T[1][0],k=T[1][1],C=0,P=0,z=x&&w&&o&&e.shiftKey,$=Array.from(e.touches||[e],(t=>{const n=t.identifier;return(t=ne(t,_)).point0=t.slice(),t.identifier=n,t}));Gi(_);var D=s(_,arguments,!0).beforestart();if("overlay"===b){A&&(g=!0);const n=[$[0],$[1]||$[0]];M.selection=A=[[i=t===sa?S:aa(n[0][0],n[1][0]),u=t===fa?E:aa(n[0][1],n[1][1])],[l=t===sa?N:oa(n[0][0],n[1][0]),d=t===fa?k:oa(n[0][1],n[1][1])]],$.length>1&&I(e)}else i=A[0][0],u=A[0][1],l=A[1][0],d=A[1][1];a=i,c=u,h=l,p=d;var R=Zn(_).attr("pointer-events","none"),F=R.selectAll(".overlay").attr("cursor",ha[b]);if(e.touches)D.moved=U,D.ended=O;else{var q=Zn(e.view).on("mousemove.brush",U,!0).on("mouseup.brush",O,!0);o&&q.on("keydown.brush",(function(t){switch(t.keyCode){case 16:z=x&&w;break;case 18:m===ea&&(x&&(l=h-C*x,i=a+C*x),w&&(d=p-P*w,u=c+P*w),m=ra,I(t));break;case 32:m!==ea&&m!==ra||(x<0?l=h-C:x>0&&(i=a-C),w<0?d=p-P:w>0&&(u=c-P),m=na,F.attr("cursor",ha.selection),I(t));break;default:return}Jo(t)}),!0).on("keyup.brush",(function(t){switch(t.keyCode){case 16:z&&(y=v=z=!1,I(t));break;case 18:m===ra&&(x<0?l=h:x>0&&(i=a),w<0?d=p:w>0&&(u=c),m=ea,I(t));break;case 32:m===na&&(t.altKey?(x&&(l=h-C*x,i=a+C*x),w&&(d=p-P*w,u=c+P*w),m=ra):(x<0?l=h:x>0&&(i=a),w<0?d=p:w>0&&(u=c),m=ea),F.attr("cursor",ha[b]),I(t));break;default:return}Jo(t)}),!0),ae(e.view)}f.call(_),D.start(e,m.name)}function U(t){for(const n of t.changedTouches||[t])for(const t of $)t.identifier===n.identifier&&(t.cur=ne(n,_));if(z&&!y&&!v&&1===$.length){const t=$[0];ia(t.cur[0]-t[0])>ia(t.cur[1]-t[1])?v=!0:y=!0}for(const t of $)t.cur&&(t[0]=t.cur[0],t[1]=t.cur[1]);g=!0,Jo(t),I(t)}function I(t){const n=$[0],e=n.point0;var r;switch(C=n[0]-e[0],P=n[1]-e[1],m){case na:case ta:x&&(C=oa(S-i,aa(N-l,C)),a=i+C,h=l+C),w&&(P=oa(E-u,aa(k-d,P)),c=u+P,p=d+P);break;case ea:$[1]?(x&&(a=oa(S,aa(N,$[0][0])),h=oa(S,aa(N,$[1][0])),x=1),w&&(c=oa(E,aa(k,$[0][1])),p=oa(E,aa(k,$[1][1])),w=1)):(x<0?(C=oa(S-i,aa(N-i,C)),a=i+C,h=l):x>0&&(C=oa(S-l,aa(N-l,C)),a=i,h=l+C),w<0?(P=oa(E-u,aa(k-u,P)),c=u+P,p=d):w>0&&(P=oa(E-d,aa(k-d,P)),c=u,p=d+P));break;case ra:x&&(a=oa(S,aa(N,i-C*x)),h=oa(S,aa(N,l+C*x))),w&&(c=oa(E,aa(k,u-P*w)),p=oa(E,aa(k,d+P*w)))}ht+e))}function za(t,n){var e=0,r=null,i=null,o=null;function a(a){var u,c=a.length,f=new Array(c),s=Pa(0,c),l=new Array(c*c),h=new Array(c),d=0;a=Float64Array.from({length:c*c},n?(t,n)=>a[n%c][n/c|0]:(t,n)=>a[n/c|0][n%c]);for(let n=0;nr(f[t],f[n])));for(const e of s){const r=n;if(t){const t=Pa(1+~c,c).filter((t=>t<0?a[~t*c+e]:a[e*c+t]));i&&t.sort(((t,n)=>i(t<0?-a[~t*c+e]:a[e*c+t],n<0?-a[~n*c+e]:a[e*c+n])));for(const r of t)if(r<0){(l[~r*c+e]||(l[~r*c+e]={source:null,target:null})).target={index:e,startAngle:n,endAngle:n+=a[~r*c+e]*d,value:a[~r*c+e]}}else{(l[e*c+r]||(l[e*c+r]={source:null,target:null})).source={index:e,startAngle:n,endAngle:n+=a[e*c+r]*d,value:a[e*c+r]}}h[e]={index:e,startAngle:r,endAngle:n,value:f[e]}}else{const t=Pa(0,c).filter((t=>a[e*c+t]||a[t*c+e]));i&&t.sort(((t,n)=>i(a[e*c+t],a[e*c+n])));for(const r of t){let t;if(e=0))throw new Error(`invalid digits: ${t}`);if(n>15)return qa;const e=10**n;return function(t){this._+=t[0];for(let n=1,r=t.length;nRa)if(Math.abs(s*u-c*f)>Ra&&i){let h=e-o,d=r-a,p=u*u+c*c,g=h*h+d*d,y=Math.sqrt(p),v=Math.sqrt(l),_=i*Math.tan(($a-Math.acos((p+l-g)/(2*y*v)))/2),b=_/v,m=_/y;Math.abs(b-1)>Ra&&this._append`L${t+b*f},${n+b*s}`,this._append`A${i},${i},0,0,${+(s*h>f*d)},${this._x1=t+m*u},${this._y1=n+m*c}`}else this._append`L${this._x1=t},${this._y1=n}`;else;}arc(t,n,e,r,i,o){if(t=+t,n=+n,o=!!o,(e=+e)<0)throw new Error(`negative radius: ${e}`);let a=e*Math.cos(r),u=e*Math.sin(r),c=t+a,f=n+u,s=1^o,l=o?r-i:i-r;null===this._x1?this._append`M${c},${f}`:(Math.abs(this._x1-c)>Ra||Math.abs(this._y1-f)>Ra)&&this._append`L${c},${f}`,e&&(l<0&&(l=l%Da+Da),l>Fa?this._append`A${e},${e},0,1,${s},${t-a},${n-u}A${e},${e},0,1,${s},${this._x1=c},${this._y1=f}`:l>Ra&&this._append`A${e},${e},0,${+(l>=$a)},${s},${this._x1=t+e*Math.cos(i)},${this._y1=n+e*Math.sin(i)}`)}rect(t,n,e,r){this._append`M${this._x0=this._x1=+t},${this._y0=this._y1=+n}h${e=+e}v${+r}h${-e}Z`}toString(){return this._}};function Ia(){return new Ua}Ia.prototype=Ua.prototype;var Oa=Array.prototype.slice;function Ba(t){return function(){return t}}function Ya(t){return t.source}function La(t){return t.target}function ja(t){return t.radius}function Ha(t){return t.startAngle}function Xa(t){return t.endAngle}function Ga(){return 0}function Va(){return 10}function Wa(t){var n=Ya,e=La,r=ja,i=ja,o=Ha,a=Xa,u=Ga,c=null;function f(){var f,s=n.apply(this,arguments),l=e.apply(this,arguments),h=u.apply(this,arguments)/2,d=Oa.call(arguments),p=+r.apply(this,(d[0]=s,d)),g=o.apply(this,d)-Ea,y=a.apply(this,d)-Ea,v=+i.apply(this,(d[0]=l,d)),_=o.apply(this,d)-Ea,b=a.apply(this,d)-Ea;if(c||(c=f=Ia()),h>Ca&&(Ma(y-g)>2*h+Ca?y>g?(g+=h,y-=h):(g-=h,y+=h):g=y=(g+y)/2,Ma(b-_)>2*h+Ca?b>_?(_+=h,b-=h):(_-=h,b+=h):_=b=(_+b)/2),c.moveTo(p*Ta(g),p*Aa(g)),c.arc(0,0,p,g,y),g!==_||y!==b)if(t){var m=v-+t.apply(this,arguments),x=(_+b)/2;c.quadraticCurveTo(0,0,m*Ta(_),m*Aa(_)),c.lineTo(v*Ta(x),v*Aa(x)),c.lineTo(m*Ta(b),m*Aa(b))}else c.quadraticCurveTo(0,0,v*Ta(_),v*Aa(_)),c.arc(0,0,v,_,b);if(c.quadraticCurveTo(0,0,p*Ta(g),p*Aa(g)),c.closePath(),f)return c=null,f+""||null}return t&&(f.headRadius=function(n){return arguments.length?(t="function"==typeof n?n:Ba(+n),f):t}),f.radius=function(t){return arguments.length?(r=i="function"==typeof t?t:Ba(+t),f):r},f.sourceRadius=function(t){return arguments.length?(r="function"==typeof t?t:Ba(+t),f):r},f.targetRadius=function(t){return arguments.length?(i="function"==typeof t?t:Ba(+t),f):i},f.startAngle=function(t){return arguments.length?(o="function"==typeof t?t:Ba(+t),f):o},f.endAngle=function(t){return arguments.length?(a="function"==typeof t?t:Ba(+t),f):a},f.padAngle=function(t){return arguments.length?(u="function"==typeof t?t:Ba(+t),f):u},f.source=function(t){return arguments.length?(n=t,f):n},f.target=function(t){return arguments.length?(e=t,f):e},f.context=function(t){return arguments.length?(c=null==t?null:t,f):c},f}var Za=Array.prototype.slice;function Ka(t,n){return t-n}var Qa=t=>()=>t;function Ja(t,n){for(var e,r=-1,i=n.length;++rr!=d>r&&e<(h-f)*(r-s)/(d-s)+f&&(i=-i)}return i}function nu(t,n,e){var r,i,o,a;return function(t,n,e){return(n[0]-t[0])*(e[1]-t[1])==(e[0]-t[0])*(n[1]-t[1])}(t,n,e)&&(i=t[r=+(t[0]===n[0])],o=e[r],a=n[r],i<=o&&o<=a||a<=o&&o<=i)}function eu(){}var ru=[[],[[[1,1.5],[.5,1]]],[[[1.5,1],[1,1.5]]],[[[1.5,1],[.5,1]]],[[[1,.5],[1.5,1]]],[[[1,1.5],[.5,1]],[[1,.5],[1.5,1]]],[[[1,.5],[1,1.5]]],[[[1,.5],[.5,1]]],[[[.5,1],[1,.5]]],[[[1,1.5],[1,.5]]],[[[.5,1],[1,.5]],[[1.5,1],[1,1.5]]],[[[1.5,1],[1,.5]]],[[[.5,1],[1.5,1]]],[[[1,1.5],[1.5,1]]],[[[.5,1],[1,1.5]]],[]];function iu(){var t=1,n=1,e=K,r=u;function i(t){var n=e(t);if(Array.isArray(n))n=n.slice().sort(Ka);else{const e=M(t,ou);for(n=G(...Z(e[0],e[1],n),n);n[n.length-1]>=e[1];)n.pop();for(;n[1]o(t,n)))}function o(e,i){const o=null==i?NaN:+i;if(isNaN(o))throw new Error(`invalid value: ${i}`);var u=[],c=[];return function(e,r,i){var o,u,c,f,s,l,h=new Array,d=new Array;o=u=-1,f=au(e[0],r),ru[f<<1].forEach(p);for(;++o=r,ru[s<<2].forEach(p);for(;++o0?u.push([t]):c.push(t)})),c.forEach((function(t){for(var n,e=0,r=u.length;e0&&o0&&a=0&&o>=0))throw new Error("invalid size");return t=r,n=o,i},i.thresholds=function(t){return arguments.length?(e="function"==typeof t?t:Array.isArray(t)?Qa(Za.call(t)):Qa(t),i):e},i.smooth=function(t){return arguments.length?(r=t?u:eu,i):r===u},i}function ou(t){return isFinite(t)?t:NaN}function au(t,n){return null!=t&&+t>=n}function uu(t){return null==t||isNaN(t=+t)?-1/0:t}function cu(t,n,e,r){const i=r-n,o=e-n,a=isFinite(i)||isFinite(o)?i/o:Math.sign(i)/Math.sign(o);return isNaN(a)?t:t+a-.5}function fu(t){return t[0]}function su(t){return t[1]}function lu(){return 1}const hu=134217729,du=33306690738754706e-32;function pu(t,n,e,r,i){let o,a,u,c,f=n[0],s=r[0],l=0,h=0;s>f==s>-f?(o=f,f=n[++l]):(o=s,s=r[++h]);let d=0;if(lf==s>-f?(a=f+o,u=o-(a-f),f=n[++l]):(a=s+o,u=o-(a-s),s=r[++h]),o=a,0!==u&&(i[d++]=u);lf==s>-f?(a=o+f,c=a-o,u=o-(a-c)+(f-c),f=n[++l]):(a=o+s,c=a-o,u=o-(a-c)+(s-c),s=r[++h]),o=a,0!==u&&(i[d++]=u);for(;l=33306690738754716e-32*f?c:-function(t,n,e,r,i,o,a){let u,c,f,s,l,h,d,p,g,y,v,_,b,m,x,w,M,T;const A=t-i,S=e-i,E=n-o,N=r-o;m=A*N,h=hu*A,d=h-(h-A),p=A-d,h=hu*N,g=h-(h-N),y=N-g,x=p*y-(m-d*g-p*g-d*y),w=E*S,h=hu*E,d=h-(h-E),p=E-d,h=hu*S,g=h-(h-S),y=S-g,M=p*y-(w-d*g-p*g-d*y),v=x-M,l=x-v,_u[0]=x-(v+l)+(l-M),_=m+v,l=_-m,b=m-(_-l)+(v-l),v=b-w,l=b-v,_u[1]=b-(v+l)+(l-w),T=_+v,l=T-_,_u[2]=_-(T-l)+(v-l),_u[3]=T;let k=function(t,n){let e=n[0];for(let r=1;r=C||-k>=C)return k;if(l=t-A,u=t-(A+l)+(l-i),l=e-S,f=e-(S+l)+(l-i),l=n-E,c=n-(E+l)+(l-o),l=r-N,s=r-(N+l)+(l-o),0===u&&0===c&&0===f&&0===s)return k;if(C=vu*a+du*Math.abs(k),k+=A*s+N*u-(E*f+S*c),k>=C||-k>=C)return k;m=u*N,h=hu*u,d=h-(h-u),p=u-d,h=hu*N,g=h-(h-N),y=N-g,x=p*y-(m-d*g-p*g-d*y),w=c*S,h=hu*c,d=h-(h-c),p=c-d,h=hu*S,g=h-(h-S),y=S-g,M=p*y-(w-d*g-p*g-d*y),v=x-M,l=x-v,wu[0]=x-(v+l)+(l-M),_=m+v,l=_-m,b=m-(_-l)+(v-l),v=b-w,l=b-v,wu[1]=b-(v+l)+(l-w),T=_+v,l=T-_,wu[2]=_-(T-l)+(v-l),wu[3]=T;const P=pu(4,_u,4,wu,bu);m=A*s,h=hu*A,d=h-(h-A),p=A-d,h=hu*s,g=h-(h-s),y=s-g,x=p*y-(m-d*g-p*g-d*y),w=E*f,h=hu*E,d=h-(h-E),p=E-d,h=hu*f,g=h-(h-f),y=f-g,M=p*y-(w-d*g-p*g-d*y),v=x-M,l=x-v,wu[0]=x-(v+l)+(l-M),_=m+v,l=_-m,b=m-(_-l)+(v-l),v=b-w,l=b-v,wu[1]=b-(v+l)+(l-w),T=_+v,l=T-_,wu[2]=_-(T-l)+(v-l),wu[3]=T;const z=pu(P,bu,4,wu,mu);m=u*s,h=hu*u,d=h-(h-u),p=u-d,h=hu*s,g=h-(h-s),y=s-g,x=p*y-(m-d*g-p*g-d*y),w=c*f,h=hu*c,d=h-(h-c),p=c-d,h=hu*f,g=h-(h-f),y=f-g,M=p*y-(w-d*g-p*g-d*y),v=x-M,l=x-v,wu[0]=x-(v+l)+(l-M),_=m+v,l=_-m,b=m-(_-l)+(v-l),v=b-w,l=b-v,wu[1]=b-(v+l)+(l-w),T=_+v,l=T-_,wu[2]=_-(T-l)+(v-l),wu[3]=T;const $=pu(z,mu,4,wu,xu);return xu[$-1]}(t,n,e,r,i,o,f)}const Tu=Math.pow(2,-52),Au=new Uint32Array(512);class Su{static from(t,n=zu,e=$u){const r=t.length,i=new Float64Array(2*r);for(let o=0;o>1;if(n>0&&"number"!=typeof t[0])throw new Error("Expected coords to contain numbers.");this.coords=t;const e=Math.max(2*n-5,0);this._triangles=new Uint32Array(3*e),this._halfedges=new Int32Array(3*e),this._hashSize=Math.ceil(Math.sqrt(n)),this._hullPrev=new Uint32Array(n),this._hullNext=new Uint32Array(n),this._hullTri=new Uint32Array(n),this._hullHash=new Int32Array(this._hashSize),this._ids=new Uint32Array(n),this._dists=new Float64Array(n),this.update()}update(){const{coords:t,_hullPrev:n,_hullNext:e,_hullTri:r,_hullHash:i}=this,o=t.length>>1;let a=1/0,u=1/0,c=-1/0,f=-1/0;for(let n=0;nc&&(c=e),r>f&&(f=r),this._ids[n]=n}const s=(a+c)/2,l=(u+f)/2;let h,d,p;for(let n=0,e=1/0;n0&&(d=n,e=r)}let v=t[2*d],_=t[2*d+1],b=1/0;for(let n=0;nr&&(n[e++]=i,r=o)}return this.hull=n.subarray(0,e),this.triangles=new Uint32Array(0),void(this.halfedges=new Uint32Array(0))}if(Mu(g,y,v,_,m,x)<0){const t=d,n=v,e=_;d=p,v=m,_=x,p=t,m=n,x=e}const w=function(t,n,e,r,i,o){const a=e-t,u=r-n,c=i-t,f=o-n,s=a*a+u*u,l=c*c+f*f,h=.5/(a*f-u*c),d=t+(f*s-u*l)*h,p=n+(a*l-c*s)*h;return{x:d,y:p}}(g,y,v,_,m,x);this._cx=w.x,this._cy=w.y;for(let n=0;n0&&Math.abs(f-o)<=Tu&&Math.abs(s-a)<=Tu)continue;if(o=f,a=s,c===h||c===d||c===p)continue;let l=0;for(let t=0,n=this._hashKey(f,s);t=0;)if(y=g,y===l){y=-1;break}if(-1===y)continue;let v=this._addTriangle(y,c,e[y],-1,-1,r[y]);r[c]=this._legalize(v+2),r[y]=v,M++;let _=e[y];for(;g=e[_],Mu(f,s,t[2*_],t[2*_+1],t[2*g],t[2*g+1])<0;)v=this._addTriangle(_,c,g,r[c],-1,r[_]),r[c]=this._legalize(v+2),e[_]=_,M--,_=g;if(y===l)for(;g=n[y],Mu(f,s,t[2*g],t[2*g+1],t[2*y],t[2*y+1])<0;)v=this._addTriangle(g,c,y,-1,r[y],r[g]),this._legalize(v+2),r[g]=v,e[y]=y,M--,y=g;this._hullStart=n[c]=y,e[y]=n[_]=c,e[c]=_,i[this._hashKey(f,s)]=c,i[this._hashKey(t[2*y],t[2*y+1])]=y}this.hull=new Uint32Array(M);for(let t=0,n=this._hullStart;t0?3-e:1+e)/4}(t-this._cx,n-this._cy)*this._hashSize)%this._hashSize}_legalize(t){const{_triangles:n,_halfedges:e,coords:r}=this;let i=0,o=0;for(;;){const a=e[t],u=t-t%3;if(o=u+(t+2)%3,-1===a){if(0===i)break;t=Au[--i];continue}const c=a-a%3,f=u+(t+1)%3,s=c+(a+2)%3,l=n[o],h=n[t],d=n[f],p=n[s];if(Nu(r[2*l],r[2*l+1],r[2*h],r[2*h+1],r[2*d],r[2*d+1],r[2*p],r[2*p+1])){n[t]=p,n[a]=l;const r=e[s];if(-1===r){let n=this._hullStart;do{if(this._hullTri[n]===s){this._hullTri[n]=t;break}n=this._hullPrev[n]}while(n!==this._hullStart)}this._link(t,r),this._link(a,e[o]),this._link(o,s);const u=c+(a+1)%3;i=e&&n[t[a]]>o;)t[a+1]=t[a--];t[a+1]=r}else{let i=e+1,o=r;Pu(t,e+r>>1,i),n[t[e]]>n[t[r]]&&Pu(t,e,r),n[t[i]]>n[t[r]]&&Pu(t,i,r),n[t[e]]>n[t[i]]&&Pu(t,e,i);const a=t[i],u=n[a];for(;;){do{i++}while(n[t[i]]u);if(o=o-e?(Cu(t,n,i,r),Cu(t,n,e,o-1)):(Cu(t,n,e,o-1),Cu(t,n,i,r))}}function Pu(t,n,e){const r=t[n];t[n]=t[e],t[e]=r}function zu(t){return t[0]}function $u(t){return t[1]}const Du=1e-6;class Ru{constructor(){this._x0=this._y0=this._x1=this._y1=null,this._=""}moveTo(t,n){this._+=`M${this._x0=this._x1=+t},${this._y0=this._y1=+n}`}closePath(){null!==this._x1&&(this._x1=this._x0,this._y1=this._y0,this._+="Z")}lineTo(t,n){this._+=`L${this._x1=+t},${this._y1=+n}`}arc(t,n,e){const r=(t=+t)+(e=+e),i=n=+n;if(e<0)throw new Error("negative radius");null===this._x1?this._+=`M${r},${i}`:(Math.abs(this._x1-r)>Du||Math.abs(this._y1-i)>Du)&&(this._+="L"+r+","+i),e&&(this._+=`A${e},${e},0,1,1,${t-e},${n}A${e},${e},0,1,1,${this._x1=r},${this._y1=i}`)}rect(t,n,e,r){this._+=`M${this._x0=this._x1=+t},${this._y0=this._y1=+n}h${+e}v${+r}h${-e}Z`}value(){return this._||null}}class Fu{constructor(){this._=[]}moveTo(t,n){this._.push([t,n])}closePath(){this._.push(this._[0].slice())}lineTo(t,n){this._.push([t,n])}value(){return this._.length?this._:null}}class qu{constructor(t,[n,e,r,i]=[0,0,960,500]){if(!((r=+r)>=(n=+n)&&(i=+i)>=(e=+e)))throw new Error("invalid bounds");this.delaunay=t,this._circumcenters=new Float64Array(2*t.points.length),this.vectors=new Float64Array(2*t.points.length),this.xmax=r,this.xmin=n,this.ymax=i,this.ymin=e,this._init()}update(){return this.delaunay.update(),this._init(),this}_init(){const{delaunay:{points:t,hull:n,triangles:e},vectors:r}=this;let i,o;const a=this.circumcenters=this._circumcenters.subarray(0,e.length/3*2);for(let r,u,c=0,f=0,s=e.length;c1;)i-=2;for(let t=2;t0){if(n>=this.ymax)return null;(i=(this.ymax-n)/r)0){if(t>=this.xmax)return null;(i=(this.xmax-t)/e)this.xmax?2:0)|(nthis.ymax?8:0)}_simplify(t){if(t&&t.length>4){for(let n=0;n2&&function(t){const{triangles:n,coords:e}=t;for(let t=0;t1e-10)return!1}return!0}(t)){this.collinear=Int32Array.from({length:n.length/2},((t,n)=>n)).sort(((t,e)=>n[2*t]-n[2*e]||n[2*t+1]-n[2*e+1]));const t=this.collinear[0],e=this.collinear[this.collinear.length-1],r=[n[2*t],n[2*t+1],n[2*e],n[2*e+1]],i=1e-8*Math.hypot(r[3]-r[1],r[2]-r[0]);for(let t=0,e=n.length/2;t0&&(this.triangles=new Int32Array(3).fill(-1),this.halfedges=new Int32Array(3).fill(-1),this.triangles[0]=r[0],o[r[0]]=1,2===r.length&&(o[r[1]]=0,this.triangles[1]=r[1],this.triangles[2]=r[1]))}voronoi(t){return new qu(this,t)}*neighbors(t){const{inedges:n,hull:e,_hullIndex:r,halfedges:i,triangles:o,collinear:a}=this;if(a){const n=a.indexOf(t);return n>0&&(yield a[n-1]),void(n=0&&i!==e&&i!==r;)e=i;return i}_step(t,n,e){const{inedges:r,hull:i,_hullIndex:o,halfedges:a,triangles:u,points:c}=this;if(-1===r[t]||!c.length)return(t+1)%(c.length>>1);let f=t,s=Iu(n-c[2*t],2)+Iu(e-c[2*t+1],2);const l=r[t];let h=l;do{let r=u[h];const l=Iu(n-c[2*r],2)+Iu(e-c[2*r+1],2);if(l9999?"+"+Ku(n,6):Ku(n,4))+"-"+Ku(t.getUTCMonth()+1,2)+"-"+Ku(t.getUTCDate(),2)+(o?"T"+Ku(e,2)+":"+Ku(r,2)+":"+Ku(i,2)+"."+Ku(o,3)+"Z":i?"T"+Ku(e,2)+":"+Ku(r,2)+":"+Ku(i,2)+"Z":r||e?"T"+Ku(e,2)+":"+Ku(r,2)+"Z":"")}function Ju(t){var n=new RegExp('["'+t+"\n\r]"),e=t.charCodeAt(0);function r(t,n){var r,i=[],o=t.length,a=0,u=0,c=o<=0,f=!1;function s(){if(c)return Hu;if(f)return f=!1,ju;var n,r,i=a;if(t.charCodeAt(i)===Xu){for(;a++=o?c=!0:(r=t.charCodeAt(a++))===Gu?f=!0:r===Vu&&(f=!0,t.charCodeAt(a)===Gu&&++a),t.slice(i+1,n-1).replace(/""/g,'"')}for(;amc(n,e).then((n=>(new DOMParser).parseFromString(n,t)))}var Sc=Ac("application/xml"),Ec=Ac("text/html"),Nc=Ac("image/svg+xml");function kc(t,n,e,r){if(isNaN(n)||isNaN(e))return t;var i,o,a,u,c,f,s,l,h,d=t._root,p={data:r},g=t._x0,y=t._y0,v=t._x1,_=t._y1;if(!d)return t._root=p,t;for(;d.length;)if((f=n>=(o=(g+v)/2))?g=o:v=o,(s=e>=(a=(y+_)/2))?y=a:_=a,i=d,!(d=d[l=s<<1|f]))return i[l]=p,t;if(u=+t._x.call(null,d.data),c=+t._y.call(null,d.data),n===u&&e===c)return p.next=d,i?i[l]=p:t._root=p,t;do{i=i?i[l]=new Array(4):t._root=new Array(4),(f=n>=(o=(g+v)/2))?g=o:v=o,(s=e>=(a=(y+_)/2))?y=a:_=a}while((l=s<<1|f)==(h=(c>=a)<<1|u>=o));return i[h]=d,i[l]=p,t}function Cc(t,n,e,r,i){this.node=t,this.x0=n,this.y0=e,this.x1=r,this.y1=i}function Pc(t){return t[0]}function zc(t){return t[1]}function $c(t,n,e){var r=new Dc(null==n?Pc:n,null==e?zc:e,NaN,NaN,NaN,NaN);return null==t?r:r.addAll(t)}function Dc(t,n,e,r,i,o){this._x=t,this._y=n,this._x0=e,this._y0=r,this._x1=i,this._y1=o,this._root=void 0}function Rc(t){for(var n={data:t.data},e=n;t=t.next;)e=e.next={data:t.data};return n}var Fc=$c.prototype=Dc.prototype;function qc(t){return function(){return t}}function Uc(t){return 1e-6*(t()-.5)}function Ic(t){return t.x+t.vx}function Oc(t){return t.y+t.vy}function Bc(t){return t.index}function Yc(t,n){var e=t.get(n);if(!e)throw new Error("node not found: "+n);return e}Fc.copy=function(){var t,n,e=new Dc(this._x,this._y,this._x0,this._y0,this._x1,this._y1),r=this._root;if(!r)return e;if(!r.length)return e._root=Rc(r),e;for(t=[{source:r,target:e._root=new Array(4)}];r=t.pop();)for(var i=0;i<4;++i)(n=r.source[i])&&(n.length?t.push({source:n,target:r.target[i]=new Array(4)}):r.target[i]=Rc(n));return e},Fc.add=function(t){const n=+this._x.call(null,t),e=+this._y.call(null,t);return kc(this.cover(n,e),n,e,t)},Fc.addAll=function(t){var n,e,r,i,o=t.length,a=new Array(o),u=new Array(o),c=1/0,f=1/0,s=-1/0,l=-1/0;for(e=0;es&&(s=r),il&&(l=i));if(c>s||f>l)return this;for(this.cover(c,f).cover(s,l),e=0;et||t>=i||r>n||n>=o;)switch(u=(nh||(o=c.y0)>d||(a=c.x1)=v)<<1|t>=y)&&(c=p[p.length-1],p[p.length-1]=p[p.length-1-f],p[p.length-1-f]=c)}else{var _=t-+this._x.call(null,g.data),b=n-+this._y.call(null,g.data),m=_*_+b*b;if(m=(u=(p+y)/2))?p=u:y=u,(s=a>=(c=(g+v)/2))?g=c:v=c,n=d,!(d=d[l=s<<1|f]))return this;if(!d.length)break;(n[l+1&3]||n[l+2&3]||n[l+3&3])&&(e=n,h=l)}for(;d.data!==t;)if(r=d,!(d=d.next))return this;return(i=d.next)&&delete d.next,r?(i?r.next=i:delete r.next,this):n?(i?n[l]=i:delete n[l],(d=n[0]||n[1]||n[2]||n[3])&&d===(n[3]||n[2]||n[1]||n[0])&&!d.length&&(e?e[h]=d:this._root=d),this):(this._root=i,this)},Fc.removeAll=function(t){for(var n=0,e=t.length;n1?r[0]+r.slice(2):r,+t.slice(e+1)]}function Zc(t){return(t=Wc(Math.abs(t)))?t[1]:NaN}var Kc,Qc=/^(?:(.)?([<>=^]))?([+\-( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?(~)?([a-z%])?$/i;function Jc(t){if(!(n=Qc.exec(t)))throw new Error("invalid format: "+t);var n;return new tf({fill:n[1],align:n[2],sign:n[3],symbol:n[4],zero:n[5],width:n[6],comma:n[7],precision:n[8]&&n[8].slice(1),trim:n[9],type:n[10]})}function tf(t){this.fill=void 0===t.fill?" ":t.fill+"",this.align=void 0===t.align?">":t.align+"",this.sign=void 0===t.sign?"-":t.sign+"",this.symbol=void 0===t.symbol?"":t.symbol+"",this.zero=!!t.zero,this.width=void 0===t.width?void 0:+t.width,this.comma=!!t.comma,this.precision=void 0===t.precision?void 0:+t.precision,this.trim=!!t.trim,this.type=void 0===t.type?"":t.type+""}function nf(t,n){var e=Wc(t,n);if(!e)return t+"";var r=e[0],i=e[1];return i<0?"0."+new Array(-i).join("0")+r:r.length>i+1?r.slice(0,i+1)+"."+r.slice(i+1):r+new Array(i-r.length+2).join("0")}Jc.prototype=tf.prototype,tf.prototype.toString=function(){return this.fill+this.align+this.sign+this.symbol+(this.zero?"0":"")+(void 0===this.width?"":Math.max(1,0|this.width))+(this.comma?",":"")+(void 0===this.precision?"":"."+Math.max(0,0|this.precision))+(this.trim?"~":"")+this.type};var ef={"%":(t,n)=>(100*t).toFixed(n),b:t=>Math.round(t).toString(2),c:t=>t+"",d:function(t){return Math.abs(t=Math.round(t))>=1e21?t.toLocaleString("en").replace(/,/g,""):t.toString(10)},e:(t,n)=>t.toExponential(n),f:(t,n)=>t.toFixed(n),g:(t,n)=>t.toPrecision(n),o:t=>Math.round(t).toString(8),p:(t,n)=>nf(100*t,n),r:nf,s:function(t,n){var e=Wc(t,n);if(!e)return t+"";var r=e[0],i=e[1],o=i-(Kc=3*Math.max(-8,Math.min(8,Math.floor(i/3))))+1,a=r.length;return o===a?r:o>a?r+new Array(o-a+1).join("0"):o>0?r.slice(0,o)+"."+r.slice(o):"0."+new Array(1-o).join("0")+Wc(t,Math.max(0,n+o-1))[0]},X:t=>Math.round(t).toString(16).toUpperCase(),x:t=>Math.round(t).toString(16)};function rf(t){return t}var of,af=Array.prototype.map,uf=["y","z","a","f","p","n","µ","m","","k","M","G","T","P","E","Z","Y"];function cf(t){var n,e,r=void 0===t.grouping||void 0===t.thousands?rf:(n=af.call(t.grouping,Number),e=t.thousands+"",function(t,r){for(var i=t.length,o=[],a=0,u=n[0],c=0;i>0&&u>0&&(c+u+1>r&&(u=Math.max(1,r-c)),o.push(t.substring(i-=u,i+u)),!((c+=u+1)>r));)u=n[a=(a+1)%n.length];return o.reverse().join(e)}),i=void 0===t.currency?"":t.currency[0]+"",o=void 0===t.currency?"":t.currency[1]+"",a=void 0===t.decimal?".":t.decimal+"",u=void 0===t.numerals?rf:function(t){return function(n){return n.replace(/[0-9]/g,(function(n){return t[+n]}))}}(af.call(t.numerals,String)),c=void 0===t.percent?"%":t.percent+"",f=void 0===t.minus?"−":t.minus+"",s=void 0===t.nan?"NaN":t.nan+"";function l(t){var n=(t=Jc(t)).fill,e=t.align,l=t.sign,h=t.symbol,d=t.zero,p=t.width,g=t.comma,y=t.precision,v=t.trim,_=t.type;"n"===_?(g=!0,_="g"):ef[_]||(void 0===y&&(y=12),v=!0,_="g"),(d||"0"===n&&"="===e)&&(d=!0,n="0",e="=");var b="$"===h?i:"#"===h&&/[boxX]/.test(_)?"0"+_.toLowerCase():"",m="$"===h?o:/[%p]/.test(_)?c:"",x=ef[_],w=/[defgprs%]/.test(_);function M(t){var i,o,c,h=b,M=m;if("c"===_)M=x(t)+M,t="";else{var T=(t=+t)<0||1/t<0;if(t=isNaN(t)?s:x(Math.abs(t),y),v&&(t=function(t){t:for(var n,e=t.length,r=1,i=-1;r0&&(i=0)}return i>0?t.slice(0,i)+t.slice(n+1):t}(t)),T&&0==+t&&"+"!==l&&(T=!1),h=(T?"("===l?l:f:"-"===l||"("===l?"":l)+h,M=("s"===_?uf[8+Kc/3]:"")+M+(T&&"("===l?")":""),w)for(i=-1,o=t.length;++i(c=t.charCodeAt(i))||c>57){M=(46===c?a+t.slice(i+1):t.slice(i))+M,t=t.slice(0,i);break}}g&&!d&&(t=r(t,1/0));var A=h.length+t.length+M.length,S=A>1)+h+t+M+S.slice(A);break;default:t=S+h+t+M}return u(t)}return y=void 0===y?6:/[gprs]/.test(_)?Math.max(1,Math.min(21,y)):Math.max(0,Math.min(20,y)),M.toString=function(){return t+""},M}return{format:l,formatPrefix:function(t,n){var e=l(((t=Jc(t)).type="f",t)),r=3*Math.max(-8,Math.min(8,Math.floor(Zc(n)/3))),i=Math.pow(10,-r),o=uf[8+r/3];return function(t){return e(i*t)+o}}}}function ff(n){return of=cf(n),t.format=of.format,t.formatPrefix=of.formatPrefix,of}function sf(t){return Math.max(0,-Zc(Math.abs(t)))}function lf(t,n){return Math.max(0,3*Math.max(-8,Math.min(8,Math.floor(Zc(n)/3)))-Zc(Math.abs(t)))}function hf(t,n){return t=Math.abs(t),n=Math.abs(n)-t,Math.max(0,Zc(n)-Zc(t))+1}t.format=void 0,t.formatPrefix=void 0,ff({thousands:",",grouping:[3],currency:["$",""]});var df=1e-6,pf=1e-12,gf=Math.PI,yf=gf/2,vf=gf/4,_f=2*gf,bf=180/gf,mf=gf/180,xf=Math.abs,wf=Math.atan,Mf=Math.atan2,Tf=Math.cos,Af=Math.ceil,Sf=Math.exp,Ef=Math.hypot,Nf=Math.log,kf=Math.pow,Cf=Math.sin,Pf=Math.sign||function(t){return t>0?1:t<0?-1:0},zf=Math.sqrt,$f=Math.tan;function Df(t){return t>1?0:t<-1?gf:Math.acos(t)}function Rf(t){return t>1?yf:t<-1?-yf:Math.asin(t)}function Ff(t){return(t=Cf(t/2))*t}function qf(){}function Uf(t,n){t&&Of.hasOwnProperty(t.type)&&Of[t.type](t,n)}var If={Feature:function(t,n){Uf(t.geometry,n)},FeatureCollection:function(t,n){for(var e=t.features,r=-1,i=e.length;++r=0?1:-1,i=r*e,o=Tf(n=(n*=mf)/2+vf),a=Cf(n),u=Vf*a,c=Gf*o+u*Tf(i),f=u*r*Cf(i);as.add(Mf(f,c)),Xf=t,Gf=o,Vf=a}function ds(t){return[Mf(t[1],t[0]),Rf(t[2])]}function ps(t){var n=t[0],e=t[1],r=Tf(e);return[r*Tf(n),r*Cf(n),Cf(e)]}function gs(t,n){return t[0]*n[0]+t[1]*n[1]+t[2]*n[2]}function ys(t,n){return[t[1]*n[2]-t[2]*n[1],t[2]*n[0]-t[0]*n[2],t[0]*n[1]-t[1]*n[0]]}function vs(t,n){t[0]+=n[0],t[1]+=n[1],t[2]+=n[2]}function _s(t,n){return[t[0]*n,t[1]*n,t[2]*n]}function bs(t){var n=zf(t[0]*t[0]+t[1]*t[1]+t[2]*t[2]);t[0]/=n,t[1]/=n,t[2]/=n}var ms,xs,ws,Ms,Ts,As,Ss,Es,Ns,ks,Cs,Ps,zs,$s,Ds,Rs,Fs={point:qs,lineStart:Is,lineEnd:Os,polygonStart:function(){Fs.point=Bs,Fs.lineStart=Ys,Fs.lineEnd=Ls,rs=new T,cs.polygonStart()},polygonEnd:function(){cs.polygonEnd(),Fs.point=qs,Fs.lineStart=Is,Fs.lineEnd=Os,as<0?(Wf=-(Kf=180),Zf=-(Qf=90)):rs>df?Qf=90:rs<-df&&(Zf=-90),os[0]=Wf,os[1]=Kf},sphere:function(){Wf=-(Kf=180),Zf=-(Qf=90)}};function qs(t,n){is.push(os=[Wf=t,Kf=t]),nQf&&(Qf=n)}function Us(t,n){var e=ps([t*mf,n*mf]);if(es){var r=ys(es,e),i=ys([r[1],-r[0],0],r);bs(i),i=ds(i);var o,a=t-Jf,u=a>0?1:-1,c=i[0]*bf*u,f=xf(a)>180;f^(u*JfQf&&(Qf=o):f^(u*Jf<(c=(c+360)%360-180)&&cQf&&(Qf=n)),f?tjs(Wf,Kf)&&(Kf=t):js(t,Kf)>js(Wf,Kf)&&(Wf=t):Kf>=Wf?(tKf&&(Kf=t)):t>Jf?js(Wf,t)>js(Wf,Kf)&&(Kf=t):js(t,Kf)>js(Wf,Kf)&&(Wf=t)}else is.push(os=[Wf=t,Kf=t]);nQf&&(Qf=n),es=e,Jf=t}function Is(){Fs.point=Us}function Os(){os[0]=Wf,os[1]=Kf,Fs.point=qs,es=null}function Bs(t,n){if(es){var e=t-Jf;rs.add(xf(e)>180?e+(e>0?360:-360):e)}else ts=t,ns=n;cs.point(t,n),Us(t,n)}function Ys(){cs.lineStart()}function Ls(){Bs(ts,ns),cs.lineEnd(),xf(rs)>df&&(Wf=-(Kf=180)),os[0]=Wf,os[1]=Kf,es=null}function js(t,n){return(n-=t)<0?n+360:n}function Hs(t,n){return t[0]-n[0]}function Xs(t,n){return t[0]<=t[1]?t[0]<=n&&n<=t[1]:ngf&&(t-=Math.round(t/_f)*_f),[t,n]}function ul(t,n,e){return(t%=_f)?n||e?ol(fl(t),sl(n,e)):fl(t):n||e?sl(n,e):al}function cl(t){return function(n,e){return xf(n+=t)>gf&&(n-=Math.round(n/_f)*_f),[n,e]}}function fl(t){var n=cl(t);return n.invert=cl(-t),n}function sl(t,n){var e=Tf(t),r=Cf(t),i=Tf(n),o=Cf(n);function a(t,n){var a=Tf(n),u=Tf(t)*a,c=Cf(t)*a,f=Cf(n),s=f*e+u*r;return[Mf(c*i-s*o,u*e-f*r),Rf(s*i+c*o)]}return a.invert=function(t,n){var a=Tf(n),u=Tf(t)*a,c=Cf(t)*a,f=Cf(n),s=f*i-c*o;return[Mf(c*i+f*o,u*e+s*r),Rf(s*e-u*r)]},a}function ll(t){function n(n){return(n=t(n[0]*mf,n[1]*mf))[0]*=bf,n[1]*=bf,n}return t=ul(t[0]*mf,t[1]*mf,t.length>2?t[2]*mf:0),n.invert=function(n){return(n=t.invert(n[0]*mf,n[1]*mf))[0]*=bf,n[1]*=bf,n},n}function hl(t,n,e,r,i,o){if(e){var a=Tf(n),u=Cf(n),c=r*e;null==i?(i=n+r*_f,o=n-c/2):(i=dl(a,i),o=dl(a,o),(r>0?io)&&(i+=r*_f));for(var f,s=i;r>0?s>o:s1&&n.push(n.pop().concat(n.shift()))},result:function(){var e=n;return n=[],t=null,e}}}function gl(t,n){return xf(t[0]-n[0])=0;--o)i.point((s=f[o])[0],s[1]);else r(h.x,h.p.x,-1,i);h=h.p}f=(h=h.o).z,d=!d}while(!h.v);i.lineEnd()}}}function _l(t){if(n=t.length){for(var n,e,r=0,i=t[0];++r=0?1:-1,E=S*A,N=E>gf,k=y*w;if(c.add(Mf(k*S*Cf(E),v*M+k*Tf(E))),a+=N?A+S*_f:A,N^p>=e^m>=e){var C=ys(ps(d),ps(b));bs(C);var P=ys(o,C);bs(P);var z=(N^A>=0?-1:1)*Rf(P[2]);(r>z||r===z&&(C[0]||C[1]))&&(u+=N^A>=0?1:-1)}}return(a<-df||a0){for(l||(i.polygonStart(),l=!0),i.lineStart(),t=0;t1&&2&c&&h.push(h.pop().concat(h.shift())),a.push(h.filter(wl))}return h}}function wl(t){return t.length>1}function Ml(t,n){return((t=t.x)[0]<0?t[1]-yf-df:yf-t[1])-((n=n.x)[0]<0?n[1]-yf-df:yf-n[1])}al.invert=al;var Tl=xl((function(){return!0}),(function(t){var n,e=NaN,r=NaN,i=NaN;return{lineStart:function(){t.lineStart(),n=1},point:function(o,a){var u=o>0?gf:-gf,c=xf(o-e);xf(c-gf)0?yf:-yf),t.point(i,r),t.lineEnd(),t.lineStart(),t.point(u,r),t.point(o,r),n=0):i!==u&&c>=gf&&(xf(e-i)df?wf((Cf(n)*(o=Tf(r))*Cf(e)-Cf(r)*(i=Tf(n))*Cf(t))/(i*o*a)):(n+r)/2}(e,r,o,a),t.point(i,r),t.lineEnd(),t.lineStart(),t.point(u,r),n=0),t.point(e=o,r=a),i=u},lineEnd:function(){t.lineEnd(),e=r=NaN},clean:function(){return 2-n}}}),(function(t,n,e,r){var i;if(null==t)i=e*yf,r.point(-gf,i),r.point(0,i),r.point(gf,i),r.point(gf,0),r.point(gf,-i),r.point(0,-i),r.point(-gf,-i),r.point(-gf,0),r.point(-gf,i);else if(xf(t[0]-n[0])>df){var o=t[0]0,i=xf(n)>df;function o(t,e){return Tf(t)*Tf(e)>n}function a(t,e,r){var i=[1,0,0],o=ys(ps(t),ps(e)),a=gs(o,o),u=o[0],c=a-u*u;if(!c)return!r&&t;var f=n*a/c,s=-n*u/c,l=ys(i,o),h=_s(i,f);vs(h,_s(o,s));var d=l,p=gs(h,d),g=gs(d,d),y=p*p-g*(gs(h,h)-1);if(!(y<0)){var v=zf(y),_=_s(d,(-p-v)/g);if(vs(_,h),_=ds(_),!r)return _;var b,m=t[0],x=e[0],w=t[1],M=e[1];x0^_[1]<(xf(_[0]-m)gf^(m<=_[0]&&_[0]<=x)){var S=_s(d,(-p+v)/g);return vs(S,h),[_,ds(S)]}}}function u(n,e){var i=r?t:gf-t,o=0;return n<-i?o|=1:n>i&&(o|=2),e<-i?o|=4:e>i&&(o|=8),o}return xl(o,(function(t){var n,e,c,f,s;return{lineStart:function(){f=c=!1,s=1},point:function(l,h){var d,p=[l,h],g=o(l,h),y=r?g?0:u(l,h):g?u(l+(l<0?gf:-gf),h):0;if(!n&&(f=c=g)&&t.lineStart(),g!==c&&(!(d=a(n,p))||gl(n,d)||gl(p,d))&&(p[2]=1),g!==c)s=0,g?(t.lineStart(),d=a(p,n),t.point(d[0],d[1])):(d=a(n,p),t.point(d[0],d[1],2),t.lineEnd()),n=d;else if(i&&n&&r^g){var v;y&e||!(v=a(p,n,!0))||(s=0,r?(t.lineStart(),t.point(v[0][0],v[0][1]),t.point(v[1][0],v[1][1]),t.lineEnd()):(t.point(v[1][0],v[1][1]),t.lineEnd(),t.lineStart(),t.point(v[0][0],v[0][1],3)))}!g||n&&gl(n,p)||t.point(p[0],p[1]),n=p,c=g,e=y},lineEnd:function(){c&&t.lineEnd(),n=null},clean:function(){return s|(f&&c)<<1}}}),(function(n,r,i,o){hl(o,t,e,i,n,r)}),r?[0,-t]:[-gf,t-gf])}var Sl,El,Nl,kl,Cl=1e9,Pl=-Cl;function zl(t,n,e,r){function i(i,o){return t<=i&&i<=e&&n<=o&&o<=r}function o(i,o,u,f){var s=0,l=0;if(null==i||(s=a(i,u))!==(l=a(o,u))||c(i,o)<0^u>0)do{f.point(0===s||3===s?t:e,s>1?r:n)}while((s=(s+u+4)%4)!==l);else f.point(o[0],o[1])}function a(r,i){return xf(r[0]-t)0?0:3:xf(r[0]-e)0?2:1:xf(r[1]-n)0?1:0:i>0?3:2}function u(t,n){return c(t.x,n.x)}function c(t,n){var e=a(t,1),r=a(n,1);return e!==r?e-r:0===e?n[1]-t[1]:1===e?t[0]-n[0]:2===e?t[1]-n[1]:n[0]-t[0]}return function(a){var c,f,s,l,h,d,p,g,y,v,_,b=a,m=pl(),x={point:w,lineStart:function(){x.point=M,f&&f.push(s=[]);v=!0,y=!1,p=g=NaN},lineEnd:function(){c&&(M(l,h),d&&y&&m.rejoin(),c.push(m.result()));x.point=w,y&&b.lineEnd()},polygonStart:function(){b=m,c=[],f=[],_=!0},polygonEnd:function(){var n=function(){for(var n=0,e=0,i=f.length;er&&(h-o)*(r-a)>(d-a)*(t-o)&&++n:d<=r&&(h-o)*(r-a)<(d-a)*(t-o)&&--n;return n}(),e=_&&n,i=(c=ft(c)).length;(e||i)&&(a.polygonStart(),e&&(a.lineStart(),o(null,null,1,a),a.lineEnd()),i&&vl(c,u,n,o,a),a.polygonEnd());b=a,c=f=s=null}};function w(t,n){i(t,n)&&b.point(t,n)}function M(o,a){var u=i(o,a);if(f&&s.push([o,a]),v)l=o,h=a,d=u,v=!1,u&&(b.lineStart(),b.point(o,a));else if(u&&y)b.point(o,a);else{var c=[p=Math.max(Pl,Math.min(Cl,p)),g=Math.max(Pl,Math.min(Cl,g))],m=[o=Math.max(Pl,Math.min(Cl,o)),a=Math.max(Pl,Math.min(Cl,a))];!function(t,n,e,r,i,o){var a,u=t[0],c=t[1],f=0,s=1,l=n[0]-u,h=n[1]-c;if(a=e-u,l||!(a>0)){if(a/=l,l<0){if(a0){if(a>s)return;a>f&&(f=a)}if(a=i-u,l||!(a<0)){if(a/=l,l<0){if(a>s)return;a>f&&(f=a)}else if(l>0){if(a0)){if(a/=h,h<0){if(a0){if(a>s)return;a>f&&(f=a)}if(a=o-c,h||!(a<0)){if(a/=h,h<0){if(a>s)return;a>f&&(f=a)}else if(h>0){if(a0&&(t[0]=u+f*l,t[1]=c+f*h),s<1&&(n[0]=u+s*l,n[1]=c+s*h),!0}}}}}(c,m,t,n,e,r)?u&&(b.lineStart(),b.point(o,a),_=!1):(y||(b.lineStart(),b.point(c[0],c[1])),b.point(m[0],m[1]),u||b.lineEnd(),_=!1)}p=o,g=a,y=u}return x}}var $l={sphere:qf,point:qf,lineStart:function(){$l.point=Rl,$l.lineEnd=Dl},lineEnd:qf,polygonStart:qf,polygonEnd:qf};function Dl(){$l.point=$l.lineEnd=qf}function Rl(t,n){El=t*=mf,Nl=Cf(n*=mf),kl=Tf(n),$l.point=Fl}function Fl(t,n){t*=mf;var e=Cf(n*=mf),r=Tf(n),i=xf(t-El),o=Tf(i),a=r*Cf(i),u=kl*e-Nl*r*o,c=Nl*e+kl*r*o;Sl.add(Mf(zf(a*a+u*u),c)),El=t,Nl=e,kl=r}function ql(t){return Sl=new T,Lf(t,$l),+Sl}var Ul=[null,null],Il={type:"LineString",coordinates:Ul};function Ol(t,n){return Ul[0]=t,Ul[1]=n,ql(Il)}var Bl={Feature:function(t,n){return Ll(t.geometry,n)},FeatureCollection:function(t,n){for(var e=t.features,r=-1,i=e.length;++r0&&(i=Ol(t[o],t[o-1]))>0&&e<=i&&r<=i&&(e+r-i)*(1-Math.pow((e-r)/i,2))df})).map(c)).concat(lt(Af(o/d)*d,i,d).filter((function(t){return xf(t%g)>df})).map(f))}return v.lines=function(){return _().map((function(t){return{type:"LineString",coordinates:t}}))},v.outline=function(){return{type:"Polygon",coordinates:[s(r).concat(l(a).slice(1),s(e).reverse().slice(1),l(u).reverse().slice(1))]}},v.extent=function(t){return arguments.length?v.extentMajor(t).extentMinor(t):v.extentMinor()},v.extentMajor=function(t){return arguments.length?(r=+t[0][0],e=+t[1][0],u=+t[0][1],a=+t[1][1],r>e&&(t=r,r=e,e=t),u>a&&(t=u,u=a,a=t),v.precision(y)):[[r,u],[e,a]]},v.extentMinor=function(e){return arguments.length?(n=+e[0][0],t=+e[1][0],o=+e[0][1],i=+e[1][1],n>t&&(e=n,n=t,t=e),o>i&&(e=o,o=i,i=e),v.precision(y)):[[n,o],[t,i]]},v.step=function(t){return arguments.length?v.stepMajor(t).stepMinor(t):v.stepMinor()},v.stepMajor=function(t){return arguments.length?(p=+t[0],g=+t[1],v):[p,g]},v.stepMinor=function(t){return arguments.length?(h=+t[0],d=+t[1],v):[h,d]},v.precision=function(h){return arguments.length?(y=+h,c=Wl(o,i,90),f=Zl(n,t,y),s=Wl(u,a,90),l=Zl(r,e,y),v):y},v.extentMajor([[-180,-90+df],[180,90-df]]).extentMinor([[-180,-80-df],[180,80+df]])}var Ql,Jl,th,nh,eh=t=>t,rh=new T,ih=new T,oh={point:qf,lineStart:qf,lineEnd:qf,polygonStart:function(){oh.lineStart=ah,oh.lineEnd=fh},polygonEnd:function(){oh.lineStart=oh.lineEnd=oh.point=qf,rh.add(xf(ih)),ih=new T},result:function(){var t=rh/2;return rh=new T,t}};function ah(){oh.point=uh}function uh(t,n){oh.point=ch,Ql=th=t,Jl=nh=n}function ch(t,n){ih.add(nh*t-th*n),th=t,nh=n}function fh(){ch(Ql,Jl)}var sh=oh,lh=1/0,hh=lh,dh=-lh,ph=dh,gh={point:function(t,n){tdh&&(dh=t);nph&&(ph=n)},lineStart:qf,lineEnd:qf,polygonStart:qf,polygonEnd:qf,result:function(){var t=[[lh,hh],[dh,ph]];return dh=ph=-(hh=lh=1/0),t}};var yh,vh,_h,bh,mh=gh,xh=0,wh=0,Mh=0,Th=0,Ah=0,Sh=0,Eh=0,Nh=0,kh=0,Ch={point:Ph,lineStart:zh,lineEnd:Rh,polygonStart:function(){Ch.lineStart=Fh,Ch.lineEnd=qh},polygonEnd:function(){Ch.point=Ph,Ch.lineStart=zh,Ch.lineEnd=Rh},result:function(){var t=kh?[Eh/kh,Nh/kh]:Sh?[Th/Sh,Ah/Sh]:Mh?[xh/Mh,wh/Mh]:[NaN,NaN];return xh=wh=Mh=Th=Ah=Sh=Eh=Nh=kh=0,t}};function Ph(t,n){xh+=t,wh+=n,++Mh}function zh(){Ch.point=$h}function $h(t,n){Ch.point=Dh,Ph(_h=t,bh=n)}function Dh(t,n){var e=t-_h,r=n-bh,i=zf(e*e+r*r);Th+=i*(_h+t)/2,Ah+=i*(bh+n)/2,Sh+=i,Ph(_h=t,bh=n)}function Rh(){Ch.point=Ph}function Fh(){Ch.point=Uh}function qh(){Ih(yh,vh)}function Uh(t,n){Ch.point=Ih,Ph(yh=_h=t,vh=bh=n)}function Ih(t,n){var e=t-_h,r=n-bh,i=zf(e*e+r*r);Th+=i*(_h+t)/2,Ah+=i*(bh+n)/2,Sh+=i,Eh+=(i=bh*t-_h*n)*(_h+t),Nh+=i*(bh+n),kh+=3*i,Ph(_h=t,bh=n)}var Oh=Ch;function Bh(t){this._context=t}Bh.prototype={_radius:4.5,pointRadius:function(t){return this._radius=t,this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){0===this._line&&this._context.closePath(),this._point=NaN},point:function(t,n){switch(this._point){case 0:this._context.moveTo(t,n),this._point=1;break;case 1:this._context.lineTo(t,n);break;default:this._context.moveTo(t+this._radius,n),this._context.arc(t,n,this._radius,0,_f)}},result:qf};var Yh,Lh,jh,Hh,Xh,Gh=new T,Vh={point:qf,lineStart:function(){Vh.point=Wh},lineEnd:function(){Yh&&Zh(Lh,jh),Vh.point=qf},polygonStart:function(){Yh=!0},polygonEnd:function(){Yh=null},result:function(){var t=+Gh;return Gh=new T,t}};function Wh(t,n){Vh.point=Zh,Lh=Hh=t,jh=Xh=n}function Zh(t,n){Hh-=t,Xh-=n,Gh.add(zf(Hh*Hh+Xh*Xh)),Hh=t,Xh=n}var Kh=Vh;let Qh,Jh,td,nd;class ed{constructor(t){this._append=null==t?rd:function(t){const n=Math.floor(t);if(!(n>=0))throw new RangeError(`invalid digits: ${t}`);if(n>15)return rd;if(n!==Qh){const t=10**n;Qh=n,Jh=function(n){let e=1;this._+=n[0];for(const r=n.length;e4*n&&g--){var m=a+h,x=u+d,w=c+p,M=zf(m*m+x*x+w*w),T=Rf(w/=M),A=xf(xf(w)-1)n||xf((v*k+_*C)/b-.5)>.3||a*h+u*d+c*p2?t[2]%360*mf:0,k()):[y*bf,v*bf,_*bf]},E.angle=function(t){return arguments.length?(b=t%360*mf,k()):b*bf},E.reflectX=function(t){return arguments.length?(m=t?-1:1,k()):m<0},E.reflectY=function(t){return arguments.length?(x=t?-1:1,k()):x<0},E.precision=function(t){return arguments.length?(a=dd(u,S=t*t),C()):zf(S)},E.fitExtent=function(t,n){return ud(E,t,n)},E.fitSize=function(t,n){return cd(E,t,n)},E.fitWidth=function(t,n){return fd(E,t,n)},E.fitHeight=function(t,n){return sd(E,t,n)},function(){return n=t.apply(this,arguments),E.invert=n.invert&&N,k()}}function _d(t){var n=0,e=gf/3,r=vd(t),i=r(n,e);return i.parallels=function(t){return arguments.length?r(n=t[0]*mf,e=t[1]*mf):[n*bf,e*bf]},i}function bd(t,n){var e=Cf(t),r=(e+Cf(n))/2;if(xf(r)0?n<-yf+df&&(n=-yf+df):n>yf-df&&(n=yf-df);var e=i/kf(Nd(n),r);return[e*Cf(r*t),i-e*Tf(r*t)]}return o.invert=function(t,n){var e=i-n,o=Pf(r)*zf(t*t+e*e),a=Mf(t,xf(e))*Pf(e);return e*r<0&&(a-=gf*Pf(t)*Pf(e)),[a/r,2*wf(kf(i/o,1/r))-yf]},o}function Cd(t,n){return[t,n]}function Pd(t,n){var e=Tf(t),r=t===n?Cf(t):(e-Tf(n))/(n-t),i=e/r+t;if(xf(r)=0;)n+=e[r].value;else n=1;t.value=n}function Gd(t,n){t instanceof Map?(t=[void 0,t],void 0===n&&(n=Wd)):void 0===n&&(n=Vd);for(var e,r,i,o,a,u=new Qd(t),c=[u];e=c.pop();)if((i=n(e.data))&&(a=(i=Array.from(i)).length))for(e.children=i,o=a-1;o>=0;--o)c.push(r=i[o]=new Qd(i[o])),r.parent=e,r.depth=e.depth+1;return u.eachBefore(Kd)}function Vd(t){return t.children}function Wd(t){return Array.isArray(t)?t[1]:null}function Zd(t){void 0!==t.data.value&&(t.value=t.data.value),t.data=t.data.data}function Kd(t){var n=0;do{t.height=n}while((t=t.parent)&&t.height<++n)}function Qd(t){this.data=t,this.depth=this.height=0,this.parent=null}function Jd(t){return null==t?null:tp(t)}function tp(t){if("function"!=typeof t)throw new Error;return t}function np(){return 0}function ep(t){return function(){return t}}qd.invert=function(t,n){for(var e,r=n,i=r*r,o=i*i*i,a=0;a<12&&(o=(i=(r-=e=(r*(zd+$d*i+o*(Dd+Rd*i))-n)/(zd+3*$d*i+o*(7*Dd+9*Rd*i)))*r)*i*i,!(xf(e)df&&--i>0);return[t/(.8707+(o=r*r)*(o*(o*o*o*(.003971-.001529*o)-.013791)-.131979)),r]},Od.invert=Md(Rf),Bd.invert=Md((function(t){return 2*wf(t)})),Yd.invert=function(t,n){return[-n,2*wf(Sf(t))-yf]},Qd.prototype=Gd.prototype={constructor:Qd,count:function(){return this.eachAfter(Xd)},each:function(t,n){let e=-1;for(const r of this)t.call(n,r,++e,this);return this},eachAfter:function(t,n){for(var e,r,i,o=this,a=[o],u=[],c=-1;o=a.pop();)if(u.push(o),e=o.children)for(r=0,i=e.length;r=0;--r)o.push(e[r]);return this},find:function(t,n){let e=-1;for(const r of this)if(t.call(n,r,++e,this))return r},sum:function(t){return this.eachAfter((function(n){for(var e=+t(n.data)||0,r=n.children,i=r&&r.length;--i>=0;)e+=r[i].value;n.value=e}))},sort:function(t){return this.eachBefore((function(n){n.children&&n.children.sort(t)}))},path:function(t){for(var n=this,e=function(t,n){if(t===n)return t;var e=t.ancestors(),r=n.ancestors(),i=null;t=e.pop(),n=r.pop();for(;t===n;)i=t,t=e.pop(),n=r.pop();return i}(n,t),r=[n];n!==e;)n=n.parent,r.push(n);for(var i=r.length;t!==e;)r.splice(i,0,t),t=t.parent;return r},ancestors:function(){for(var t=this,n=[t];t=t.parent;)n.push(t);return n},descendants:function(){return Array.from(this)},leaves:function(){var t=[];return this.eachBefore((function(n){n.children||t.push(n)})),t},links:function(){var t=this,n=[];return t.each((function(e){e!==t&&n.push({source:e.parent,target:e})})),n},copy:function(){return Gd(this).eachBefore(Zd)},[Symbol.iterator]:function*(){var t,n,e,r,i=this,o=[i];do{for(t=o.reverse(),o=[];i=t.pop();)if(yield i,n=i.children)for(e=0,r=n.length;e(t=(rp*t+ip)%op)/op}function up(t,n){for(var e,r,i=0,o=(t=function(t,n){let e,r,i=t.length;for(;i;)r=n()*i--|0,e=t[i],t[i]=t[r],t[r]=e;return t}(Array.from(t),n)).length,a=[];i0&&e*e>r*r+i*i}function lp(t,n){for(var e=0;e1e-6?(E+Math.sqrt(E*E-4*S*N))/(2*S):N/E);return{x:r+w+M*k,y:i+T+A*k,r:k}}function gp(t,n,e){var r,i,o,a,u=t.x-n.x,c=t.y-n.y,f=u*u+c*c;f?(i=n.r+e.r,i*=i,a=t.r+e.r,i>(a*=a)?(r=(f+a-i)/(2*f),o=Math.sqrt(Math.max(0,a/f-r*r)),e.x=t.x-r*u-o*c,e.y=t.y-r*c+o*u):(r=(f+i-a)/(2*f),o=Math.sqrt(Math.max(0,i/f-r*r)),e.x=n.x+r*u-o*c,e.y=n.y+r*c+o*u)):(e.x=n.x+e.r,e.y=n.y)}function yp(t,n){var e=t.r+n.r-1e-6,r=n.x-t.x,i=n.y-t.y;return e>0&&e*e>r*r+i*i}function vp(t){var n=t._,e=t.next._,r=n.r+e.r,i=(n.x*e.r+e.x*n.r)/r,o=(n.y*e.r+e.y*n.r)/r;return i*i+o*o}function _p(t){this._=t,this.next=null,this.previous=null}function bp(t,n){if(!(o=(t=function(t){return"object"==typeof t&&"length"in t?t:Array.from(t)}(t)).length))return 0;var e,r,i,o,a,u,c,f,s,l,h;if((e=t[0]).x=0,e.y=0,!(o>1))return e.r;if(r=t[1],e.x=-r.r,r.x=e.r,r.y=0,!(o>2))return e.r+r.r;gp(r,e,i=t[2]),e=new _p(e),r=new _p(r),i=new _p(i),e.next=i.previous=r,r.next=e.previous=i,i.next=r.previous=e;t:for(c=3;c1&&!zp(t,n););return t.slice(0,n)}function zp(t,n){if("/"===t[n]){let e=0;for(;n>0&&"\\"===t[--n];)++e;if(!(1&e))return!0}return!1}function $p(t,n){return t.parent===n.parent?1:2}function Dp(t){var n=t.children;return n?n[0]:t.t}function Rp(t){var n=t.children;return n?n[n.length-1]:t.t}function Fp(t,n,e){var r=e/(n.i-t.i);n.c-=r,n.s+=e,t.c+=r,n.z+=e,n.m+=e}function qp(t,n,e){return t.a.parent===n.parent?t.a:e}function Up(t,n){this._=t,this.parent=null,this.children=null,this.A=null,this.a=this,this.z=0,this.m=0,this.c=0,this.s=0,this.t=null,this.i=n}function Ip(t,n,e,r,i){for(var o,a=t.children,u=-1,c=a.length,f=t.value&&(i-e)/t.value;++uh&&(h=u),y=s*s*g,(d=Math.max(h/y,y/l))>p){s-=u;break}p=d}v.push(a={value:s,dice:c1?n:1)},e}(Op);var Lp=function t(n){function e(t,e,r,i,o){if((a=t._squarify)&&a.ratio===n)for(var a,u,c,f,s,l=-1,h=a.length,d=t.value;++l1?n:1)},e}(Op);function jp(t,n,e){return(n[0]-t[0])*(e[1]-t[1])-(n[1]-t[1])*(e[0]-t[0])}function Hp(t,n){return t[0]-n[0]||t[1]-n[1]}function Xp(t){const n=t.length,e=[0,1];let r,i=2;for(r=2;r1&&jp(t[e[i-2]],t[e[i-1]],t[r])<=0;)--i;e[i++]=r}return e.slice(0,i)}var Gp=Math.random,Vp=function t(n){function e(t,e){return t=null==t?0:+t,e=null==e?1:+e,1===arguments.length?(e=t,t=0):e-=t,function(){return n()*e+t}}return e.source=t,e}(Gp),Wp=function t(n){function e(t,e){return arguments.length<2&&(e=t,t=0),t=Math.floor(t),e=Math.floor(e)-t,function(){return Math.floor(n()*e+t)}}return e.source=t,e}(Gp),Zp=function t(n){function e(t,e){var r,i;return t=null==t?0:+t,e=null==e?1:+e,function(){var o;if(null!=r)o=r,r=null;else do{r=2*n()-1,o=2*n()-1,i=r*r+o*o}while(!i||i>1);return t+e*o*Math.sqrt(-2*Math.log(i)/i)}}return e.source=t,e}(Gp),Kp=function t(n){var e=Zp.source(n);function r(){var t=e.apply(this,arguments);return function(){return Math.exp(t())}}return r.source=t,r}(Gp),Qp=function t(n){function e(t){return(t=+t)<=0?()=>0:function(){for(var e=0,r=t;r>1;--r)e+=n();return e+r*n()}}return e.source=t,e}(Gp),Jp=function t(n){var e=Qp.source(n);function r(t){if(0==(t=+t))return n;var r=e(t);return function(){return r()/t}}return r.source=t,r}(Gp),tg=function t(n){function e(t){return function(){return-Math.log1p(-n())/t}}return e.source=t,e}(Gp),ng=function t(n){function e(t){if((t=+t)<0)throw new RangeError("invalid alpha");return t=1/-t,function(){return Math.pow(1-n(),t)}}return e.source=t,e}(Gp),eg=function t(n){function e(t){if((t=+t)<0||t>1)throw new RangeError("invalid p");return function(){return Math.floor(n()+t)}}return e.source=t,e}(Gp),rg=function t(n){function e(t){if((t=+t)<0||t>1)throw new RangeError("invalid p");return 0===t?()=>1/0:1===t?()=>1:(t=Math.log1p(-t),function(){return 1+Math.floor(Math.log1p(-n())/t)})}return e.source=t,e}(Gp),ig=function t(n){var e=Zp.source(n)();function r(t,r){if((t=+t)<0)throw new RangeError("invalid k");if(0===t)return()=>0;if(r=null==r?1:+r,1===t)return()=>-Math.log1p(-n())*r;var i=(t<1?t+1:t)-1/3,o=1/(3*Math.sqrt(i)),a=t<1?()=>Math.pow(n(),1/t):()=>1;return function(){do{do{var t=e(),u=1+o*t}while(u<=0);u*=u*u;var c=1-n()}while(c>=1-.0331*t*t*t*t&&Math.log(c)>=.5*t*t+i*(1-u+Math.log(u)));return i*u*a()*r}}return r.source=t,r}(Gp),og=function t(n){var e=ig.source(n);function r(t,n){var r=e(t),i=e(n);return function(){var t=r();return 0===t?0:t/(t+i())}}return r.source=t,r}(Gp),ag=function t(n){var e=rg.source(n),r=og.source(n);function i(t,n){return t=+t,(n=+n)>=1?()=>t:n<=0?()=>0:function(){for(var i=0,o=t,a=n;o*a>16&&o*(1-a)>16;){var u=Math.floor((o+1)*a),c=r(u,o-u+1)();c<=a?(i+=u,o-=u,a=(a-c)/(1-c)):(o=u-1,a/=c)}for(var f=a<.5,s=e(f?a:1-a),l=s(),h=0;l<=o;++h)l+=s();return i+(f?h:o-h)}}return i.source=t,i}(Gp),ug=function t(n){function e(t,e,r){var i;return 0==(t=+t)?i=t=>-Math.log(t):(t=1/t,i=n=>Math.pow(n,t)),e=null==e?0:+e,r=null==r?1:+r,function(){return e+r*i(-Math.log1p(-n()))}}return e.source=t,e}(Gp),cg=function t(n){function e(t,e){return t=null==t?0:+t,e=null==e?1:+e,function(){return t+e*Math.tan(Math.PI*n())}}return e.source=t,e}(Gp),fg=function t(n){function e(t,e){return t=null==t?0:+t,e=null==e?1:+e,function(){var r=n();return t+e*Math.log(r/(1-r))}}return e.source=t,e}(Gp),sg=function t(n){var e=ig.source(n),r=ag.source(n);function i(t){return function(){for(var i=0,o=t;o>16;){var a=Math.floor(.875*o),u=e(a)();if(u>o)return i+r(a-1,o/u)();i+=a,o-=u}for(var c=-Math.log1p(-n()),f=0;c<=o;++f)c-=Math.log1p(-n());return i+f}}return i.source=t,i}(Gp);const lg=1/4294967296;function hg(t,n){switch(arguments.length){case 0:break;case 1:this.range(t);break;default:this.range(n).domain(t)}return this}function dg(t,n){switch(arguments.length){case 0:break;case 1:"function"==typeof t?this.interpolator(t):this.range(t);break;default:this.domain(t),"function"==typeof n?this.interpolator(n):this.range(n)}return this}const pg=Symbol("implicit");function gg(){var t=new InternMap,n=[],e=[],r=pg;function i(i){let o=t.get(i);if(void 0===o){if(r!==pg)return r;t.set(i,o=n.push(i)-1)}return e[o%e.length]}return i.domain=function(e){if(!arguments.length)return n.slice();n=[],t=new InternMap;for(const r of e)t.has(r)||t.set(r,n.push(r)-1);return i},i.range=function(t){return arguments.length?(e=Array.from(t),i):e.slice()},i.unknown=function(t){return arguments.length?(r=t,i):r},i.copy=function(){return gg(n,e).unknown(r)},hg.apply(i,arguments),i}function yg(){var t,n,e=gg().unknown(void 0),r=e.domain,i=e.range,o=0,a=1,u=!1,c=0,f=0,s=.5;function l(){var e=r().length,l=an&&(e=t,t=n,n=e),function(e){return Math.max(t,Math.min(n,e))}}(a[0],a[t-1])),r=t>2?Mg:wg,i=o=null,l}function l(n){return null==n||isNaN(n=+n)?e:(i||(i=r(a.map(t),u,c)))(t(f(n)))}return l.invert=function(e){return f(n((o||(o=r(u,a.map(t),Yr)))(e)))},l.domain=function(t){return arguments.length?(a=Array.from(t,_g),s()):a.slice()},l.range=function(t){return arguments.length?(u=Array.from(t),s()):u.slice()},l.rangeRound=function(t){return u=Array.from(t),c=Vr,s()},l.clamp=function(t){return arguments.length?(f=!!t||mg,s()):f!==mg},l.interpolate=function(t){return arguments.length?(c=t,s()):c},l.unknown=function(t){return arguments.length?(e=t,l):e},function(e,r){return t=e,n=r,s()}}function Sg(){return Ag()(mg,mg)}function Eg(n,e,r,i){var o,a=W(n,e,r);switch((i=Jc(null==i?",f":i)).type){case"s":var u=Math.max(Math.abs(n),Math.abs(e));return null!=i.precision||isNaN(o=lf(a,u))||(i.precision=o),t.formatPrefix(i,u);case"":case"e":case"g":case"p":case"r":null!=i.precision||isNaN(o=hf(a,Math.max(Math.abs(n),Math.abs(e))))||(i.precision=o-("e"===i.type));break;case"f":case"%":null!=i.precision||isNaN(o=sf(a))||(i.precision=o-2*("%"===i.type))}return t.format(i)}function Ng(t){var n=t.domain;return t.ticks=function(t){var e=n();return G(e[0],e[e.length-1],null==t?10:t)},t.tickFormat=function(t,e){var r=n();return Eg(r[0],r[r.length-1],null==t?10:t,e)},t.nice=function(e){null==e&&(e=10);var r,i,o=n(),a=0,u=o.length-1,c=o[a],f=o[u],s=10;for(f0;){if((i=V(c,f,e))===r)return o[a]=c,o[u]=f,n(o);if(i>0)c=Math.floor(c/i)*i,f=Math.ceil(f/i)*i;else{if(!(i<0))break;c=Math.ceil(c*i)/i,f=Math.floor(f*i)/i}r=i}return t},t}function kg(t,n){var e,r=0,i=(t=t.slice()).length-1,o=t[r],a=t[i];return a-t(-n,e)}function Fg(n){const e=n(Cg,Pg),r=e.domain;let i,o,a=10;function u(){return i=function(t){return t===Math.E?Math.log:10===t&&Math.log10||2===t&&Math.log2||(t=Math.log(t),n=>Math.log(n)/t)}(a),o=function(t){return 10===t?Dg:t===Math.E?Math.exp:n=>Math.pow(t,n)}(a),r()[0]<0?(i=Rg(i),o=Rg(o),n(zg,$g)):n(Cg,Pg),e}return e.base=function(t){return arguments.length?(a=+t,u()):a},e.domain=function(t){return arguments.length?(r(t),u()):r()},e.ticks=t=>{const n=r();let e=n[0],u=n[n.length-1];const c=u0){for(;l<=h;++l)for(f=1;fu)break;p.push(s)}}else for(;l<=h;++l)for(f=a-1;f>=1;--f)if(s=l>0?f/o(-l):f*o(l),!(su)break;p.push(s)}2*p.length{if(null==n&&(n=10),null==r&&(r=10===a?"s":","),"function"!=typeof r&&(a%1||null!=(r=Jc(r)).precision||(r.trim=!0),r=t.format(r)),n===1/0)return r;const u=Math.max(1,a*n/e.ticks().length);return t=>{let n=t/o(Math.round(i(t)));return n*ar(kg(r(),{floor:t=>o(Math.floor(i(t))),ceil:t=>o(Math.ceil(i(t)))})),e}function qg(t){return function(n){return Math.sign(n)*Math.log1p(Math.abs(n/t))}}function Ug(t){return function(n){return Math.sign(n)*Math.expm1(Math.abs(n))*t}}function Ig(t){var n=1,e=t(qg(n),Ug(n));return e.constant=function(e){return arguments.length?t(qg(n=+e),Ug(n)):n},Ng(e)}function Og(t){return function(n){return n<0?-Math.pow(-n,t):Math.pow(n,t)}}function Bg(t){return t<0?-Math.sqrt(-t):Math.sqrt(t)}function Yg(t){return t<0?-t*t:t*t}function Lg(t){var n=t(mg,mg),e=1;return n.exponent=function(n){return arguments.length?1===(e=+n)?t(mg,mg):.5===e?t(Bg,Yg):t(Og(e),Og(1/e)):e},Ng(n)}function jg(){var t=Lg(Ag());return t.copy=function(){return Tg(t,jg()).exponent(t.exponent())},hg.apply(t,arguments),t}function Hg(t){return Math.sign(t)*t*t}const Xg=new Date,Gg=new Date;function Vg(t,n,e,r){function i(n){return t(n=0===arguments.length?new Date:new Date(+n)),n}return i.floor=n=>(t(n=new Date(+n)),n),i.ceil=e=>(t(e=new Date(e-1)),n(e,1),t(e),e),i.round=t=>{const n=i(t),e=i.ceil(t);return t-n(n(t=new Date(+t),null==e?1:Math.floor(e)),t),i.range=(e,r,o)=>{const a=[];if(e=i.ceil(e),o=null==o?1:Math.floor(o),!(e0))return a;let u;do{a.push(u=new Date(+e)),n(e,o),t(e)}while(uVg((n=>{if(n>=n)for(;t(n),!e(n);)n.setTime(n-1)}),((t,r)=>{if(t>=t)if(r<0)for(;++r<=0;)for(;n(t,-1),!e(t););else for(;--r>=0;)for(;n(t,1),!e(t););})),e&&(i.count=(n,r)=>(Xg.setTime(+n),Gg.setTime(+r),t(Xg),t(Gg),Math.floor(e(Xg,Gg))),i.every=t=>(t=Math.floor(t),isFinite(t)&&t>0?t>1?i.filter(r?n=>r(n)%t==0:n=>i.count(0,n)%t==0):i:null)),i}const Wg=Vg((()=>{}),((t,n)=>{t.setTime(+t+n)}),((t,n)=>n-t));Wg.every=t=>(t=Math.floor(t),isFinite(t)&&t>0?t>1?Vg((n=>{n.setTime(Math.floor(n/t)*t)}),((n,e)=>{n.setTime(+n+e*t)}),((n,e)=>(e-n)/t)):Wg:null);const Zg=Wg.range,Kg=1e3,Qg=6e4,Jg=36e5,ty=864e5,ny=6048e5,ey=2592e6,ry=31536e6,iy=Vg((t=>{t.setTime(t-t.getMilliseconds())}),((t,n)=>{t.setTime(+t+n*Kg)}),((t,n)=>(n-t)/Kg),(t=>t.getUTCSeconds())),oy=iy.range,ay=Vg((t=>{t.setTime(t-t.getMilliseconds()-t.getSeconds()*Kg)}),((t,n)=>{t.setTime(+t+n*Qg)}),((t,n)=>(n-t)/Qg),(t=>t.getMinutes())),uy=ay.range,cy=Vg((t=>{t.setUTCSeconds(0,0)}),((t,n)=>{t.setTime(+t+n*Qg)}),((t,n)=>(n-t)/Qg),(t=>t.getUTCMinutes())),fy=cy.range,sy=Vg((t=>{t.setTime(t-t.getMilliseconds()-t.getSeconds()*Kg-t.getMinutes()*Qg)}),((t,n)=>{t.setTime(+t+n*Jg)}),((t,n)=>(n-t)/Jg),(t=>t.getHours())),ly=sy.range,hy=Vg((t=>{t.setUTCMinutes(0,0,0)}),((t,n)=>{t.setTime(+t+n*Jg)}),((t,n)=>(n-t)/Jg),(t=>t.getUTCHours())),dy=hy.range,py=Vg((t=>t.setHours(0,0,0,0)),((t,n)=>t.setDate(t.getDate()+n)),((t,n)=>(n-t-(n.getTimezoneOffset()-t.getTimezoneOffset())*Qg)/ty),(t=>t.getDate()-1)),gy=py.range,yy=Vg((t=>{t.setUTCHours(0,0,0,0)}),((t,n)=>{t.setUTCDate(t.getUTCDate()+n)}),((t,n)=>(n-t)/ty),(t=>t.getUTCDate()-1)),vy=yy.range,_y=Vg((t=>{t.setUTCHours(0,0,0,0)}),((t,n)=>{t.setUTCDate(t.getUTCDate()+n)}),((t,n)=>(n-t)/ty),(t=>Math.floor(t/ty))),by=_y.range;function my(t){return Vg((n=>{n.setDate(n.getDate()-(n.getDay()+7-t)%7),n.setHours(0,0,0,0)}),((t,n)=>{t.setDate(t.getDate()+7*n)}),((t,n)=>(n-t-(n.getTimezoneOffset()-t.getTimezoneOffset())*Qg)/ny))}const xy=my(0),wy=my(1),My=my(2),Ty=my(3),Ay=my(4),Sy=my(5),Ey=my(6),Ny=xy.range,ky=wy.range,Cy=My.range,Py=Ty.range,zy=Ay.range,$y=Sy.range,Dy=Ey.range;function Ry(t){return Vg((n=>{n.setUTCDate(n.getUTCDate()-(n.getUTCDay()+7-t)%7),n.setUTCHours(0,0,0,0)}),((t,n)=>{t.setUTCDate(t.getUTCDate()+7*n)}),((t,n)=>(n-t)/ny))}const Fy=Ry(0),qy=Ry(1),Uy=Ry(2),Iy=Ry(3),Oy=Ry(4),By=Ry(5),Yy=Ry(6),Ly=Fy.range,jy=qy.range,Hy=Uy.range,Xy=Iy.range,Gy=Oy.range,Vy=By.range,Wy=Yy.range,Zy=Vg((t=>{t.setDate(1),t.setHours(0,0,0,0)}),((t,n)=>{t.setMonth(t.getMonth()+n)}),((t,n)=>n.getMonth()-t.getMonth()+12*(n.getFullYear()-t.getFullYear())),(t=>t.getMonth())),Ky=Zy.range,Qy=Vg((t=>{t.setUTCDate(1),t.setUTCHours(0,0,0,0)}),((t,n)=>{t.setUTCMonth(t.getUTCMonth()+n)}),((t,n)=>n.getUTCMonth()-t.getUTCMonth()+12*(n.getUTCFullYear()-t.getUTCFullYear())),(t=>t.getUTCMonth())),Jy=Qy.range,tv=Vg((t=>{t.setMonth(0,1),t.setHours(0,0,0,0)}),((t,n)=>{t.setFullYear(t.getFullYear()+n)}),((t,n)=>n.getFullYear()-t.getFullYear()),(t=>t.getFullYear()));tv.every=t=>isFinite(t=Math.floor(t))&&t>0?Vg((n=>{n.setFullYear(Math.floor(n.getFullYear()/t)*t),n.setMonth(0,1),n.setHours(0,0,0,0)}),((n,e)=>{n.setFullYear(n.getFullYear()+e*t)})):null;const nv=tv.range,ev=Vg((t=>{t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0)}),((t,n)=>{t.setUTCFullYear(t.getUTCFullYear()+n)}),((t,n)=>n.getUTCFullYear()-t.getUTCFullYear()),(t=>t.getUTCFullYear()));ev.every=t=>isFinite(t=Math.floor(t))&&t>0?Vg((n=>{n.setUTCFullYear(Math.floor(n.getUTCFullYear()/t)*t),n.setUTCMonth(0,1),n.setUTCHours(0,0,0,0)}),((n,e)=>{n.setUTCFullYear(n.getUTCFullYear()+e*t)})):null;const rv=ev.range;function iv(t,n,e,i,o,a){const u=[[iy,1,Kg],[iy,5,5e3],[iy,15,15e3],[iy,30,3e4],[a,1,Qg],[a,5,3e5],[a,15,9e5],[a,30,18e5],[o,1,Jg],[o,3,108e5],[o,6,216e5],[o,12,432e5],[i,1,ty],[i,2,1728e5],[e,1,ny],[n,1,ey],[n,3,7776e6],[t,1,ry]];function c(n,e,i){const o=Math.abs(e-n)/i,a=r((([,,t])=>t)).right(u,o);if(a===u.length)return t.every(W(n/ry,e/ry,i));if(0===a)return Wg.every(Math.max(W(n,e,i),1));const[c,f]=u[o/u[a-1][2]=12)]},q:function(t){return 1+~~(t.getMonth()/3)},Q:k_,s:C_,S:Zv,u:Kv,U:Qv,V:t_,w:n_,W:e_,x:null,X:null,y:r_,Y:o_,Z:u_,"%":N_},m={a:function(t){return a[t.getUTCDay()]},A:function(t){return o[t.getUTCDay()]},b:function(t){return c[t.getUTCMonth()]},B:function(t){return u[t.getUTCMonth()]},c:null,d:c_,e:c_,f:d_,g:T_,G:S_,H:f_,I:s_,j:l_,L:h_,m:p_,M:g_,p:function(t){return i[+(t.getUTCHours()>=12)]},q:function(t){return 1+~~(t.getUTCMonth()/3)},Q:k_,s:C_,S:y_,u:v_,U:__,V:m_,w:x_,W:w_,x:null,X:null,y:M_,Y:A_,Z:E_,"%":N_},x={a:function(t,n,e){var r=d.exec(n.slice(e));return r?(t.w=p.get(r[0].toLowerCase()),e+r[0].length):-1},A:function(t,n,e){var r=l.exec(n.slice(e));return r?(t.w=h.get(r[0].toLowerCase()),e+r[0].length):-1},b:function(t,n,e){var r=v.exec(n.slice(e));return r?(t.m=_.get(r[0].toLowerCase()),e+r[0].length):-1},B:function(t,n,e){var r=g.exec(n.slice(e));return r?(t.m=y.get(r[0].toLowerCase()),e+r[0].length):-1},c:function(t,e,r){return T(t,n,e,r)},d:zv,e:zv,f:Uv,g:Nv,G:Ev,H:Dv,I:Dv,j:$v,L:qv,m:Pv,M:Rv,p:function(t,n,e){var r=f.exec(n.slice(e));return r?(t.p=s.get(r[0].toLowerCase()),e+r[0].length):-1},q:Cv,Q:Ov,s:Bv,S:Fv,u:Mv,U:Tv,V:Av,w:wv,W:Sv,x:function(t,n,r){return T(t,e,n,r)},X:function(t,n,e){return T(t,r,n,e)},y:Nv,Y:Ev,Z:kv,"%":Iv};function w(t,n){return function(e){var r,i,o,a=[],u=-1,c=0,f=t.length;for(e instanceof Date||(e=new Date(+e));++u53)return null;"w"in o||(o.w=1),"Z"in o?(i=(r=sv(lv(o.y,0,1))).getUTCDay(),r=i>4||0===i?qy.ceil(r):qy(r),r=yy.offset(r,7*(o.V-1)),o.y=r.getUTCFullYear(),o.m=r.getUTCMonth(),o.d=r.getUTCDate()+(o.w+6)%7):(i=(r=fv(lv(o.y,0,1))).getDay(),r=i>4||0===i?wy.ceil(r):wy(r),r=py.offset(r,7*(o.V-1)),o.y=r.getFullYear(),o.m=r.getMonth(),o.d=r.getDate()+(o.w+6)%7)}else("W"in o||"U"in o)&&("w"in o||(o.w="u"in o?o.u%7:"W"in o?1:0),i="Z"in o?sv(lv(o.y,0,1)).getUTCDay():fv(lv(o.y,0,1)).getDay(),o.m=0,o.d="W"in o?(o.w+6)%7+7*o.W-(i+5)%7:o.w+7*o.U-(i+6)%7);return"Z"in o?(o.H+=o.Z/100|0,o.M+=o.Z%100,sv(o)):fv(o)}}function T(t,n,e,r){for(var i,o,a=0,u=n.length,c=e.length;a=c)return-1;if(37===(i=n.charCodeAt(a++))){if(i=n.charAt(a++),!(o=x[i in pv?n.charAt(a++):i])||(r=o(t,e,r))<0)return-1}else if(i!=e.charCodeAt(r++))return-1}return r}return b.x=w(e,b),b.X=w(r,b),b.c=w(n,b),m.x=w(e,m),m.X=w(r,m),m.c=w(n,m),{format:function(t){var n=w(t+="",b);return n.toString=function(){return t},n},parse:function(t){var n=M(t+="",!1);return n.toString=function(){return t},n},utcFormat:function(t){var n=w(t+="",m);return n.toString=function(){return t},n},utcParse:function(t){var n=M(t+="",!0);return n.toString=function(){return t},n}}}var dv,pv={"-":"",_:" ",0:"0"},gv=/^\s*\d+/,yv=/^%/,vv=/[\\^$*+?|[\]().{}]/g;function _v(t,n,e){var r=t<0?"-":"",i=(r?-t:t)+"",o=i.length;return r+(o[t.toLowerCase(),n])))}function wv(t,n,e){var r=gv.exec(n.slice(e,e+1));return r?(t.w=+r[0],e+r[0].length):-1}function Mv(t,n,e){var r=gv.exec(n.slice(e,e+1));return r?(t.u=+r[0],e+r[0].length):-1}function Tv(t,n,e){var r=gv.exec(n.slice(e,e+2));return r?(t.U=+r[0],e+r[0].length):-1}function Av(t,n,e){var r=gv.exec(n.slice(e,e+2));return r?(t.V=+r[0],e+r[0].length):-1}function Sv(t,n,e){var r=gv.exec(n.slice(e,e+2));return r?(t.W=+r[0],e+r[0].length):-1}function Ev(t,n,e){var r=gv.exec(n.slice(e,e+4));return r?(t.y=+r[0],e+r[0].length):-1}function Nv(t,n,e){var r=gv.exec(n.slice(e,e+2));return r?(t.y=+r[0]+(+r[0]>68?1900:2e3),e+r[0].length):-1}function kv(t,n,e){var r=/^(Z)|([+-]\d\d)(?::?(\d\d))?/.exec(n.slice(e,e+6));return r?(t.Z=r[1]?0:-(r[2]+(r[3]||"00")),e+r[0].length):-1}function Cv(t,n,e){var r=gv.exec(n.slice(e,e+1));return r?(t.q=3*r[0]-3,e+r[0].length):-1}function Pv(t,n,e){var r=gv.exec(n.slice(e,e+2));return r?(t.m=r[0]-1,e+r[0].length):-1}function zv(t,n,e){var r=gv.exec(n.slice(e,e+2));return r?(t.d=+r[0],e+r[0].length):-1}function $v(t,n,e){var r=gv.exec(n.slice(e,e+3));return r?(t.m=0,t.d=+r[0],e+r[0].length):-1}function Dv(t,n,e){var r=gv.exec(n.slice(e,e+2));return r?(t.H=+r[0],e+r[0].length):-1}function Rv(t,n,e){var r=gv.exec(n.slice(e,e+2));return r?(t.M=+r[0],e+r[0].length):-1}function Fv(t,n,e){var r=gv.exec(n.slice(e,e+2));return r?(t.S=+r[0],e+r[0].length):-1}function qv(t,n,e){var r=gv.exec(n.slice(e,e+3));return r?(t.L=+r[0],e+r[0].length):-1}function Uv(t,n,e){var r=gv.exec(n.slice(e,e+6));return r?(t.L=Math.floor(r[0]/1e3),e+r[0].length):-1}function Iv(t,n,e){var r=yv.exec(n.slice(e,e+1));return r?e+r[0].length:-1}function Ov(t,n,e){var r=gv.exec(n.slice(e));return r?(t.Q=+r[0],e+r[0].length):-1}function Bv(t,n,e){var r=gv.exec(n.slice(e));return r?(t.s=+r[0],e+r[0].length):-1}function Yv(t,n){return _v(t.getDate(),n,2)}function Lv(t,n){return _v(t.getHours(),n,2)}function jv(t,n){return _v(t.getHours()%12||12,n,2)}function Hv(t,n){return _v(1+py.count(tv(t),t),n,3)}function Xv(t,n){return _v(t.getMilliseconds(),n,3)}function Gv(t,n){return Xv(t,n)+"000"}function Vv(t,n){return _v(t.getMonth()+1,n,2)}function Wv(t,n){return _v(t.getMinutes(),n,2)}function Zv(t,n){return _v(t.getSeconds(),n,2)}function Kv(t){var n=t.getDay();return 0===n?7:n}function Qv(t,n){return _v(xy.count(tv(t)-1,t),n,2)}function Jv(t){var n=t.getDay();return n>=4||0===n?Ay(t):Ay.ceil(t)}function t_(t,n){return t=Jv(t),_v(Ay.count(tv(t),t)+(4===tv(t).getDay()),n,2)}function n_(t){return t.getDay()}function e_(t,n){return _v(wy.count(tv(t)-1,t),n,2)}function r_(t,n){return _v(t.getFullYear()%100,n,2)}function i_(t,n){return _v((t=Jv(t)).getFullYear()%100,n,2)}function o_(t,n){return _v(t.getFullYear()%1e4,n,4)}function a_(t,n){var e=t.getDay();return _v((t=e>=4||0===e?Ay(t):Ay.ceil(t)).getFullYear()%1e4,n,4)}function u_(t){var n=t.getTimezoneOffset();return(n>0?"-":(n*=-1,"+"))+_v(n/60|0,"0",2)+_v(n%60,"0",2)}function c_(t,n){return _v(t.getUTCDate(),n,2)}function f_(t,n){return _v(t.getUTCHours(),n,2)}function s_(t,n){return _v(t.getUTCHours()%12||12,n,2)}function l_(t,n){return _v(1+yy.count(ev(t),t),n,3)}function h_(t,n){return _v(t.getUTCMilliseconds(),n,3)}function d_(t,n){return h_(t,n)+"000"}function p_(t,n){return _v(t.getUTCMonth()+1,n,2)}function g_(t,n){return _v(t.getUTCMinutes(),n,2)}function y_(t,n){return _v(t.getUTCSeconds(),n,2)}function v_(t){var n=t.getUTCDay();return 0===n?7:n}function __(t,n){return _v(Fy.count(ev(t)-1,t),n,2)}function b_(t){var n=t.getUTCDay();return n>=4||0===n?Oy(t):Oy.ceil(t)}function m_(t,n){return t=b_(t),_v(Oy.count(ev(t),t)+(4===ev(t).getUTCDay()),n,2)}function x_(t){return t.getUTCDay()}function w_(t,n){return _v(qy.count(ev(t)-1,t),n,2)}function M_(t,n){return _v(t.getUTCFullYear()%100,n,2)}function T_(t,n){return _v((t=b_(t)).getUTCFullYear()%100,n,2)}function A_(t,n){return _v(t.getUTCFullYear()%1e4,n,4)}function S_(t,n){var e=t.getUTCDay();return _v((t=e>=4||0===e?Oy(t):Oy.ceil(t)).getUTCFullYear()%1e4,n,4)}function E_(){return"+0000"}function N_(){return"%"}function k_(t){return+t}function C_(t){return Math.floor(+t/1e3)}function P_(n){return dv=hv(n),t.timeFormat=dv.format,t.timeParse=dv.parse,t.utcFormat=dv.utcFormat,t.utcParse=dv.utcParse,dv}t.timeFormat=void 0,t.timeParse=void 0,t.utcFormat=void 0,t.utcParse=void 0,P_({dateTime:"%x, %X",date:"%-m/%-d/%Y",time:"%-I:%M:%S %p",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});var z_="%Y-%m-%dT%H:%M:%S.%LZ";var $_=Date.prototype.toISOString?function(t){return t.toISOString()}:t.utcFormat(z_),D_=$_;var R_=+new Date("2000-01-01T00:00:00.000Z")?function(t){var n=new Date(t);return isNaN(n)?null:n}:t.utcParse(z_),F_=R_;function q_(t){return new Date(t)}function U_(t){return t instanceof Date?+t:+new Date(+t)}function I_(t,n,e,r,i,o,a,u,c,f){var s=Sg(),l=s.invert,h=s.domain,d=f(".%L"),p=f(":%S"),g=f("%I:%M"),y=f("%I %p"),v=f("%a %d"),_=f("%b %d"),b=f("%B"),m=f("%Y");function x(t){return(c(t)Fr(t[t.length-1]),ib=new Array(3).concat("d8b365f5f5f55ab4ac","a6611adfc27d80cdc1018571","a6611adfc27df5f5f580cdc1018571","8c510ad8b365f6e8c3c7eae55ab4ac01665e","8c510ad8b365f6e8c3f5f5f5c7eae55ab4ac01665e","8c510abf812ddfc27df6e8c3c7eae580cdc135978f01665e","8c510abf812ddfc27df6e8c3f5f5f5c7eae580cdc135978f01665e","5430058c510abf812ddfc27df6e8c3c7eae580cdc135978f01665e003c30","5430058c510abf812ddfc27df6e8c3f5f5f5c7eae580cdc135978f01665e003c30").map(H_),ob=rb(ib),ab=new Array(3).concat("af8dc3f7f7f77fbf7b","7b3294c2a5cfa6dba0008837","7b3294c2a5cff7f7f7a6dba0008837","762a83af8dc3e7d4e8d9f0d37fbf7b1b7837","762a83af8dc3e7d4e8f7f7f7d9f0d37fbf7b1b7837","762a839970abc2a5cfe7d4e8d9f0d3a6dba05aae611b7837","762a839970abc2a5cfe7d4e8f7f7f7d9f0d3a6dba05aae611b7837","40004b762a839970abc2a5cfe7d4e8d9f0d3a6dba05aae611b783700441b","40004b762a839970abc2a5cfe7d4e8f7f7f7d9f0d3a6dba05aae611b783700441b").map(H_),ub=rb(ab),cb=new Array(3).concat("e9a3c9f7f7f7a1d76a","d01c8bf1b6dab8e1864dac26","d01c8bf1b6daf7f7f7b8e1864dac26","c51b7de9a3c9fde0efe6f5d0a1d76a4d9221","c51b7de9a3c9fde0eff7f7f7e6f5d0a1d76a4d9221","c51b7dde77aef1b6dafde0efe6f5d0b8e1867fbc414d9221","c51b7dde77aef1b6dafde0eff7f7f7e6f5d0b8e1867fbc414d9221","8e0152c51b7dde77aef1b6dafde0efe6f5d0b8e1867fbc414d9221276419","8e0152c51b7dde77aef1b6dafde0eff7f7f7e6f5d0b8e1867fbc414d9221276419").map(H_),fb=rb(cb),sb=new Array(3).concat("998ec3f7f7f7f1a340","5e3c99b2abd2fdb863e66101","5e3c99b2abd2f7f7f7fdb863e66101","542788998ec3d8daebfee0b6f1a340b35806","542788998ec3d8daebf7f7f7fee0b6f1a340b35806","5427888073acb2abd2d8daebfee0b6fdb863e08214b35806","5427888073acb2abd2d8daebf7f7f7fee0b6fdb863e08214b35806","2d004b5427888073acb2abd2d8daebfee0b6fdb863e08214b358067f3b08","2d004b5427888073acb2abd2d8daebf7f7f7fee0b6fdb863e08214b358067f3b08").map(H_),lb=rb(sb),hb=new Array(3).concat("ef8a62f7f7f767a9cf","ca0020f4a58292c5de0571b0","ca0020f4a582f7f7f792c5de0571b0","b2182bef8a62fddbc7d1e5f067a9cf2166ac","b2182bef8a62fddbc7f7f7f7d1e5f067a9cf2166ac","b2182bd6604df4a582fddbc7d1e5f092c5de4393c32166ac","b2182bd6604df4a582fddbc7f7f7f7d1e5f092c5de4393c32166ac","67001fb2182bd6604df4a582fddbc7d1e5f092c5de4393c32166ac053061","67001fb2182bd6604df4a582fddbc7f7f7f7d1e5f092c5de4393c32166ac053061").map(H_),db=rb(hb),pb=new Array(3).concat("ef8a62ffffff999999","ca0020f4a582bababa404040","ca0020f4a582ffffffbababa404040","b2182bef8a62fddbc7e0e0e09999994d4d4d","b2182bef8a62fddbc7ffffffe0e0e09999994d4d4d","b2182bd6604df4a582fddbc7e0e0e0bababa8787874d4d4d","b2182bd6604df4a582fddbc7ffffffe0e0e0bababa8787874d4d4d","67001fb2182bd6604df4a582fddbc7e0e0e0bababa8787874d4d4d1a1a1a","67001fb2182bd6604df4a582fddbc7ffffffe0e0e0bababa8787874d4d4d1a1a1a").map(H_),gb=rb(pb),yb=new Array(3).concat("fc8d59ffffbf91bfdb","d7191cfdae61abd9e92c7bb6","d7191cfdae61ffffbfabd9e92c7bb6","d73027fc8d59fee090e0f3f891bfdb4575b4","d73027fc8d59fee090ffffbfe0f3f891bfdb4575b4","d73027f46d43fdae61fee090e0f3f8abd9e974add14575b4","d73027f46d43fdae61fee090ffffbfe0f3f8abd9e974add14575b4","a50026d73027f46d43fdae61fee090e0f3f8abd9e974add14575b4313695","a50026d73027f46d43fdae61fee090ffffbfe0f3f8abd9e974add14575b4313695").map(H_),vb=rb(yb),_b=new Array(3).concat("fc8d59ffffbf91cf60","d7191cfdae61a6d96a1a9641","d7191cfdae61ffffbfa6d96a1a9641","d73027fc8d59fee08bd9ef8b91cf601a9850","d73027fc8d59fee08bffffbfd9ef8b91cf601a9850","d73027f46d43fdae61fee08bd9ef8ba6d96a66bd631a9850","d73027f46d43fdae61fee08bffffbfd9ef8ba6d96a66bd631a9850","a50026d73027f46d43fdae61fee08bd9ef8ba6d96a66bd631a9850006837","a50026d73027f46d43fdae61fee08bffffbfd9ef8ba6d96a66bd631a9850006837").map(H_),bb=rb(_b),mb=new Array(3).concat("fc8d59ffffbf99d594","d7191cfdae61abdda42b83ba","d7191cfdae61ffffbfabdda42b83ba","d53e4ffc8d59fee08be6f59899d5943288bd","d53e4ffc8d59fee08bffffbfe6f59899d5943288bd","d53e4ff46d43fdae61fee08be6f598abdda466c2a53288bd","d53e4ff46d43fdae61fee08bffffbfe6f598abdda466c2a53288bd","9e0142d53e4ff46d43fdae61fee08be6f598abdda466c2a53288bd5e4fa2","9e0142d53e4ff46d43fdae61fee08bffffbfe6f598abdda466c2a53288bd5e4fa2").map(H_),xb=rb(mb),wb=new Array(3).concat("e5f5f999d8c92ca25f","edf8fbb2e2e266c2a4238b45","edf8fbb2e2e266c2a42ca25f006d2c","edf8fbccece699d8c966c2a42ca25f006d2c","edf8fbccece699d8c966c2a441ae76238b45005824","f7fcfde5f5f9ccece699d8c966c2a441ae76238b45005824","f7fcfde5f5f9ccece699d8c966c2a441ae76238b45006d2c00441b").map(H_),Mb=rb(wb),Tb=new Array(3).concat("e0ecf49ebcda8856a7","edf8fbb3cde38c96c688419d","edf8fbb3cde38c96c68856a7810f7c","edf8fbbfd3e69ebcda8c96c68856a7810f7c","edf8fbbfd3e69ebcda8c96c68c6bb188419d6e016b","f7fcfde0ecf4bfd3e69ebcda8c96c68c6bb188419d6e016b","f7fcfde0ecf4bfd3e69ebcda8c96c68c6bb188419d810f7c4d004b").map(H_),Ab=rb(Tb),Sb=new Array(3).concat("e0f3dba8ddb543a2ca","f0f9e8bae4bc7bccc42b8cbe","f0f9e8bae4bc7bccc443a2ca0868ac","f0f9e8ccebc5a8ddb57bccc443a2ca0868ac","f0f9e8ccebc5a8ddb57bccc44eb3d32b8cbe08589e","f7fcf0e0f3dbccebc5a8ddb57bccc44eb3d32b8cbe08589e","f7fcf0e0f3dbccebc5a8ddb57bccc44eb3d32b8cbe0868ac084081").map(H_),Eb=rb(Sb),Nb=new Array(3).concat("fee8c8fdbb84e34a33","fef0d9fdcc8afc8d59d7301f","fef0d9fdcc8afc8d59e34a33b30000","fef0d9fdd49efdbb84fc8d59e34a33b30000","fef0d9fdd49efdbb84fc8d59ef6548d7301f990000","fff7ecfee8c8fdd49efdbb84fc8d59ef6548d7301f990000","fff7ecfee8c8fdd49efdbb84fc8d59ef6548d7301fb300007f0000").map(H_),kb=rb(Nb),Cb=new Array(3).concat("ece2f0a6bddb1c9099","f6eff7bdc9e167a9cf02818a","f6eff7bdc9e167a9cf1c9099016c59","f6eff7d0d1e6a6bddb67a9cf1c9099016c59","f6eff7d0d1e6a6bddb67a9cf3690c002818a016450","fff7fbece2f0d0d1e6a6bddb67a9cf3690c002818a016450","fff7fbece2f0d0d1e6a6bddb67a9cf3690c002818a016c59014636").map(H_),Pb=rb(Cb),zb=new Array(3).concat("ece7f2a6bddb2b8cbe","f1eef6bdc9e174a9cf0570b0","f1eef6bdc9e174a9cf2b8cbe045a8d","f1eef6d0d1e6a6bddb74a9cf2b8cbe045a8d","f1eef6d0d1e6a6bddb74a9cf3690c00570b0034e7b","fff7fbece7f2d0d1e6a6bddb74a9cf3690c00570b0034e7b","fff7fbece7f2d0d1e6a6bddb74a9cf3690c00570b0045a8d023858").map(H_),$b=rb(zb),Db=new Array(3).concat("e7e1efc994c7dd1c77","f1eef6d7b5d8df65b0ce1256","f1eef6d7b5d8df65b0dd1c77980043","f1eef6d4b9dac994c7df65b0dd1c77980043","f1eef6d4b9dac994c7df65b0e7298ace125691003f","f7f4f9e7e1efd4b9dac994c7df65b0e7298ace125691003f","f7f4f9e7e1efd4b9dac994c7df65b0e7298ace125698004367001f").map(H_),Rb=rb(Db),Fb=new Array(3).concat("fde0ddfa9fb5c51b8a","feebe2fbb4b9f768a1ae017e","feebe2fbb4b9f768a1c51b8a7a0177","feebe2fcc5c0fa9fb5f768a1c51b8a7a0177","feebe2fcc5c0fa9fb5f768a1dd3497ae017e7a0177","fff7f3fde0ddfcc5c0fa9fb5f768a1dd3497ae017e7a0177","fff7f3fde0ddfcc5c0fa9fb5f768a1dd3497ae017e7a017749006a").map(H_),qb=rb(Fb),Ub=new Array(3).concat("edf8b17fcdbb2c7fb8","ffffcca1dab441b6c4225ea8","ffffcca1dab441b6c42c7fb8253494","ffffccc7e9b47fcdbb41b6c42c7fb8253494","ffffccc7e9b47fcdbb41b6c41d91c0225ea80c2c84","ffffd9edf8b1c7e9b47fcdbb41b6c41d91c0225ea80c2c84","ffffd9edf8b1c7e9b47fcdbb41b6c41d91c0225ea8253494081d58").map(H_),Ib=rb(Ub),Ob=new Array(3).concat("f7fcb9addd8e31a354","ffffccc2e69978c679238443","ffffccc2e69978c67931a354006837","ffffccd9f0a3addd8e78c67931a354006837","ffffccd9f0a3addd8e78c67941ab5d238443005a32","ffffe5f7fcb9d9f0a3addd8e78c67941ab5d238443005a32","ffffe5f7fcb9d9f0a3addd8e78c67941ab5d238443006837004529").map(H_),Bb=rb(Ob),Yb=new Array(3).concat("fff7bcfec44fd95f0e","ffffd4fed98efe9929cc4c02","ffffd4fed98efe9929d95f0e993404","ffffd4fee391fec44ffe9929d95f0e993404","ffffd4fee391fec44ffe9929ec7014cc4c028c2d04","ffffe5fff7bcfee391fec44ffe9929ec7014cc4c028c2d04","ffffe5fff7bcfee391fec44ffe9929ec7014cc4c02993404662506").map(H_),Lb=rb(Yb),jb=new Array(3).concat("ffeda0feb24cf03b20","ffffb2fecc5cfd8d3ce31a1c","ffffb2fecc5cfd8d3cf03b20bd0026","ffffb2fed976feb24cfd8d3cf03b20bd0026","ffffb2fed976feb24cfd8d3cfc4e2ae31a1cb10026","ffffccffeda0fed976feb24cfd8d3cfc4e2ae31a1cb10026","ffffccffeda0fed976feb24cfd8d3cfc4e2ae31a1cbd0026800026").map(H_),Hb=rb(jb),Xb=new Array(3).concat("deebf79ecae13182bd","eff3ffbdd7e76baed62171b5","eff3ffbdd7e76baed63182bd08519c","eff3ffc6dbef9ecae16baed63182bd08519c","eff3ffc6dbef9ecae16baed64292c62171b5084594","f7fbffdeebf7c6dbef9ecae16baed64292c62171b5084594","f7fbffdeebf7c6dbef9ecae16baed64292c62171b508519c08306b").map(H_),Gb=rb(Xb),Vb=new Array(3).concat("e5f5e0a1d99b31a354","edf8e9bae4b374c476238b45","edf8e9bae4b374c47631a354006d2c","edf8e9c7e9c0a1d99b74c47631a354006d2c","edf8e9c7e9c0a1d99b74c47641ab5d238b45005a32","f7fcf5e5f5e0c7e9c0a1d99b74c47641ab5d238b45005a32","f7fcf5e5f5e0c7e9c0a1d99b74c47641ab5d238b45006d2c00441b").map(H_),Wb=rb(Vb),Zb=new Array(3).concat("f0f0f0bdbdbd636363","f7f7f7cccccc969696525252","f7f7f7cccccc969696636363252525","f7f7f7d9d9d9bdbdbd969696636363252525","f7f7f7d9d9d9bdbdbd969696737373525252252525","fffffff0f0f0d9d9d9bdbdbd969696737373525252252525","fffffff0f0f0d9d9d9bdbdbd969696737373525252252525000000").map(H_),Kb=rb(Zb),Qb=new Array(3).concat("efedf5bcbddc756bb1","f2f0f7cbc9e29e9ac86a51a3","f2f0f7cbc9e29e9ac8756bb154278f","f2f0f7dadaebbcbddc9e9ac8756bb154278f","f2f0f7dadaebbcbddc9e9ac8807dba6a51a34a1486","fcfbfdefedf5dadaebbcbddc9e9ac8807dba6a51a34a1486","fcfbfdefedf5dadaebbcbddc9e9ac8807dba6a51a354278f3f007d").map(H_),Jb=rb(Qb),tm=new Array(3).concat("fee0d2fc9272de2d26","fee5d9fcae91fb6a4acb181d","fee5d9fcae91fb6a4ade2d26a50f15","fee5d9fcbba1fc9272fb6a4ade2d26a50f15","fee5d9fcbba1fc9272fb6a4aef3b2ccb181d99000d","fff5f0fee0d2fcbba1fc9272fb6a4aef3b2ccb181d99000d","fff5f0fee0d2fcbba1fc9272fb6a4aef3b2ccb181da50f1567000d").map(H_),nm=rb(tm),em=new Array(3).concat("fee6cefdae6be6550d","feeddefdbe85fd8d3cd94701","feeddefdbe85fd8d3ce6550da63603","feeddefdd0a2fdae6bfd8d3ce6550da63603","feeddefdd0a2fdae6bfd8d3cf16913d948018c2d04","fff5ebfee6cefdd0a2fdae6bfd8d3cf16913d948018c2d04","fff5ebfee6cefdd0a2fdae6bfd8d3cf16913d94801a636037f2704").map(H_),rm=rb(em);var im=hi(Tr(300,.5,0),Tr(-240,.5,1)),om=hi(Tr(-100,.75,.35),Tr(80,1.5,.8)),am=hi(Tr(260,.75,.35),Tr(80,1.5,.8)),um=Tr();var cm=Fe(),fm=Math.PI/3,sm=2*Math.PI/3;function lm(t){var n=t.length;return function(e){return t[Math.max(0,Math.min(n-1,Math.floor(e*n)))]}}var hm=lm(H_("44015444025645045745055946075a46085c460a5d460b5e470d60470e6147106347116447136548146748166848176948186a481a6c481b6d481c6e481d6f481f70482071482173482374482475482576482677482878482979472a7a472c7a472d7b472e7c472f7d46307e46327e46337f463480453581453781453882443983443a83443b84433d84433e85423f854240864241864142874144874045884046883f47883f48893e49893e4a893e4c8a3d4d8a3d4e8a3c4f8a3c508b3b518b3b528b3a538b3a548c39558c39568c38588c38598c375a8c375b8d365c8d365d8d355e8d355f8d34608d34618d33628d33638d32648e32658e31668e31678e31688e30698e306a8e2f6b8e2f6c8e2e6d8e2e6e8e2e6f8e2d708e2d718e2c718e2c728e2c738e2b748e2b758e2a768e2a778e2a788e29798e297a8e297b8e287c8e287d8e277e8e277f8e27808e26818e26828e26828e25838e25848e25858e24868e24878e23888e23898e238a8d228b8d228c8d228d8d218e8d218f8d21908d21918c20928c20928c20938c1f948c1f958b1f968b1f978b1f988b1f998a1f9a8a1e9b8a1e9c891e9d891f9e891f9f881fa0881fa1881fa1871fa28720a38620a48621a58521a68522a78522a88423a98324aa8325ab8225ac8226ad8127ad8128ae8029af7f2ab07f2cb17e2db27d2eb37c2fb47c31b57b32b67a34b67935b77937b87838b9773aba763bbb753dbc743fbc7340bd7242be7144bf7046c06f48c16e4ac16d4cc26c4ec36b50c46a52c56954c56856c66758c7655ac8645cc8635ec96260ca6063cb5f65cb5e67cc5c69cd5b6ccd5a6ece5870cf5773d05675d05477d1537ad1517cd2507fd34e81d34d84d44b86d54989d5488bd6468ed64590d74393d74195d84098d83e9bd93c9dd93ba0da39a2da37a5db36a8db34aadc32addc30b0dd2fb2dd2db5de2bb8de29bade28bddf26c0df25c2df23c5e021c8e020cae11fcde11dd0e11cd2e21bd5e21ad8e219dae319dde318dfe318e2e418e5e419e7e419eae51aece51befe51cf1e51df4e61ef6e620f8e621fbe723fde725")),dm=lm(H_("00000401000501010601010802010902020b02020d03030f03031204041405041606051806051a07061c08071e0907200a08220b09240c09260d0a290e0b2b100b2d110c2f120d31130d34140e36150e38160f3b180f3d19103f1a10421c10441d11471e114920114b21114e22115024125325125527125829115a2a115c2c115f2d11612f116331116533106734106936106b38106c390f6e3b0f703d0f713f0f72400f74420f75440f764510774710784910784a10794c117a4e117b4f127b51127c52137c54137d56147d57157e59157e5a167e5c167f5d177f5f187f601880621980641a80651a80671b80681c816a1c816b1d816d1d816e1e81701f81721f817320817521817621817822817922827b23827c23827e24828025828125818326818426818627818827818928818b29818c29818e2a81902a81912b81932b80942c80962c80982d80992d809b2e7f9c2e7f9e2f7fa02f7fa1307ea3307ea5317ea6317da8327daa337dab337cad347cae347bb0357bb2357bb3367ab5367ab73779b83779ba3878bc3978bd3977bf3a77c03a76c23b75c43c75c53c74c73d73c83e73ca3e72cc3f71cd4071cf4070d0416fd2426fd3436ed5446dd6456cd8456cd9466bdb476adc4869de4968df4a68e04c67e24d66e34e65e44f64e55064e75263e85362e95462ea5661eb5760ec5860ed5a5fee5b5eef5d5ef05f5ef1605df2625df2645cf3655cf4675cf4695cf56b5cf66c5cf66e5cf7705cf7725cf8745cf8765cf9785df9795df97b5dfa7d5efa7f5efa815ffb835ffb8560fb8761fc8961fc8a62fc8c63fc8e64fc9065fd9266fd9467fd9668fd9869fd9a6afd9b6bfe9d6cfe9f6dfea16efea36ffea571fea772fea973feaa74feac76feae77feb078feb27afeb47bfeb67cfeb77efeb97ffebb81febd82febf84fec185fec287fec488fec68afec88cfeca8dfecc8ffecd90fecf92fed194fed395fed597fed799fed89afdda9cfddc9efddea0fde0a1fde2a3fde3a5fde5a7fde7a9fde9aafdebacfcecaefceeb0fcf0b2fcf2b4fcf4b6fcf6b8fcf7b9fcf9bbfcfbbdfcfdbf")),pm=lm(H_("00000401000501010601010802010a02020c02020e03021004031204031405041706041907051b08051d09061f0a07220b07240c08260d08290e092b10092d110a30120a32140b34150b37160b39180c3c190c3e1b0c411c0c431e0c451f0c48210c4a230c4c240c4f260c51280b53290b552b0b572d0b592f0a5b310a5c320a5e340a5f3609613809623909633b09643d09653e0966400a67420a68440a68450a69470b6a490b6a4a0c6b4c0c6b4d0d6c4f0d6c510e6c520e6d540f6d550f6d57106e59106e5a116e5c126e5d126e5f136e61136e62146e64156e65156e67166e69166e6a176e6c186e6d186e6f196e71196e721a6e741a6e751b6e771c6d781c6d7a1d6d7c1d6d7d1e6d7f1e6c801f6c82206c84206b85216b87216b88226a8a226a8c23698d23698f24699025689225689326679526679727669827669a28659b29649d29649f2a63a02a63a22b62a32c61a52c60a62d60a82e5fa92e5eab2f5ead305dae305cb0315bb1325ab3325ab43359b63458b73557b93556ba3655bc3754bd3853bf3952c03a51c13a50c33b4fc43c4ec63d4dc73e4cc83f4bca404acb4149cc4248ce4347cf4446d04545d24644d34743d44842d54a41d74b3fd84c3ed94d3dda4e3cdb503bdd513ade5238df5337e05536e15635e25734e35933e45a31e55c30e65d2fe75e2ee8602de9612bea632aeb6429eb6628ec6726ed6925ee6a24ef6c23ef6e21f06f20f1711ff1731df2741cf3761bf37819f47918f57b17f57d15f67e14f68013f78212f78410f8850ff8870ef8890cf98b0bf98c0af98e09fa9008fa9207fa9407fb9606fb9706fb9906fb9b06fb9d07fc9f07fca108fca309fca50afca60cfca80dfcaa0ffcac11fcae12fcb014fcb216fcb418fbb61afbb81dfbba1ffbbc21fbbe23fac026fac228fac42afac62df9c72ff9c932f9cb35f8cd37f8cf3af7d13df7d340f6d543f6d746f5d949f5db4cf4dd4ff4df53f4e156f3e35af3e55df2e661f2e865f2ea69f1ec6df1ed71f1ef75f1f179f2f27df2f482f3f586f3f68af4f88ef5f992f6fa96f8fb9af9fc9dfafda1fcffa4")),gm=lm(H_("0d088710078813078916078a19068c1b068d1d068e20068f2206902406912605912805922a05932c05942e05952f059631059733059735049837049938049a3a049a3c049b3e049c3f049c41049d43039e44039e46039f48039f4903a04b03a14c02a14e02a25002a25102a35302a35502a45601a45801a45901a55b01a55c01a65e01a66001a66100a76300a76400a76600a76700a86900a86a00a86c00a86e00a86f00a87100a87201a87401a87501a87701a87801a87a02a87b02a87d03a87e03a88004a88104a78305a78405a78606a68707a68808a68a09a58b0aa58d0ba58e0ca48f0da4910ea3920fa39410a29511a19613a19814a099159f9a169f9c179e9d189d9e199da01a9ca11b9ba21d9aa31e9aa51f99a62098a72197a82296aa2395ab2494ac2694ad2793ae2892b02991b12a90b22b8fb32c8eb42e8db52f8cb6308bb7318ab83289ba3388bb3488bc3587bd3786be3885bf3984c03a83c13b82c23c81c33d80c43e7fc5407ec6417dc7427cc8437bc9447aca457acb4679cc4778cc4977cd4a76ce4b75cf4c74d04d73d14e72d24f71d35171d45270d5536fd5546ed6556dd7566cd8576bd9586ada5a6ada5b69db5c68dc5d67dd5e66de5f65de6164df6263e06363e16462e26561e26660e3685fe4695ee56a5de56b5de66c5ce76e5be76f5ae87059e97158e97257ea7457eb7556eb7655ec7754ed7953ed7a52ee7b51ef7c51ef7e50f07f4ff0804ef1814df1834cf2844bf3854bf3874af48849f48948f58b47f58c46f68d45f68f44f79044f79143f79342f89441f89540f9973ff9983ef99a3efa9b3dfa9c3cfa9e3bfb9f3afba139fba238fca338fca537fca636fca835fca934fdab33fdac33fdae32fdaf31fdb130fdb22ffdb42ffdb52efeb72dfeb82cfeba2cfebb2bfebd2afebe2afec029fdc229fdc328fdc527fdc627fdc827fdca26fdcb26fccd25fcce25fcd025fcd225fbd324fbd524fbd724fad824fada24f9dc24f9dd25f8df25f8e125f7e225f7e425f6e626f6e826f5e926f5eb27f4ed27f3ee27f3f027f2f227f1f426f1f525f0f724f0f921"));function ym(t){return function(){return t}}const vm=Math.abs,_m=Math.atan2,bm=Math.cos,mm=Math.max,xm=Math.min,wm=Math.sin,Mm=Math.sqrt,Tm=1e-12,Am=Math.PI,Sm=Am/2,Em=2*Am;function Nm(t){return t>=1?Sm:t<=-1?-Sm:Math.asin(t)}function km(t){let n=3;return t.digits=function(e){if(!arguments.length)return n;if(null==e)n=null;else{const t=Math.floor(e);if(!(t>=0))throw new RangeError(`invalid digits: ${e}`);n=t}return t},()=>new Ua(n)}function Cm(t){return t.innerRadius}function Pm(t){return t.outerRadius}function zm(t){return t.startAngle}function $m(t){return t.endAngle}function Dm(t){return t&&t.padAngle}function Rm(t,n,e,r,i,o,a){var u=t-e,c=n-r,f=(a?o:-o)/Mm(u*u+c*c),s=f*c,l=-f*u,h=t+s,d=n+l,p=e+s,g=r+l,y=(h+p)/2,v=(d+g)/2,_=p-h,b=g-d,m=_*_+b*b,x=i-o,w=h*g-p*d,M=(b<0?-1:1)*Mm(mm(0,x*x*m-w*w)),T=(w*b-_*M)/m,A=(-w*_-b*M)/m,S=(w*b+_*M)/m,E=(-w*_+b*M)/m,N=T-y,k=A-v,C=S-y,P=E-v;return N*N+k*k>C*C+P*P&&(T=S,A=E),{cx:T,cy:A,x01:-s,y01:-l,x11:T*(i/x-1),y11:A*(i/x-1)}}var Fm=Array.prototype.slice;function qm(t){return"object"==typeof t&&"length"in t?t:Array.from(t)}function Um(t){this._context=t}function Im(t){return new Um(t)}function Om(t){return t[0]}function Bm(t){return t[1]}function Ym(t,n){var e=ym(!0),r=null,i=Im,o=null,a=km(u);function u(u){var c,f,s,l=(u=qm(u)).length,h=!1;for(null==r&&(o=i(s=a())),c=0;c<=l;++c)!(c=l;--h)u.point(v[h],_[h]);u.lineEnd(),u.areaEnd()}y&&(v[s]=+t(d,s,f),_[s]=+n(d,s,f),u.point(r?+r(d,s,f):v[s],e?+e(d,s,f):_[s]))}if(p)return u=null,p+""||null}function s(){return Ym().defined(i).curve(a).context(o)}return t="function"==typeof t?t:void 0===t?Om:ym(+t),n="function"==typeof n?n:ym(void 0===n?0:+n),e="function"==typeof e?e:void 0===e?Bm:ym(+e),f.x=function(n){return arguments.length?(t="function"==typeof n?n:ym(+n),r=null,f):t},f.x0=function(n){return arguments.length?(t="function"==typeof n?n:ym(+n),f):t},f.x1=function(t){return arguments.length?(r=null==t?null:"function"==typeof t?t:ym(+t),f):r},f.y=function(t){return arguments.length?(n="function"==typeof t?t:ym(+t),e=null,f):n},f.y0=function(t){return arguments.length?(n="function"==typeof t?t:ym(+t),f):n},f.y1=function(t){return arguments.length?(e=null==t?null:"function"==typeof t?t:ym(+t),f):e},f.lineX0=f.lineY0=function(){return s().x(t).y(n)},f.lineY1=function(){return s().x(t).y(e)},f.lineX1=function(){return s().x(r).y(n)},f.defined=function(t){return arguments.length?(i="function"==typeof t?t:ym(!!t),f):i},f.curve=function(t){return arguments.length?(a=t,null!=o&&(u=a(o)),f):a},f.context=function(t){return arguments.length?(null==t?o=u=null:u=a(o=t),f):o},f}function jm(t,n){return nt?1:n>=t?0:NaN}function Hm(t){return t}Um.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;default:this._context.lineTo(t,n)}}};var Xm=Vm(Im);function Gm(t){this._curve=t}function Vm(t){function n(n){return new Gm(t(n))}return n._curve=t,n}function Wm(t){var n=t.curve;return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t.curve=function(t){return arguments.length?n(Vm(t)):n()._curve},t}function Zm(){return Wm(Ym().curve(Xm))}function Km(){var t=Lm().curve(Xm),n=t.curve,e=t.lineX0,r=t.lineX1,i=t.lineY0,o=t.lineY1;return t.angle=t.x,delete t.x,t.startAngle=t.x0,delete t.x0,t.endAngle=t.x1,delete t.x1,t.radius=t.y,delete t.y,t.innerRadius=t.y0,delete t.y0,t.outerRadius=t.y1,delete t.y1,t.lineStartAngle=function(){return Wm(e())},delete t.lineX0,t.lineEndAngle=function(){return Wm(r())},delete t.lineX1,t.lineInnerRadius=function(){return Wm(i())},delete t.lineY0,t.lineOuterRadius=function(){return Wm(o())},delete t.lineY1,t.curve=function(t){return arguments.length?n(Vm(t)):n()._curve},t}function Qm(t,n){return[(n=+n)*Math.cos(t-=Math.PI/2),n*Math.sin(t)]}Gm.prototype={areaStart:function(){this._curve.areaStart()},areaEnd:function(){this._curve.areaEnd()},lineStart:function(){this._curve.lineStart()},lineEnd:function(){this._curve.lineEnd()},point:function(t,n){this._curve.point(n*Math.sin(t),n*-Math.cos(t))}};class Jm{constructor(t,n){this._context=t,this._x=n}areaStart(){this._line=0}areaEnd(){this._line=NaN}lineStart(){this._point=0}lineEnd(){(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line}point(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;default:this._x?this._context.bezierCurveTo(this._x0=(this._x0+t)/2,this._y0,this._x0,n,t,n):this._context.bezierCurveTo(this._x0,this._y0=(this._y0+n)/2,t,this._y0,t,n)}this._x0=t,this._y0=n}}class tx{constructor(t){this._context=t}lineStart(){this._point=0}lineEnd(){}point(t,n){if(t=+t,n=+n,0===this._point)this._point=1;else{const e=Qm(this._x0,this._y0),r=Qm(this._x0,this._y0=(this._y0+n)/2),i=Qm(t,this._y0),o=Qm(t,n);this._context.moveTo(...e),this._context.bezierCurveTo(...r,...i,...o)}this._x0=t,this._y0=n}}function nx(t){return new Jm(t,!0)}function ex(t){return new Jm(t,!1)}function rx(t){return new tx(t)}function ix(t){return t.source}function ox(t){return t.target}function ax(t){let n=ix,e=ox,r=Om,i=Bm,o=null,a=null,u=km(c);function c(){let c;const f=Fm.call(arguments),s=n.apply(this,f),l=e.apply(this,f);if(null==o&&(a=t(c=u())),a.lineStart(),f[0]=s,a.point(+r.apply(this,f),+i.apply(this,f)),f[0]=l,a.point(+r.apply(this,f),+i.apply(this,f)),a.lineEnd(),c)return a=null,c+""||null}return c.source=function(t){return arguments.length?(n=t,c):n},c.target=function(t){return arguments.length?(e=t,c):e},c.x=function(t){return arguments.length?(r="function"==typeof t?t:ym(+t),c):r},c.y=function(t){return arguments.length?(i="function"==typeof t?t:ym(+t),c):i},c.context=function(n){return arguments.length?(null==n?o=a=null:a=t(o=n),c):o},c}const ux=Mm(3);var cx={draw(t,n){const e=.59436*Mm(n+xm(n/28,.75)),r=e/2,i=r*ux;t.moveTo(0,e),t.lineTo(0,-e),t.moveTo(-i,-r),t.lineTo(i,r),t.moveTo(-i,r),t.lineTo(i,-r)}},fx={draw(t,n){const e=Mm(n/Am);t.moveTo(e,0),t.arc(0,0,e,0,Em)}},sx={draw(t,n){const e=Mm(n/5)/2;t.moveTo(-3*e,-e),t.lineTo(-e,-e),t.lineTo(-e,-3*e),t.lineTo(e,-3*e),t.lineTo(e,-e),t.lineTo(3*e,-e),t.lineTo(3*e,e),t.lineTo(e,e),t.lineTo(e,3*e),t.lineTo(-e,3*e),t.lineTo(-e,e),t.lineTo(-3*e,e),t.closePath()}};const lx=Mm(1/3),hx=2*lx;var dx={draw(t,n){const e=Mm(n/hx),r=e*lx;t.moveTo(0,-e),t.lineTo(r,0),t.lineTo(0,e),t.lineTo(-r,0),t.closePath()}},px={draw(t,n){const e=.62625*Mm(n);t.moveTo(0,-e),t.lineTo(e,0),t.lineTo(0,e),t.lineTo(-e,0),t.closePath()}},gx={draw(t,n){const e=.87559*Mm(n-xm(n/7,2));t.moveTo(-e,0),t.lineTo(e,0),t.moveTo(0,e),t.lineTo(0,-e)}},yx={draw(t,n){const e=Mm(n),r=-e/2;t.rect(r,r,e,e)}},vx={draw(t,n){const e=.4431*Mm(n);t.moveTo(e,e),t.lineTo(e,-e),t.lineTo(-e,-e),t.lineTo(-e,e),t.closePath()}};const _x=wm(Am/10)/wm(7*Am/10),bx=wm(Em/10)*_x,mx=-bm(Em/10)*_x;var xx={draw(t,n){const e=Mm(.8908130915292852*n),r=bx*e,i=mx*e;t.moveTo(0,-e),t.lineTo(r,i);for(let n=1;n<5;++n){const o=Em*n/5,a=bm(o),u=wm(o);t.lineTo(u*e,-a*e),t.lineTo(a*r-u*i,u*r+a*i)}t.closePath()}};const wx=Mm(3);var Mx={draw(t,n){const e=-Mm(n/(3*wx));t.moveTo(0,2*e),t.lineTo(-wx*e,-e),t.lineTo(wx*e,-e),t.closePath()}};const Tx=Mm(3);var Ax={draw(t,n){const e=.6824*Mm(n),r=e/2,i=e*Tx/2;t.moveTo(0,-e),t.lineTo(i,r),t.lineTo(-i,r),t.closePath()}};const Sx=-.5,Ex=Mm(3)/2,Nx=1/Mm(12),kx=3*(Nx/2+1);var Cx={draw(t,n){const e=Mm(n/kx),r=e/2,i=e*Nx,o=r,a=e*Nx+e,u=-o,c=a;t.moveTo(r,i),t.lineTo(o,a),t.lineTo(u,c),t.lineTo(Sx*r-Ex*i,Ex*r+Sx*i),t.lineTo(Sx*o-Ex*a,Ex*o+Sx*a),t.lineTo(Sx*u-Ex*c,Ex*u+Sx*c),t.lineTo(Sx*r+Ex*i,Sx*i-Ex*r),t.lineTo(Sx*o+Ex*a,Sx*a-Ex*o),t.lineTo(Sx*u+Ex*c,Sx*c-Ex*u),t.closePath()}},Px={draw(t,n){const e=.6189*Mm(n-xm(n/6,1.7));t.moveTo(-e,-e),t.lineTo(e,e),t.moveTo(-e,e),t.lineTo(e,-e)}};const zx=[fx,sx,dx,yx,xx,Mx,Cx],$x=[fx,gx,Px,Ax,cx,vx,px];function Dx(){}function Rx(t,n,e){t._context.bezierCurveTo((2*t._x0+t._x1)/3,(2*t._y0+t._y1)/3,(t._x0+2*t._x1)/3,(t._y0+2*t._y1)/3,(t._x0+4*t._x1+n)/6,(t._y0+4*t._y1+e)/6)}function Fx(t){this._context=t}function qx(t){this._context=t}function Ux(t){this._context=t}function Ix(t,n){this._basis=new Fx(t),this._beta=n}Fx.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){switch(this._point){case 3:Rx(this,this._x1,this._y1);case 2:this._context.lineTo(this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3,this._context.lineTo((5*this._x0+this._x1)/6,(5*this._y0+this._y1)/6);default:Rx(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}},qx.prototype={areaStart:Dx,areaEnd:Dx,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._y0=this._y1=this._y2=this._y3=this._y4=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x2,this._y2),this._context.closePath();break;case 2:this._context.moveTo((this._x2+2*this._x3)/3,(this._y2+2*this._y3)/3),this._context.lineTo((this._x3+2*this._x2)/3,(this._y3+2*this._y2)/3),this._context.closePath();break;case 3:this.point(this._x2,this._y2),this.point(this._x3,this._y3),this.point(this._x4,this._y4)}},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._x2=t,this._y2=n;break;case 1:this._point=2,this._x3=t,this._y3=n;break;case 2:this._point=3,this._x4=t,this._y4=n,this._context.moveTo((this._x0+4*this._x1+t)/6,(this._y0+4*this._y1+n)/6);break;default:Rx(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}},Ux.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3;var e=(this._x0+4*this._x1+t)/6,r=(this._y0+4*this._y1+n)/6;this._line?this._context.lineTo(e,r):this._context.moveTo(e,r);break;case 3:this._point=4;default:Rx(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}},Ix.prototype={lineStart:function(){this._x=[],this._y=[],this._basis.lineStart()},lineEnd:function(){var t=this._x,n=this._y,e=t.length-1;if(e>0)for(var r,i=t[0],o=n[0],a=t[e]-i,u=n[e]-o,c=-1;++c<=e;)r=c/e,this._basis.point(this._beta*t[c]+(1-this._beta)*(i+r*a),this._beta*n[c]+(1-this._beta)*(o+r*u));this._x=this._y=null,this._basis.lineEnd()},point:function(t,n){this._x.push(+t),this._y.push(+n)}};var Ox=function t(n){function e(t){return 1===n?new Fx(t):new Ix(t,n)}return e.beta=function(n){return t(+n)},e}(.85);function Bx(t,n,e){t._context.bezierCurveTo(t._x1+t._k*(t._x2-t._x0),t._y1+t._k*(t._y2-t._y0),t._x2+t._k*(t._x1-n),t._y2+t._k*(t._y1-e),t._x2,t._y2)}function Yx(t,n){this._context=t,this._k=(1-n)/6}Yx.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:Bx(this,this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2,this._x1=t,this._y1=n;break;case 2:this._point=3;default:Bx(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Lx=function t(n){function e(t){return new Yx(t,n)}return e.tension=function(n){return t(+n)},e}(0);function jx(t,n){this._context=t,this._k=(1-n)/6}jx.prototype={areaStart:Dx,areaEnd:Dx,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._x3=t,this._y3=n;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=n);break;case 2:this._point=3,this._x5=t,this._y5=n;break;default:Bx(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Hx=function t(n){function e(t){return new jx(t,n)}return e.tension=function(n){return t(+n)},e}(0);function Xx(t,n){this._context=t,this._k=(1-n)/6}Xx.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:Bx(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Gx=function t(n){function e(t){return new Xx(t,n)}return e.tension=function(n){return t(+n)},e}(0);function Vx(t,n,e){var r=t._x1,i=t._y1,o=t._x2,a=t._y2;if(t._l01_a>Tm){var u=2*t._l01_2a+3*t._l01_a*t._l12_a+t._l12_2a,c=3*t._l01_a*(t._l01_a+t._l12_a);r=(r*u-t._x0*t._l12_2a+t._x2*t._l01_2a)/c,i=(i*u-t._y0*t._l12_2a+t._y2*t._l01_2a)/c}if(t._l23_a>Tm){var f=2*t._l23_2a+3*t._l23_a*t._l12_a+t._l12_2a,s=3*t._l23_a*(t._l23_a+t._l12_a);o=(o*f+t._x1*t._l23_2a-n*t._l12_2a)/s,a=(a*f+t._y1*t._l23_2a-e*t._l12_2a)/s}t._context.bezierCurveTo(r,i,o,a,t._x2,t._y2)}function Wx(t,n){this._context=t,this._alpha=n}Wx.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:this.point(this._x2,this._y2)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3;default:Vx(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Zx=function t(n){function e(t){return n?new Wx(t,n):new Yx(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);function Kx(t,n){this._context=t,this._alpha=n}Kx.prototype={areaStart:Dx,areaEnd:Dx,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._x3=t,this._y3=n;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=n);break;case 2:this._point=3,this._x5=t,this._y5=n;break;default:Vx(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Qx=function t(n){function e(t){return n?new Kx(t,n):new jx(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);function Jx(t,n){this._context=t,this._alpha=n}Jx.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:Vx(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var tw=function t(n){function e(t){return n?new Jx(t,n):new Xx(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);function nw(t){this._context=t}function ew(t){return t<0?-1:1}function rw(t,n,e){var r=t._x1-t._x0,i=n-t._x1,o=(t._y1-t._y0)/(r||i<0&&-0),a=(e-t._y1)/(i||r<0&&-0),u=(o*i+a*r)/(r+i);return(ew(o)+ew(a))*Math.min(Math.abs(o),Math.abs(a),.5*Math.abs(u))||0}function iw(t,n){var e=t._x1-t._x0;return e?(3*(t._y1-t._y0)/e-n)/2:n}function ow(t,n,e){var r=t._x0,i=t._y0,o=t._x1,a=t._y1,u=(o-r)/3;t._context.bezierCurveTo(r+u,i+u*n,o-u,a-u*e,o,a)}function aw(t){this._context=t}function uw(t){this._context=new cw(t)}function cw(t){this._context=t}function fw(t){this._context=t}function sw(t){var n,e,r=t.length-1,i=new Array(r),o=new Array(r),a=new Array(r);for(i[0]=0,o[0]=2,a[0]=t[0]+2*t[1],n=1;n=0;--n)i[n]=(a[n]-i[n+1])/o[n];for(o[r-1]=(t[r]+i[r-1])/2,n=0;n1)for(var e,r,i,o=1,a=t[n[0]],u=a.length;o=0;)e[n]=n;return e}function pw(t,n){return t[n]}function gw(t){const n=[];return n.key=t,n}function yw(t){var n=t.map(vw);return dw(t).sort((function(t,e){return n[t]-n[e]}))}function vw(t){for(var n,e=-1,r=0,i=t.length,o=-1/0;++eo&&(o=n,r=e);return r}function _w(t){var n=t.map(bw);return dw(t).sort((function(t,e){return n[t]-n[e]}))}function bw(t){for(var n,e=0,r=-1,i=t.length;++r=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;default:if(this._t<=0)this._context.lineTo(this._x,n),this._context.lineTo(t,n);else{var e=this._x*(1-this._t)+t*this._t;this._context.lineTo(e,this._y),this._context.lineTo(e,n)}}this._x=t,this._y=n}};var mw=t=>()=>t;function xw(t,{sourceEvent:n,target:e,transform:r,dispatch:i}){Object.defineProperties(this,{type:{value:t,enumerable:!0,configurable:!0},sourceEvent:{value:n,enumerable:!0,configurable:!0},target:{value:e,enumerable:!0,configurable:!0},transform:{value:r,enumerable:!0,configurable:!0},_:{value:i}})}function ww(t,n,e){this.k=t,this.x=n,this.y=e}ww.prototype={constructor:ww,scale:function(t){return 1===t?this:new ww(this.k*t,this.x,this.y)},translate:function(t,n){return 0===t&0===n?this:new ww(this.k,this.x+this.k*t,this.y+this.k*n)},apply:function(t){return[t[0]*this.k+this.x,t[1]*this.k+this.y]},applyX:function(t){return t*this.k+this.x},applyY:function(t){return t*this.k+this.y},invert:function(t){return[(t[0]-this.x)/this.k,(t[1]-this.y)/this.k]},invertX:function(t){return(t-this.x)/this.k},invertY:function(t){return(t-this.y)/this.k},rescaleX:function(t){return t.copy().domain(t.range().map(this.invertX,this).map(t.invert,t))},rescaleY:function(t){return t.copy().domain(t.range().map(this.invertY,this).map(t.invert,t))},toString:function(){return"translate("+this.x+","+this.y+") scale("+this.k+")"}};var Mw=new ww(1,0,0);function Tw(t){for(;!t.__zoom;)if(!(t=t.parentNode))return Mw;return t.__zoom}function Aw(t){t.stopImmediatePropagation()}function Sw(t){t.preventDefault(),t.stopImmediatePropagation()}function Ew(t){return!(t.ctrlKey&&"wheel"!==t.type||t.button)}function Nw(){var t=this;return t instanceof SVGElement?(t=t.ownerSVGElement||t).hasAttribute("viewBox")?[[(t=t.viewBox.baseVal).x,t.y],[t.x+t.width,t.y+t.height]]:[[0,0],[t.width.baseVal.value,t.height.baseVal.value]]:[[0,0],[t.clientWidth,t.clientHeight]]}function kw(){return this.__zoom||Mw}function Cw(t){return-t.deltaY*(1===t.deltaMode?.05:t.deltaMode?1:.002)*(t.ctrlKey?10:1)}function Pw(){return navigator.maxTouchPoints||"ontouchstart"in this}function zw(t,n,e){var r=t.invertX(n[0][0])-e[0][0],i=t.invertX(n[1][0])-e[1][0],o=t.invertY(n[0][1])-e[0][1],a=t.invertY(n[1][1])-e[1][1];return t.translate(i>r?(r+i)/2:Math.min(0,r)||Math.max(0,i),a>o?(o+a)/2:Math.min(0,o)||Math.max(0,a))}Tw.prototype=ww.prototype,t.Adder=T,t.Delaunay=Lu,t.FormatSpecifier=tf,t.InternMap=InternMap,t.InternSet=InternSet,t.Node=Qd,t.Path=Ua,t.Voronoi=qu,t.ZoomTransform=ww,t.active=function(t,n){var e,r,i=t.__transition;if(i)for(r in n=null==n?null:n+"",i)if((e=i[r]).state>qi&&e.name===n)return new po([[t]],Zo,n,+r);return null},t.arc=function(){var t=Cm,n=Pm,e=ym(0),r=null,i=zm,o=$m,a=Dm,u=null,c=km(f);function f(){var f,s,l=+t.apply(this,arguments),h=+n.apply(this,arguments),d=i.apply(this,arguments)-Sm,p=o.apply(this,arguments)-Sm,g=vm(p-d),y=p>d;if(u||(u=f=c()),hTm)if(g>Em-Tm)u.moveTo(h*bm(d),h*wm(d)),u.arc(0,0,h,d,p,!y),l>Tm&&(u.moveTo(l*bm(p),l*wm(p)),u.arc(0,0,l,p,d,y));else{var v,_,b=d,m=p,x=d,w=p,M=g,T=g,A=a.apply(this,arguments)/2,S=A>Tm&&(r?+r.apply(this,arguments):Mm(l*l+h*h)),E=xm(vm(h-l)/2,+e.apply(this,arguments)),N=E,k=E;if(S>Tm){var C=Nm(S/l*wm(A)),P=Nm(S/h*wm(A));(M-=2*C)>Tm?(x+=C*=y?1:-1,w-=C):(M=0,x=w=(d+p)/2),(T-=2*P)>Tm?(b+=P*=y?1:-1,m-=P):(T=0,b=m=(d+p)/2)}var z=h*bm(b),$=h*wm(b),D=l*bm(w),R=l*wm(w);if(E>Tm){var F,q=h*bm(m),U=h*wm(m),I=l*bm(x),O=l*wm(x);if(g1?0:t<-1?Am:Math.acos(t)}((B*L+Y*j)/(Mm(B*B+Y*Y)*Mm(L*L+j*j)))/2),X=Mm(F[0]*F[0]+F[1]*F[1]);N=xm(E,(l-X)/(H-1)),k=xm(E,(h-X)/(H+1))}else N=k=0}T>Tm?k>Tm?(v=Rm(I,O,z,$,h,k,y),_=Rm(q,U,D,R,h,k,y),u.moveTo(v.cx+v.x01,v.cy+v.y01),kTm&&M>Tm?N>Tm?(v=Rm(D,R,q,U,l,-N,y),_=Rm(z,$,I,O,l,-N,y),u.lineTo(v.cx+v.x01,v.cy+v.y01),N=0))throw new RangeError("invalid r");let e=t.length;if(!((e=Math.floor(e))>=0))throw new RangeError("invalid length");if(!e||!n)return t;const r=y(n),i=t.slice();return r(t,i,0,e,1),r(i,t,0,e,1),r(t,i,0,e,1),t},t.blur2=l,t.blurImage=h,t.brush=function(){return wa(la)},t.brushSelection=function(t){var n=t.__brush;return n?n.dim.output(n.selection):null},t.brushX=function(){return wa(fa)},t.brushY=function(){return wa(sa)},t.buffer=function(t,n){return fetch(t,n).then(_c)},t.chord=function(){return za(!1,!1)},t.chordDirected=function(){return za(!0,!1)},t.chordTranspose=function(){return za(!1,!0)},t.cluster=function(){var t=Ld,n=1,e=1,r=!1;function i(i){var o,a=0;i.eachAfter((function(n){var e=n.children;e?(n.x=function(t){return t.reduce(jd,0)/t.length}(e),n.y=function(t){return 1+t.reduce(Hd,0)}(e)):(n.x=o?a+=t(n,o):0,n.y=0,o=n)}));var u=function(t){for(var n;n=t.children;)t=n[0];return t}(i),c=function(t){for(var n;n=t.children;)t=n[n.length-1];return t}(i),f=u.x-t(u,c)/2,s=c.x+t(c,u)/2;return i.eachAfter(r?function(t){t.x=(t.x-i.x)*n,t.y=(i.y-t.y)*e}:function(t){t.x=(t.x-f)/(s-f)*n,t.y=(1-(i.y?t.y/i.y:1))*e})}return i.separation=function(n){return arguments.length?(t=n,i):t},i.size=function(t){return arguments.length?(r=!1,n=+t[0],e=+t[1],i):r?null:[n,e]},i.nodeSize=function(t){return arguments.length?(r=!0,n=+t[0],e=+t[1],i):r?[n,e]:null},i},t.color=ze,t.contourDensity=function(){var t=fu,n=su,e=lu,r=960,i=500,o=20,a=2,u=3*o,c=r+2*u>>a,f=i+2*u>>a,s=Qa(20);function h(r){var i=new Float32Array(c*f),s=Math.pow(2,-a),h=-1;for(const o of r){var d=(t(o,++h,r)+u)*s,p=(n(o,h,r)+u)*s,g=+e(o,h,r);if(g&&d>=0&&d=0&&pt*r)))(n).map(((t,n)=>(t.value=+e[n],p(t))))}function p(t){return t.coordinates.forEach(g),t}function g(t){t.forEach(y)}function y(t){t.forEach(v)}function v(t){t[0]=t[0]*Math.pow(2,a)-u,t[1]=t[1]*Math.pow(2,a)-u}function _(){return c=r+2*(u=3*o)>>a,f=i+2*u>>a,d}return d.contours=function(t){var n=h(t),e=iu().size([c,f]),r=Math.pow(2,2*a),i=t=>{t=+t;var i=p(e.contour(n,t*r));return i.value=t,i};return Object.defineProperty(i,"max",{get:()=>J(n)/r}),i},d.x=function(n){return arguments.length?(t="function"==typeof n?n:Qa(+n),d):t},d.y=function(t){return arguments.length?(n="function"==typeof t?t:Qa(+t),d):n},d.weight=function(t){return arguments.length?(e="function"==typeof t?t:Qa(+t),d):e},d.size=function(t){if(!arguments.length)return[r,i];var n=+t[0],e=+t[1];if(!(n>=0&&e>=0))throw new Error("invalid size");return r=n,i=e,_()},d.cellSize=function(t){if(!arguments.length)return 1<=1))throw new Error("invalid cell size");return a=Math.floor(Math.log(t)/Math.LN2),_()},d.thresholds=function(t){return arguments.length?(s="function"==typeof t?t:Array.isArray(t)?Qa(Za.call(t)):Qa(t),d):s},d.bandwidth=function(t){if(!arguments.length)return Math.sqrt(o*(o+1));if(!((t=+t)>=0))throw new Error("invalid bandwidth");return o=(Math.sqrt(4*t*t+1)-1)/2,_()},d},t.contours=iu,t.count=v,t.create=function(t){return Zn(Yt(t).call(document.documentElement))},t.creator=Yt,t.cross=function(...t){const n="function"==typeof t[t.length-1]&&function(t){return n=>t(...n)}(t.pop()),e=(t=t.map(m)).map(_),r=t.length-1,i=new Array(r+1).fill(0),o=[];if(r<0||e.some(b))return o;for(;;){o.push(i.map(((n,e)=>t[e][n])));let a=r;for(;++i[a]===e[a];){if(0===a)return n?o.map(n):o;i[a--]=0}}},t.csv=wc,t.csvFormat=rc,t.csvFormatBody=ic,t.csvFormatRow=ac,t.csvFormatRows=oc,t.csvFormatValue=uc,t.csvParse=nc,t.csvParseRows=ec,t.cubehelix=Tr,t.cumsum=function(t,n){var e=0,r=0;return Float64Array.from(t,void 0===n?t=>e+=+t||0:i=>e+=+n(i,r++,t)||0)},t.curveBasis=function(t){return new Fx(t)},t.curveBasisClosed=function(t){return new qx(t)},t.curveBasisOpen=function(t){return new Ux(t)},t.curveBumpX=nx,t.curveBumpY=ex,t.curveBundle=Ox,t.curveCardinal=Lx,t.curveCardinalClosed=Hx,t.curveCardinalOpen=Gx,t.curveCatmullRom=Zx,t.curveCatmullRomClosed=Qx,t.curveCatmullRomOpen=tw,t.curveLinear=Im,t.curveLinearClosed=function(t){return new nw(t)},t.curveMonotoneX=function(t){return new aw(t)},t.curveMonotoneY=function(t){return new uw(t)},t.curveNatural=function(t){return new fw(t)},t.curveStep=function(t){return new lw(t,.5)},t.curveStepAfter=function(t){return new lw(t,1)},t.curveStepBefore=function(t){return new lw(t,0)},t.descending=e,t.deviation=w,t.difference=function(t,...n){t=new InternSet(t);for(const e of n)for(const n of e)t.delete(n);return t},t.disjoint=function(t,n){const e=n[Symbol.iterator](),r=new InternSet;for(const n of t){if(r.has(n))return!1;let t,i;for(;({value:t,done:i}=e.next())&&!i;){if(Object.is(n,t))return!1;r.add(t)}}return!0},t.dispatch=$t,t.drag=function(){var t,n,e,r,i=se,o=le,a=he,u=de,c={},f=$t("start","drag","end"),s=0,l=0;function h(t){t.on("mousedown.drag",d).filter(u).on("touchstart.drag",y).on("touchmove.drag",v,ee).on("touchend.drag touchcancel.drag",_).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function d(a,u){if(!r&&i.call(this,a,u)){var c=b(this,o.call(this,a,u),a,u,"mouse");c&&(Zn(a.view).on("mousemove.drag",p,re).on("mouseup.drag",g,re),ae(a.view),ie(a),e=!1,t=a.clientX,n=a.clientY,c("start",a))}}function p(r){if(oe(r),!e){var i=r.clientX-t,o=r.clientY-n;e=i*i+o*o>l}c.mouse("drag",r)}function g(t){Zn(t.view).on("mousemove.drag mouseup.drag",null),ue(t.view,e),oe(t),c.mouse("end",t)}function y(t,n){if(i.call(this,t,n)){var e,r,a=t.changedTouches,u=o.call(this,t,n),c=a.length;for(e=0;e+t,t.easePoly=wo,t.easePolyIn=mo,t.easePolyInOut=wo,t.easePolyOut=xo,t.easeQuad=_o,t.easeQuadIn=function(t){return t*t},t.easeQuadInOut=_o,t.easeQuadOut=function(t){return t*(2-t)},t.easeSin=Ao,t.easeSinIn=function(t){return 1==+t?1:1-Math.cos(t*To)},t.easeSinInOut=Ao,t.easeSinOut=function(t){return Math.sin(t*To)},t.every=function(t,n){if("function"!=typeof n)throw new TypeError("test is not a function");let e=-1;for(const r of t)if(!n(r,++e,t))return!1;return!0},t.extent=M,t.fcumsum=function(t,n){const e=new T;let r=-1;return Float64Array.from(t,void 0===n?t=>e.add(+t||0):i=>e.add(+n(i,++r,t)||0))},t.filter=function(t,n){if("function"!=typeof n)throw new TypeError("test is not a function");const e=[];let r=-1;for(const i of t)n(i,++r,t)&&e.push(i);return e},t.flatGroup=function(t,...n){return z(P(t,...n),n)},t.flatRollup=function(t,n,...e){return z(D(t,n,...e),e)},t.forceCenter=function(t,n){var e,r=1;function i(){var i,o,a=e.length,u=0,c=0;for(i=0;if+p||os+p||ac.index){var g=f-u.x-u.vx,y=s-u.y-u.vy,v=g*g+y*y;vt.r&&(t.r=t[n].r)}function c(){if(n){var r,i,o=n.length;for(e=new Array(o),r=0;r[u(t,n,r),t])));for(a=0,i=new Array(f);a=u)){(t.data!==n||t.next)&&(0===l&&(p+=(l=Uc(e))*l),0===h&&(p+=(h=Uc(e))*h),p(t=(Lc*t+jc)%Hc)/Hc}();function l(){h(),f.call("tick",n),e1?(null==e?u.delete(t):u.set(t,p(e)),n):u.get(t)},find:function(n,e,r){var i,o,a,u,c,f=0,s=t.length;for(null==r?r=1/0:r*=r,f=0;f1?(f.on(t,e),n):f.on(t)}}},t.forceX=function(t){var n,e,r,i=qc(.1);function o(t){for(var i,o=0,a=n.length;o=.12&&i<.234&&r>=-.425&&r<-.214?u:i>=.166&&i<.234&&r>=-.214&&r<-.115?c:a).invert(t)},s.stream=function(e){return t&&n===e?t:(r=[a.stream(n=e),u.stream(e),c.stream(e)],i=r.length,t={point:function(t,n){for(var e=-1;++ejs(r[0],r[1])&&(r[1]=i[1]),js(i[0],r[1])>js(r[0],r[1])&&(r[0]=i[0])):o.push(r=i);for(a=-1/0,n=0,r=o[e=o.length-1];n<=e;r=i,++n)i=o[n],(u=js(r[1],i[0]))>a&&(a=u,Wf=i[0],Kf=r[1])}return is=os=null,Wf===1/0||Zf===1/0?[[NaN,NaN],[NaN,NaN]]:[[Wf,Zf],[Kf,Qf]]},t.geoCentroid=function(t){ms=xs=ws=Ms=Ts=As=Ss=Es=0,Ns=new T,ks=new T,Cs=new T,Lf(t,Gs);var n=+Ns,e=+ks,r=+Cs,i=Ef(n,e,r);return i=0))throw new RangeError(`invalid digits: ${t}`);i=n}return null===n&&(r=new ed(i)),a},a.projection(t).digits(i).context(n)},t.geoProjection=yd,t.geoProjectionMutator=vd,t.geoRotation=ll,t.geoStereographic=function(){return yd(Bd).scale(250).clipAngle(142)},t.geoStereographicRaw=Bd,t.geoStream=Lf,t.geoTransform=function(t){return{stream:id(t)}},t.geoTransverseMercator=function(){var t=Ed(Yd),n=t.center,e=t.rotate;return t.center=function(t){return arguments.length?n([-t[1],t[0]]):[(t=n())[1],-t[0]]},t.rotate=function(t){return arguments.length?e([t[0],t[1],t.length>2?t[2]+90:90]):[(t=e())[0],t[1],t[2]-90]},e([0,0,90]).scale(159.155)},t.geoTransverseMercatorRaw=Yd,t.gray=function(t,n){return new ur(t,0,0,null==n?1:n)},t.greatest=ot,t.greatestIndex=function(t,e=n){if(1===e.length)return tt(t,e);let r,i=-1,o=-1;for(const n of t)++o,(i<0?0===e(n,n):e(n,r)>0)&&(r=n,i=o);return i},t.group=C,t.groupSort=function(t,e,r){return(2!==e.length?U($(t,e,r),(([t,e],[r,i])=>n(e,i)||n(t,r))):U(C(t,r),(([t,r],[i,o])=>e(r,o)||n(t,i)))).map((([t])=>t))},t.groups=P,t.hcl=dr,t.hierarchy=Gd,t.histogram=Q,t.hsl=He,t.html=Ec,t.image=function(t,n){return new Promise((function(e,r){var i=new Image;for(var o in n)i[o]=n[o];i.onerror=r,i.onload=function(){e(i)},i.src=t}))},t.index=function(t,...n){return F(t,k,R,n)},t.indexes=function(t,...n){return F(t,Array.from,R,n)},t.interpolate=Gr,t.interpolateArray=function(t,n){return(Ir(n)?Ur:Or)(t,n)},t.interpolateBasis=Er,t.interpolateBasisClosed=Nr,t.interpolateBlues=Gb,t.interpolateBrBG=ob,t.interpolateBuGn=Mb,t.interpolateBuPu=Ab,t.interpolateCividis=function(t){return t=Math.max(0,Math.min(1,t)),"rgb("+Math.max(0,Math.min(255,Math.round(-4.54-t*(35.34-t*(2381.73-t*(6402.7-t*(7024.72-2710.57*t)))))))+", "+Math.max(0,Math.min(255,Math.round(32.49+t*(170.73+t*(52.82-t*(131.46-t*(176.58-67.37*t)))))))+", "+Math.max(0,Math.min(255,Math.round(81.24+t*(442.36-t*(2482.43-t*(6167.24-t*(6614.94-2475.67*t)))))))+")"},t.interpolateCool=am,t.interpolateCubehelix=li,t.interpolateCubehelixDefault=im,t.interpolateCubehelixLong=hi,t.interpolateDate=Br,t.interpolateDiscrete=function(t){var n=t.length;return function(e){return t[Math.max(0,Math.min(n-1,Math.floor(e*n)))]}},t.interpolateGnBu=Eb,t.interpolateGreens=Wb,t.interpolateGreys=Kb,t.interpolateHcl=ci,t.interpolateHclLong=fi,t.interpolateHsl=oi,t.interpolateHslLong=ai,t.interpolateHue=function(t,n){var e=Pr(+t,+n);return function(t){var n=e(t);return n-360*Math.floor(n/360)}},t.interpolateInferno=pm,t.interpolateLab=function(t,n){var e=$r((t=ar(t)).l,(n=ar(n)).l),r=$r(t.a,n.a),i=$r(t.b,n.b),o=$r(t.opacity,n.opacity);return function(n){return t.l=e(n),t.a=r(n),t.b=i(n),t.opacity=o(n),t+""}},t.interpolateMagma=dm,t.interpolateNumber=Yr,t.interpolateNumberArray=Ur,t.interpolateObject=Lr,t.interpolateOrRd=kb,t.interpolateOranges=rm,t.interpolatePRGn=ub,t.interpolatePiYG=fb,t.interpolatePlasma=gm,t.interpolatePuBu=$b,t.interpolatePuBuGn=Pb,t.interpolatePuOr=lb,t.interpolatePuRd=Rb,t.interpolatePurples=Jb,t.interpolateRainbow=function(t){(t<0||t>1)&&(t-=Math.floor(t));var n=Math.abs(t-.5);return um.h=360*t-100,um.s=1.5-1.5*n,um.l=.8-.9*n,um+""},t.interpolateRdBu=db,t.interpolateRdGy=gb,t.interpolateRdPu=qb,t.interpolateRdYlBu=vb,t.interpolateRdYlGn=bb,t.interpolateReds=nm,t.interpolateRgb=Dr,t.interpolateRgbBasis=Fr,t.interpolateRgbBasisClosed=qr,t.interpolateRound=Vr,t.interpolateSinebow=function(t){var n;return t=(.5-t)*Math.PI,cm.r=255*(n=Math.sin(t))*n,cm.g=255*(n=Math.sin(t+fm))*n,cm.b=255*(n=Math.sin(t+sm))*n,cm+""},t.interpolateSpectral=xb,t.interpolateString=Xr,t.interpolateTransformCss=ti,t.interpolateTransformSvg=ni,t.interpolateTurbo=function(t){return t=Math.max(0,Math.min(1,t)),"rgb("+Math.max(0,Math.min(255,Math.round(34.61+t*(1172.33-t*(10793.56-t*(33300.12-t*(38394.49-14825.05*t)))))))+", "+Math.max(0,Math.min(255,Math.round(23.31+t*(557.33+t*(1225.33-t*(3574.96-t*(1073.77+707.56*t)))))))+", "+Math.max(0,Math.min(255,Math.round(27.2+t*(3211.1-t*(15327.97-t*(27814-t*(22569.18-6838.66*t)))))))+")"},t.interpolateViridis=hm,t.interpolateWarm=om,t.interpolateYlGn=Bb,t.interpolateYlGnBu=Ib,t.interpolateYlOrBr=Lb,t.interpolateYlOrRd=Hb,t.interpolateZoom=ri,t.interrupt=Gi,t.intersection=function(t,...n){t=new InternSet(t),n=n.map(vt);t:for(const e of t)for(const r of n)if(!r.has(e)){t.delete(e);continue t}return t},t.interval=function(t,n,e){var r=new Ei,i=n;return null==n?(r.restart(t,n,e),r):(r._restart=r.restart,r.restart=function(t,n,e){n=+n,e=null==e?Ai():+e,r._restart((function o(a){a+=i,r._restart(o,i+=n,e),t(a)}),n,e)},r.restart(t,n,e),r)},t.isoFormat=D_,t.isoParse=F_,t.json=function(t,n){return fetch(t,n).then(Tc)},t.lab=ar,t.lch=function(t,n,e,r){return 1===arguments.length?hr(t):new pr(e,n,t,null==r?1:r)},t.least=function(t,e=n){let r,i=!1;if(1===e.length){let o;for(const a of t){const t=e(a);(i?n(t,o)<0:0===n(t,t))&&(r=a,o=t,i=!0)}}else for(const n of t)(i?e(n,r)<0:0===e(n,n))&&(r=n,i=!0);return r},t.leastIndex=ht,t.line=Ym,t.lineRadial=Zm,t.link=ax,t.linkHorizontal=function(){return ax(nx)},t.linkRadial=function(){const t=ax(rx);return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t},t.linkVertical=function(){return ax(ex)},t.local=Qn,t.map=function(t,n){if("function"!=typeof t[Symbol.iterator])throw new TypeError("values is not iterable");if("function"!=typeof n)throw new TypeError("mapper is not a function");return Array.from(t,((e,r)=>n(e,r,t)))},t.matcher=Vt,t.max=J,t.maxIndex=tt,t.mean=function(t,n){let e=0,r=0;if(void 0===n)for(let n of t)null!=n&&(n=+n)>=n&&(++e,r+=n);else{let i=-1;for(let o of t)null!=(o=n(o,++i,t))&&(o=+o)>=o&&(++e,r+=o)}if(e)return r/e},t.median=function(t,n){return at(t,.5,n)},t.medianIndex=function(t,n){return ct(t,.5,n)},t.merge=ft,t.min=nt,t.minIndex=et,t.mode=function(t,n){const e=new InternMap;if(void 0===n)for(let n of t)null!=n&&n>=n&&e.set(n,(e.get(n)||0)+1);else{let r=-1;for(let i of t)null!=(i=n(i,++r,t))&&i>=i&&e.set(i,(e.get(i)||0)+1)}let r,i=0;for(const[t,n]of e)n>i&&(i=n,r=t);return r},t.namespace=It,t.namespaces=Ut,t.nice=Z,t.now=Ai,t.pack=function(){var t=null,n=1,e=1,r=np;function i(i){const o=ap();return i.x=n/2,i.y=e/2,t?i.eachBefore(xp(t)).eachAfter(wp(r,.5,o)).eachBefore(Mp(1)):i.eachBefore(xp(mp)).eachAfter(wp(np,1,o)).eachAfter(wp(r,i.r/Math.min(n,e),o)).eachBefore(Mp(Math.min(n,e)/(2*i.r))),i}return i.radius=function(n){return arguments.length?(t=Jd(n),i):t},i.size=function(t){return arguments.length?(n=+t[0],e=+t[1],i):[n,e]},i.padding=function(t){return arguments.length?(r="function"==typeof t?t:ep(+t),i):r},i},t.packEnclose=function(t){return up(t,ap())},t.packSiblings=function(t){return bp(t,ap()),t},t.pairs=function(t,n=st){const e=[];let r,i=!1;for(const o of t)i&&e.push(n(r,o)),r=o,i=!0;return e},t.partition=function(){var t=1,n=1,e=0,r=!1;function i(i){var o=i.height+1;return i.x0=i.y0=e,i.x1=t,i.y1=n/o,i.eachBefore(function(t,n){return function(r){r.children&&Ap(r,r.x0,t*(r.depth+1)/n,r.x1,t*(r.depth+2)/n);var i=r.x0,o=r.y0,a=r.x1-e,u=r.y1-e;a0&&(d+=l);for(null!=n?p.sort((function(t,e){return n(g[t],g[e])})):null!=e&&p.sort((function(t,n){return e(a[t],a[n])})),u=0,f=d?(v-h*b)/d:0;u0?l*f:0)+b,g[c]={data:a[c],index:u,value:l,startAngle:y,endAngle:s,padAngle:_};return g}return a.value=function(n){return arguments.length?(t="function"==typeof n?n:ym(+n),a):t},a.sortValues=function(t){return arguments.length?(n=t,e=null,a):n},a.sort=function(t){return arguments.length?(e=t,n=null,a):e},a.startAngle=function(t){return arguments.length?(r="function"==typeof t?t:ym(+t),a):r},a.endAngle=function(t){return arguments.length?(i="function"==typeof t?t:ym(+t),a):i},a.padAngle=function(t){return arguments.length?(o="function"==typeof t?t:ym(+t),a):o},a},t.piecewise=di,t.pointRadial=Qm,t.pointer=ne,t.pointers=function(t,n){return t.target&&(t=te(t),void 0===n&&(n=t.currentTarget),t=t.touches||[t]),Array.from(t,(t=>ne(t,n)))},t.polygonArea=function(t){for(var n,e=-1,r=t.length,i=t[r-1],o=0;++eu!=f>u&&a<(c-e)*(u-r)/(f-r)+e&&(s=!s),c=e,f=r;return s},t.polygonHull=function(t){if((e=t.length)<3)return null;var n,e,r=new Array(e),i=new Array(e);for(n=0;n=0;--n)f.push(t[r[o[n]][2]]);for(n=+u;n(n=1664525*n+1013904223|0,lg*(n>>>0))},t.randomLogNormal=Kp,t.randomLogistic=fg,t.randomNormal=Zp,t.randomPareto=ng,t.randomPoisson=sg,t.randomUniform=Vp,t.randomWeibull=ug,t.range=lt,t.rank=function(t,e=n){if("function"!=typeof t[Symbol.iterator])throw new TypeError("values is not iterable");let r=Array.from(t);const i=new Float64Array(r.length);2!==e.length&&(r=r.map(e),e=n);const o=(t,n)=>e(r[t],r[n]);let a,u;return(t=Uint32Array.from(r,((t,n)=>n))).sort(e===n?(t,n)=>O(r[t],r[n]):I(o)),t.forEach(((t,n)=>{const e=o(t,void 0===a?t:a);e>=0?((void 0===a||e>0)&&(a=t,u=n),i[t]=u):i[t]=NaN})),i},t.reduce=function(t,n,e){if("function"!=typeof n)throw new TypeError("reducer is not a function");const r=t[Symbol.iterator]();let i,o,a=-1;if(arguments.length<3){if(({done:i,value:e}=r.next()),i)return;++a}for(;({done:i,value:o}=r.next()),!i;)e=n(e,o,++a,t);return e},t.reverse=function(t){if("function"!=typeof t[Symbol.iterator])throw new TypeError("values is not iterable");return Array.from(t).reverse()},t.rgb=Fe,t.ribbon=function(){return Wa()},t.ribbonArrow=function(){return Wa(Va)},t.rollup=$,t.rollups=D,t.scaleBand=yg,t.scaleDiverging=function t(){var n=Ng(L_()(mg));return n.copy=function(){return B_(n,t())},dg.apply(n,arguments)},t.scaleDivergingLog=function t(){var n=Fg(L_()).domain([.1,1,10]);return n.copy=function(){return B_(n,t()).base(n.base())},dg.apply(n,arguments)},t.scaleDivergingPow=j_,t.scaleDivergingSqrt=function(){return j_.apply(null,arguments).exponent(.5)},t.scaleDivergingSymlog=function t(){var n=Ig(L_());return n.copy=function(){return B_(n,t()).constant(n.constant())},dg.apply(n,arguments)},t.scaleIdentity=function t(n){var e;function r(t){return null==t||isNaN(t=+t)?e:t}return r.invert=r,r.domain=r.range=function(t){return arguments.length?(n=Array.from(t,_g),r):n.slice()},r.unknown=function(t){return arguments.length?(e=t,r):e},r.copy=function(){return t(n).unknown(e)},n=arguments.length?Array.from(n,_g):[0,1],Ng(r)},t.scaleImplicit=pg,t.scaleLinear=function t(){var n=Sg();return n.copy=function(){return Tg(n,t())},hg.apply(n,arguments),Ng(n)},t.scaleLog=function t(){const n=Fg(Ag()).domain([1,10]);return n.copy=()=>Tg(n,t()).base(n.base()),hg.apply(n,arguments),n},t.scaleOrdinal=gg,t.scalePoint=function(){return vg(yg.apply(null,arguments).paddingInner(1))},t.scalePow=jg,t.scaleQuantile=function t(){var e,r=[],i=[],o=[];function a(){var t=0,n=Math.max(1,i.length);for(o=new Array(n-1);++t0?o[n-1]:r[0],n=i?[o[i-1],r]:[o[n-1],o[n]]},u.unknown=function(t){return arguments.length?(n=t,u):u},u.thresholds=function(){return o.slice()},u.copy=function(){return t().domain([e,r]).range(a).unknown(n)},hg.apply(Ng(u),arguments)},t.scaleRadial=function t(){var n,e=Sg(),r=[0,1],i=!1;function o(t){var r=function(t){return Math.sign(t)*Math.sqrt(Math.abs(t))}(e(t));return isNaN(r)?n:i?Math.round(r):r}return o.invert=function(t){return e.invert(Hg(t))},o.domain=function(t){return arguments.length?(e.domain(t),o):e.domain()},o.range=function(t){return arguments.length?(e.range((r=Array.from(t,_g)).map(Hg)),o):r.slice()},o.rangeRound=function(t){return o.range(t).round(!0)},o.round=function(t){return arguments.length?(i=!!t,o):i},o.clamp=function(t){return arguments.length?(e.clamp(t),o):e.clamp()},o.unknown=function(t){return arguments.length?(n=t,o):n},o.copy=function(){return t(e.domain(),r).round(i).clamp(e.clamp()).unknown(n)},hg.apply(o,arguments),Ng(o)},t.scaleSequential=function t(){var n=Ng(O_()(mg));return n.copy=function(){return B_(n,t())},dg.apply(n,arguments)},t.scaleSequentialLog=function t(){var n=Fg(O_()).domain([1,10]);return n.copy=function(){return B_(n,t()).base(n.base())},dg.apply(n,arguments)},t.scaleSequentialPow=Y_,t.scaleSequentialQuantile=function t(){var e=[],r=mg;function i(t){if(null!=t&&!isNaN(t=+t))return r((s(e,t,1)-1)/(e.length-1))}return i.domain=function(t){if(!arguments.length)return e.slice();e=[];for(let n of t)null==n||isNaN(n=+n)||e.push(n);return e.sort(n),i},i.interpolator=function(t){return arguments.length?(r=t,i):r},i.range=function(){return e.map(((t,n)=>r(n/(e.length-1))))},i.quantiles=function(t){return Array.from({length:t+1},((n,r)=>at(e,r/t)))},i.copy=function(){return t(r).domain(e)},dg.apply(i,arguments)},t.scaleSequentialSqrt=function(){return Y_.apply(null,arguments).exponent(.5)},t.scaleSequentialSymlog=function t(){var n=Ig(O_());return n.copy=function(){return B_(n,t()).constant(n.constant())},dg.apply(n,arguments)},t.scaleSqrt=function(){return jg.apply(null,arguments).exponent(.5)},t.scaleSymlog=function t(){var n=Ig(Ag());return n.copy=function(){return Tg(n,t()).constant(n.constant())},hg.apply(n,arguments)},t.scaleThreshold=function t(){var n,e=[.5],r=[0,1],i=1;function o(t){return null!=t&&t<=t?r[s(e,t,0,i)]:n}return o.domain=function(t){return arguments.length?(e=Array.from(t),i=Math.min(e.length,r.length-1),o):e.slice()},o.range=function(t){return arguments.length?(r=Array.from(t),i=Math.min(e.length,r.length-1),o):r.slice()},o.invertExtent=function(t){var n=r.indexOf(t);return[e[n-1],e[n]]},o.unknown=function(t){return arguments.length?(n=t,o):n},o.copy=function(){return t().domain(e).range(r).unknown(n)},hg.apply(o,arguments)},t.scaleTime=function(){return hg.apply(I_(uv,cv,tv,Zy,xy,py,sy,ay,iy,t.timeFormat).domain([new Date(2e3,0,1),new Date(2e3,0,2)]),arguments)},t.scaleUtc=function(){return hg.apply(I_(ov,av,ev,Qy,Fy,yy,hy,cy,iy,t.utcFormat).domain([Date.UTC(2e3,0,1),Date.UTC(2e3,0,2)]),arguments)},t.scan=function(t,n){const e=ht(t,n);return e<0?void 0:e},t.schemeAccent=G_,t.schemeBlues=Xb,t.schemeBrBG=ib,t.schemeBuGn=wb,t.schemeBuPu=Tb,t.schemeCategory10=X_,t.schemeDark2=V_,t.schemeGnBu=Sb,t.schemeGreens=Vb,t.schemeGreys=Zb,t.schemeObservable10=W_,t.schemeOrRd=Nb,t.schemeOranges=em,t.schemePRGn=ab,t.schemePaired=Z_,t.schemePastel1=K_,t.schemePastel2=Q_,t.schemePiYG=cb,t.schemePuBu=zb,t.schemePuBuGn=Cb,t.schemePuOr=sb,t.schemePuRd=Db,t.schemePurples=Qb,t.schemeRdBu=hb,t.schemeRdGy=pb,t.schemeRdPu=Fb,t.schemeRdYlBu=yb,t.schemeRdYlGn=_b,t.schemeReds=tm,t.schemeSet1=J_,t.schemeSet2=tb,t.schemeSet3=nb,t.schemeSpectral=mb,t.schemeTableau10=eb,t.schemeYlGn=Ob,t.schemeYlGnBu=Ub,t.schemeYlOrBr=Yb,t.schemeYlOrRd=jb,t.select=Zn,t.selectAll=function(t){return"string"==typeof t?new Vn([document.querySelectorAll(t)],[document.documentElement]):new Vn([Ht(t)],Gn)},t.selection=Wn,t.selector=jt,t.selectorAll=Gt,t.shuffle=dt,t.shuffler=pt,t.some=function(t,n){if("function"!=typeof n)throw new TypeError("test is not a function");let e=-1;for(const r of t)if(n(r,++e,t))return!0;return!1},t.sort=U,t.stack=function(){var t=ym([]),n=dw,e=hw,r=pw;function i(i){var o,a,u=Array.from(t.apply(this,arguments),gw),c=u.length,f=-1;for(const t of i)for(o=0,++f;o0)for(var e,r,i,o,a,u,c=0,f=t[n[0]].length;c0?(r[0]=o,r[1]=o+=i):i<0?(r[1]=a,r[0]=a+=i):(r[0]=0,r[1]=i)},t.stackOffsetExpand=function(t,n){if((r=t.length)>0){for(var e,r,i,o=0,a=t[0].length;o0){for(var e,r=0,i=t[n[0]],o=i.length;r0&&(r=(e=t[n[0]]).length)>0){for(var e,r,i,o=0,a=1;afunction(t){t=`${t}`;let n=t.length;zp(t,n-1)&&!zp(t,n-2)&&(t=t.slice(0,-1));return"/"===t[0]?t:`/${t}`}(t(n,e,r)))),e=n.map(Pp),i=new Set(n).add("");for(const t of e)i.has(t)||(i.add(t),n.push(t),e.push(Pp(t)),h.push(Np));d=(t,e)=>n[e],p=(t,n)=>e[n]}for(a=0,i=h.length;a=0&&(f=h[t]).data===Np;--t)f.data=null}if(u.parent=Sp,u.eachBefore((function(t){t.depth=t.parent.depth+1,--i})).eachBefore(Kd),u.parent=null,i>0)throw new Error("cycle");return u}return r.id=function(t){return arguments.length?(n=Jd(t),r):n},r.parentId=function(t){return arguments.length?(e=Jd(t),r):e},r.path=function(n){return arguments.length?(t=Jd(n),r):t},r},t.style=_n,t.subset=function(t,n){return _t(n,t)},t.sum=function(t,n){let e=0;if(void 0===n)for(let n of t)(n=+n)&&(e+=n);else{let r=-1;for(let i of t)(i=+n(i,++r,t))&&(e+=i)}return e},t.superset=_t,t.svg=Nc,t.symbol=function(t,n){let e=null,r=km(i);function i(){let i;if(e||(e=i=r()),t.apply(this,arguments).draw(e,+n.apply(this,arguments)),i)return e=null,i+""||null}return t="function"==typeof t?t:ym(t||fx),n="function"==typeof n?n:ym(void 0===n?64:+n),i.type=function(n){return arguments.length?(t="function"==typeof n?n:ym(n),i):t},i.size=function(t){return arguments.length?(n="function"==typeof t?t:ym(+t),i):n},i.context=function(t){return arguments.length?(e=null==t?null:t,i):e},i},t.symbolAsterisk=cx,t.symbolCircle=fx,t.symbolCross=sx,t.symbolDiamond=dx,t.symbolDiamond2=px,t.symbolPlus=gx,t.symbolSquare=yx,t.symbolSquare2=vx,t.symbolStar=xx,t.symbolTimes=Px,t.symbolTriangle=Mx,t.symbolTriangle2=Ax,t.symbolWye=Cx,t.symbolX=Px,t.symbols=zx,t.symbolsFill=zx,t.symbolsStroke=$x,t.text=mc,t.thresholdFreedmanDiaconis=function(t,n,e){const r=v(t),i=at(t,.75)-at(t,.25);return r&&i?Math.ceil((e-n)/(2*i*Math.pow(r,-1/3))):1},t.thresholdScott=function(t,n,e){const r=v(t),i=w(t);return r&&i?Math.ceil((e-n)*Math.cbrt(r)/(3.49*i)):1},t.thresholdSturges=K,t.tickFormat=Eg,t.tickIncrement=V,t.tickStep=W,t.ticks=G,t.timeDay=py,t.timeDays=gy,t.timeFormatDefaultLocale=P_,t.timeFormatLocale=hv,t.timeFriday=Sy,t.timeFridays=$y,t.timeHour=sy,t.timeHours=ly,t.timeInterval=Vg,t.timeMillisecond=Wg,t.timeMilliseconds=Zg,t.timeMinute=ay,t.timeMinutes=uy,t.timeMonday=wy,t.timeMondays=ky,t.timeMonth=Zy,t.timeMonths=Ky,t.timeSaturday=Ey,t.timeSaturdays=Dy,t.timeSecond=iy,t.timeSeconds=oy,t.timeSunday=xy,t.timeSundays=Ny,t.timeThursday=Ay,t.timeThursdays=zy,t.timeTickInterval=cv,t.timeTicks=uv,t.timeTuesday=My,t.timeTuesdays=Cy,t.timeWednesday=Ty,t.timeWednesdays=Py,t.timeWeek=xy,t.timeWeeks=Ny,t.timeYear=tv,t.timeYears=nv,t.timeout=$i,t.timer=Ni,t.timerFlush=ki,t.transition=go,t.transpose=gt,t.tree=function(){var t=$p,n=1,e=1,r=null;function i(i){var c=function(t){for(var n,e,r,i,o,a=new Up(t,0),u=[a];n=u.pop();)if(r=n._.children)for(n.children=new Array(o=r.length),i=o-1;i>=0;--i)u.push(e=n.children[i]=new Up(r[i],i)),e.parent=n;return(a.parent=new Up(null,0)).children=[a],a}(i);if(c.eachAfter(o),c.parent.m=-c.z,c.eachBefore(a),r)i.eachBefore(u);else{var f=i,s=i,l=i;i.eachBefore((function(t){t.xs.x&&(s=t),t.depth>l.depth&&(l=t)}));var h=f===s?1:t(f,s)/2,d=h-f.x,p=n/(s.x+h+d),g=e/(l.depth||1);i.eachBefore((function(t){t.x=(t.x+d)*p,t.y=t.depth*g}))}return i}function o(n){var e=n.children,r=n.parent.children,i=n.i?r[n.i-1]:null;if(e){!function(t){for(var n,e=0,r=0,i=t.children,o=i.length;--o>=0;)(n=i[o]).z+=e,n.m+=e,e+=n.s+(r+=n.c)}(n);var o=(e[0].z+e[e.length-1].z)/2;i?(n.z=i.z+t(n._,i._),n.m=n.z-o):n.z=o}else i&&(n.z=i.z+t(n._,i._));n.parent.A=function(n,e,r){if(e){for(var i,o=n,a=n,u=e,c=o.parent.children[0],f=o.m,s=a.m,l=u.m,h=c.m;u=Rp(u),o=Dp(o),u&&o;)c=Dp(c),(a=Rp(a)).a=n,(i=u.z+l-o.z-f+t(u._,o._))>0&&(Fp(qp(u,n,r),n,i),f+=i,s+=i),l+=u.m,f+=o.m,h+=c.m,s+=a.m;u&&!Rp(a)&&(a.t=u,a.m+=l-s),o&&!Dp(c)&&(c.t=o,c.m+=f-h,r=n)}return r}(n,i,n.parent.A||r[0])}function a(t){t._.x=t.z+t.parent.m,t.m+=t.parent.m}function u(t){t.x*=n,t.y=t.depth*e}return i.separation=function(n){return arguments.length?(t=n,i):t},i.size=function(t){return arguments.length?(r=!1,n=+t[0],e=+t[1],i):r?null:[n,e]},i.nodeSize=function(t){return arguments.length?(r=!0,n=+t[0],e=+t[1],i):r?[n,e]:null},i},t.treemap=function(){var t=Yp,n=!1,e=1,r=1,i=[0],o=np,a=np,u=np,c=np,f=np;function s(t){return t.x0=t.y0=0,t.x1=e,t.y1=r,t.eachBefore(l),i=[0],n&&t.eachBefore(Tp),t}function l(n){var e=i[n.depth],r=n.x0+e,s=n.y0+e,l=n.x1-e,h=n.y1-e;l=e-1){var s=u[n];return s.x0=i,s.y0=o,s.x1=a,void(s.y1=c)}var l=f[n],h=r/2+l,d=n+1,p=e-1;for(;d>>1;f[g]c-o){var _=r?(i*v+a*y)/r:a;t(n,d,y,i,o,_,c),t(d,e,v,_,o,a,c)}else{var b=r?(o*v+c*y)/r:c;t(n,d,y,i,o,a,b),t(d,e,v,i,b,a,c)}}(0,c,t.value,n,e,r,i)},t.treemapDice=Ap,t.treemapResquarify=Lp,t.treemapSlice=Ip,t.treemapSliceDice=function(t,n,e,r,i){(1&t.depth?Ip:Ap)(t,n,e,r,i)},t.treemapSquarify=Yp,t.tsv=Mc,t.tsvFormat=lc,t.tsvFormatBody=hc,t.tsvFormatRow=pc,t.tsvFormatRows=dc,t.tsvFormatValue=gc,t.tsvParse=fc,t.tsvParseRows=sc,t.union=function(...t){const n=new InternSet;for(const e of t)for(const t of e)n.add(t);return n},t.unixDay=_y,t.unixDays=by,t.utcDay=yy,t.utcDays=vy,t.utcFriday=By,t.utcFridays=Vy,t.utcHour=hy,t.utcHours=dy,t.utcMillisecond=Wg,t.utcMilliseconds=Zg,t.utcMinute=cy,t.utcMinutes=fy,t.utcMonday=qy,t.utcMondays=jy,t.utcMonth=Qy,t.utcMonths=Jy,t.utcSaturday=Yy,t.utcSaturdays=Wy,t.utcSecond=iy,t.utcSeconds=oy,t.utcSunday=Fy,t.utcSundays=Ly,t.utcThursday=Oy,t.utcThursdays=Gy,t.utcTickInterval=av,t.utcTicks=ov,t.utcTuesday=Uy,t.utcTuesdays=Hy,t.utcWednesday=Iy,t.utcWednesdays=Xy,t.utcWeek=Fy,t.utcWeeks=Ly,t.utcYear=ev,t.utcYears=rv,t.variance=x,t.version="7.9.0",t.window=pn,t.xml=Sc,t.zip=function(){return gt(arguments)},t.zoom=function(){var t,n,e,r=Ew,i=Nw,o=zw,a=Cw,u=Pw,c=[0,1/0],f=[[-1/0,-1/0],[1/0,1/0]],s=250,l=ri,h=$t("start","zoom","end"),d=500,p=150,g=0,y=10;function v(t){t.property("__zoom",kw).on("wheel.zoom",T,{passive:!1}).on("mousedown.zoom",A).on("dblclick.zoom",S).filter(u).on("touchstart.zoom",E).on("touchmove.zoom",N).on("touchend.zoom touchcancel.zoom",k).style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function _(t,n){return(n=Math.max(c[0],Math.min(c[1],n)))===t.k?t:new ww(n,t.x,t.y)}function b(t,n,e){var r=n[0]-e[0]*t.k,i=n[1]-e[1]*t.k;return r===t.x&&i===t.y?t:new ww(t.k,r,i)}function m(t){return[(+t[0][0]+ +t[1][0])/2,(+t[0][1]+ +t[1][1])/2]}function x(t,n,e,r){t.on("start.zoom",(function(){w(this,arguments).event(r).start()})).on("interrupt.zoom end.zoom",(function(){w(this,arguments).event(r).end()})).tween("zoom",(function(){var t=this,o=arguments,a=w(t,o).event(r),u=i.apply(t,o),c=null==e?m(u):"function"==typeof e?e.apply(t,o):e,f=Math.max(u[1][0]-u[0][0],u[1][1]-u[0][1]),s=t.__zoom,h="function"==typeof n?n.apply(t,o):n,d=l(s.invert(c).concat(f/s.k),h.invert(c).concat(f/h.k));return function(t){if(1===t)t=h;else{var n=d(t),e=f/n[2];t=new ww(e,c[0]-n[0]*e,c[1]-n[1]*e)}a.zoom(null,t)}}))}function w(t,n,e){return!e&&t.__zooming||new M(t,n)}function M(t,n){this.that=t,this.args=n,this.active=0,this.sourceEvent=null,this.extent=i.apply(t,n),this.taps=0}function T(t,...n){if(r.apply(this,arguments)){var e=w(this,n).event(t),i=this.__zoom,u=Math.max(c[0],Math.min(c[1],i.k*Math.pow(2,a.apply(this,arguments)))),s=ne(t);if(e.wheel)e.mouse[0][0]===s[0]&&e.mouse[0][1]===s[1]||(e.mouse[1]=i.invert(e.mouse[0]=s)),clearTimeout(e.wheel);else{if(i.k===u)return;e.mouse=[s,i.invert(s)],Gi(this),e.start()}Sw(t),e.wheel=setTimeout((function(){e.wheel=null,e.end()}),p),e.zoom("mouse",o(b(_(i,u),e.mouse[0],e.mouse[1]),e.extent,f))}}function A(t,...n){if(!e&&r.apply(this,arguments)){var i=t.currentTarget,a=w(this,n,!0).event(t),u=Zn(t.view).on("mousemove.zoom",(function(t){if(Sw(t),!a.moved){var n=t.clientX-s,e=t.clientY-l;a.moved=n*n+e*e>g}a.event(t).zoom("mouse",o(b(a.that.__zoom,a.mouse[0]=ne(t,i),a.mouse[1]),a.extent,f))}),!0).on("mouseup.zoom",(function(t){u.on("mousemove.zoom mouseup.zoom",null),ue(t.view,a.moved),Sw(t),a.event(t).end()}),!0),c=ne(t,i),s=t.clientX,l=t.clientY;ae(t.view),Aw(t),a.mouse=[c,this.__zoom.invert(c)],Gi(this),a.start()}}function S(t,...n){if(r.apply(this,arguments)){var e=this.__zoom,a=ne(t.changedTouches?t.changedTouches[0]:t,this),u=e.invert(a),c=e.k*(t.shiftKey?.5:2),l=o(b(_(e,c),a,u),i.apply(this,n),f);Sw(t),s>0?Zn(this).transition().duration(s).call(x,l,a,t):Zn(this).call(v.transform,l,a,t)}}function E(e,...i){if(r.apply(this,arguments)){var o,a,u,c,f=e.touches,s=f.length,l=w(this,i,e.changedTouches.length===s).event(e);for(Aw(e),a=0;acode,kbd){white-space:nowrap}:where(pre){direction:ltr;max-inline-size:max-content;min-inline-size:0;white-space:pre;writing-mode:lr}:where(:not(pre)>code){background:var(--surface-2);border-radius:var(--radius-2);padding:var(--size-1) var(--size-2);writing-mode:lr}:where(kbd,var){border-color:var(--surface-4);border-radius:var(--radius-2);border-width:var(--border-size-1);padding:var(--size-1) var(--size-2)}:where(mark){border-radius:var(--radius-2);padding-inline:var(--size-1)}:where(ol,ul){padding-inline-start:var(--size-8)}:where(li){padding-inline-start:var(--size-2)}:where(li,dd,figcaption){max-inline-size:var(--size-content-2)}:where(p){text-wrap:pretty;max-inline-size:var(--size-content-3)}:where(dt,summary){font-weight:var(--font-weight-7)}:where(dt:not(:first-of-type)){margin-block-start:var(--size-5)}:where(small){font-size:max(.5em,var(--font-size-0));max-inline-size:var(--size-content-1)}:where(hr){background-color:var(--surface-3);height:var(--border-size-2);margin-block:var(--size-fluid-5)}:where(figure){display:grid;gap:var(--size-2);place-items:center}:where(figure)>:where(figcaption){text-wrap:balance;font-size:var(--font-size-1)}:where(blockquote,:not(blockquote)>cite){border-inline-start-width:var(--border-size-3)}:where(blockquote){display:grid;gap:var(--size-3);max-inline-size:var(--size-content-2);padding-block:var(--size-3);padding-inline:var(--size-4)}:where(:not(blockquote)>cite){padding-inline-start:var(--size-2)}:where(summary){background:var(--surface-3);border-radius:var(--radius-2);margin:calc(var(--size-2)*-1) calc(var(--size-3)*-1);padding:var(--size-2) var(--size-3)}:where(details){background:var(--surface-2);border-radius:var(--radius-2);padding-block:var(--size-2);padding-inline:var(--size-3)}:where(details[open]>summary){border-end-end-radius:0;border-end-start-radius:0;margin-bottom:var(--size-2)}:where(fieldset){border:var(--border-size-1) solid var(--surface-4);border-radius:var(--radius-2)}:where(del){background:var(--red-9);color:var(--red-2)}:where(ins){background:var(--green-9);color:var(--green-1)}:where(abbr){text-decoration-color:var(--blue-5)}:where(dialog){background-color:var(--surface-1);border-radius:var(--radius-3);box-shadow:var(--shadow-6);color:inherit}:where(menu){display:flex;gap:var(--size-3);padding-inline-start:0}:where(sup){font-size:.5em}:where(table){--nice-inner-radius:calc(var(--radius-3) - 2px);background:var(--surface-2);border:1px solid var(--surface-2);border-radius:var(--radius-3);width:fit-content}:where(table:not(:has(tfoot)) tr:last-child td:first-child){border-end-start-radius:var(--nice-inner-radius)}:where(table:not(:has(tfoot)) tr:last-child td:last-child){border-end-end-radius:var(--nice-inner-radius)}:where(table thead tr:first-child th:first-child){border-start-start-radius:var(--nice-inner-radius)}:where(table thead tr:first-child th:last-child){border-start-end-radius:var(--nice-inner-radius)}:where(tfoot tr:last-child th:first-of-type){border-end-start-radius:var(--nice-inner-radius)}:where(tfoot tr:last-child td:first-of-type){border-end-start-radius:var(--nice-inner-radius)}:where(tfoot tr:last-child th:last-of-type){border-end-end-radius:var(--nice-inner-radius)}:where(tfoot tr:last-child td:last-of-type){border-end-end-radius:var(--nice-inner-radius)}:where(th){background-color:var(--surface-2);color:var(--text-1)}:where(table a:not(.does-not-exist):focus-visible){outline-offset:-2px}:where(table button:not(.does-not-exist):focus-visible){outline-offset:-2px}:where(table [contenteditable]:focus-visible){outline-offset:-2px}:where(td){text-wrap:pretty;background:var(--surface-1);max-inline-size:var(--size-content-2)}:where(td,th){padding:var(--size-2);text-align:left}:where(td:not([align])){text-align:center}:where(th:not([align])){text-align:center}:where(thead){border-collapse:collapse}:where(table tr:hover td),:where(tbody tr:nth-child(2n):hover td){background-color:var(--surface-3)}:where(table>caption){margin:var(--size-3)}:where(tfoot button){padding-block:var(--size-1);padding-inline:var(--size-3)}@media (prefers-color-scheme:dark){:where(textarea,select,input:not([type=button],[type=submit],[type=reset])){background-color:#171a1c}:where(dialog){background-color:var(--surface-2)}::placeholder{color:var(--gray-6)}} \ No newline at end of file diff --git a/crates/echo-session-ws-gateway/assets/vendor/open-props.LICENSE b/crates/echo-session-ws-gateway/assets/vendor/open-props.LICENSE deleted file mode 100644 index 57d11b0b..00000000 --- a/crates/echo-session-ws-gateway/assets/vendor/open-props.LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2021 Adam Argyle - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/crates/echo-session-ws-gateway/assets/vendor/open-props.min.css b/crates/echo-session-ws-gateway/assets/vendor/open-props.min.css deleted file mode 100644 index 177cb68c..00000000 --- a/crates/echo-session-ws-gateway/assets/vendor/open-props.min.css +++ /dev/null @@ -1 +0,0 @@ -:where(html){--font-system-ui:system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif;--font-transitional:Charter,Bitstream Charter,Sitka Text,Cambria,serif;--font-old-style:Iowan Old Style,Palatino Linotype,URW Palladio L,P052,serif;--font-humanist:Seravek,Gill Sans Nova,Ubuntu,Calibri,DejaVu Sans,source-sans-pro,sans-serif;--font-geometric-humanist:Avenir,Montserrat,Corbel,URW Gothic,source-sans-pro,sans-serif;--font-classical-humanist:Optima,Candara,Noto Sans,source-sans-pro,sans-serif;--font-neo-grotesque:Inter,Roboto,Helvetica Neue,Arial Nova,Nimbus Sans,Arial,sans-serif;--font-monospace-slab-serif:Nimbus Mono PS,Courier New,monospace;--font-monospace-code:Dank Mono,Operator Mono,Inconsolata,Fira Mono,ui-monospace,SF Mono,Monaco,Droid Sans Mono,Source Code Pro,Cascadia Code,Menlo,Consolas,DejaVu Sans Mono,monospace;--font-industrial:Bahnschrift,DIN Alternate,Franklin Gothic Medium,Nimbus Sans Narrow,sans-serif-condensed,sans-serif;--font-rounded-sans:ui-rounded,Hiragino Maru Gothic ProN,Quicksand,Comfortaa,Manjari,Arial Rounded MT,Arial Rounded MT Bold,Calibri,source-sans-pro,sans-serif;--font-slab-serif:Rockwell,Rockwell Nova,Roboto Slab,DejaVu Serif,Sitka Small,serif;--font-antique:Superclarendon,Bookman Old Style,URW Bookman,URW Bookman L,Georgia Pro,Georgia,serif;--font-didone:Didot,Bodoni MT,Noto Serif Display,URW Palladio L,P052,Sylfaen,serif;--font-handwritten:Segoe Print,Bradley Hand,Chilanka,TSCu_Comic,casual,cursive;--font-sans:var(--font-system-ui);--font-serif:ui-serif,serif;--font-mono:var(--font-monospace-code);--font-weight-1:100;--font-weight-2:200;--font-weight-3:300;--font-weight-4:400;--font-weight-5:500;--font-weight-6:600;--font-weight-7:700;--font-weight-8:800;--font-weight-9:900;--font-lineheight-00:.95;--font-lineheight-0:1.1;--font-lineheight-1:1.25;--font-lineheight-2:1.375;--font-lineheight-3:1.5;--font-lineheight-4:1.75;--font-lineheight-5:2;--font-letterspacing-0:-.05em;--font-letterspacing-1:.025em;--font-letterspacing-2:.050em;--font-letterspacing-3:.075em;--font-letterspacing-4:.150em;--font-letterspacing-5:.500em;--font-letterspacing-6:.750em;--font-letterspacing-7:1em;--font-size-00:.5rem;--font-size-0:.75rem;--font-size-1:1rem;--font-size-2:1.1rem;--font-size-3:1.25rem;--font-size-4:1.5rem;--font-size-5:2rem;--font-size-6:2.5rem;--font-size-7:3rem;--font-size-8:3.5rem;--font-size-fluid-0:max(.75rem,min(2vw,1rem));--font-size-fluid-1:max(1rem,min(4vw,1.5rem));--font-size-fluid-2:max(1.5rem,min(6vw,2.5rem));--font-size-fluid-3:max(2rem,min(9vw,3.5rem));--size-000:-.5rem;--size-00:-.25rem;--size-1:.25rem;--size-2:.5rem;--size-3:1rem;--size-4:1.25rem;--size-5:1.5rem;--size-6:1.75rem;--size-7:2rem;--size-8:3rem;--size-9:4rem;--size-10:5rem;--size-11:7.5rem;--size-12:10rem;--size-13:15rem;--size-14:20rem;--size-15:30rem;--size-px-000:-8px;--size-px-00:-4px;--size-px-1:4px;--size-px-2:8px;--size-px-3:16px;--size-px-4:20px;--size-px-5:24px;--size-px-6:28px;--size-px-7:32px;--size-px-8:48px;--size-px-9:64px;--size-px-10:80px;--size-px-11:120px;--size-px-12:160px;--size-px-13:240px;--size-px-14:320px;--size-px-15:480px;--size-fluid-1:max(.5rem,min(1vw,1rem));--size-fluid-2:max(1rem,min(2vw,1.5rem));--size-fluid-3:max(1.5rem,min(3vw,2rem));--size-fluid-4:max(2rem,min(4vw,3rem));--size-fluid-5:max(4rem,min(5vw,5rem));--size-fluid-6:max(5rem,min(7vw,7.5rem));--size-fluid-7:max(7.5rem,min(10vw,10rem));--size-fluid-8:max(10rem,min(20vw,15rem));--size-fluid-9:max(15rem,min(30vw,20rem));--size-fluid-10:max(20rem,min(40vw,30rem));--size-content-1:20ch;--size-content-2:45ch;--size-content-3:60ch;--size-header-1:20ch;--size-header-2:25ch;--size-header-3:35ch;--size-xxs:240px;--size-xs:360px;--size-sm:480px;--size-md:768px;--size-lg:1024px;--size-xl:1440px;--size-xxl:1920px;--size-relative-000:-.5ch;--size-relative-00:-.25ch;--size-relative-1:.25ch;--size-relative-2:.5ch;--size-relative-3:1ch;--size-relative-4:1.25ch;--size-relative-5:1.5ch;--size-relative-6:1.75ch;--size-relative-7:2ch;--size-relative-8:3ch;--size-relative-9:4ch;--size-relative-10:5ch;--size-relative-11:7.5ch;--size-relative-12:10ch;--size-relative-13:15ch;--size-relative-14:20ch;--size-relative-15:30ch;--ease-1:cubic-bezier(.25,0,.5,1);--ease-2:cubic-bezier(.25,0,.4,1);--ease-3:cubic-bezier(.25,0,.3,1);--ease-4:cubic-bezier(.25,0,.2,1);--ease-5:cubic-bezier(.25,0,.1,1);--ease-in-1:cubic-bezier(.25,0,1,1);--ease-in-2:cubic-bezier(.50,0,1,1);--ease-in-3:cubic-bezier(.70,0,1,1);--ease-in-4:cubic-bezier(.90,0,1,1);--ease-in-5:cubic-bezier(1,0,1,1);--ease-out-1:cubic-bezier(0,0,.75,1);--ease-out-2:cubic-bezier(0,0,.50,1);--ease-out-3:cubic-bezier(0,0,.3,1);--ease-out-4:cubic-bezier(0,0,.1,1);--ease-out-5:cubic-bezier(0,0,0,1);--ease-in-out-1:cubic-bezier(.1,0,.9,1);--ease-in-out-2:cubic-bezier(.3,0,.7,1);--ease-in-out-3:cubic-bezier(.5,0,.5,1);--ease-in-out-4:cubic-bezier(.7,0,.3,1);--ease-in-out-5:cubic-bezier(.9,0,.1,1);--ease-elastic-out-1:cubic-bezier(.5,.75,.75,1.25);--ease-elastic-out-2:cubic-bezier(.5,1,.75,1.25);--ease-elastic-out-3:cubic-bezier(.5,1.25,.75,1.25);--ease-elastic-out-4:cubic-bezier(.5,1.5,.75,1.25);--ease-elastic-out-5:cubic-bezier(.5,1.75,.75,1.25);--ease-elastic-in-1:cubic-bezier(.5,-0.25,.75,1);--ease-elastic-in-2:cubic-bezier(.5,-0.50,.75,1);--ease-elastic-in-3:cubic-bezier(.5,-0.75,.75,1);--ease-elastic-in-4:cubic-bezier(.5,-1.00,.75,1);--ease-elastic-in-5:cubic-bezier(.5,-1.25,.75,1);--ease-elastic-in-out-1:cubic-bezier(.5,-.1,.1,1.5);--ease-elastic-in-out-2:cubic-bezier(.5,-.3,.1,1.5);--ease-elastic-in-out-3:cubic-bezier(.5,-.5,.1,1.5);--ease-elastic-in-out-4:cubic-bezier(.5,-.7,.1,1.5);--ease-elastic-in-out-5:cubic-bezier(.5,-.9,.1,1.5);--ease-step-1:steps(2);--ease-step-2:steps(3);--ease-step-3:steps(4);--ease-step-4:steps(7);--ease-step-5:steps(10);--ease-elastic-1:var(--ease-elastic-out-1);--ease-elastic-2:var(--ease-elastic-out-2);--ease-elastic-3:var(--ease-elastic-out-3);--ease-elastic-4:var(--ease-elastic-out-4);--ease-elastic-5:var(--ease-elastic-out-5);--ease-squish-1:var(--ease-elastic-in-out-1);--ease-squish-2:var(--ease-elastic-in-out-2);--ease-squish-3:var(--ease-elastic-in-out-3);--ease-squish-4:var(--ease-elastic-in-out-4);--ease-squish-5:var(--ease-elastic-in-out-5);--ease-spring-1:linear(0,0.006,0.025 2.8%,0.101 6.1%,0.539 18.9%,0.721 25.3%,0.849 31.5%,0.937 38.1%,0.968 41.8%,0.991 45.7%,1.006 50.1%,1.015 55%,1.017 63.9%,1.001);--ease-spring-2:linear(0,0.007,0.029 2.2%,0.118 4.7%,0.625 14.4%,0.826 19%,0.902,0.962,1.008 26.1%,1.041 28.7%,1.064 32.1%,1.07 36%,1.061 40.5%,1.015 53.4%,0.999 61.6%,0.995 71.2%,1);--ease-spring-3:linear(0,0.009,0.035 2.1%,0.141 4.4%,0.723 12.9%,0.938 16.7%,1.017,1.077,1.121,1.149 24.3%,1.159,1.163,1.161,1.154 29.9%,1.129 32.8%,1.051 39.6%,1.017 43.1%,0.991,0.977 51%,0.974 53.8%,0.975 57.1%,0.997 69.8%,1.003 76.9%,1);--ease-spring-4:linear(0,0.009,0.037 1.7%,0.153 3.6%,0.776 10.3%,1.001,1.142 16%,1.185,1.209 19%,1.215 19.9% 20.8%,1.199,1.165 25%,1.056 30.3%,1.008 33%,0.973,0.955 39.2%,0.953 41.1%,0.957 43.3%,0.998 53.3%,1.009 59.1% 63.7%,0.998 78.9%,1);--ease-spring-5:linear(0,0.01,0.04 1.6%,0.161 3.3%,0.816 9.4%,1.046,1.189 14.4%,1.231,1.254 17%,1.259,1.257 18.6%,1.236,1.194 22.3%,1.057 27%,0.999 29.4%,0.955 32.1%,0.942,0.935 34.9%,0.933,0.939 38.4%,1 47.3%,1.011,1.017 52.6%,1.016 56.4%,1 65.2%,0.996 70.2%,1.001 87.2%,1);--ease-bounce-1:linear(0,0.004,0.016,0.035,0.063,0.098,0.141,0.191,0.25,0.316,0.391 36.8%,0.563,0.766,1 58.8%,0.946,0.908 69.1%,0.895,0.885,0.879,0.878,0.879,0.885,0.895,0.908 89.7%,0.946,1);--ease-bounce-2:linear(0,0.004,0.016,0.035,0.063,0.098,0.141 15.1%,0.25,0.391,0.562,0.765,1,0.892 45.2%,0.849,0.815,0.788,0.769,0.757,0.753,0.757,0.769,0.788,0.815,0.85,0.892 75.2%,1 80.2%,0.973,0.954,0.943,0.939,0.943,0.954,0.973,1);--ease-bounce-3:linear(0,0.004,0.016,0.035,0.062,0.098,0.141 11.4%,0.25,0.39,0.562,0.764,1 30.3%,0.847 34.8%,0.787,0.737,0.699,0.672,0.655,0.65,0.656,0.672,0.699,0.738,0.787,0.847 61.7%,1 66.2%,0.946,0.908,0.885 74.2%,0.879,0.878,0.879,0.885 79.5%,0.908,0.946,1 87.4%,0.981,0.968,0.96,0.957,0.96,0.968,0.981,1);--ease-bounce-4:linear(0,0.004,0.016 3%,0.062,0.141,0.25,0.391,0.562 18.2%,1 24.3%,0.81,0.676 32.3%,0.629,0.595,0.575,0.568,0.575,0.595,0.629,0.676 48.2%,0.811,1 56.2%,0.918,0.86,0.825,0.814,0.825,0.86,0.918,1 77.2%,0.94 80.6%,0.925,0.92,0.925,0.94 87.5%,1 90.9%,0.974,0.965,0.974,1);--ease-bounce-5:linear(0,0.004,0.016 2.5%,0.063,0.141,0.25 10.1%,0.562,1 20.2%,0.783,0.627,0.534 30.9%,0.511,0.503,0.511,0.534 38%,0.627,0.782,1 48.7%,0.892,0.815,0.769 56.3%,0.757,0.753,0.757,0.769 61.3%,0.815,0.892,1 68.8%,0.908 72.4%,0.885,0.878,0.885,0.908 79.4%,1 83%,0.954 85.5%,0.943,0.939,0.943,0.954 90.5%,1 93%,0.977,0.97,0.977,1);--ease-circ-in:cubic-bezier(.6,.04,.98,.335);--ease-circ-in-out:cubic-bezier(.785,.135,.15,.86);--ease-circ-out:cubic-bezier(.075,.82,.165,1);--ease-cubic-in:cubic-bezier(.55,.055,.675,.19);--ease-cubic-in-out:cubic-bezier(.645,.045,.355,1);--ease-cubic-out:cubic-bezier(.215,.61,.355,1);--ease-expo-in:cubic-bezier(.95,.05,.795,.035);--ease-expo-in-out:cubic-bezier(1,0,0,1);--ease-expo-out:cubic-bezier(.19,1,.22,1);--ease-quad-in:cubic-bezier(.55,.085,.68,.53);--ease-quad-in-out:cubic-bezier(.455,.03,.515,.955);--ease-quad-out:cubic-bezier(.25,.46,.45,.94);--ease-quart-in:cubic-bezier(.895,.03,.685,.22);--ease-quart-in-out:cubic-bezier(.77,0,.175,1);--ease-quart-out:cubic-bezier(.165,.84,.44,1);--ease-quint-in:cubic-bezier(.755,.05,.855,.06);--ease-quint-in-out:cubic-bezier(.86,0,.07,1);--ease-quint-out:cubic-bezier(.23,1,.32,1);--ease-sine-in:cubic-bezier(.47,0,.745,.715);--ease-sine-in-out:cubic-bezier(.445,.05,.55,.95);--ease-sine-out:cubic-bezier(.39,.575,.565,1);--layer-1:1;--layer-2:2;--layer-3:3;--layer-4:4;--layer-5:5;--layer-important:2147483647;--shadow-color:220 3% 15%;--shadow-strength:1%;--inner-shadow-highlight:inset 0 -.5px 0 0 #fff,inset 0 .5px 0 0 rgba(0,0,0,.067);--shadow-1:0 1px 2px -1px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 9%));--shadow-2:0 3px 5px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 3%)),0 7px 14px -5px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 5%));--shadow-3:0 -1px 3px 0 hsl(var(--shadow-color)/calc(var(--shadow-strength) + 2%)),0 1px 2px -5px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 2%)),0 2px 5px -5px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 4%)),0 4px 12px -5px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 5%)),0 12px 15px -5px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 7%));--shadow-4:0 -2px 5px 0 hsl(var(--shadow-color)/calc(var(--shadow-strength) + 2%)),0 1px 1px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 3%)),0 2px 2px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 3%)),0 5px 5px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 4%)),0 9px 9px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 5%)),0 16px 16px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 6%));--shadow-5:0 -1px 2px 0 hsl(var(--shadow-color)/calc(var(--shadow-strength) + 2%)),0 2px 1px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 3%)),0 5px 5px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 3%)),0 10px 10px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 4%)),0 20px 20px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 5%)),0 40px 40px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 7%));--shadow-6:0 -1px 2px 0 hsl(var(--shadow-color)/calc(var(--shadow-strength) + 2%)),0 3px 2px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 3%)),0 7px 5px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 3%)),0 12px 10px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 4%)),0 22px 18px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 5%)),0 41px 33px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 6%)),0 100px 80px -2px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 7%));--inner-shadow-0:inset 0 0 0 1px hsl(var(--shadow-color)/calc(var(--shadow-strength) + 9%));--inner-shadow-1:inset 0 1px 2px 0 hsl(var(--shadow-color)/calc(var(--shadow-strength) + 9%)),var(--inner-shadow-highlight);--inner-shadow-2:inset 0 1px 4px 0 hsl(var(--shadow-color)/calc(var(--shadow-strength) + 9%)),var(--inner-shadow-highlight);--inner-shadow-3:inset 0 2px 8px 0 hsl(var(--shadow-color)/calc(var(--shadow-strength) + 9%)),var(--inner-shadow-highlight);--inner-shadow-4:inset 0 2px 14px 0 hsl(var(--shadow-color)/calc(var(--shadow-strength) + 9%)),var(--inner-shadow-highlight);--ratio-square:1;--ratio-landscape:4/3;--ratio-portrait:3/4;--ratio-widescreen:16/9;--ratio-ultrawide:18/5;--ratio-golden:1.6180/1;--gray-0:#f8f9fa;--gray-1:#f1f3f5;--gray-2:#e9ecef;--gray-3:#dee2e6;--gray-4:#ced4da;--gray-5:#adb5bd;--gray-6:#868e96;--gray-7:#495057;--gray-8:#343a40;--gray-9:#212529;--gray-10:#16191d;--gray-11:#0d0f12;--gray-12:#030507;--stone-0:#f8fafb;--stone-1:#f2f4f6;--stone-2:#ebedef;--stone-3:#e0e4e5;--stone-4:#d1d6d8;--stone-5:#b1b6b9;--stone-6:#979b9d;--stone-7:#7e8282;--stone-8:#666968;--stone-9:#50514f;--stone-10:#3a3a37;--stone-11:#252521;--stone-12:#121210;--red-0:#fff5f5;--red-1:#ffe3e3;--red-2:#ffc9c9;--red-3:#ffa8a8;--red-4:#ff8787;--red-5:#ff6b6b;--red-6:#fa5252;--red-7:#f03e3e;--red-8:#e03131;--red-9:#c92a2a;--red-10:#b02525;--red-11:#962020;--red-12:#7d1a1a;--pink-0:#fff0f6;--pink-1:#ffdeeb;--pink-2:#fcc2d7;--pink-3:#faa2c1;--pink-4:#f783ac;--pink-5:#f06595;--pink-6:#e64980;--pink-7:#d6336c;--pink-8:#c2255c;--pink-9:#a61e4d;--pink-10:#8c1941;--pink-11:#731536;--pink-12:#59102a;--purple-0:#f8f0fc;--purple-1:#f3d9fa;--purple-2:#eebefa;--purple-3:#e599f7;--purple-4:#da77f2;--purple-5:#cc5de8;--purple-6:#be4bdb;--purple-7:#ae3ec9;--purple-8:#9c36b5;--purple-9:#862e9c;--purple-10:#702682;--purple-11:#5a1e69;--purple-12:#44174f;--violet-0:#f3f0ff;--violet-1:#e5dbff;--violet-2:#d0bfff;--violet-3:#b197fc;--violet-4:#9775fa;--violet-5:#845ef7;--violet-6:#7950f2;--violet-7:#7048e8;--violet-8:#6741d9;--violet-9:#5f3dc4;--violet-10:#5235ab;--violet-11:#462d91;--violet-12:#3a2578;--indigo-0:#edf2ff;--indigo-1:#dbe4ff;--indigo-2:#bac8ff;--indigo-3:#91a7ff;--indigo-4:#748ffc;--indigo-5:#5c7cfa;--indigo-6:#4c6ef5;--indigo-7:#4263eb;--indigo-8:#3b5bdb;--indigo-9:#364fc7;--indigo-10:#2f44ad;--indigo-11:#283a94;--indigo-12:#21307a;--blue-0:#e7f5ff;--blue-1:#d0ebff;--blue-2:#a5d8ff;--blue-3:#74c0fc;--blue-4:#4dabf7;--blue-5:#339af0;--blue-6:#228be6;--blue-7:#1c7ed6;--blue-8:#1971c2;--blue-9:#1864ab;--blue-10:#145591;--blue-11:#114678;--blue-12:#0d375e;--cyan-0:#e3fafc;--cyan-1:#c5f6fa;--cyan-2:#99e9f2;--cyan-3:#66d9e8;--cyan-4:#3bc9db;--cyan-5:#22b8cf;--cyan-6:#15aabf;--cyan-7:#1098ad;--cyan-8:#0c8599;--cyan-9:#0b7285;--cyan-10:#095c6b;--cyan-11:#074652;--cyan-12:#053038;--teal-0:#e6fcf5;--teal-1:#c3fae8;--teal-2:#96f2d7;--teal-3:#63e6be;--teal-4:#38d9a9;--teal-5:#20c997;--teal-6:#12b886;--teal-7:#0ca678;--teal-8:#099268;--teal-9:#087f5b;--teal-10:#066649;--teal-11:#054d37;--teal-12:#033325;--green-0:#ebfbee;--green-1:#d3f9d8;--green-2:#b2f2bb;--green-3:#8ce99a;--green-4:#69db7c;--green-5:#51cf66;--green-6:#40c057;--green-7:#37b24d;--green-8:#2f9e44;--green-9:#2b8a3e;--green-10:#237032;--green-11:#1b5727;--green-12:#133d1b;--lime-0:#f4fce3;--lime-1:#e9fac8;--lime-2:#d8f5a2;--lime-3:#c0eb75;--lime-4:#a9e34b;--lime-5:#94d82d;--lime-6:#82c91e;--lime-7:#74b816;--lime-8:#66a80f;--lime-9:#5c940d;--lime-10:#4c7a0b;--lime-11:#3c6109;--lime-12:#2c4706;--yellow-0:#fff9db;--yellow-1:#fff3bf;--yellow-2:#ffec99;--yellow-3:#ffe066;--yellow-4:#ffd43b;--yellow-5:#fcc419;--yellow-6:#fab005;--yellow-7:#f59f00;--yellow-8:#f08c00;--yellow-9:#e67700;--yellow-10:#b35c00;--yellow-11:#804200;--yellow-12:#663500;--orange-0:#fff4e6;--orange-1:#ffe8cc;--orange-2:#ffd8a8;--orange-3:#ffc078;--orange-4:#ffa94d;--orange-5:#ff922b;--orange-6:#fd7e14;--orange-7:#f76707;--orange-8:#e8590c;--orange-9:#d9480f;--orange-10:#bf400d;--orange-11:#99330b;--orange-12:#802b09;--choco-0:#fff8dc;--choco-1:#fce1bc;--choco-2:#f7ca9e;--choco-3:#f1b280;--choco-4:#e99b62;--choco-5:#df8545;--choco-6:#d46e25;--choco-7:#bd5f1b;--choco-8:#a45117;--choco-9:#8a4513;--choco-10:#703a13;--choco-11:#572f12;--choco-12:#3d210d;--brown-0:#faf4eb;--brown-1:#ede0d1;--brown-2:#e0cab7;--brown-3:#d3b79e;--brown-4:#c5a285;--brown-5:#b78f6d;--brown-6:#a87c56;--brown-7:#956b47;--brown-8:#825b3a;--brown-9:#6f4b2d;--brown-10:#5e3a21;--brown-11:#4e2b15;--brown-12:#422412;--sand-0:#f8fafb;--sand-1:#e6e4dc;--sand-2:#d5cfbd;--sand-3:#c2b9a0;--sand-4:#aea58c;--sand-5:#9a9178;--sand-6:#867c65;--sand-7:#736a53;--sand-8:#5f5746;--sand-9:#4b4639;--sand-10:#38352d;--sand-11:#252521;--sand-12:#121210;--camo-0:#f9fbe7;--camo-1:#e8ed9c;--camo-2:#d2df4e;--camo-3:#c2ce34;--camo-4:#b5bb2e;--camo-5:#a7a827;--camo-6:#999621;--camo-7:#8c851c;--camo-8:#7e7416;--camo-9:#6d6414;--camo-10:#5d5411;--camo-11:#4d460e;--camo-12:#36300a;--jungle-0:#ecfeb0;--jungle-1:#def39a;--jungle-2:#d0e884;--jungle-3:#c2dd6e;--jungle-4:#b5d15b;--jungle-5:#a8c648;--jungle-6:#9bbb36;--jungle-7:#8fb024;--jungle-8:#84a513;--jungle-9:#7a9908;--jungle-10:#658006;--jungle-11:#516605;--jungle-12:#3d4d04;--gradient-space: ;--gradient-1:linear-gradient(to bottom right var(--gradient-space),#1f005c,#5b0060,#870160,#ac255e,#ca485c,#e16b5c,#f39060,#ffb56b);--gradient-2:linear-gradient(to bottom right var(--gradient-space),#48005c,#8300e2,#a269ff);--gradient-3:radial-gradient(circle at top right var(--gradient-space),#0ff,rgba(0,255,255,0)),radial-gradient(circle at bottom left var(--gradient-space),#ff1492,rgba(255,20,146,0));--gradient-4:linear-gradient(to bottom right var(--gradient-space),#00f5a0,#00d9f5);--gradient-5:conic-gradient(from -270deg at 75% 110% var(--gradient-space),#f0f,#fffaf0);--gradient-6:conic-gradient(from -90deg at top left var(--gradient-space),#000,#fff);--gradient-7:linear-gradient(to bottom right var(--gradient-space),#72c6ef,#004e8f);--gradient-8:conic-gradient(from 90deg at 50% 0% var(--gradient-space),#111,50%,#222,#111);--gradient-9:conic-gradient(from .5turn at bottom center var(--gradient-space),#add8e6,#fff);--gradient-10:conic-gradient(from 90deg at 40% -25% var(--gradient-space),gold,#f79d03,#ee6907,#e6390a,#de0d0d,#d61039,#cf1261,#c71585,#cf1261,#d61039,#de0d0d,#ee6907,#f79d03,gold,gold,gold);--gradient-11:conic-gradient(at bottom left var(--gradient-space),#ff1493,cyan);--gradient-12:conic-gradient(from 90deg at 25% -10% var(--gradient-space),#ff4500,#d3f340,#7bee85,#afeeee,#7bee85);--gradient-13:radial-gradient(circle at 50% 200% var(--gradient-space),#000142,#3b0083,#b300c3,#ff059f,#ff4661,#ffad86,#fff3c7);--gradient-14:conic-gradient(at top right var(--gradient-space),lime,cyan);--gradient-15:linear-gradient(to bottom right var(--gradient-space),#c7d2fe,#fecaca,#fef3c7);--gradient-16:radial-gradient(circle at 50% -250% var(--gradient-space),#374151,#111827,#000);--gradient-17:conic-gradient(from -90deg at 50% -25% var(--gradient-space),blue,#8a2be2);--gradient-18:linear-gradient(0deg var(--gradient-space),rgba(255,0,0,.8),rgba(255,0,0,0) 75%),linear-gradient(60deg var(--gradient-space),rgba(255,255,0,.8),rgba(255,255,0,0) 75%),linear-gradient(120deg var(--gradient-space),rgba(0,255,0,.8),rgba(0,255,0,0) 75%),linear-gradient(180deg var(--gradient-space),rgba(0,255,255,.8),rgba(0,255,255,0) 75%),linear-gradient(240deg var(--gradient-space),rgba(0,0,255,.8),rgba(0,0,255,0) 75%),linear-gradient(300deg var(--gradient-space),rgba(255,0,255,.8),rgba(255,0,255,0) 75%);--gradient-19:linear-gradient(to bottom right var(--gradient-space),#ffe259,#ffa751);--gradient-20:conic-gradient(from -135deg at -10% center var(--gradient-space),orange,#ff7715,#ff522a,#ff3f47,#ff5482,#ff69b4);--gradient-21:conic-gradient(from -90deg at 25% 115% var(--gradient-space),red,#f06,#f0c,#c0f,#60f,#00f,#00f,#00f,#00f);--gradient-22:linear-gradient(to bottom right var(--gradient-space),#acb6e5,#86fde8);--gradient-23:linear-gradient(to bottom right var(--gradient-space),#536976,#292e49);--gradient-24:conic-gradient(from .5turn at 0% 0% var(--gradient-space),#00c476,10%,#82b0ff,90%,#00c476);--gradient-25:conic-gradient(at 125% 50% var(--gradient-space),#b78cf7,#ff7c94,#ffcf0d,#ff7c94,#b78cf7);--gradient-26:linear-gradient(to bottom right var(--gradient-space),#9796f0,#fbc7d4);--gradient-27:conic-gradient(from .5turn at bottom left var(--gradient-space),#ff1493,#639);--gradient-28:conic-gradient(from -90deg at 50% 105% var(--gradient-space),#fff,orchid);--gradient-29:radial-gradient(circle at top right var(--gradient-space),#bfb3ff,rgba(191,179,255,0)),radial-gradient(circle at bottom left var(--gradient-space),#86acf9,rgba(134,172,249,0));--gradient-30:radial-gradient(circle at top right var(--gradient-space),#00ff80,rgba(0,255,128,0)),radial-gradient(circle at bottom left var(--gradient-space),#adffd6,rgba(173,255,214,0));--noise-1:url("data:image/svg+xml;charset=utf-8,%3Csvg viewBox='0 0 200 200' xmlns='http://www.w3.org/2000/svg'%3E%3Cfilter id='a'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='.005' numOctaves='2' stitchTiles='stitch'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23a)'/%3E%3C/svg%3E");--noise-2:url("data:image/svg+xml;charset=utf-8,%3Csvg viewBox='0 0 300 300' xmlns='http://www.w3.org/2000/svg'%3E%3Cfilter id='a'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='.05' stitchTiles='stitch'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23a)'/%3E%3C/svg%3E");--noise-3:url("data:image/svg+xml;charset=utf-8,%3Csvg viewBox='0 0 1024 1024' xmlns='http://www.w3.org/2000/svg'%3E%3Cfilter id='a'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='.25' stitchTiles='stitch'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23a)'/%3E%3C/svg%3E");--noise-4:url("data:image/svg+xml;charset=utf-8,%3Csvg viewBox='0 0 2056 2056' xmlns='http://www.w3.org/2000/svg'%3E%3Cfilter id='a'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='.5' stitchTiles='stitch'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23a)'/%3E%3C/svg%3E");--noise-5:url("data:image/svg+xml;charset=utf-8,%3Csvg viewBox='0 0 2056 2056' xmlns='http://www.w3.org/2000/svg'%3E%3Cfilter id='a'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='.75' stitchTiles='stitch'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23a)'/%3E%3C/svg%3E");--noise-filter-1:contrast(300%) brightness(100%);--noise-filter-2:contrast(200%) brightness(150%);--noise-filter-3:contrast(200%) brightness(250%);--noise-filter-4:contrast(200%) brightness(500%);--noise-filter-5:contrast(200%) brightness(1000%);--animation-fade-in:fade-in .5s var(--ease-3);--animation-fade-in-bloom:fade-in-bloom 2s var(--ease-3);--animation-fade-out:fade-out .5s var(--ease-3);--animation-fade-out-bloom:fade-out-bloom 2s var(--ease-3);--animation-scale-up:scale-up .5s var(--ease-3);--animation-scale-down:scale-down .5s var(--ease-3);--animation-slide-out-up:slide-out-up .5s var(--ease-3);--animation-slide-out-down:slide-out-down .5s var(--ease-3);--animation-slide-out-right:slide-out-right .5s var(--ease-3);--animation-slide-out-left:slide-out-left .5s var(--ease-3);--animation-slide-in-up:slide-in-up .5s var(--ease-3);--animation-slide-in-down:slide-in-down .5s var(--ease-3);--animation-slide-in-right:slide-in-right .5s var(--ease-3);--animation-slide-in-left:slide-in-left .5s var(--ease-3);--animation-shake-x:shake-x .75s var(--ease-out-5);--animation-shake-y:shake-y .75s var(--ease-out-5);--animation-shake-z:shake-z 1s var(--ease-in-out-3);--animation-spin:spin 2s linear infinite;--animation-ping:ping 5s var(--ease-out-3) infinite;--animation-blink:blink 1s var(--ease-out-3) infinite;--animation-float:float 3s var(--ease-in-out-3) infinite;--animation-bounce:bounce 2s var(--ease-squish-2) infinite;--animation-pulse:pulse 2s var(--ease-out-3) infinite;--border-size-1:1px;--border-size-2:2px;--border-size-3:5px;--border-size-4:10px;--border-size-5:25px;--radius-1:2px;--radius-2:5px;--radius-3:1rem;--radius-4:2rem;--radius-5:4rem;--radius-6:8rem;--radius-drawn-1:255px 15px 225px 15px/15px 225px 15px 255px;--radius-drawn-2:125px 10px 20px 185px/25px 205px 205px 25px;--radius-drawn-3:15px 255px 15px 225px/225px 15px 255px 15px;--radius-drawn-4:15px 25px 155px 25px/225px 150px 25px 115px;--radius-drawn-5:250px 25px 15px 20px/15px 80px 105px 115px;--radius-drawn-6:28px 100px 20px 15px/150px 30px 205px 225px;--radius-round:1e5px;--radius-blob-1:30% 70% 70% 30%/53% 30% 70% 47%;--radius-blob-2:53% 47% 34% 66%/63% 46% 54% 37%;--radius-blob-3:37% 63% 56% 44%/49% 56% 44% 51%;--radius-blob-4:63% 37% 37% 63%/43% 37% 63% 57%;--radius-blob-5:49% 51% 48% 52%/57% 44% 56% 43%;--radius-conditional-1:clamp(0px,calc(100vw - 100%) * 1e5,var(--radius-1));--radius-conditional-2:clamp(0px,calc(100vw - 100%) * 1e5,var(--radius-2));--radius-conditional-3:clamp(0px,calc(100vw - 100%) * 1e5,var(--radius-3));--radius-conditional-4:clamp(0px,calc(100vw - 100%) * 1e5,var(--radius-4));--radius-conditional-5:clamp(0px,calc(100vw - 100%) * 1e5,var(--radius-5));--radius-conditional-6:clamp(0px,calc(100vw - 100%) * 1e5,var(--radius-6))}@media (prefers-color-scheme:dark){:where(html){--shadow-color:220 40% 2%;--shadow-strength:25%;--inner-shadow-highlight:inset 0 -.5px 0 0 hsla(0,0%,100%,.067),inset 0 .5px 0 0 rgba(0,0,0,.467)}}@supports (background:linear-gradient(to right in oklab,#000,#fff)){:where(html){--gradient-space:in oklab}}@keyframes fade-in{to{opacity:1}}@keyframes fade-in-bloom{0%{filter:brightness(1) blur(20px);opacity:0}10%{filter:brightness(2) blur(10px);opacity:1}to{filter:brightness(1) blur(0);opacity:1}}@keyframes fade-out{to{opacity:0}}@keyframes fade-out-bloom{to{filter:brightness(1) blur(20px);opacity:0}10%{filter:brightness(2) blur(10px);opacity:1}0%{filter:brightness(1) blur(0);opacity:1}}@keyframes scale-up{to{transform:scale(1.25)}}@keyframes scale-down{to{transform:scale(.75)}}@keyframes slide-out-up{to{transform:translateY(-100%)}}@keyframes slide-out-down{to{transform:translateY(100%)}}@keyframes slide-out-right{to{transform:translateX(100%)}}@keyframes slide-out-left{to{transform:translateX(-100%)}}@keyframes slide-in-up{0%{transform:translateY(100%)}}@keyframes slide-in-down{0%{transform:translateY(-100%)}}@keyframes slide-in-right{0%{transform:translateX(-100%)}}@keyframes slide-in-left{0%{transform:translateX(100%)}}@keyframes shake-x{0%,to{transform:translateX(0)}20%{transform:translateX(-5%)}40%{transform:translateX(5%)}60%{transform:translateX(-5%)}80%{transform:translateX(5%)}}@keyframes shake-y{0%,to{transform:translateY(0)}20%{transform:translateY(-5%)}40%{transform:translateY(5%)}60%{transform:translateY(-5%)}80%{transform:translateY(5%)}}@keyframes shake-z{0%,to{transform:rotate(0deg)}20%{transform:rotate(-2deg)}40%{transform:rotate(2deg)}60%{transform:rotate(-2deg)}80%{transform:rotate(2deg)}}@keyframes spin{to{transform:rotate(1turn)}}@keyframes ping{90%,to{opacity:0;transform:scale(2)}}@keyframes blink{0%,to{opacity:1}50%{opacity:.5}}@keyframes float{50%{transform:translateY(-25%)}}@keyframes bounce{25%{transform:translateY(-20%)}40%{transform:translateY(-3%)}0%,60%,to{transform:translateY(0)}}@keyframes pulse{50%{transform:scale(.9)}}@media (prefers-color-scheme:dark){@keyframes fade-in-bloom{0%{filter:brightness(1) blur(20px);opacity:0}10%{filter:brightness(.5) blur(10px);opacity:1}to{filter:brightness(1) blur(0);opacity:1}}}@media (prefers-color-scheme:dark){@keyframes fade-out-bloom{to{filter:brightness(1) blur(20px);opacity:0}10%{filter:brightness(.5) blur(10px);opacity:1}0%{filter:brightness(1) blur(0);opacity:1}}} \ No newline at end of file diff --git a/crates/echo-session-ws-gateway/assets/vendor/theme.dark.switch.min.css b/crates/echo-session-ws-gateway/assets/vendor/theme.dark.switch.min.css deleted file mode 100644 index 3c1917d7..00000000 --- a/crates/echo-session-ws-gateway/assets/vendor/theme.dark.switch.min.css +++ /dev/null @@ -1 +0,0 @@ -:where([data-theme=dark],.dark,.dark-theme){--csstools-color-scheme--light: ;--link:var(--indigo-3);--link-visited:var(--purple-3);--text-1:var(--gray-0);--text-2:var(--gray-4);--surface-1:var(--gray-9);--surface-2:var(--gray-8);--surface-3:var(--gray-7);--surface-4:var(--gray-6);--scrollthumb-color:var(--gray-6);--shadow-strength:10%;--shadow-color:220 40% 2%;--inner-shadow-highlight:inset 0 -.5px 0 0 hsla(0,0%,100%,.067),inset 0 .5px 0 0 rgba(0,0,0,.467);color-scheme:dark}:where([data-theme=dark],.dark,.dark-theme) :where(dialog){background-color:var(--surface-2)}:where([data-theme=dark],.dark,.dark-theme) :where(button,.btn){--_highlight:var(--_highlight-dark);--_bg:var(--_bg-dark);--_ink-shadow:var(--_ink-shadow-dark)}:where(.dark,.dark-theme,[data-theme=dark]) :where(button,.btn):where([type=reset]){--_text:var(--red-2);--_border:var(--surface-3)}:where(.dark,.dark-theme,[data-theme=dark]) [disabled]:where(button,input[type=button],.btn){--_text:var(--gray-5)}:where(.dark,.dark-theme,[data-theme=dark]) [disabled]:where(button,input[type=submit],.btn){--_text:var(--gray-5)}:where(.dark,.dark-theme,[data-theme=dark]) [disabled]:where(button,input[type=reset],.btn){--_text:var(--gray-5)}:where([data-theme=dark],.dark,.dark-theme) :where(textarea,select,input:not([type=button],[type=submit],[type=reset])){background-color:#171a1c}:where([data-theme=dark],.dark,.dark-theme) :where([disabled]),:where([data-theme=dark],.dark,.dark-theme) :where([type=reset]),:where([data-theme=dark],.dark,.dark-theme) :where([type=submit]),:where([data-theme=dark],.dark,.dark-theme) :where(form button:not([type=button])){--_bg:var(--surface-1)} \ No newline at end of file diff --git a/crates/echo-session-ws-gateway/assets/vendor/theme.light.switch.min.css b/crates/echo-session-ws-gateway/assets/vendor/theme.light.switch.min.css deleted file mode 100644 index 2c014abf..00000000 --- a/crates/echo-session-ws-gateway/assets/vendor/theme.light.switch.min.css +++ /dev/null @@ -1 +0,0 @@ -:where([data-theme=light],.light,.light-theme){--csstools-color-scheme--light:initial;--link:var(--indigo-7);--link-visited:var(--purple-7);--text-1:var(--gray-12);--text-2:var(--gray-7);--surface-1:var(--gray-0);--surface-2:var(--gray-2);--surface-3:var(--gray-3);--surface-4:var(--gray-4);--scrollthumb-color:var(--gray-7);--shadow-color:220 3% 15%;--shadow-strength:1%;--inner-shadow-highlight:inset 0 -.5px 0 0 #fff,inset 0 .5px 0 0 rgba(0,0,0,.067);color-scheme:light}@media (dynamic-range:high) or (color-gamut:p3){@supports (color:color(display-p3 0 0 0)){:where([data-theme=light],.light,.light-theme){--link:color(display-p3 .1 .39 1);--link-visited:color(display-p3 .6 .2 1)}}}:where([data-theme=light],.light,.light-theme) :where(dialog){background-color:var(--surface-1)}:where([data-theme=light],.light,.light-theme) :where(button,.btn){--_highlight:var(--_highlight-light);--_bg:var(--_bg-light);--_ink-shadow:var(--_ink-shadow-light)}:where(.light,.light-theme,[data-theme=light]) :where(button,.btn):where([type=reset]){--_text:var(--red-6);--_border:var(--red-3)}:where(.light,.light-theme,[data-theme=light]) [disabled]:where(button,input[type=button],.btn){--_text:var(--gray-6)}:where(.light,.light-theme,[data-theme=light]) [disabled]:where(button,input[type=submit],.btn){--_text:var(--gray-6)}:where(.light,.light-theme,[data-theme=light]) [disabled]:where(button,input[type=reset],.btn){--_text:var(--gray-6)}:where([data-theme=light],.light,.light-theme) :where(textarea,select,input:not([type=button],[type=submit],[type=reset])){background-color:var(--surface-2)} \ No newline at end of file diff --git a/crates/echo-wasm-abi/Cargo.toml b/crates/echo-wasm-abi/Cargo.toml index acd4f2c7..4f2829bb 100644 --- a/crates/echo-wasm-abi/Cargo.toml +++ b/crates/echo-wasm-abi/Cargo.toml @@ -18,6 +18,7 @@ ciborium = { version = "0.2", default-features = false } serde-value = { version = "0.7" } half = { version = "2.4", default-features = false, features = ["alloc"] } thiserror = { version = "2.0" } +blake3 = { version = "1.5", default-features = false } [features] default = ["std"] diff --git a/crates/echo-wasm-abi/src/kernel_port.rs b/crates/echo-wasm-abi/src/kernel_port.rs index c8877033..409e621e 100644 --- a/crates/echo-wasm-abi/src/kernel_port.rs +++ b/crates/echo-wasm-abi/src/kernel_port.rs @@ -10,7 +10,7 @@ //! //! # ABI Version //! -//! The current ABI version is [`ABI_VERSION`] (8). All response types are +//! The current ABI version is [`ABI_VERSION`] (9). All response types are //! CBOR-encoded using the canonical rules defined in `docs/spec/js-cbor-mapping.md`. //! Breaking changes to response shapes or error codes require a bump to the //! ABI version. @@ -26,6 +26,8 @@ extern crate alloc; +use alloc::boxed::Box; +use alloc::format; use alloc::string::String; use alloc::vec::Vec; use core::fmt; @@ -38,7 +40,7 @@ use serde::{ /// /// Increment when response types, error codes, or method signatures change /// in a backward-incompatible way. -pub const ABI_VERSION: u32 = 8; +pub const ABI_VERSION: u32 = 9; fn deserialize_opaque_id<'de, D>(deserializer: D) -> Result<[u8; 32], D::Error> where @@ -186,6 +188,92 @@ logical_counter!( RunId ); +opaque_id!( + /// Opaque stable identifier for an Echo optic descriptor. + OpticId +); + +opaque_id!( + /// Opaque stable identifier for a generic braid. + BraidId +); + +opaque_id!( + /// Opaque stable identifier for a retained reading key. + RetainedReadingKey +); + +opaque_id!( + /// Opaque stable identifier for the encoding used by a retained reading payload. + RetainedReadingCodecId +); + +opaque_id!( + /// Opaque stable identifier for an intent family allowed through an optic. + IntentFamilyId +); + +opaque_id!( + /// Opaque stable identifier for an admission law used by optic dispatch. + AdmissionLawId +); + +opaque_id!( + /// Opaque stable identifier for an optic capability basis. + OpticCapabilityId +); + +opaque_id!( + /// Opaque stable identifier for an actor opening or using an optic. + OpticActorId +); + +opaque_id!( + /// Opaque stable identifier for an authored or kernel observer plan. + ObserverPlanId +); + +opaque_id!( + /// Opaque stable identifier for a hosted observer instance. + ObserverInstanceId +); + +opaque_id!( + /// Opaque stable identifier for a WARP instance. + WarpId +); + +opaque_id!( + /// Opaque stable identifier for a node within a WARP instance. + NodeId +); + +opaque_id!( + /// Opaque stable identifier for an edge within a WARP instance. + EdgeId +); + +opaque_id!( + /// Opaque stable identifier for a materialization channel. + ChannelId +); + +/// Version of the projection law used by an optic read. +#[repr(transparent)] +#[derive( + Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Serialize, Deserialize, +)] +#[serde(transparent)] +pub struct ProjectionVersion(pub u32); + +/// Version of the reducer law used by an optic read, when a reducer is present. +#[repr(transparent)] +#[derive( + Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Serialize, Deserialize, +)] +#[serde(transparent)] +pub struct ReducerVersion(pub u32); + // --------------------------------------------------------------------------- // Error codes // --------------------------------------------------------------------------- @@ -220,6 +308,14 @@ pub mod error_codes { pub const INVALID_CONTROL: u32 = 13; /// The requested strand is not registered. pub const INVALID_STRAND: u32 = 14; + /// The requested observer plan is not available in this kernel. + pub const UNSUPPORTED_OBSERVER_PLAN: u32 = 15; + /// The requested observer instance is not available in this kernel. + pub const UNSUPPORTED_OBSERVER_INSTANCE: u32 = 16; + /// The requested observation rights posture is not available in this kernel. + pub const UNSUPPORTED_OBSERVATION_RIGHTS: u32 = 17; + /// The requested observation exceeded its explicit read budget. + pub const OBSERVATION_BUDGET_EXCEEDED: u32 = 18; } // --------------------------------------------------------------------------- @@ -436,6 +532,664 @@ pub struct ChannelData { pub data: Vec, } +/// Attachment plane selector for optic boundary reads. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum AttachmentPlane { + /// Vertex/node attachment plane. + Alpha, + /// Edge attachment plane. + Beta, +} + +/// Attachment owner reference for optic boundary reads. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum AttachmentOwnerRef { + /// Node-owned attachment. + Node { + /// WARP instance containing the node. + warp_id: WarpId, + /// Node identity within that WARP instance. + node_id: NodeId, + }, + /// Edge-owned attachment. + Edge { + /// WARP instance containing the edge. + warp_id: WarpId, + /// Edge identity within that WARP instance. + edge_id: EdgeId, + }, +} + +/// First-class reference to an attachment boundary. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct AttachmentKey { + /// Owner of the attachment slot. + pub owner: AttachmentOwnerRef, + /// Attachment plane selector. + pub plane: AttachmentPlane, +} + +/// Lawful subject named by an optic. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum OpticFocus { + /// A whole worldline. + Worldline { + /// Target worldline. + worldline_id: WorldlineId, + }, + /// A live or retained strand. + Strand { + /// Target strand. + strand_id: StrandId, + }, + /// A generic braid projection. + Braid { + /// Target braid. + braid_id: BraidId, + }, + /// A previously retained reading. + RetainedReading { + /// Retained reading key. + key: RetainedReadingKey, + }, + /// An explicit attachment boundary. + AttachmentBoundary { + /// Attachment boundary key. + key: AttachmentKey, + }, +} + +/// Coordinate selector used by generic optics. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum CoordinateAt { + /// Current frontier at observation or dispatch time. + Frontier, + /// Specific committed tick. + Tick { + /// Per-worldline append identity. + worldline_tick: WorldlineTick, + }, + /// Full provenance coordinate. + Provenance { + /// Provenance coordinate reference. + reference: ProvenanceRef, + }, +} + +/// Causal coordinate named by an optic. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum EchoCoordinate { + /// Coordinate on a worldline. + Worldline { + /// Target worldline. + worldline_id: WorldlineId, + /// Requested position. + at: CoordinateAt, + }, + /// Coordinate on a strand. + Strand { + /// Target strand. + strand_id: StrandId, + /// Requested position. + at: CoordinateAt, + /// Optional parent basis that makes the strand read honest. + parent_basis: Option, + }, + /// Coordinate on a braid projection. + Braid { + /// Target braid. + braid_id: BraidId, + /// Projection digest at the named member frontier. + projection_digest: Vec, + /// Number of members included in the projection. + member_count: u64, + }, + /// Coordinate of a retained reading. + RetainedReading { + /// Retained reading key. + key: RetainedReadingKey, + }, +} + +/// Attachment recursion policy for an optic aperture. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum AttachmentDescentPolicy { + /// Stop at the attachment boundary and expose only the boundary reference. + BoundaryOnly, + /// Recursive descent was explicitly requested and remains budget/capability checked. + Explicit, +} + +/// Budget bound for an optic read. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] +pub struct OpticReadBudget { + /// Maximum payload bytes to produce. + pub max_bytes: Option, + /// Maximum graph nodes or entities to visit. + pub max_nodes: Option, + /// Maximum causal ticks to reduce. + pub max_ticks: Option, + /// Maximum attachment boundaries to descend through. + pub max_attachments: Option, +} + +/// Bounded aperture shape selected by an optic read. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum OpticApertureShape { + /// Head/frontier metadata only. + Head, + /// Snapshot metadata only. + SnapshotMetadata, + /// Recorded truth channels. + TruthChannels { + /// Optional channel filter. `None` means all recorded channels within budget. + channels: Option>, + }, + /// Contract query bytes identified by query id and vars digest. + QueryBytes { + /// Stable query identifier. + query_id: u32, + /// Hash of canonical query variables. + vars_digest: Vec, + }, + /// Bounded byte range aperture. + ByteRange { + /// Start byte offset. + start: u64, + /// Maximum byte length to return. + len: u64, + }, + /// Explicit attachment boundary. + AttachmentBoundary, +} + +/// Complete aperture for one optic read. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct OpticAperture { + /// Shape of the read aperture. + pub shape: OpticApertureShape, + /// Read budget. + pub budget: OpticReadBudget, + /// Attachment recursion policy. + pub attachment_descent: AttachmentDescentPolicy, +} + +/// Opened optic descriptor. This is not a mutable handle. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct EchoOptic { + /// Stable optic identity derived by the core host. + pub optic_id: OpticId, + /// Lawful subject being observed or targeted by intent dispatch. + pub focus: OpticFocus, + /// Explicit causal coordinate. + pub coordinate: EchoCoordinate, + /// Projection law version. + pub projection_version: ProjectionVersion, + /// Reducer law version, if a reducer participates. + pub reducer_version: Option, + /// Intent family allowed through this optic. + pub intent_family: IntentFamilyId, + /// Capability basis under which the optic was opened. + pub capability: OpticCapabilityId, +} + +/// Reason an optic read identity cannot name a complete witness basis yet. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum MissingWitnessBasisReason { + /// Required witness evidence is unavailable. + EvidenceUnavailable, + /// The requested read exceeded its declared budget. + BudgetLimited, + /// The current capability does not permit revealing the basis. + RightsLimited, + /// The requested basis posture is not supported by this projection law. + UnsupportedBasis, +} + +/// Witness basis named by a read identity. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum WitnessBasis { + /// One resolved provenance commit witnesses the reading. + ResolvedCommit { + /// Provenance coordinate that witnesses the reading. + reference: ProvenanceRef, + /// State root at the witness coordinate. + state_root: Vec, + /// Commit hash at the witness coordinate. + commit_hash: Vec, + }, + /// A checkpoint plus explicit live-tail witness set witnesses the reading. + CheckpointPlusTail { + /// Checkpoint coordinate used as the cold basis. + checkpoint_ref: ProvenanceRef, + /// Checkpoint content hash. + checkpoint_hash: Vec, + /// Live-tail provenance refs reduced after the checkpoint. + tail_witness_refs: Vec, + /// Digest of the live-tail witness set. + tail_digest: Vec, + }, + /// A witness set whose exact semantics are named by the contained refs and digest. + WitnessSet { + /// Witness refs supporting the read. + refs: Vec, + /// Digest over the witness set. + witness_set_hash: Vec, + }, + /// The basis is missing; callers must treat the read as obstructed or incomplete. + Missing { + /// Deterministic reason the basis is missing. + reason: MissingWitnessBasisReason, + }, +} + +/// Stable identity of the question an optic read answered. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ReadIdentity { + /// Stable hash over all identity fields. + pub read_identity_hash: Vec, + /// Optic being observed. + pub optic_id: OpticId, + /// Digest of the focus named by the read. + pub focus_digest: Vec, + /// Coordinate named by the read. + pub coordinate: EchoCoordinate, + /// Digest of the aperture named by the read. + pub aperture_digest: Vec, + /// Projection law version. + pub projection_version: ProjectionVersion, + /// Reducer law version, if present. + pub reducer_version: Option, + /// Witness basis used by the read. + pub witness_basis: WitnessBasis, + /// Rights posture of the emitted reading. + pub rights_posture: ReadingRightsPosture, + /// Budget posture of the emitted reading. + pub budget_posture: ReadingBudgetPosture, + /// Residual posture of the emitted reading. + pub residual_posture: ReadingResidualPosture, +} + +/// Existing reading envelope plus first-class optic read identity. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct OpticReadingEnvelope { + /// Existing observation reading envelope. + pub reading: ReadingEnvelope, + /// Stable read identity for the question this reading answered. + pub read_identity: ReadIdentity, +} + +/// Descriptor for a retained reading payload. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct RetainedReadingDescriptor { + /// Stable key derived from semantic read identity and byte identity. + pub key: RetainedReadingKey, + /// Semantic identity of the question answered by the retained payload. + pub read_identity: ReadIdentity, + /// Content hash of the retained payload bytes. + pub content_hash: Vec, + /// Codec used for the retained payload bytes. + pub codec_id: RetainedReadingCodecId, + /// Retained payload byte length. + pub byte_len: u64, +} + +/// Bounded read request through an Echo optic. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ObserveOpticRequest { + /// Optic being observed. + pub optic_id: OpticId, + /// Focus being observed. + pub focus: OpticFocus, + /// Explicit causal coordinate for the read. + pub coordinate: EchoCoordinate, + /// Bounded aperture selected by the read. + pub aperture: OpticAperture, + /// Projection law version requested by the read. + pub projection_version: ProjectionVersion, + /// Reducer law version requested by the read, when present. + pub reducer_version: Option, + /// Capability basis for the read. + pub capability: OpticCapabilityId, +} + +/// Intent payload dispatched through an optic. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case", deny_unknown_fields)] +pub enum OpticIntentPayload { + /// Canonical Echo intent v1 bytes. + EintV1 { + /// Complete EINT v1 envelope bytes. + bytes: Vec, + }, +} + +/// Write-side proposal request through an Echo optic. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct DispatchOpticIntentRequest { + /// Optic being used as the proposal boundary. + pub optic_id: OpticId, + /// Explicit causal basis for the proposal. + pub base_coordinate: EchoCoordinate, + /// Intent family being proposed. + pub intent_family: IntentFamilyId, + /// Focus targeted by the proposal. + pub focus: OpticFocus, + /// Actor/cause associated with the proposal. + pub cause: OpticCause, + /// Capability basis for the proposal. + pub capability: OpticCapability, + /// Admission law requested for the proposal. + pub admission_law: AdmissionLawId, + /// Intent payload carried by the proposal. + pub payload: OpticIntentPayload, +} + +/// Deterministic reason an optic read or dispatch could not lawfully proceed. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum OpticObstructionKind { + /// Required witness evidence is unavailable. + MissingWitness, + /// A retained reading named by the optic cannot be found or revealed. + MissingRetainedReading, + /// The dispatch named a base coordinate that is no longer the admitted basis. + StaleBasis, + /// The capability basis does not authorize the requested read or dispatch. + CapabilityDenied, + /// The declared read or dispatch budget was exceeded. + BudgetExceeded, + /// The requested aperture is not supported by this optic or projection law. + UnsupportedAperture, + /// The requested projection law/version is not available. + UnsupportedProjectionLaw, + /// The requested intent family is not available through this optic. + UnsupportedIntentFamily, + /// The read reached an attachment boundary and explicit descent is required. + AttachmentDescentRequired, + /// The requested attachment descent is not authorized. + AttachmentDescentDenied, + /// A live-tail read requires additional bounded reduction before it is honest. + LiveTailRequiresReduction, + /// The requested coordinate names an incompatible frontier. + ConflictingFrontier, + /// The request would collapse plurality without an explicit policy. + PluralityRequiresExplicitPolicy, +} + +/// Typed obstruction returned instead of a hidden fallback or fake success. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct OpticObstruction { + /// Deterministic obstruction kind. + pub kind: OpticObstructionKind, + /// Optic implicated by the obstruction, when known. + pub optic_id: Option, + /// Focus implicated by the obstruction, when known. + pub focus: Option, + /// Coordinate implicated by the obstruction, when known. + pub coordinate: Option, + /// Witness basis posture that explains evidence availability, when known. + pub witness_basis: Option, + /// Human-readable diagnostic text. + pub message: String, +} + +/// Admission result for an optic intent that Echo accepted into witnessed history. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct AdmittedIntent { + /// Optic through which the intent was dispatched. + pub optic_id: OpticId, + /// Explicit causal basis named by the dispatch. + pub base_coordinate: EchoCoordinate, + /// Intent family admitted through the optic. + pub intent_family: IntentFamilyId, + /// Provenance coordinate produced or identified by admission. + pub admitted_ref: ProvenanceRef, + /// Receipt digest witnessing the admission outcome. + pub receipt_hash: Vec, +} + +/// Reason an optic intent is staged instead of admitted immediately. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum StagedIntentReason { + /// The proposal needs an explicit rebase before admission can proceed. + RebaseRequired, + /// The proposal is waiting for additional capability evidence. + AwaitingCapability, + /// The proposal is waiting for additional witness evidence. + AwaitingWitness, + /// The proposal was deliberately staged for later explicit admission. + AwaitingExplicitAdmission, +} + +/// Admission result for an optic intent retained without mutating the frontier. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct StagedIntent { + /// Optic through which the intent was dispatched. + pub optic_id: OpticId, + /// Explicit causal basis named by the dispatch. + pub base_coordinate: EchoCoordinate, + /// Intent family proposed through the optic. + pub intent_family: IntentFamilyId, + /// Stable digest or storage key for the staged proposal. + pub stage_ref: Vec, + /// Deterministic reason the proposal is staged. + pub reason: StagedIntentReason, +} + +/// Admission result that preserves lawful plurality instead of selecting one winner. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct PluralIntent { + /// Optic through which the intent was dispatched. + pub optic_id: OpticId, + /// Explicit causal basis named by the dispatch. + pub base_coordinate: EchoCoordinate, + /// Intent family proposed through the optic. + pub intent_family: IntentFamilyId, + /// Candidate coordinates that remain lawful plural outcomes. + pub candidate_refs: Vec, + /// Residual posture associated with the preserved plurality. + pub residual_posture: ReadingResidualPosture, +} + +/// Deterministic conflict reason for an optic intent dispatch. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum IntentConflictReason { + /// The named base coordinate is no longer the applicable basis. + StaleBasis, + /// The request conflicts with the named or observed frontier. + ConflictingFrontier, + /// Capability evidence conflicts with the requested operation. + CapabilityConflict, + /// The verified footprint conflicts with concurrent causal claims. + FootprintConflict, + /// The requested admission law conflicts with the available host law. + AdmissionLawConflict, + /// The request needs an explicit plurality policy before admission. + UnsupportedPluralityPolicy, +} + +/// Admission result for incompatible causal claims under an optic dispatch. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct IntentConflict { + /// Optic through which the intent was dispatched. + pub optic_id: OpticId, + /// Explicit causal basis named by the dispatch. + pub base_coordinate: EchoCoordinate, + /// Intent family proposed through the optic. + pub intent_family: IntentFamilyId, + /// Deterministic conflict reason. + pub reason: IntentConflictReason, + /// Provenance coordinate implicated by the conflict, when known. + pub conflict_ref: Option, + /// Digest of compact conflict evidence. + pub evidence_digest: Vec, + /// Human-readable diagnostic text. + pub message: String, +} + +/// Typed top-level result for dispatching an intent through an optic. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", content = "outcome", rename_all = "snake_case")] +pub enum IntentDispatchResult { + /// Echo accepted the intent into witnessed history. + Admitted(AdmittedIntent), + /// Echo retained the proposal without mutating the named frontier. + Staged(StagedIntent), + /// Echo preserved lawful plurality instead of selecting a single result. + Plural(PluralIntent), + /// Echo found incompatible causal claims under the named admission law. + Conflict(IntentConflict), + /// Echo could not lawfully proceed because basis, evidence, rights, or law is missing. + Obstructed(OpticObstruction), +} + +/// Auditable cause for opening, closing, observing, or dispatching through an optic. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct OpticCause { + /// Actor associated with the cause. + pub actor: OpticActorId, + /// Stable digest of the host-level cause or request. + pub cause_hash: Vec, + /// Optional diagnostic label for humans. + pub label: Option, +} + +/// Capability grant used while validating an optic descriptor. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct OpticCapability { + /// Stable capability identity retained in opened optic descriptors. + pub capability_id: OpticCapabilityId, + /// Actor to which the capability was issued. + pub actor: OpticActorId, + /// Provenance ref for the issuer or policy source, when available. + pub issuer_ref: Option, + /// Stable digest of the capability policy. + pub policy_hash: Vec, + /// Focus this minimal capability authorizes. + pub allowed_focus: OpticFocus, + /// Projection law version this capability authorizes. + pub projection_version: ProjectionVersion, + /// Reducer law version this capability authorizes, when required. + pub reducer_version: Option, + /// Intent family this capability authorizes. + pub allowed_intent_family: IntentFamilyId, + /// Maximum read budget authorized by this capability. + pub max_budget: OpticReadBudget, +} + +/// Capability posture returned after successfully validating an optic descriptor. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum CapabilityPosture { + /// The descriptor is authorized by the named capability grant. + Granted { + /// Capability identity retained in the opened descriptor. + capability_id: OpticCapabilityId, + /// Actor to which the capability was issued. + actor: OpticActorId, + /// Provenance ref for the issuer or policy source, when available. + issuer_ref: Option, + /// Stable digest of the capability policy. + policy_hash: Vec, + }, +} + +/// Descriptor-validation request for opening a session-local optic resource. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct OpenOpticRequest { + /// Lawful subject being observed or targeted by intent dispatch. + pub focus: OpticFocus, + /// Explicit causal coordinate for the optic descriptor. + pub coordinate: EchoCoordinate, + /// Projection law version requested by the descriptor. + pub projection_version: ProjectionVersion, + /// Reducer law version requested by the descriptor, when present. + pub reducer_version: Option, + /// Intent family allowed through the opened optic. + pub intent_family: IntentFamilyId, + /// Capability grant used to validate this descriptor. + pub capability: OpticCapability, + /// Auditable cause for opening the descriptor. + pub cause: OpticCause, +} + +/// Successful descriptor-validation result for opening an optic. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct OpenOpticResult { + /// Opened optic descriptor. This is not a mutable subject handle. + pub optic: EchoOptic, + /// Capability posture that authorized the descriptor. + pub capability_posture: CapabilityPosture, +} + +/// Error returned while opening an optic descriptor. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", content = "obstruction", rename_all = "snake_case")] +pub enum OpticOpenError { + /// Opening failed with a typed obstruction. + Obstructed(OpticObstruction), +} + +/// Request for releasing a session-local optic descriptor resource. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct CloseOpticRequest { + /// Optic descriptor to release from the session. + pub optic_id: OpticId, + /// Auditable cause for closing the descriptor. + pub cause: OpticCause, +} + +/// Result for releasing a session-local optic descriptor resource. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub struct CloseOpticResult { + /// Optic descriptor released from the session. + pub optic_id: OpticId, +} + +/// Error returned while closing an optic descriptor. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", content = "obstruction", rename_all = "snake_case")] +pub enum OpticCloseError { + /// Closing failed with a typed obstruction. + Obstructed(OpticObstruction), +} + +/// Successful bounded reading returned through an optic. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct OpticReading { + /// Reading-envelope metadata. + pub envelope: ReadingEnvelope, + /// Stable read identity for the question this reading answered. + pub read_identity: ReadIdentity, + /// Observation payload emitted by the observer. + pub payload: ObservationPayload, + /// Retained reading key, when the payload was retained. + pub retained: Option, +} + +/// Result of observing an optic. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", content = "value", rename_all = "snake_case")] +pub enum ObserveOpticResult { + /// The optic emitted a bounded reading. + Reading(Box), + /// The optic could not lawfully emit a reading. + Obstructed(Box), +} + /// Coordinate selector for an observation request. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct ObservationCoordinate { @@ -494,6 +1248,46 @@ pub enum ObservationProjection { }, } +/// Lightweight projection kind used in frame/projection validation errors. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum ObservationProjectionKind { + /// Head metadata projection. + Head, + /// Snapshot metadata projection. + Snapshot, + /// Recorded-truth channels projection. + TruthChannels, + /// Query byte projection. + Query, +} + +impl ObservationProjection { + /// Returns the projection kind without retaining projection payload bytes. + #[must_use] + pub fn kind(&self) -> ObservationProjectionKind { + match self { + Self::Head => ObservationProjectionKind::Head, + Self::Snapshot => ObservationProjectionKind::Snapshot, + Self::TruthChannels { .. } => ObservationProjectionKind::TruthChannels, + Self::Query { .. } => ObservationProjectionKind::Query, + } + } +} + +/// Invalid one-shot built-in observation request. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum ObservationRequestError { + /// The declared frame does not support the requested projection kind. + UnsupportedFrameProjection { + /// Declared frame. + frame: ObservationFrame, + /// Requested projection kind. + projection: ObservationProjectionKind, + }, +} + /// Canonical observation request DTO. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct ObservationRequest { @@ -503,6 +1297,87 @@ pub struct ObservationRequest { pub frame: ObservationFrame, /// Requested projection within that frame. pub projection: ObservationProjection, + /// Observer plan the caller is explicitly invoking. + pub observer_plan: ReadingObserverPlan, + /// Hosted observer instance state, when this is not a one-shot read. + pub observer_instance: Option, + /// Declared read budget. + pub budget: ObservationReadBudget, + /// Declared rights posture for the read. + pub rights: ObservationRights, +} + +impl ObservationRequest { + /// Builds a one-shot built-in observation request for the frame/projection pair. + pub fn builtin_one_shot( + coordinate: ObservationCoordinate, + frame: ObservationFrame, + projection: ObservationProjection, + ) -> Result { + let observer_plan = ReadingObserverPlan::Builtin { + plan: builtin_observer_plan_for(&frame, &projection)?, + }; + Ok(Self { + coordinate, + frame, + projection, + observer_plan, + observer_instance: None, + budget: ObservationReadBudget::UnboundedOneShot, + rights: ObservationRights::KernelPublic, + }) + } +} + +fn builtin_observer_plan_for( + frame: &ObservationFrame, + projection: &ObservationProjection, +) -> Result { + match (frame, projection) { + (&ObservationFrame::CommitBoundary, ObservationProjection::Head) => { + Ok(BuiltinObserverPlan::CommitBoundaryHead) + } + (&ObservationFrame::CommitBoundary, ObservationProjection::Snapshot) => { + Ok(BuiltinObserverPlan::CommitBoundarySnapshot) + } + (&ObservationFrame::RecordedTruth, ObservationProjection::TruthChannels { .. }) => { + Ok(BuiltinObserverPlan::RecordedTruthChannels) + } + (&ObservationFrame::QueryView, ObservationProjection::Query { .. }) => { + Ok(BuiltinObserverPlan::QueryBytes) + } + _ => Err(ObservationRequestError::UnsupportedFrameProjection { + frame: frame.clone(), + projection: projection.kind(), + }), + } +} + +#[cfg(test)] +mod observation_request_tests { + use super::{ + ObservationAt, ObservationCoordinate, ObservationFrame, ObservationProjection, + ObservationRequest, WorldlineId, + }; + + #[test] + fn builtin_one_shot_rejects_invalid_frame_projection() { + let result = ObservationRequest::builtin_one_shot( + ObservationCoordinate { + worldline_id: WorldlineId::from_bytes([1; 32]), + at: ObservationAt::Frontier, + }, + ObservationFrame::RecordedTruth, + ObservationProjection::Head, + ); + assert!(matches!( + result, + Err(super::ObservationRequestError::UnsupportedFrameProjection { + frame: ObservationFrame::RecordedTruth, + projection: super::ObservationProjectionKind::Head, + }) + )); + } } /// Resolved coordinate returned with every observation artifact. @@ -593,6 +1468,23 @@ pub enum BuiltinObserverPlan { QueryBytes, } +/// Authored observer plan identity. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct AuthoredObserverPlan { + /// Stable plan identity. + pub plan_id: ObserverPlanId, + /// Hash of the generated or installed observer artifact. + pub artifact_hash: Vec, + /// Hash of the authored schema or contract family. + pub schema_hash: Vec, + /// Hash of the observer state schema. + pub state_schema_hash: Vec, + /// Hash of the observer update law. + pub update_law_hash: Vec, + /// Hash of the observer emission law. + pub emission_law_hash: Vec, +} + /// Observer plan identity for a reading artifact. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(tag = "kind", rename_all = "snake_case")] @@ -602,6 +1494,22 @@ pub enum ReadingObserverPlan { /// Built-in plan selected by the observation frame/projection pair. plan: BuiltinObserverPlan, }, + /// Authored/generated observer plan. + Authored { + /// Authored plan identity and law hashes. + plan: Box, + }, +} + +/// Hosted observer instance identity. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ObserverInstanceRef { + /// Runtime instance identity. + pub instance_id: ObserverInstanceId, + /// Plan that owns this instance. + pub plan_id: ObserverPlanId, + /// Hash of the accumulated observer state. + pub state_hash: Vec, } /// Native observer basis used by the emitted reading. @@ -616,6 +1524,35 @@ pub enum ReadingObserverBasis { QueryView, } +/// Read budget requested by an observation caller. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum ObservationReadBudget { + /// One-shot built-in observer with no caller-specified slice budget. + UnboundedOneShot, + /// Caller-bounded read budget. + Bounded { + /// Maximum encoded payload bytes the caller is willing to receive. + max_payload_bytes: u64, + /// Maximum witness references the caller is willing to accept. + max_witness_refs: u64, + }, +} + +/// Rights posture requested by an observation caller. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(tag = "kind", rename_all = "snake_case")] +pub enum ObservationRights { + /// Kernel-public read. + KernelPublic, + /// Capability-scoped read. Echo carries this now but does not execute it + /// until a capability checker is installed for the observer family. + CapabilityScoped { + /// Capability basis named by the caller. + capability: OpticCapabilityId, + }, +} + /// Witness reference carried by a reading artifact. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(tag = "kind", rename_all = "snake_case")] @@ -642,6 +1579,17 @@ pub enum ReadingWitnessRef { pub enum ReadingBudgetPosture { /// One-shot built-in observer with no caller-specified slice budget. UnboundedOneShot, + /// Caller-bounded reading that remained within budget. + Bounded { + /// Requested encoded payload byte limit. + max_payload_bytes: u64, + /// Encoded payload bytes emitted. + payload_bytes: u64, + /// Requested witness-reference limit. + max_witness_refs: u64, + /// Witness references emitted. + witness_refs: u64, + }, } /// Rights posture for a reading artifact. @@ -671,6 +1619,8 @@ pub enum ReadingResidualPosture { pub struct ReadingEnvelope { /// Observer plan identity. pub observer_plan: ReadingObserverPlan, + /// Hosted observer instance, when the reading used accumulated observer state. + pub observer_instance: Option, /// Native observer basis used by the reading. pub observer_basis: ReadingObserverBasis, /// Witnesses or shell references that support the reading. @@ -1107,6 +2057,71 @@ pub struct WitnessedSuffixAdmissionResponse { pub outcome: WitnessedSuffixAdmissionOutcome, } +/// Request to export a witnessed causal suffix rooted at a known source frontier. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct ExportSuffixRequest { + /// Source worldline carrying the suffix. + pub source_worldline_id: WorldlineId, + /// Known source basis before the suffix begins. + pub base_frontier: ProvenanceRef, + /// Optional requested source frontier to export through. + pub target_frontier: Option, + /// Optional basis-relative settlement evidence reused by the exported shell. + pub basis_report: Option, +} + +/// Witnessed suffix bundle exchanged across a hot/cold runtime boundary. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct CausalSuffixBundle { + /// Known source basis before the suffix begins. + pub base_frontier: ProvenanceRef, + /// Source frontier reached by this exported suffix shell. + pub target_frontier: ProvenanceRef, + /// Compact source suffix and its witness digest. + pub source_suffix: WitnessedSuffixShell, + /// Deterministic digest of the bundle identity. + pub bundle_digest: Vec, +} + +/// Obstruction returned when Echo cannot produce a witnessed suffix bundle. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct ExportSuffixObstruction { + /// Source coordinate implicated in the obstruction. + pub source_ref: ProvenanceRef, + /// Read-side residual posture associated with the obstruction. + pub residual_posture: ReadingResidualPosture, + /// Deterministic digest of compact obstruction evidence. + pub evidence_digest: Vec, +} + +/// Request to import one witnessed causal suffix bundle by classifying it +/// against a target basis. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct ImportSuffixRequest { + /// Source bundle being judged. + pub bundle: CausalSuffixBundle, + /// Worldline receiving the proposed admission. + pub target_worldline_id: WorldlineId, + /// Target basis used while judging admission. + pub target_basis: ProvenanceRef, + /// Optional target-basis evidence for strand/parent realization cases. + pub basis_report: Option, +} + +/// Result of importing one witnessed causal suffix bundle into local admission. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct ImportSuffixResult { + /// Bundle identity retained for shell-equivalence and loop-prevention checks. + pub bundle_digest: Vec, + /// Admission classifier response for the bundle's source suffix. + pub admission: WitnessedSuffixAdmissionResponse, +} + /// Top-level witnessed suffix admission posture. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(tag = "kind", rename_all = "snake_case", deny_unknown_fields)] @@ -1228,6 +2243,171 @@ impl ErrEnvelope { // KernelPort trait // --------------------------------------------------------------------------- +fn optic_focus_matches_coordinate(focus: &OpticFocus, coordinate: &EchoCoordinate) -> bool { + match (focus, coordinate) { + ( + OpticFocus::Worldline { worldline_id }, + EchoCoordinate::Worldline { + worldline_id: coordinate_worldline, + .. + }, + ) => worldline_id == coordinate_worldline, + ( + OpticFocus::Strand { strand_id }, + EchoCoordinate::Strand { + strand_id: coordinate_strand, + .. + }, + ) => strand_id == coordinate_strand, + ( + OpticFocus::Braid { braid_id }, + EchoCoordinate::Braid { + braid_id: coordinate_braid, + .. + }, + ) => braid_id == coordinate_braid, + ( + OpticFocus::RetainedReading { key }, + EchoCoordinate::RetainedReading { + key: coordinate_key, + }, + ) => key == coordinate_key, + (OpticFocus::AttachmentBoundary { .. }, _) => true, + _ => false, + } +} + +fn optic_dispatch_obstruction( + request: &DispatchOpticIntentRequest, + kind: OpticObstructionKind, + message: impl Into, +) -> IntentDispatchResult { + IntentDispatchResult::Obstructed(OpticObstruction { + kind, + optic_id: Some(request.optic_id), + focus: Some(request.focus.clone()), + coordinate: Some(request.base_coordinate.clone()), + witness_basis: None, + message: message.into(), + }) +} + +fn validate_optic_dispatch_request( + request: &DispatchOpticIntentRequest, + current_coordinate: Option<&EchoCoordinate>, +) -> Option { + if !optic_focus_matches_coordinate(&request.focus, &request.base_coordinate) { + return Some(optic_dispatch_obstruction( + request, + OpticObstructionKind::ConflictingFrontier, + "optic dispatch focus and base coordinate name different subjects", + )); + } + + if request.capability.actor != request.cause.actor { + return Some(optic_dispatch_obstruction( + request, + OpticObstructionKind::CapabilityDenied, + "optic dispatch capability actor does not match cause actor", + )); + } + + if request.capability.allowed_focus != request.focus { + return Some(optic_dispatch_obstruction( + request, + OpticObstructionKind::CapabilityDenied, + "optic dispatch capability does not authorize focus", + )); + } + + if request.capability.allowed_intent_family != request.intent_family { + return Some(optic_dispatch_obstruction( + request, + OpticObstructionKind::UnsupportedIntentFamily, + "optic dispatch capability does not authorize intent family", + )); + } + + if let Some(current_coordinate) = current_coordinate { + if !coordinates_name_same_subject(&request.base_coordinate, current_coordinate) { + return Some(optic_dispatch_obstruction( + request, + OpticObstructionKind::ConflictingFrontier, + "optic dispatch current coordinate names a different subject", + )); + } + + if base_coordinate_is_stale(&request.base_coordinate, current_coordinate) { + return Some(optic_dispatch_obstruction( + request, + OpticObstructionKind::StaleBasis, + "optic dispatch base coordinate is stale relative to current frontier", + )); + } + } + + None +} + +fn coordinates_name_same_subject(base: &EchoCoordinate, current: &EchoCoordinate) -> bool { + match (base, current) { + ( + EchoCoordinate::Worldline { worldline_id, .. }, + EchoCoordinate::Worldline { + worldline_id: current_worldline, + .. + }, + ) => worldline_id == current_worldline, + ( + EchoCoordinate::Strand { strand_id, .. }, + EchoCoordinate::Strand { + strand_id: current_strand, + .. + }, + ) => strand_id == current_strand, + ( + EchoCoordinate::Braid { braid_id, .. }, + EchoCoordinate::Braid { + braid_id: current_braid, + .. + }, + ) => braid_id == current_braid, + ( + EchoCoordinate::RetainedReading { key }, + EchoCoordinate::RetainedReading { key: current_key }, + ) => key == current_key, + _ => false, + } +} + +fn base_coordinate_is_stale(base: &EchoCoordinate, current: &EchoCoordinate) -> bool { + match (base, current) { + ( + EchoCoordinate::Worldline { at, .. } | EchoCoordinate::Strand { at, .. }, + EchoCoordinate::Worldline { at: current_at, .. } + | EchoCoordinate::Strand { at: current_at, .. }, + ) => coordinate_at_tick(at).is_some_and(|base_tick| { + coordinate_at_tick(current_at).is_some_and(|current_tick| base_tick < current_tick) + }), + ( + EchoCoordinate::Braid { member_count, .. }, + EchoCoordinate::Braid { + member_count: current_member_count, + .. + }, + ) => member_count < current_member_count, + _ => false, + } +} + +fn coordinate_at_tick(at: &CoordinateAt) -> Option { + match at { + CoordinateAt::Frontier => None, + CoordinateAt::Tick { worldline_tick } => Some(worldline_tick.0), + CoordinateAt::Provenance { reference } => Some(reference.worldline_tick.0), + } +} + /// App-agnostic kernel boundary for WASM host adapters. /// /// Implementors wrap a specific simulation engine and expose the byte-level @@ -1253,6 +2433,69 @@ pub trait KernelPort { /// newly accepted or a duplicate. fn dispatch_intent(&mut self, intent_bytes: &[u8]) -> Result; + /// Returns the current coordinate for an optic focus when the implementation + /// can resolve it cheaply enough to validate stale bases. + fn current_optic_coordinate( + &self, + _focus: &OpticFocus, + ) -> Result, AbiError> { + Ok(None) + } + + /// Propose an intent through an explicit optic dispatch request. + /// + /// The default implementation validates the generic optic/capability + /// request and routes `EintV1` payloads into [`KernelPort::dispatch_intent`]. + /// Because that existing path only ingests an intent into the runtime inbox, + /// the resulting optic outcome is `Staged`, not a fabricated admitted tick. + fn dispatch_optic_intent( + &mut self, + request: DispatchOpticIntentRequest, + ) -> Result { + if let Some(obstruction) = validate_optic_dispatch_request(&request, None) { + return Ok(obstruction); + } + + let current_coordinate = self.current_optic_coordinate(&request.focus)?; + if let Some(obstruction) = + validate_optic_dispatch_request(&request, current_coordinate.as_ref()) + { + return Ok(obstruction); + } + + match &request.payload { + OpticIntentPayload::EintV1 { bytes } => { + if let Err(error) = crate::unpack_intent_v1(bytes) { + return Ok(optic_dispatch_obstruction( + &request, + OpticObstructionKind::UnsupportedIntentFamily, + format!("optic dispatch EINT v1 payload is malformed: {error}"), + )); + } + + let dispatch = self.dispatch_intent(bytes)?; + Ok(IntentDispatchResult::Staged(StagedIntent { + optic_id: request.optic_id, + base_coordinate: request.base_coordinate, + intent_family: request.intent_family, + stage_ref: dispatch.intent_id, + reason: StagedIntentReason::AwaitingExplicitAdmission, + })) + } + } + } + + /// Observe through an explicit optic request. + /// + /// The default implementation reports that optic reads are not supported by + /// this kernel implementation. + fn observe_optic(&self, _request: ObserveOpticRequest) -> Result { + Err(AbiError { + code: error_codes::NOT_SUPPORTED, + message: "observe_optic is not supported by this kernel".into(), + }) + } + /// Observe a worldline at an explicit coordinate and frame. /// /// This is the canonical world-state read entrypoint. The diff --git a/crates/echo-wasm-abi/src/lib.rs b/crates/echo-wasm-abi/src/lib.rs index 6ba299a2..7c1e974c 100644 --- a/crates/echo-wasm-abi/src/lib.rs +++ b/crates/echo-wasm-abi/src/lib.rs @@ -28,6 +28,33 @@ use alloc::string::{String, ToString}; use alloc::vec::Vec; use serde::{Deserialize, Serialize}; +/// Domain separator for query variable digests used by generated optic helpers. +pub const QUERY_VARS_DIGEST_V1_DOMAIN: &[u8] = b"echo-wesley-query-vars/v1\0"; + +/// Hash canonical query variables for `QueryBytes` optic apertures. +#[must_use] +pub fn query_vars_digest_v1(vars_bytes: &[u8]) -> Vec { + let mut hasher = blake3::Hasher::new(); + hasher.update(QUERY_VARS_DIGEST_V1_DOMAIN); + hasher.update(vars_bytes); + hasher.finalize().as_bytes().to_vec() +} + +#[cfg(test)] +mod query_vars_digest_tests { + use super::{QUERY_VARS_DIGEST_V1_DOMAIN, query_vars_digest_v1}; + + #[test] + fn query_vars_digest_is_domain_separated_blake3() { + let vars = b"\xa1evalue\x18*"; + let mut hasher = blake3::Hasher::new(); + hasher.update(QUERY_VARS_DIGEST_V1_DOMAIN); + hasher.update(vars); + + assert_eq!(query_vars_digest_v1(vars), hasher.finalize().as_bytes()); + } +} + pub mod canonical; pub use canonical::{CanonError, decode_value, encode_value}; @@ -50,6 +77,12 @@ pub mod codec; /// Reserved EINT op id for privileged control intents. pub const CONTROL_INTENT_V1_OP_ID: u32 = u32::MAX; +/// Reserved Echo-owned EINT op id for proposing witnessed suffix import. +/// +/// Transport arrival is host I/O until the bundle is wrapped in this canonical +/// intent envelope and admitted through Echo scheduling. +pub const IMPORT_SUFFIX_INTENT_V1_OP_ID: u32 = u32::MAX - 1; + /// Errors produced by the Intent Envelope parser. #[derive(Debug, PartialEq, Eq)] pub enum EnvelopeError { @@ -182,6 +215,45 @@ pub fn unpack_control_intent_v1( decode_cbor(vars).map_err(|_| EnvelopeError::Malformed) } +/// Packs a witnessed suffix import proposal into an Echo-owned EINT envelope. +/// +/// The payload is the canonical byte representation of +/// [`kernel_port::ImportSuffixRequest`]. Decoding this envelope only validates +/// the proposal shape; admission still happens through Echo's causal scheduler. +/// +/// # Errors +/// +/// Returns [`EnvelopeError::Malformed`] if the request cannot be encoded as +/// canonical CBOR. Returns [`EnvelopeError::PayloadTooLarge`] if the encoded +/// payload exceeds the EINT v1 `u32` length field. +pub fn pack_import_suffix_intent_v1( + request: &kernel_port::ImportSuffixRequest, +) -> Result, EnvelopeError> { + let bytes = encode_cbor(request).map_err(|_| EnvelopeError::Malformed)?; + pack_intent_v1(IMPORT_SUFFIX_INTENT_V1_OP_ID, &bytes) +} + +/// Unpacks and validates a witnessed suffix import proposal from EINT v1 bytes. +/// +/// The envelope must use [`IMPORT_SUFFIX_INTENT_V1_OP_ID`]. Successful decoding +/// means the payload is a canonical import request, not that the proposed suffix +/// has been admitted. +/// +/// # Errors +/// +/// Returns envelope parse errors from [`unpack_intent_v1`]. Returns +/// [`EnvelopeError::Malformed`] when the op id is not the import op id or the +/// payload is not valid canonical CBOR for [`kernel_port::ImportSuffixRequest`]. +pub fn unpack_import_suffix_intent_v1( + bytes: &[u8], +) -> Result { + let (op_id, vars) = unpack_intent_v1(bytes)?; + if op_id != IMPORT_SUFFIX_INTENT_V1_OP_ID { + return Err(EnvelopeError::Malformed); + } + decode_cbor(vars).map_err(|_| EnvelopeError::Malformed) +} + // ----------------------------------------------------------------------------- // Legacy DTOs (Retained for cross-repo compatibility, to be purged later) // ----------------------------------------------------------------------------- @@ -484,6 +556,74 @@ mod tests { ); } + fn sample_provenance_ref(seed: u8, tick: u64) -> kernel_port::ProvenanceRef { + kernel_port::ProvenanceRef { + worldline_id: kernel_port::WorldlineId::from_bytes([seed; 32]), + worldline_tick: kernel_port::WorldlineTick(tick), + commit_hash: vec![seed.wrapping_add(1); 32], + } + } + + fn sample_import_suffix_request() -> kernel_port::ImportSuffixRequest { + let base_frontier = sample_provenance_ref(1, 0); + let target_frontier = sample_provenance_ref(1, 2); + let source_suffix = kernel_port::WitnessedSuffixShell { + source_worldline_id: kernel_port::WorldlineId::from_bytes([1; 32]), + source_suffix_start_tick: kernel_port::WorldlineTick(1), + source_suffix_end_tick: Some(kernel_port::WorldlineTick(2)), + source_entries: vec![sample_provenance_ref(1, 1), target_frontier.clone()], + boundary_witness: Some(base_frontier.clone()), + witness_digest: vec![7; 32], + basis_report: None, + }; + + kernel_port::ImportSuffixRequest { + bundle: kernel_port::CausalSuffixBundle { + base_frontier, + target_frontier, + source_suffix, + bundle_digest: vec![8; 32], + }, + target_worldline_id: kernel_port::WorldlineId::from_bytes([9; 32]), + target_basis: sample_provenance_ref(9, 0), + basis_report: None, + } + } + + #[test] + fn test_import_suffix_intent_round_trip() { + let request = sample_import_suffix_request(); + let packed = pack_import_suffix_intent_v1(&request).unwrap(); + + let (op_id, vars) = unpack_intent_v1(&packed).unwrap(); + assert_eq!(op_id, IMPORT_SUFFIX_INTENT_V1_OP_ID); + assert!(!vars.is_empty()); + + let unpacked = unpack_import_suffix_intent_v1(&packed).unwrap(); + assert_eq!(unpacked, request); + } + + #[test] + fn test_import_suffix_intent_rejects_wrong_op_id() { + let payload = encode_cbor(&sample_import_suffix_request()).unwrap(); + let packed = pack_intent_v1(77, &payload).unwrap(); + + assert_eq!( + unpack_import_suffix_intent_v1(&packed), + Err(EnvelopeError::Malformed) + ); + } + + #[test] + fn test_import_suffix_intent_rejects_malformed_payload() { + let packed = pack_intent_v1(IMPORT_SUFFIX_INTENT_V1_OP_ID, &[0xff]).unwrap(); + + assert_eq!( + unpack_import_suffix_intent_v1(&packed), + Err(EnvelopeError::Malformed) + ); + } + #[test] fn test_worldline_id_round_trip_uses_cbor_bytes() { use crate::kernel_port::WorldlineId; @@ -651,6 +791,699 @@ mod tests { } } + #[test] + fn test_optic_core_dtos_round_trip() { + use crate::kernel_port::{ + AttachmentDescentPolicy, BraidId, EchoCoordinate, EchoOptic, ObserveOpticRequest, + OpticAperture, OpticApertureShape, OpticCapabilityId, OpticFocus, OpticId, + OpticReadBudget, ProjectionVersion, ReducerVersion, RetainedReadingKey, WorldlineId, + }; + + let optic = EchoOptic { + optic_id: OpticId::from_bytes([1; 32]), + focus: OpticFocus::Braid { + braid_id: BraidId::from_bytes([2; 32]), + }, + coordinate: EchoCoordinate::RetainedReading { + key: RetainedReadingKey::from_bytes([3; 32]), + }, + projection_version: ProjectionVersion(4), + reducer_version: Some(ReducerVersion(5)), + intent_family: crate::kernel_port::IntentFamilyId::from_bytes([6; 32]), + capability: OpticCapabilityId::from_bytes([7; 32]), + }; + + let bytes = encode_cbor(&optic).unwrap(); + let decoded: EchoOptic = decode_cbor(&bytes).unwrap(); + assert_eq!(decoded, optic); + + let aperture = OpticAperture { + shape: OpticApertureShape::QueryBytes { + query_id: 42, + vars_digest: vec![9; 32], + }, + budget: OpticReadBudget { + max_bytes: Some(1024), + max_nodes: Some(64), + max_ticks: Some(8), + max_attachments: Some(0), + }, + attachment_descent: AttachmentDescentPolicy::BoundaryOnly, + }; + let decoded: OpticAperture = decode_cbor(&encode_cbor(&aperture).unwrap()).unwrap(); + assert_eq!(decoded, aperture); + + let observe = ObserveOpticRequest { + optic_id: optic.optic_id, + focus: optic.focus.clone(), + coordinate: optic.coordinate.clone(), + aperture, + projection_version: optic.projection_version, + reducer_version: optic.reducer_version, + capability: optic.capability, + }; + let decoded: ObserveOpticRequest = decode_cbor(&encode_cbor(&observe).unwrap()).unwrap(); + assert_eq!(decoded, observe); + + let focus = OpticFocus::Worldline { + worldline_id: WorldlineId::from_bytes([8; 32]), + }; + let decoded: OpticFocus = decode_cbor(&encode_cbor(&focus).unwrap()).unwrap(); + assert_eq!(decoded, focus); + } + + #[test] + fn test_optic_generated_binding_dtos_serialize_deterministically() { + use crate::kernel_port::{ + AdmissionLawId, AttachmentDescentPolicy, BraidId, CoordinateAt, + DispatchOpticIntentRequest, EchoCoordinate, IntentFamilyId, ObserveOpticRequest, + OpticActorId, OpticAperture, OpticApertureShape, OpticCapability, OpticCapabilityId, + OpticCause, OpticFocus, OpticId, OpticIntentPayload, OpticReadBudget, + ProjectionVersion, ProvenanceRef, ReducerVersion, RetainedReadingKey, StrandId, + WorldlineId, WorldlineTick, + }; + + let worldline_id = WorldlineId::from_bytes([1; 32]); + let strand_id = StrandId::from_bytes([2; 32]); + let braid_id = BraidId::from_bytes([3; 32]); + let retained_key = RetainedReadingKey::from_bytes([4; 32]); + let optic_id = OpticId::from_bytes([5; 32]); + let capability_id = OpticCapabilityId::from_bytes([6; 32]); + let intent_family = IntentFamilyId::from_bytes([7; 32]); + let actor = OpticActorId::from_bytes([8; 32]); + let cause = OpticCause { + actor, + cause_hash: vec![9; 32], + label: Some("generated optic helper".into()), + }; + let focus = OpticFocus::Worldline { worldline_id }; + let coordinate = EchoCoordinate::Worldline { + worldline_id, + at: CoordinateAt::Provenance { + reference: ProvenanceRef { + worldline_id, + worldline_tick: WorldlineTick(11), + commit_hash: vec![12; 32], + }, + }, + }; + let aperture = OpticAperture { + shape: OpticApertureShape::QueryBytes { + query_id: 1002, + vars_digest: vec![13; 32], + }, + budget: OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(64), + max_ticks: Some(16), + max_attachments: Some(0), + }, + attachment_descent: AttachmentDescentPolicy::BoundaryOnly, + }; + let observe = ObserveOpticRequest { + optic_id, + focus: focus.clone(), + coordinate: coordinate.clone(), + aperture, + projection_version: ProjectionVersion(1), + reducer_version: Some(ReducerVersion(2)), + capability: capability_id, + }; + let capability = OpticCapability { + capability_id, + actor, + issuer_ref: None, + policy_hash: vec![14; 32], + allowed_focus: focus.clone(), + projection_version: ProjectionVersion(1), + reducer_version: Some(ReducerVersion(2)), + allowed_intent_family: intent_family, + max_budget: OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(64), + max_ticks: Some(16), + max_attachments: Some(0), + }, + }; + let dispatch = DispatchOpticIntentRequest { + optic_id, + base_coordinate: coordinate, + intent_family, + focus, + cause, + capability, + admission_law: AdmissionLawId::from_bytes([15; 32]), + payload: OpticIntentPayload::EintV1 { + bytes: pack_intent_v1(1001, b"optic-vars").unwrap(), + }, + }; + + let observe_bytes = encode_cbor(&observe).unwrap(); + assert_eq!(observe_bytes, encode_cbor(&observe).unwrap()); + let decoded: ObserveOpticRequest = decode_cbor(&observe_bytes).unwrap(); + assert_eq!(decoded, observe); + + let dispatch_bytes = encode_cbor(&dispatch).unwrap(); + assert_eq!(dispatch_bytes, encode_cbor(&dispatch).unwrap()); + let decoded: DispatchOpticIntentRequest = decode_cbor(&dispatch_bytes).unwrap(); + assert_eq!(decoded, dispatch); + + for dto in [ + OpticFocus::Strand { strand_id }, + OpticFocus::Braid { braid_id }, + OpticFocus::RetainedReading { key: retained_key }, + ] { + assert_eq!( + decode_cbor::(&encode_cbor(&dto).unwrap()).unwrap(), + dto + ); + } + } + + #[test] + fn test_optic_read_identity_round_trip() { + use crate::kernel_port::{ + AuthoredObserverPlan, BuiltinObserverPlan, EchoCoordinate, MissingWitnessBasisReason, + ObservationAt, ObservationBasisPosture, ObservationCoordinate, ObservationFrame, + ObservationPayload, ObservationProjection, ObservationReadBudget, ObservationRequest, + ObservationRights, ObserveOpticResult, ObserverInstanceId, ObserverInstanceRef, + ObserverPlanId, OpticCapabilityId, OpticId, OpticReading, OpticReadingEnvelope, + ProjectionVersion, ReadIdentity, ReadingBudgetPosture, ReadingEnvelope, + ReadingObserverBasis, ReadingObserverPlan, ReadingResidualPosture, + ReadingRightsPosture, ReadingWitnessRef, RetainedReadingCodecId, + RetainedReadingDescriptor, RetainedReadingKey, WitnessBasis, WorldlineId, + WorldlineTick, + }; + use alloc::boxed::Box; + + let reference = crate::kernel_port::ProvenanceRef { + worldline_id: WorldlineId::from_bytes([1; 32]), + worldline_tick: WorldlineTick(7), + commit_hash: vec![2; 32], + }; + let identity = ReadIdentity { + read_identity_hash: vec![3; 32], + optic_id: OpticId::from_bytes([4; 32]), + focus_digest: vec![5; 32], + coordinate: EchoCoordinate::Worldline { + worldline_id: WorldlineId::from_bytes([1; 32]), + at: crate::kernel_port::CoordinateAt::Frontier, + }, + aperture_digest: vec![6; 32], + projection_version: ProjectionVersion(1), + reducer_version: None, + witness_basis: WitnessBasis::Missing { + reason: MissingWitnessBasisReason::EvidenceUnavailable, + }, + rights_posture: ReadingRightsPosture::KernelPublic, + budget_posture: ReadingBudgetPosture::UnboundedOneShot, + residual_posture: ReadingResidualPosture::Obstructed, + }; + let envelope = OpticReadingEnvelope { + reading: ReadingEnvelope { + observer_plan: ReadingObserverPlan::Builtin { + plan: BuiltinObserverPlan::CommitBoundaryHead, + }, + observer_instance: None, + observer_basis: ReadingObserverBasis::CommitBoundary, + witness_refs: vec![ReadingWitnessRef::ResolvedCommit { reference }], + parent_basis_posture: ObservationBasisPosture::Worldline, + budget_posture: ReadingBudgetPosture::UnboundedOneShot, + rights_posture: ReadingRightsPosture::KernelPublic, + residual_posture: ReadingResidualPosture::Obstructed, + }, + read_identity: identity, + }; + + let decoded: OpticReadingEnvelope = decode_cbor(&encode_cbor(&envelope).unwrap()).unwrap(); + assert_eq!(decoded, envelope); + + let optic_result = ObserveOpticResult::Reading(Box::new(OpticReading { + envelope: envelope.reading.clone(), + read_identity: envelope.read_identity.clone(), + payload: ObservationPayload::QueryBytes { data: vec![12, 13] }, + retained: Some(RetainedReadingKey::from_bytes([9; 32])), + })); + let decoded: ObserveOpticResult = + decode_cbor(&encode_cbor(&optic_result).unwrap()).unwrap(); + assert_eq!(decoded, optic_result); + + let retained = RetainedReadingDescriptor { + key: RetainedReadingKey::from_bytes([9; 32]), + read_identity: envelope.read_identity, + content_hash: vec![10; 32], + codec_id: RetainedReadingCodecId::from_bytes([11; 32]), + byte_len: 1024, + }; + let decoded: RetainedReadingDescriptor = + decode_cbor(&encode_cbor(&retained).unwrap()).unwrap(); + assert_eq!(decoded, retained); + + let authored = AuthoredObserverPlan { + plan_id: ObserverPlanId::from_bytes([13; 32]), + artifact_hash: vec![14; 32], + schema_hash: vec![15; 32], + state_schema_hash: vec![16; 32], + update_law_hash: vec![17; 32], + emission_law_hash: vec![18; 32], + }; + let instance = ObserverInstanceRef { + instance_id: ObserverInstanceId::from_bytes([19; 32]), + plan_id: authored.plan_id, + state_hash: vec![20; 32], + }; + let authored_request = ObservationRequest { + coordinate: ObservationCoordinate { + worldline_id: WorldlineId::from_bytes([21; 32]), + at: ObservationAt::Frontier, + }, + frame: ObservationFrame::QueryView, + projection: ObservationProjection::Query { + query_id: 9, + vars_bytes: vec![1, 2, 3], + }, + observer_plan: ReadingObserverPlan::Authored { + plan: Box::new(authored), + }, + observer_instance: Some(instance), + budget: ObservationReadBudget::Bounded { + max_payload_bytes: 4096, + max_witness_refs: 4, + }, + rights: ObservationRights::CapabilityScoped { + capability: OpticCapabilityId::from_bytes([22; 32]), + }, + }; + let decoded: ObservationRequest = + decode_cbor(&encode_cbor(&authored_request).unwrap()).unwrap(); + assert_eq!(decoded, authored_request); + + let builtin_request = ObservationRequest::builtin_one_shot( + ObservationCoordinate { + worldline_id: WorldlineId::from_bytes([23; 32]), + at: ObservationAt::Frontier, + }, + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ) + .unwrap(); + assert!(matches!( + builtin_request.observer_plan, + ReadingObserverPlan::Builtin { + plan: BuiltinObserverPlan::CommitBoundaryHead + } + )); + } + + #[test] + fn test_optic_intent_dispatch_result_variants_round_trip() { + use crate::kernel_port::{ + AdmittedIntent, CoordinateAt, EchoCoordinate, IntentConflict, IntentConflictReason, + IntentDispatchResult, IntentFamilyId, MissingWitnessBasisReason, OpticFocus, OpticId, + OpticObstruction, OpticObstructionKind, PluralIntent, ReadingResidualPosture, + StagedIntent, StagedIntentReason, StrandId, WitnessBasis, WorldlineId, WorldlineTick, + }; + + fn classify(result: &IntentDispatchResult) -> &'static str { + match result { + IntentDispatchResult::Admitted(_) => "admitted", + IntentDispatchResult::Staged(_) => "staged", + IntentDispatchResult::Plural(_) => "plural", + IntentDispatchResult::Conflict(_) => "conflict", + IntentDispatchResult::Obstructed(_) => "obstructed", + } + } + + let optic_id = OpticId::from_bytes([1; 32]); + let intent_family = IntentFamilyId::from_bytes([2; 32]); + let worldline_id = WorldlineId::from_bytes([3; 32]); + let base_coordinate = EchoCoordinate::Worldline { + worldline_id, + at: CoordinateAt::Frontier, + }; + let admitted_ref = crate::kernel_port::ProvenanceRef { + worldline_id, + worldline_tick: WorldlineTick(4), + commit_hash: vec![5; 32], + }; + + let outcomes = vec![ + IntentDispatchResult::Admitted(AdmittedIntent { + optic_id, + base_coordinate: base_coordinate.clone(), + intent_family, + admitted_ref: admitted_ref.clone(), + receipt_hash: vec![6; 32], + }), + IntentDispatchResult::Staged(StagedIntent { + optic_id, + base_coordinate: base_coordinate.clone(), + intent_family, + stage_ref: vec![7; 32], + reason: StagedIntentReason::AwaitingWitness, + }), + IntentDispatchResult::Plural(PluralIntent { + optic_id, + base_coordinate: base_coordinate.clone(), + intent_family, + candidate_refs: vec![admitted_ref.clone()], + residual_posture: ReadingResidualPosture::PluralityPreserved, + }), + IntentDispatchResult::Conflict(IntentConflict { + optic_id, + base_coordinate: base_coordinate.clone(), + intent_family, + reason: IntentConflictReason::ConflictingFrontier, + conflict_ref: Some(admitted_ref), + evidence_digest: vec![8; 32], + message: "frontier conflict".into(), + }), + IntentDispatchResult::Obstructed(OpticObstruction { + kind: OpticObstructionKind::AttachmentDescentRequired, + optic_id: Some(optic_id), + focus: Some(OpticFocus::Strand { + strand_id: StrandId::from_bytes([9; 32]), + }), + coordinate: Some(base_coordinate), + witness_basis: Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::EvidenceUnavailable, + }), + message: "explicit attachment descent required".into(), + }), + ]; + + let decoded_labels = outcomes + .iter() + .map(|outcome| { + let decoded: IntentDispatchResult = + decode_cbor(&encode_cbor(outcome).unwrap()).unwrap(); + assert_eq!(&decoded, outcome); + classify(&decoded) + }) + .collect::>(); + + assert_eq!( + decoded_labels, + vec!["admitted", "staged", "plural", "conflict", "obstructed"] + ); + } + + #[test] + fn test_dispatch_optic_intent_request_round_trip_and_requires_base_coordinate() + -> Result<(), String> { + use crate::kernel_port::{ + AdmissionLawId, CoordinateAt, DispatchOpticIntentRequest, EchoCoordinate, + IntentFamilyId, OpticActorId, OpticCapability, OpticCapabilityId, OpticCause, + OpticFocus, OpticId, OpticIntentPayload, OpticReadBudget, ProjectionVersion, + WorldlineId, + }; + use ciborium::value::Value; + + let worldline_id = WorldlineId::from_bytes([3; 32]); + let focus = OpticFocus::Worldline { worldline_id }; + let base_coordinate = EchoCoordinate::Worldline { + worldline_id, + at: CoordinateAt::Frontier, + }; + let actor = OpticActorId::from_bytes([4; 32]); + let intent_family = IntentFamilyId::from_bytes([5; 32]); + let payload_bytes = pack_intent_v1(77, b"optic-vars").unwrap(); + let request = DispatchOpticIntentRequest { + optic_id: OpticId::from_bytes([1; 32]), + base_coordinate, + intent_family, + focus: focus.clone(), + cause: OpticCause { + actor, + cause_hash: vec![6; 32], + label: Some("optic dispatch".into()), + }, + capability: OpticCapability { + capability_id: OpticCapabilityId::from_bytes([7; 32]), + actor, + issuer_ref: None, + policy_hash: vec![8; 32], + allowed_focus: focus, + projection_version: ProjectionVersion(1), + reducer_version: None, + allowed_intent_family: intent_family, + max_budget: OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(64), + max_ticks: Some(8), + max_attachments: Some(0), + }, + }, + admission_law: AdmissionLawId::from_bytes([9; 32]), + payload: OpticIntentPayload::EintV1 { + bytes: payload_bytes.clone(), + }, + }; + + let decoded: DispatchOpticIntentRequest = + decode_cbor(&encode_cbor(&request).unwrap()).unwrap(); + assert_eq!(decoded, request); + assert!(matches!( + decoded.payload, + OpticIntentPayload::EintV1 { bytes } if bytes == payload_bytes + )); + + let mut value = decode_value(&encode_cbor(&request).unwrap()).unwrap(); + let Value::Map(fields) = &mut value else { + return Err(String::from("encoded request should be a map")); + }; + let position = fields + .iter() + .position(|(key, _)| matches!(key, Value::Text(field) if field == "base_coordinate")) + .unwrap(); + fields.remove(position); + + assert!(decode_cbor::(&encode_value(&value).unwrap()).is_err()); + Ok(()) + } + + #[test] + fn test_kernel_port_dispatch_optic_intent_routes_eint_v1_as_staged_admission() { + use crate::kernel_port::{ + AdmissionLawId, CoordinateAt, DispatchOpticIntentRequest, DispatchResponse, + EchoCoordinate, GlobalTick, IntentDispatchResult, IntentFamilyId, KernelPort, + OpticActorId, OpticCapability, OpticCapabilityId, OpticCause, OpticFocus, OpticId, + OpticIntentPayload, OpticReadBudget, ProjectionVersion, RunCompletion, RunId, + SchedulerMode, SchedulerState, SchedulerStatus, StagedIntentReason, WorkState, + WorldlineId, + }; + + struct RecordingKernel { + dispatched: Vec, + } + + impl KernelPort for RecordingKernel { + fn dispatch_intent( + &mut self, + intent_bytes: &[u8], + ) -> Result { + self.dispatched = intent_bytes.to_vec(); + Ok(DispatchResponse { + accepted: true, + intent_id: vec![10; 32], + scheduler_status: SchedulerStatus { + state: SchedulerState::Inactive, + active_mode: Some(SchedulerMode::UntilIdle { + cycle_limit: Some(1), + }), + work_state: WorkState::Quiescent, + run_id: Some(RunId(1)), + latest_cycle_global_tick: Some(GlobalTick(1)), + latest_commit_global_tick: Some(GlobalTick(1)), + last_quiescent_global_tick: Some(GlobalTick(1)), + last_run_completion: Some(RunCompletion::Quiesced), + }, + }) + } + + fn registry_info(&self) -> kernel_port::RegistryInfo { + kernel_port::RegistryInfo { + codec_id: None, + registry_version: None, + schema_sha256_hex: None, + abi_version: kernel_port::ABI_VERSION, + } + } + + fn scheduler_status(&self) -> Result { + Ok(SchedulerStatus { + state: SchedulerState::Inactive, + active_mode: None, + work_state: WorkState::Quiescent, + run_id: None, + latest_cycle_global_tick: None, + latest_commit_global_tick: None, + last_quiescent_global_tick: None, + last_run_completion: None, + }) + } + } + + let worldline_id = WorldlineId::from_bytes([3; 32]); + let focus = OpticFocus::Worldline { worldline_id }; + let base_coordinate = EchoCoordinate::Worldline { + worldline_id, + at: CoordinateAt::Frontier, + }; + let actor = OpticActorId::from_bytes([4; 32]); + let intent_family = IntentFamilyId::from_bytes([5; 32]); + let payload_bytes = pack_intent_v1(77, b"optic-vars").unwrap(); + let request = DispatchOpticIntentRequest { + optic_id: OpticId::from_bytes([1; 32]), + base_coordinate: base_coordinate.clone(), + intent_family, + focus: focus.clone(), + cause: OpticCause { + actor, + cause_hash: vec![6; 32], + label: Some("optic dispatch".into()), + }, + capability: OpticCapability { + capability_id: OpticCapabilityId::from_bytes([7; 32]), + actor, + issuer_ref: None, + policy_hash: vec![8; 32], + allowed_focus: focus, + projection_version: ProjectionVersion(1), + reducer_version: None, + allowed_intent_family: intent_family, + max_budget: OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(64), + max_ticks: Some(8), + max_attachments: Some(0), + }, + }, + admission_law: AdmissionLawId::from_bytes([9; 32]), + payload: OpticIntentPayload::EintV1 { + bytes: payload_bytes.clone(), + }, + }; + let mut kernel = RecordingKernel { + dispatched: Vec::new(), + }; + + let result = kernel.dispatch_optic_intent(request).unwrap(); + + assert_eq!(kernel.dispatched, payload_bytes); + assert!(matches!( + result, + IntentDispatchResult::Staged(staged) + if staged.base_coordinate == base_coordinate + && staged.intent_family == intent_family + && staged.stage_ref == vec![10; 32] + && staged.reason == StagedIntentReason::AwaitingExplicitAdmission + )); + } + + #[test] + fn test_optic_open_close_models_round_trip() { + use crate::kernel_port::{ + CapabilityPosture, CloseOpticRequest, CloseOpticResult, CoordinateAt, EchoCoordinate, + EchoOptic, IntentFamilyId, OpenOpticRequest, OpenOpticResult, OpticActorId, + OpticCapability, OpticCapabilityId, OpticCause, OpticFocus, OpticId, OpticObstruction, + OpticObstructionKind, OpticOpenError, OpticReadBudget, ProjectionVersion, WorldlineId, + WorldlineTick, + }; + + let worldline_id = WorldlineId::from_bytes([1; 32]); + let focus = OpticFocus::Worldline { worldline_id }; + let coordinate = EchoCoordinate::Worldline { + worldline_id, + at: CoordinateAt::Frontier, + }; + let actor = OpticActorId::from_bytes([2; 32]); + let capability_id = OpticCapabilityId::from_bytes([3; 32]); + let intent_family = IntentFamilyId::from_bytes([4; 32]); + let issuer_ref = crate::kernel_port::ProvenanceRef { + worldline_id, + worldline_tick: WorldlineTick(5), + commit_hash: vec![6; 32], + }; + let cause = OpticCause { + actor, + cause_hash: vec![7; 32], + label: Some("test open".into()), + }; + let capability = OpticCapability { + capability_id, + actor, + issuer_ref: Some(issuer_ref.clone()), + policy_hash: vec![8; 32], + allowed_focus: focus.clone(), + projection_version: ProjectionVersion(1), + reducer_version: None, + allowed_intent_family: intent_family, + max_budget: OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(64), + max_ticks: Some(8), + max_attachments: Some(0), + }, + }; + let request = OpenOpticRequest { + focus: focus.clone(), + coordinate: coordinate.clone(), + projection_version: ProjectionVersion(1), + reducer_version: None, + intent_family, + capability, + cause: cause.clone(), + }; + let decoded: OpenOpticRequest = decode_cbor(&encode_cbor(&request).unwrap()).unwrap(); + assert_eq!(decoded, request); + + let result = OpenOpticResult { + optic: EchoOptic { + optic_id: OpticId::from_bytes([9; 32]), + focus, + coordinate: coordinate.clone(), + projection_version: ProjectionVersion(1), + reducer_version: None, + intent_family, + capability: capability_id, + }, + capability_posture: CapabilityPosture::Granted { + capability_id, + actor, + issuer_ref: Some(issuer_ref), + policy_hash: vec![8; 32], + }, + }; + let decoded: OpenOpticResult = decode_cbor(&encode_cbor(&result).unwrap()).unwrap(); + assert_eq!(decoded, result); + + let error = OpticOpenError::Obstructed(OpticObstruction { + kind: OpticObstructionKind::CapabilityDenied, + optic_id: None, + focus: None, + coordinate: Some(coordinate), + witness_basis: None, + message: "capability denied".into(), + }); + let decoded: OpticOpenError = decode_cbor(&encode_cbor(&error).unwrap()).unwrap(); + assert_eq!(decoded, error); + + let close_request = CloseOpticRequest { + optic_id: OpticId::from_bytes([9; 32]), + cause, + }; + let decoded: CloseOpticRequest = + decode_cbor(&encode_cbor(&close_request).unwrap()).unwrap(); + assert_eq!(decoded, close_request); + + let close_result = CloseOpticResult { + optic_id: OpticId::from_bytes([9; 32]), + }; + let decoded: CloseOpticResult = decode_cbor(&encode_cbor(&close_result).unwrap()).unwrap(); + assert_eq!(decoded, close_result); + } + #[test] fn test_unpack_control_intent_rejects_wrong_op_id() { use crate::kernel_port::{ControlIntentV1, SchedulerMode}; diff --git a/crates/echo-wasm-abi/src/witnessed_suffix_tests.rs b/crates/echo-wasm-abi/src/witnessed_suffix_tests.rs index f4083348..c6a6b06c 100644 --- a/crates/echo-wasm-abi/src/witnessed_suffix_tests.rs +++ b/crates/echo-wasm-abi/src/witnessed_suffix_tests.rs @@ -8,8 +8,9 @@ use ciborium::value::Value; use crate::{ CanonError, decode_cbor, decode_value, encode_cbor, encode_value, kernel_port::{ - BaseRef, ConflictReason, ProvenanceRef, ReadingResidualPosture, SettlementBasisReport, - SettlementOverlapRevalidation, SettlementParentRevalidation, + BaseRef, CausalSuffixBundle, ConflictReason, ExportSuffixObstruction, ExportSuffixRequest, + ImportSuffixRequest, ImportSuffixResult, ProvenanceRef, ReadingResidualPosture, + SettlementBasisReport, SettlementOverlapRevalidation, SettlementParentRevalidation, WitnessedSuffixAdmissionOutcome, WitnessedSuffixAdmissionRequest, WitnessedSuffixAdmissionResponse, WitnessedSuffixShell, WorldlineId, WorldlineTick, }, @@ -86,6 +87,40 @@ fn response(outcome: WitnessedSuffixAdmissionOutcome) -> WitnessedSuffixAdmissio } } +fn export_request() -> ExportSuffixRequest { + ExportSuffixRequest { + source_worldline_id: worldline(3), + base_frontier: provenance_ref(3, 2), + target_frontier: Some(provenance_ref(3, 4)), + basis_report: Some(basis_report()), + } +} + +fn causal_suffix_bundle() -> CausalSuffixBundle { + CausalSuffixBundle { + base_frontier: provenance_ref(3, 2), + target_frontier: provenance_ref(3, 4), + source_suffix: shell_with_entries(vec![provenance_ref(3, 3), provenance_ref(3, 4)]), + bundle_digest: vec![7; 32], + } +} + +fn import_request() -> ImportSuffixRequest { + ImportSuffixRequest { + bundle: causal_suffix_bundle(), + target_worldline_id: worldline(11), + target_basis: provenance_ref(12, 9), + basis_report: Some(basis_report()), + } +} + +fn import_result(outcome: WitnessedSuffixAdmissionOutcome) -> ImportSuffixResult { + ImportSuffixResult { + bundle_digest: vec![7; 32], + admission: response(outcome), + } +} + fn overlap_revalidation() -> SettlementOverlapRevalidation { SettlementOverlapRevalidation::Conflict { overlapping_slot_count: 2, @@ -145,6 +180,74 @@ fn admitted_outcome() -> WitnessedSuffixAdmissionOutcome { } } +#[test] +fn witnessed_suffix_export_request_round_trips() -> Result<(), crate::CanonError> { + let original = export_request(); + let bytes = encode_cbor(&original)?; + let decoded: ExportSuffixRequest = decode_cbor(&bytes)?; + + assert_eq!(decoded, original); + assert_eq!(decoded.base_frontier, provenance_ref(3, 2)); + assert_eq!(decoded.target_frontier, Some(provenance_ref(3, 4))); + Ok(()) +} + +#[test] +fn witnessed_suffix_causal_bundle_round_trips() -> Result<(), crate::CanonError> { + let original = causal_suffix_bundle(); + let bytes = encode_cbor(&original)?; + let decoded: CausalSuffixBundle = decode_cbor(&bytes)?; + + assert_eq!(decoded, original); + assert_eq!(decoded.bundle_digest, vec![7; 32]); + assert_eq!( + decoded.source_suffix.source_entries, + vec![provenance_ref(3, 3), provenance_ref(3, 4)] + ); + Ok(()) +} + +#[test] +fn witnessed_suffix_import_request_round_trips() -> Result<(), crate::CanonError> { + let original = import_request(); + let bytes = encode_cbor(&original)?; + let decoded: ImportSuffixRequest = decode_cbor(&bytes)?; + + assert_eq!(decoded, original); + assert_eq!(decoded.target_basis, provenance_ref(12, 9)); + assert_eq!(decoded.bundle.bundle_digest, vec![7; 32]); + Ok(()) +} + +#[test] +fn witnessed_suffix_import_result_round_trips() -> Result<(), crate::CanonError> { + let original = import_result(admitted_outcome()); + let bytes = encode_cbor(&original)?; + let decoded: ImportSuffixResult = decode_cbor(&bytes)?; + + assert_eq!(decoded, original); + assert_eq!(decoded.bundle_digest, vec![7; 32]); + assert!(matches!( + decoded.admission.outcome, + WitnessedSuffixAdmissionOutcome::Admitted { .. } + )); + Ok(()) +} + +#[test] +fn witnessed_suffix_export_obstruction_round_trips() -> Result<(), crate::CanonError> { + let original = ExportSuffixObstruction { + source_ref: provenance_ref(3, 2), + residual_posture: ReadingResidualPosture::Obstructed, + evidence_digest: vec![8; 32], + }; + let bytes = encode_cbor(&original)?; + let decoded: ExportSuffixObstruction = decode_cbor(&bytes)?; + + assert_eq!(decoded, original); + Ok(()) +} + #[test] fn witnessed_suffix_request_round_trips_with_source_and_target_refs() -> Result<(), crate::CanonError> { diff --git a/crates/echo-wesley-gen/Cargo.toml b/crates/echo-wesley-gen/Cargo.toml index 1a6b91df..18609349 100644 --- a/crates/echo-wesley-gen/Cargo.toml +++ b/crates/echo-wesley-gen/Cargo.toml @@ -14,6 +14,7 @@ categories = ["development-tools", "command-line-utilities", "encoding", "wasm"] [dependencies] anyhow = "1.0" +blake3 = "1.5" clap = { version = "4.4", features = ["derive"] } proc-macro2 = "1.0" quote = "1.0" @@ -21,6 +22,7 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" syn = { version = "2.0", features = ["full", "extra-traits"] } prettyplease = "0.2" +wesley-core = "0.0.2" [lints] diff --git a/crates/echo-wesley-gen/README.md b/crates/echo-wesley-gen/README.md index 7f2b3476..825f8dac 100644 --- a/crates/echo-wesley-gen/README.md +++ b/crates/echo-wesley-gen/README.md @@ -3,13 +3,23 @@ # echo-wesley-gen -CLI tool that reads Wesley IR (JSON) from stdin and emits Rust structs/enums -for Echo. Intended to be driven by the JavaScript generator (packages/wesley-generator-echo) -which now outputs `ir.json` instead of handwritten Rust. +CLI tool that emits Echo Rust structs, operation registries, and optic helper +functions from Wesley contract data. + +The preferred input is GraphQL SDL lowered directly through the published +`wesley-core` crate. The older `echo-ir/v1` JSON stdin path is retained for +fixtures and compatibility while consumers move off the historical JavaScript +generator. ## Usage ```bash +# Generate Rust code directly from GraphQL SDL +cargo run -p echo-wesley-gen -- --schema schema.graphql + +# Write generated Rust from GraphQL SDL to a file +cargo run -p echo-wesley-gen -- --schema schema.graphql --out generated.rs + # Generate Rust code to stdout cat ir.json | cargo run -p echo-wesley-gen -- @@ -20,5 +30,18 @@ cat ir.json | cargo run -p echo-wesley-gen -- --out generated.rs ## Notes - Supports ENUM and OBJECT kinds from Wesley IR. +- Preserves per-operation directive metadata as `OpDef::directives_json`; Echo + admission tooling owns any interpretation of `wes_footprint`. +- Emits footprint certificate constants for operations with `@wes_footprint`; + those certificates include the generated Rust artifact manifest hash and the + operation argument shape, and hosts can verify them through + `echo_registry_api::verify_contract_artifact` before treating the generated + artifact as compile-time-certified. +- GraphQL SDL operation ids are derived deterministically and fail closed on + collision. The generator never increments a collided id because operation ids + are persisted ABI. +- Generated query optic helpers use Echo ABI's domain-separated BLAKE3 + `query_vars_digest_v1(...)`; ad hoc variable digests are not accepted for + retained reading identity. - Optional fields become `Option`; lists become `Vec` (wrapped in Option when not required). - Unknown scalar names are emitted as identifiers as-is (so ensure upstream IR types are valid Rust idents). diff --git a/crates/echo-wesley-gen/src/ir.rs b/crates/echo-wesley-gen/src/ir.rs index bd686881..55761ae0 100644 --- a/crates/echo-wesley-gen/src/ir.rs +++ b/crates/echo-wesley-gen/src/ir.rs @@ -2,7 +2,11 @@ // © James Ross Ω FLYING•ROBOTS //! Minimal Wesley IR structs used by echo-wesley-gen. -use serde::Deserialize; +use serde::{Deserialize, Serialize}; + +fn empty_directives() -> serde_json::Value { + serde_json::Value::Object(serde_json::Map::new()) +} /// Root Wesley IR payload consumed by `echo-wesley-gen`. /// @@ -10,7 +14,7 @@ use serde::Deserialize; /// the upstream generator. Unknown fields are ignored by serde; missing fields /// are defaulted where sensible so the CLI can be tolerant of additive schema /// changes. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct WesleyIR { /// IR schema version tag (e.g. `"echo-ir/v1"`). #[serde(default)] @@ -37,7 +41,7 @@ pub struct WesleyIR { } /// Generator provenance metadata embedded in the IR. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] #[allow(dead_code)] // Part of IR spec, present for deserialization pub struct GeneratedBy { /// Tool name (package/binary) that produced this IR. @@ -48,7 +52,7 @@ pub struct GeneratedBy { } /// Type definition in the IR type catalog. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct TypeDefinition { /// GraphQL type name. pub name: String, @@ -63,7 +67,7 @@ pub struct TypeDefinition { } /// Kind tag for IR type definitions. -#[derive(Debug, Deserialize, PartialEq, Eq)] +#[derive(Debug, Deserialize, Serialize, PartialEq, Eq)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] pub enum TypeKind { /// GraphQL object type. @@ -81,7 +85,7 @@ pub enum TypeKind { } /// Operation kind (query or mutation). -#[derive(Debug, Deserialize, PartialEq, Eq)] +#[derive(Debug, Deserialize, Serialize, PartialEq, Eq)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] pub enum OpKind { /// Read-only operation. @@ -91,7 +95,7 @@ pub enum OpKind { } /// Operation definition in the IR operation catalog. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct OpDefinition { /// Operation kind. pub kind: OpKind, @@ -104,6 +108,9 @@ pub struct OpDefinition { pub args: Vec, /// GraphQL result type name. pub result_type: String, + /// Generic operation directive metadata preserved for Echo-owned admission tooling. + #[serde(default = "empty_directives")] + pub directives: serde_json::Value, } /// Argument definition (used for both operation args and object fields). @@ -112,7 +119,7 @@ pub struct OpDefinition { /// the same shape (name + base type + required + list). We keep distinct Rust /// wrapper types (`ArgDefinition` and `FieldDefinition`) so call sites can /// remain semantically explicit even if the JSON schema evolves. -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct ArgDefinition { /// Field/argument name. pub name: String, @@ -127,7 +134,7 @@ pub struct ArgDefinition { } /// Object field definition (same schema as [`ArgDefinition`]; kept for semantic clarity). -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] pub struct FieldDefinition { /// Field name. pub name: String, diff --git a/crates/echo-wesley-gen/src/main.rs b/crates/echo-wesley-gen/src/main.rs index bb4be975..85b83d95 100644 --- a/crates/echo-wesley-gen/src/main.rs +++ b/crates/echo-wesley-gen/src/main.rs @@ -3,12 +3,13 @@ #![allow(clippy::print_stdout, clippy::print_stderr)] //! CLI that reads Wesley IR JSON from stdin and emits Rust structs/enums for Echo. -use anyhow::Result; +use anyhow::{bail, Result}; use clap::Parser; use proc_macro2::TokenStream; use quote::{format_ident, quote}; use std::collections::BTreeMap; use std::io::{self, Read}; +use std::path::PathBuf; /// Create an identifier safely, falling back to a raw identifier for Rust keywords. fn safe_ident(name: &str) -> proc_macro2::Ident { @@ -19,6 +20,11 @@ fn safe_ident(name: &str) -> proc_macro2::Ident { mod ir; use ir::{OpKind, TypeKind, WesleyIR}; +const ECHO_IR_VERSION: &str = "echo-ir/v1"; +const DEFAULT_CODEC_ID: &str = "cbor-canon-v1"; +const DEFAULT_REGISTRY_VERSION: u32 = 1; +const WESLEY_CORE_VERSION: &str = "0.0.2"; + #[derive(Parser)] #[command( author, @@ -26,9 +32,13 @@ use ir::{OpKind, TypeKind, WesleyIR}; about = "Generates Echo Rust artifacts from Wesley IR" )] struct Args { + /// Read GraphQL SDL directly and lower it with wesley-core. + #[arg(long)] + schema: Option, + /// Optional output path (defaults to stdout) #[arg(short, long)] - out: Option, + out: Option, /// Emit code compatible with no_std environments #[arg(long, default_value_t = false)] @@ -42,11 +52,18 @@ struct Args { fn main() -> Result<()> { let args = Args::parse(); - let mut buffer = String::new(); - io::stdin().read_to_string(&mut buffer)?; + let ir = if let Some(schema_path) = &args.schema { + let schema_sdl = std::fs::read_to_string(schema_path)?; + echo_ir_from_schema_sdl(&schema_sdl)? + } else { + let mut buffer = String::new(); + io::stdin().read_to_string(&mut buffer)?; + + let ir: WesleyIR = serde_json::from_str(&buffer)?; + validate_version(&ir)?; + ir + }; - let ir: WesleyIR = serde_json::from_str(&buffer)?; - validate_version(&ir)?; let code = generate_rust(&ir, &args)?; if let Some(path) = args.out { @@ -58,6 +75,141 @@ fn main() -> Result<()> { Ok(()) } +fn echo_ir_from_schema_sdl(schema_sdl: &str) -> Result { + let l1_ir = wesley_core::lower_schema_sdl(schema_sdl)?; + let schema_sha256 = wesley_core::compute_registry_hash(&l1_ir)?; + let mut operations = wesley_core::list_schema_operations_sdl(schema_sdl)?; + operations.sort_by_key(operation_sort_key); + + let mut used_op_ids = BTreeMap::new(); + let mut ops = Vec::with_capacity(operations.len()); + for operation in operations { + let op_id = stable_op_id(&operation.operation_type, &operation.field_name); + if op_id == 0 { + bail!( + "generated operation id collision sentinel for {:?} `{}`; \ + add explicit operation ids upstream before generating Echo artifacts", + operation.operation_type, + operation.field_name + ); + } + if let Some((existing_type, existing_name)) = used_op_ids.insert( + op_id, + (operation.operation_type, operation.field_name.clone()), + ) { + bail!( + "generated operation id collision: {:?} `{}` and {:?} `{}` both map to {op_id}; \ + add explicit operation ids upstream before generating Echo artifacts", + existing_type, + existing_name, + operation.operation_type, + operation.field_name + ); + } + + ops.push(ir::OpDefinition { + kind: op_kind_from_wesley(operation.operation_type), + name: operation.field_name, + op_id, + args: operation + .arguments + .into_iter() + .map(|argument| ir::ArgDefinition { + name: argument.name, + type_name: argument.r#type.base, + required: !argument.r#type.nullable, + list: argument.r#type.is_list, + }) + .collect(), + result_type: operation.result_type.base, + directives: serde_json::to_value(operation.directives)?, + }); + } + + Ok(WesleyIR { + ir_version: Some(ECHO_IR_VERSION.to_string()), + generated_by: Some(ir::GeneratedBy { + tool: "wesley-core".to_string(), + version: Some(WESLEY_CORE_VERSION.to_string()), + }), + schema_sha256: Some(schema_sha256), + types: l1_ir + .types + .into_iter() + .map(type_definition_from_wesley) + .collect(), + ops, + codec_id: Some(DEFAULT_CODEC_ID.to_string()), + registry_version: Some(DEFAULT_REGISTRY_VERSION), + }) +} + +fn operation_sort_key(operation: &wesley_core::SchemaOperation) -> (u8, String) { + ( + operation_type_rank(operation.operation_type), + operation.field_name.clone(), + ) +} + +fn operation_type_rank(operation_type: wesley_core::OperationType) -> u8 { + match operation_type { + wesley_core::OperationType::Query => 0, + wesley_core::OperationType::Mutation => 1, + wesley_core::OperationType::Subscription => 2, + } +} + +fn op_kind_from_wesley(operation_type: wesley_core::OperationType) -> OpKind { + match operation_type { + wesley_core::OperationType::Query | wesley_core::OperationType::Subscription => { + OpKind::Query + } + wesley_core::OperationType::Mutation => OpKind::Mutation, + } +} + +fn type_definition_from_wesley(type_definition: wesley_core::TypeDefinition) -> ir::TypeDefinition { + ir::TypeDefinition { + name: type_definition.name, + kind: type_kind_from_wesley(type_definition.kind), + fields: type_definition + .fields + .into_iter() + .map(|field| ir::FieldDefinition { + name: field.name, + type_name: field.r#type.base, + required: !field.r#type.nullable, + list: field.r#type.is_list, + }) + .collect(), + values: type_definition.enum_values, + } +} + +fn type_kind_from_wesley(type_kind: wesley_core::TypeKind) -> TypeKind { + match type_kind { + wesley_core::TypeKind::Object => TypeKind::Object, + wesley_core::TypeKind::Interface => TypeKind::Interface, + wesley_core::TypeKind::Union => TypeKind::Union, + wesley_core::TypeKind::Enum => TypeKind::Enum, + wesley_core::TypeKind::Scalar => TypeKind::Scalar, + wesley_core::TypeKind::InputObject => TypeKind::InputObject, + } +} + +fn stable_op_id(operation_type: &wesley_core::OperationType, field_name: &str) -> u32 { + let mut hash = 2_166_136_261_u32; + hash = fnv1a_step(hash, operation_type_rank(*operation_type)); + for byte in field_name.as_bytes() { + hash = fnv1a_step(hash, *byte); + } + hash +} + +fn fnv1a_step(hash: u32, byte: u8) -> u32 { + hash.wrapping_mul(16_777_619) ^ u32::from(byte) +} + fn generate_rust(ir: &WesleyIR, args: &Args) -> Result { validate_generated_item_names(ir)?; @@ -87,11 +239,13 @@ fn generate_rust(ir: &WesleyIR, args: &Args) -> Result { let schema_sha = ir.schema_sha256.as_deref().unwrap_or(""); let codec_id = ir.codec_id.as_deref().unwrap_or("cbor-canon-v1"); let registry_version = ir.registry_version.unwrap_or(1); + let generated_rust_artifact_hash = generated_rust_artifact_hash(ir, args)?; tokens.extend(quote! { pub const SCHEMA_SHA256: &str = #schema_sha; pub const CODEC_ID: &str = #codec_id; pub const REGISTRY_VERSION: u32 = #registry_version; + pub const GENERATED_RUST_ARTIFACT_HASH: &str = #generated_rust_artifact_hash; }); for type_def in &ir.types { @@ -152,10 +306,28 @@ fn generate_rust(ir: &WesleyIR, args: &Args) -> Result { } if !ir.ops.is_empty() { - tokens.extend(quote! { - // Registry provider types (Echo runtime loads an app-supplied implementation). - use echo_registry_api::{ArgDef, EnumDef, ObjectDef, OpDef, OpKind, RegistryInfo, RegistryProvider}; - }); + let mut ops_sorted: Vec<_> = ir.ops.iter().collect(); + ops_sorted.sort_unstable_by_key(|op| op.op_id); + let footprint_certificates = ops_sorted + .iter() + .map(|op| { + let certificate = op_footprint_certificate(ir, op, &generated_rust_artifact_hash)?; + Ok((op.op_id, certificate)) + }) + .collect::>>()?; + let has_footprint_certificates = footprint_certificates.values().any(Option::is_some); + + if has_footprint_certificates { + tokens.extend(quote! { + // Registry provider types (Echo runtime loads an app-supplied implementation). + use echo_registry_api::{ArgDef, EnumDef, FootprintCertificate, ObjectDef, OpDef, OpKind, RegistryInfo, RegistryProvider}; + }); + } else { + tokens.extend(quote! { + // Registry provider types (Echo runtime loads an app-supplied implementation). + use echo_registry_api::{ArgDef, EnumDef, ObjectDef, OpDef, OpKind, RegistryInfo, RegistryProvider}; + }); + } let mut enum_defs: Vec<_> = ir .types @@ -221,10 +393,8 @@ fn generate_rust(ir: &WesleyIR, args: &Args) -> Result { ]; }); - let mut ops_sorted: Vec<_> = ir.ops.iter().collect(); - ops_sorted.sort_unstable_by_key(|op| op.op_id); - - // Op ID constants + arg descriptors (sorted by op_id for deterministic iteration). + // Op ID constants + arg descriptors + footprint certificates + // (sorted by op_id for deterministic iteration). for op in &ops_sorted { let const_name = op_const_ident(&op.name, op.op_id); let args_name = format_ident!("{}_ARGS", const_name); @@ -242,6 +412,42 @@ fn generate_rust(ir: &WesleyIR, args: &Args) -> Result { #(#args),* ]; }); + + if let Some(certificate) = footprint_certificates + .get(&op.op_id) + .and_then(|value| value.as_ref()) + { + let reads_name = format_ident!("{}_FOOTPRINT_READS", const_name); + let writes_name = format_ident!("{}_FOOTPRINT_WRITES", const_name); + let artifact_hash_name = format_ident!("{}_FOOTPRINT_ARTIFACT_HASH", const_name); + let certificate_hash_name = + format_ident!("{}_FOOTPRINT_CERTIFICATE_HASH", const_name); + let certificate_name = format_ident!("{}_FOOTPRINT_CERTIFICATE", const_name); + let op_name = &op.name; + let reads = certificate.reads.iter(); + let writes = certificate.writes.iter(); + let artifact_hash = certificate.artifact_hash_hex.as_str(); + let certificate_hash = certificate.certificate_hash_hex.as_str(); + tokens.extend(quote! { + pub const #reads_name: &[&str] = &[ + #(#reads),* + ]; + pub const #writes_name: &[&str] = &[ + #(#writes),* + ]; + pub const #artifact_hash_name: &str = #artifact_hash; + pub const #certificate_hash_name: &str = #certificate_hash; + pub const #certificate_name: FootprintCertificate = FootprintCertificate { + op_id: #const_name, + op_name: #op_name, + schema_sha256_hex: SCHEMA_SHA256, + artifact_hash_hex: #artifact_hash_name, + certificate_hash_hex: #certificate_hash_name, + reads: #reads_name, + writes: #writes_name, + }; + }); + } } let mut helper_prelude = TokenStream::new(); @@ -255,16 +461,37 @@ fn generate_rust(ir: &WesleyIR, args: &Args) -> Result { }); } - if ir.ops.iter().any(|op| op.kind == OpKind::Query) { + let has_query_ops = ir.ops.iter().any(|op| op.kind == OpKind::Query); + let has_mutation_ops = ir.ops.iter().any(|op| op.kind == OpKind::Mutation); + + if has_query_ops { helper_prelude.extend(quote! { use echo_wasm_abi::kernel_port::{ + AttachmentDescentPolicy, EchoCoordinate, ObserveOpticRequest, OpticAperture, + OpticApertureShape, OpticCapabilityId, OpticFocus, OpticId, OpticReadBudget, ObservationAt, ObservationCoordinate, ObservationFrame, ObservationProjection, - ObservationRequest, WorldlineId, + ObservationRequest, ProjectionVersion, ReducerVersion, WorldlineId, }; }); } - if ir.ops.iter().any(|op| op.kind == OpKind::Mutation) { + if has_mutation_ops { + if has_query_ops { + helper_prelude.extend(quote! { + use echo_wasm_abi::kernel_port::{ + AdmissionLawId, DispatchOpticIntentRequest, IntentFamilyId, + OpticCapability, OpticCause, OpticIntentPayload, + }; + }); + } else { + helper_prelude.extend(quote! { + use echo_wasm_abi::kernel_port::{ + AdmissionLawId, DispatchOpticIntentRequest, EchoCoordinate, + IntentFamilyId, OpticCapability, OpticCause, OpticFocus, OpticId, + OpticIntentPayload, + }; + }); + } helper_prelude.extend(quote! { use echo_wasm_abi::pack_intent_v1; @@ -279,9 +506,18 @@ fn generate_rust(ir: &WesleyIR, args: &Args) -> Result { }); } + if has_query_ops { + helper_tokens.extend(quote! { + fn generated_vars_digest(vars_bytes: &[u8]) -> Vec { + echo_wasm_abi::query_vars_digest_v1(vars_bytes) + } + }); + } + for op in &ops_sorted { let const_name = op_const_ident(&op.name, op.op_id); - let helper_name = format_ident!("{}", to_snake_case(&op.name)); + let helper_name_string = to_snake_case(&op.name); + let helper_name = format_ident!("{}", helper_name_string); let vars_name = format_ident!("{}Vars", to_pascal_case(&op.name)); let vars_fields = op.args.iter().map(|a| { let field_name = safe_ident(&a.name); @@ -316,8 +552,18 @@ fn generate_rust(ir: &WesleyIR, args: &Args) -> Result { OpKind::Mutation => { let fn_name = format_ident!("pack_{}_intent", helper_name); let raw_fn_name = format_ident!("pack_{}_intent_raw_vars", helper_name); + let optic_helper_name = + format_ident!("{}", optic_mutation_helper_stem(&op.name)); + let optic_fn_name = + format_ident!("{}_dispatch_optic_intent_request", optic_helper_name); + let optic_raw_fn_name = format_ident!( + "{}_dispatch_optic_intent_request_raw_vars", + optic_helper_name + ); helper_exports.push(fn_name.clone()); helper_exports.push(raw_fn_name.clone()); + helper_exports.push(optic_fn_name.clone()); + helper_exports.push(optic_raw_fn_name.clone()); helper_tokens.extend(quote! { /// Encode this mutation's vars and pack them into an EINT v1 intent. pub fn #fn_name(vars: &#vars_name) -> Result, GeneratedIntentError> { @@ -329,13 +575,69 @@ fn generate_rust(ir: &WesleyIR, args: &Args) -> Result { pub fn #raw_fn_name(vars: &[u8]) -> Result, echo_wasm_abi::EnvelopeError> { pack_intent_v1(super::#const_name, vars) } + + /// Build an optic intent-dispatch request for this mutation. + #[allow(clippy::too_many_arguments)] + pub fn #optic_fn_name( + optic_id: OpticId, + base_coordinate: EchoCoordinate, + intent_family: IntentFamilyId, + focus: OpticFocus, + cause: OpticCause, + capability: OpticCapability, + admission_law: AdmissionLawId, + vars: &#vars_name, + ) -> Result { + let vars_bytes = #encode_fn_name(vars).map_err(GeneratedIntentError::EncodeVars)?; + #optic_raw_fn_name( + optic_id, + base_coordinate, + intent_family, + focus, + cause, + capability, + admission_law, + &vars_bytes, + ) + } + + /// Build an optic intent-dispatch request from already-canonical vars bytes. + #[allow(clippy::too_many_arguments)] + pub fn #optic_raw_fn_name( + optic_id: OpticId, + base_coordinate: EchoCoordinate, + intent_family: IntentFamilyId, + focus: OpticFocus, + cause: OpticCause, + capability: OpticCapability, + admission_law: AdmissionLawId, + vars: &[u8], + ) -> Result { + let bytes = pack_intent_v1(super::#const_name, vars) + .map_err(GeneratedIntentError::PackEnvelope)?; + Ok(DispatchOpticIntentRequest { + optic_id, + base_coordinate, + intent_family, + focus, + cause, + capability, + admission_law, + payload: OpticIntentPayload::EintV1 { bytes }, + }) + } }); } OpKind::Query => { let fn_name = format_ident!("{}_observation_request", helper_name); let raw_fn_name = format_ident!("{}_observation_request_raw_vars", helper_name); + let optic_fn_name = format_ident!("{}_observe_optic_request", helper_name); + let optic_raw_fn_name = + format_ident!("{}_observe_optic_request_raw_vars", helper_name); helper_exports.push(fn_name.clone()); helper_exports.push(raw_fn_name.clone()); + helper_exports.push(optic_fn_name.clone()); + helper_exports.push(optic_raw_fn_name.clone()); helper_tokens.extend(quote! { /// Encode this query's vars and build a frontier query-view observation request. pub fn #fn_name(worldline_id: WorldlineId, vars: &#vars_name) -> Result { @@ -345,16 +647,72 @@ fn generate_rust(ir: &WesleyIR, args: &Args) -> Result { /// Build a frontier query-view request from already-canonical vars bytes. pub fn #raw_fn_name(worldline_id: WorldlineId, vars: &[u8]) -> ObservationRequest { - ObservationRequest { - coordinate: ObservationCoordinate { + ObservationRequest::builtin_one_shot( + ObservationCoordinate { worldline_id, at: ObservationAt::Frontier, }, - frame: ObservationFrame::QueryView, - projection: ObservationProjection::Query { + ObservationFrame::QueryView, + ObservationProjection::Query { query_id: super::#const_name, vars_bytes: Vec::from(vars), }, + ) + .expect("generated query observation request uses a valid frame/projection pair") + } + + /// Encode this query's vars and build a bounded optic read request. + #[allow(clippy::too_many_arguments)] + pub fn #optic_fn_name( + optic_id: OpticId, + focus: OpticFocus, + coordinate: EchoCoordinate, + capability: OpticCapabilityId, + projection_version: ProjectionVersion, + reducer_version: Option, + budget: OpticReadBudget, + vars: &#vars_name, + ) -> Result { + let vars_bytes = #encode_fn_name(vars)?; + Ok(#optic_raw_fn_name( + optic_id, + focus, + coordinate, + capability, + projection_version, + reducer_version, + budget, + &vars_bytes, + )) + } + + /// Build a bounded optic read request from already-canonical vars bytes. + #[allow(clippy::too_many_arguments)] + pub fn #optic_raw_fn_name( + optic_id: OpticId, + focus: OpticFocus, + coordinate: EchoCoordinate, + capability: OpticCapabilityId, + projection_version: ProjectionVersion, + reducer_version: Option, + budget: OpticReadBudget, + vars: &[u8], + ) -> ObserveOpticRequest { + ObserveOpticRequest { + optic_id, + focus, + coordinate, + aperture: OpticAperture { + shape: OpticApertureShape::QueryBytes { + query_id: super::#const_name, + vars_digest: generated_vars_digest(vars), + }, + budget, + attachment_descent: AttachmentDescentPolicy::BoundaryOnly, + }, + projection_version, + reducer_version, + capability, } } }); @@ -379,17 +737,42 @@ fn generate_rust(ir: &WesleyIR, args: &Args) -> Result { }); // OPS table (sorted by op_id). - let ops_entries = ops_sorted.iter().map(|op| { - let kind = match op.kind { - OpKind::Query => quote! { OpKind::Query }, - OpKind::Mutation => quote! { OpKind::Mutation }, - }; - let name = &op.name; - let op_id = op.op_id; - let args_name = format_ident!("{}_ARGS", op_const_ident(&op.name, op.op_id)); - let result_ty = &op.result_type; - quote! { OpDef { kind: #kind, name: #name, op_id: #op_id, args: #args_name, result_ty: #result_ty } } - }); + let ops_entries = ops_sorted + .iter() + .map(|op| { + let kind = match op.kind { + OpKind::Query => quote! { OpKind::Query }, + OpKind::Mutation => quote! { OpKind::Mutation }, + }; + let name = &op.name; + let op_id = op.op_id; + let args_name = format_ident!("{}_ARGS", op_const_ident(&op.name, op.op_id)); + let result_ty = &op.result_type; + let directives_json = op_directives_json(op)?; + let footprint_certificate = if footprint_certificates + .get(&op.op_id) + .and_then(|value| value.as_ref()) + .is_some() + { + let const_name = op_const_ident(&op.name, op.op_id); + let certificate_name = format_ident!("{}_FOOTPRINT_CERTIFICATE", const_name); + quote! { Some(&#certificate_name) } + } else { + quote! { None } + }; + Ok(quote! { + OpDef { + kind: #kind, + name: #name, + op_id: #op_id, + args: #args_name, + result_ty: #result_ty, + directives_json: #directives_json, + footprint_certificate: #footprint_certificate, + } + }) + }) + .collect::>>()?; tokens.extend(quote! { pub const OPS: &[OpDef] = &[ @@ -447,6 +830,174 @@ fn op_const_ident(name: &str, op_id: u32) -> proc_macro2::Ident { format_ident!("{}", op_const_name(name, op_id)) } +fn op_directives_json(op: &ir::OpDefinition) -> Result { + serde_json::to_string(&op.directives).map_err(Into::into) +} + +fn generated_rust_artifact_hash(ir: &WesleyIR, args: &Args) -> Result { + let mut type_defs = ir.types.iter().collect::>(); + type_defs.sort_unstable_by(|a, b| a.name.cmp(&b.name)); + let mut op_defs = ir.ops.iter().collect::>(); + op_defs.sort_unstable_by_key(|op| op.op_id); + + let type_catalog_json = serde_json::to_string(&type_defs)?; + let op_catalog_json = serde_json::to_string(&op_defs)?; + let schema_sha = ir.schema_sha256.as_deref().unwrap_or(""); + let codec_id = ir.codec_id.as_deref().unwrap_or(DEFAULT_CODEC_ID); + let registry_version = ir.registry_version.unwrap_or(DEFAULT_REGISTRY_VERSION); + let ir_version = ir.ir_version.as_deref().unwrap_or(""); + let generated_by_json = serde_json::to_string(&ir.generated_by)?; + + let preimage = format!( + concat!( + "echo-wesley-rust-artifact/v1\n", + "generator=echo-wesley-gen\n", + "generator_version={generator_version}\n", + "ir_version={ir_version}\n", + "schema_sha256={schema_sha}\n", + "codec_id={codec_id}\n", + "registry_version={registry_version}\n", + "no_std={no_std}\n", + "minicbor={minicbor}\n", + "generated_by={generated_by_json}\n", + "types={type_catalog_json}\n", + "ops={op_catalog_json}\n", + ), + generator_version = env!("CARGO_PKG_VERSION"), + ir_version = ir_version, + schema_sha = schema_sha, + codec_id = codec_id, + registry_version = registry_version, + no_std = args.no_std, + minicbor = args.minicbor, + generated_by_json = generated_by_json, + type_catalog_json = type_catalog_json, + op_catalog_json = op_catalog_json, + ); + + Ok(blake3_hex(preimage.as_bytes())) +} + +#[derive(Debug, Clone)] +struct GeneratedFootprintCertificate { + reads: Vec, + writes: Vec, + artifact_hash_hex: String, + certificate_hash_hex: String, +} + +fn op_footprint_certificate( + ir: &WesleyIR, + op: &ir::OpDefinition, + generated_rust_artifact_hash: &str, +) -> Result> { + let Some(footprint) = op.directives.get("wes_footprint") else { + return Ok(None); + }; + + let reads = footprint_string_items(footprint, "reads", &op.name)?; + let writes = footprint_string_items(footprint, "writes", &op.name)?; + let reads_json = serde_json::to_string(&reads)?; + let writes_json = serde_json::to_string(&writes)?; + let args_json = serde_json::to_string(&op.args)?; + let directives_json = op_directives_json(op)?; + let schema_sha = ir.schema_sha256.as_deref().unwrap_or(""); + let codec_id = ir.codec_id.as_deref().unwrap_or(DEFAULT_CODEC_ID); + let registry_version = ir.registry_version.unwrap_or(DEFAULT_REGISTRY_VERSION); + let kind = match op.kind { + OpKind::Query => "QUERY", + OpKind::Mutation => "MUTATION", + }; + + let artifact_preimage = format!( + concat!( + "echo-wesley-footprint-artifact/v1\n", + "schema_sha256={schema_sha}\n", + "codec_id={codec_id}\n", + "registry_version={registry_version}\n", + "op_kind={kind}\n", + "op_id={op_id}\n", + "op_name={op_name}\n", + "result_type={result_type}\n", + "args={args_json}\n", + "generated_rust_artifact_hash={generated_rust_artifact_hash}\n", + "reads={reads_json}\n", + "writes={writes_json}\n", + ), + schema_sha = schema_sha, + codec_id = codec_id, + registry_version = registry_version, + kind = kind, + op_id = op.op_id, + op_name = op.name, + result_type = op.result_type, + args_json = args_json, + generated_rust_artifact_hash = generated_rust_artifact_hash, + reads_json = reads_json, + writes_json = writes_json, + ); + let artifact_hash_hex = blake3_hex(artifact_preimage.as_bytes()); + let certificate_preimage = format!( + concat!( + "echo-wesley-footprint-certificate/v1\n", + "generator=echo-wesley-gen\n", + "generator_version={generator_version}\n", + "artifact_hash={artifact_hash_hex}\n", + "directives_json={directives_json}\n", + ), + generator_version = env!("CARGO_PKG_VERSION"), + artifact_hash_hex = artifact_hash_hex, + directives_json = directives_json, + ); + let certificate_hash_hex = blake3_hex(certificate_preimage.as_bytes()); + + Ok(Some(GeneratedFootprintCertificate { + reads, + writes, + artifact_hash_hex, + certificate_hash_hex, + })) +} + +fn footprint_string_items( + footprint: &serde_json::Value, + key: &str, + op_name: &str, +) -> Result> { + let Some(value) = footprint_argument_value(footprint, key) else { + return Ok(Vec::new()); + }; + let serde_json::Value::Array(items) = value else { + bail!("wes_footprint.{key} for operation `{op_name}` must be an array of strings"); + }; + + let mut values = Vec::with_capacity(items.len()); + for item in items { + let Some(item) = item.as_str() else { + bail!("wes_footprint.{key} for operation `{op_name}` must contain only strings"); + }; + values.push(item.to_string()); + } + values.sort(); + values.dedup(); + Ok(values) +} + +fn footprint_argument_value<'a>( + footprint: &'a serde_json::Value, + key: &str, +) -> Option<&'a serde_json::Value> { + footprint.get(key).or_else(|| { + footprint + .get("arguments") + .and_then(|arguments| arguments.get(key)) + }) +} + +fn blake3_hex(input: &[u8]) -> String { + blake3::hash(input).to_hex().to_string() +} + fn op_const_name(name: &str, op_id: u32) -> String { let mut out = String::new(); for (i, c) in name.chars().enumerate() { @@ -517,6 +1068,15 @@ fn to_snake_case(name: &str) -> String { } } +fn optic_mutation_helper_stem(name: &str) -> String { + let stem = to_snake_case(name); + if stem == "set" || stem.starts_with("set_") { + format!("propose_{stem}") + } else { + stem + } +} + fn validate_version(ir: &WesleyIR) -> Result<()> { const SUPPORTED: &str = "echo-ir/v1"; match ir.ir_version.as_deref() { @@ -609,11 +1169,19 @@ fn validate_generated_item_names(ir: &WesleyIR) -> Result<()> { "generated intent helper error", )?; } + if ir.ops.iter().any(|op| op.kind == OpKind::Query) { + record_generated_item( + &mut helper_items, + "generated_vars_digest", + "generated optic query vars digest helper", + )?; + } for op in &ir.ops { let kind = op_kind_name(&op.kind); let const_name = op_const_name(&op.name, op.op_id); let helper_name = to_snake_case(&op.name); + let optic_mutation_helper_name = optic_mutation_helper_stem(&op.name); record_generated_item( &mut top_level_items, @@ -663,6 +1231,32 @@ fn validate_generated_item_names(ir: &WesleyIR) -> Result<()> { format!("pack_{helper_name}_intent_raw_vars"), format!("mutation operation `{}` raw EINT helper re-export", op.name), )?; + record_generated_item( + &mut helper_items, + format!("{optic_mutation_helper_name}_dispatch_optic_intent_request"), + format!("mutation operation `{}` optic dispatch helper", op.name), + )?; + record_generated_item( + &mut helper_items, + format!("{optic_mutation_helper_name}_dispatch_optic_intent_request_raw_vars"), + format!("mutation operation `{}` raw optic dispatch helper", op.name), + )?; + record_generated_item( + &mut top_level_items, + format!("{optic_mutation_helper_name}_dispatch_optic_intent_request"), + format!( + "mutation operation `{}` optic dispatch helper re-export", + op.name + ), + )?; + record_generated_item( + &mut top_level_items, + format!("{optic_mutation_helper_name}_dispatch_optic_intent_request_raw_vars"), + format!( + "mutation operation `{}` raw optic dispatch helper re-export", + op.name + ), + )?; } OpKind::Query => { record_generated_item( @@ -688,6 +1282,32 @@ fn validate_generated_item_names(ir: &WesleyIR) -> Result<()> { op.name ), )?; + record_generated_item( + &mut helper_items, + format!("{helper_name}_observe_optic_request"), + format!("query operation `{}` optic observe helper", op.name), + )?; + record_generated_item( + &mut helper_items, + format!("{helper_name}_observe_optic_request_raw_vars"), + format!("query operation `{}` raw optic observe helper", op.name), + )?; + record_generated_item( + &mut top_level_items, + format!("{helper_name}_observe_optic_request"), + format!( + "query operation `{}` optic observe helper re-export", + op.name + ), + )?; + record_generated_item( + &mut top_level_items, + format!("{helper_name}_observe_optic_request_raw_vars"), + format!( + "query operation `{}` raw optic observe helper re-export", + op.name + ), + )?; } } } diff --git a/crates/echo-wesley-gen/tests/fixtures/toy-counter/echo-ir-v1.json b/crates/echo-wesley-gen/tests/fixtures/toy-counter/echo-ir-v1.json index e3a93076..c6690708 100644 --- a/crates/echo-wesley-gen/tests/fixtures/toy-counter/echo-ir-v1.json +++ b/crates/echo-wesley-gen/tests/fixtures/toy-counter/echo-ir-v1.json @@ -61,7 +61,20 @@ "required": true } ], - "result_type": "CounterValue" + "result_type": "CounterValue", + "directives": { + "wes_op": { + "name": "increment" + }, + "wes_footprint": { + "reads": [ + "CounterValue" + ], + "writes": [ + "CounterValue" + ] + } + } }, { "kind": "QUERY", diff --git a/crates/echo-wesley-gen/tests/generation.rs b/crates/echo-wesley-gen/tests/generation.rs index 88915197..96952f58 100644 --- a/crates/echo-wesley-gen/tests/generation.rs +++ b/crates/echo-wesley-gen/tests/generation.rs @@ -35,6 +35,24 @@ fn run_wesley_gen_with_args(ir: &str, args: &[&str]) -> Output { child.wait_with_output().expect("failed to wait on child") } +fn run_wesley_gen_schema(schema_path: &Path) -> Output { + Command::new("cargo") + .args(["run", "-p", "echo-wesley-gen", "--", "--schema"]) + .arg(schema_path) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .output() + .expect("failed to run cargo run") +} + +fn generated_str_const<'a>(source: &'a str, name: &str) -> &'a str { + let prefix = format!("pub const {name}: &str = \""); + let start = source.find(&prefix).expect("missing generated const") + prefix.len(); + let rest = &source[start..]; + let end = rest.find('"').expect("generated const is not closed"); + &rest[..end] +} + fn workspace_root() -> PathBuf { Path::new(env!("CARGO_MANIFEST_DIR")) .ancestors() @@ -43,6 +61,106 @@ fn workspace_root() -> PathBuf { .to_path_buf() } +#[test] +fn test_generate_from_graphql_schema_uses_wesley_core() { + let workspace = workspace_root(); + let fixture_dir = workspace + .join("target") + .join("echo-wesley-gen-schema-fixture") + .join(std::process::id().to_string()); + fs::create_dir_all(&fixture_dir).expect("failed to create schema fixture dir"); + let schema_path = fixture_dir.join("counter.graphql"); + fs::write( + &schema_path, + r#" +directive @wes_op(name: String!) on FIELD_DEFINITION +directive @wes_footprint(reads: [String!], writes: [String!]) on FIELD_DEFINITION + +type CounterValue { + value: Int! +} + +input IncrementInput { + amount: Int! +} + +type Query { + counterValue: CounterValue! @wes_op(name: "counterValue") +} + +type Mutation { + increment(input: IncrementInput!): CounterValue! + @wes_op(name: "increment") + @wes_footprint(reads: ["CounterValue"], writes: ["CounterValue"]) +} +"#, + ) + .expect("failed to write schema fixture"); + + let output = run_wesley_gen_schema(&schema_path); + + assert!( + output.status.success(), + "CLI failed: {}", + String::from_utf8_lossy(&output.stderr) + ); + let stdout = String::from_utf8_lossy(&output.stdout); + + assert!(stdout.contains("pub struct CounterValue")); + assert!(stdout.contains("pub struct IncrementInput")); + assert!(stdout.contains("pub const CODEC_ID: &str = \"cbor-canon-v1\"")); + assert!(stdout.contains("pub const REGISTRY_VERSION: u32 = 1")); + assert!(stdout.contains("pub const OP_COUNTER_VALUE: u32 =")); + assert!(stdout.contains("pub const OP_INCREMENT: u32 =")); + assert!(stdout.contains("pub struct CounterValueVars")); + assert!(stdout.contains("pub struct IncrementVars")); + assert!(stdout.contains("pub fn counter_value_observe_optic_request")); + assert!(stdout.contains("pub fn increment_dispatch_optic_intent_request")); + assert!(stdout.contains("directives_json:")); + assert!(stdout.contains("\\\"wes_footprint\\\"")); + assert!(stdout.contains("OP_INCREMENT_FOOTPRINT_CERTIFICATE")); + assert!(stdout.contains("footprint_certificate: Some(&OP_INCREMENT_FOOTPRINT_CERTIFICATE)")); +} + +#[test] +fn test_schema_operation_id_collision_fails_closed() { + let workspace = workspace_root(); + let fixture_dir = workspace + .join("target") + .join("echo-wesley-gen-schema-fixture") + .join(format!("{}-collision", std::process::id())); + fs::create_dir_all(&fixture_dir).expect("failed to create schema fixture dir"); + let schema_path = fixture_dir.join("counter.graphql"); + fs::write( + &schema_path, + r#" +directive @wes_op(name: String!) on FIELD_DEFINITION + +type CounterValue { + value: Int! +} + +type Query { + qbkxqtpuqmzm1zzt: CounterValue! @wes_op(name: "qbkxqtpuqmzm1zzt") + qgcpfdz1bsy: CounterValue! @wes_op(name: "qgcpfdz1bsy") +} +"#, + ) + .expect("failed to write schema fixture"); + + let output = run_wesley_gen_schema(&schema_path); + + assert!( + !output.status.success(), + "schema op id collision should fail closed" + ); + assert!( + String::from_utf8_lossy(&output.stderr).contains("operation id collision"), + "stderr did not explain collision: {}", + String::from_utf8_lossy(&output.stderr) + ); +} + fn write_consumer_smoke_crate(generated: &str) -> PathBuf { let workspace = workspace_root(); let crate_dir = workspace @@ -56,6 +174,7 @@ fn write_consumer_smoke_crate(generated: &str) -> PathBuf { let registry_path = workspace.join("crates/echo-registry-api"); let wasm_abi_path = workspace.join("crates/echo-wasm-abi"); + let warp_wasm_path = workspace.join("crates/warp-wasm"); fs::write( crate_dir.join("Cargo.toml"), format!( @@ -70,10 +189,13 @@ publish = false [dependencies] echo-registry-api = {{ path = "{}" }} echo-wasm-abi = {{ path = "{}" }} +warp-wasm = {{ path = "{}" }} +blake3 = "1" serde = {{ version = "1.0", features = ["derive"] }} -"#, + "#, registry_path.display(), - wasm_abi_path.display() + wasm_abi_path.display(), + warp_wasm_path.display() ), ) .expect("failed to write smoke Cargo.toml"); @@ -89,19 +211,28 @@ mod tests { use super::generated::{ __echo_wesley_generated::{CounterValueVars, IncrementVars}, counter_value_observation_request, counter_value_observation_request_raw_vars, - encode_counter_value_vars, pack_increment_intent, IncrementInput, CODEC_ID, - OP_COUNTER_VALUE, OP_INCREMENT, REGISTRY, REGISTRY_VERSION, SCHEMA_SHA256, + counter_value_observe_optic_request, counter_value_observe_optic_request_raw_vars, + encode_counter_value_vars, increment_dispatch_optic_intent_request, pack_increment_intent, + IncrementInput, CODEC_ID, OP_COUNTER_VALUE, OP_INCREMENT, + OP_INCREMENT_FOOTPRINT_ARTIFACT_HASH, OP_INCREMENT_FOOTPRINT_CERTIFICATE_HASH, REGISTRY, + REGISTRY_VERSION, SCHEMA_SHA256, + }; + use echo_registry_api::{ + verify_contract_artifact, ContractArtifactTrustPosture, ContractArtifactVerificationPolicy, + ExpectedFootprintCertificate, OpKind, RegistryProvider, }; - use echo_registry_api::{OpKind, RegistryProvider}; use echo_wasm_abi::kernel_port::{ - AbiError, BuiltinObserverPlan, DispatchResponse, KernelPort, ObservationArtifact, + AbiError, AdmissionLawId, BuiltinObserverPlan, CoordinateAt, DispatchOpticIntentRequest, + DispatchResponse, EchoCoordinate, IntentFamilyId, KernelPort, ObservationArtifact, ObservationAt, ObservationBasisPosture, ObservationFrame, ObservationPayload, - ObservationProjection, ReadingBudgetPosture, ReadingEnvelope, ReadingObserverBasis, - ReadingObserverPlan, ReadingResidualPosture, ReadingRightsPosture, ReadingWitnessRef, - RegistryInfo, ResolvedObservationCoordinate, RunCompletion, SchedulerState, - SchedulerStatus, WorkState, WorldlineId, WorldlineTick, ABI_VERSION, + ObservationProjection, ObserveOpticRequest, OpticActorId, OpticApertureShape, + OpticCapability, OpticCapabilityId, OpticCause, OpticFocus, OpticIntentPayload, + OpticReadBudget, ProjectionVersion, ReadingBudgetPosture, ReadingEnvelope, + ReadingObserverBasis, ReadingObserverPlan, ReadingResidualPosture, ReadingRightsPosture, + ReadingWitnessRef, RegistryInfo, ResolvedObservationCoordinate, RunCompletion, OkEnvelope, + SchedulerState, SchedulerStatus, WorkState, WorldlineId, WorldlineTick, ABI_VERSION, }; - use echo_wasm_abi::{decode_cbor, unpack_intent_v1}; + use echo_wasm_abi::{decode_cbor, encode_cbor, unpack_intent_v1}; #[derive(Default)] struct ToyKernel { @@ -181,6 +312,7 @@ mod tests { observer_plan: ReadingObserverPlan::Builtin { plan: BuiltinObserverPlan::QueryBytes, }, + observer_instance: None, observer_basis: ReadingObserverBasis::QueryView, witness_refs: vec![ReadingWitnessRef::EmptyFrontier { worldline_id, @@ -222,7 +354,61 @@ mod tests { assert_eq!(registry_info.registry_version, REGISTRY_VERSION); assert_eq!(registry_info.schema_sha256_hex, SCHEMA_SHA256); assert_eq!(REGISTRY.op_by_id(OP_INCREMENT).unwrap().kind, OpKind::Mutation); + assert!( + REGISTRY + .op_by_id(OP_INCREMENT) + .unwrap() + .directives_json + .contains("\"wes_footprint\"") + ); + let increment_certificate = REGISTRY + .op_by_id(OP_INCREMENT) + .unwrap() + .footprint_certificate + .expect("increment operation must carry a footprint certificate"); + assert_eq!(increment_certificate.op_id, OP_INCREMENT); + assert_eq!(increment_certificate.op_name, "increment"); + assert_eq!(increment_certificate.schema_sha256_hex, SCHEMA_SHA256); + assert_eq!( + increment_certificate.certificate_hash_hex, + OP_INCREMENT_FOOTPRINT_CERTIFICATE_HASH + ); + assert!(REGISTRY + .op_by_id(OP_INCREMENT) + .unwrap() + .footprint_certificate_matches(SCHEMA_SHA256, OP_INCREMENT_FOOTPRINT_CERTIFICATE_HASH)); + assert!(!REGISTRY + .op_by_id(OP_INCREMENT) + .unwrap() + .footprint_certificate_matches(SCHEMA_SHA256, "wrong-hash")); + assert_eq!(increment_certificate.reads, &["CounterValue"]); + assert_eq!(increment_certificate.writes, &["CounterValue"]); + let expected_certificates = [ExpectedFootprintCertificate { + op_id: OP_INCREMENT, + certificate_hash_hex: OP_INCREMENT_FOOTPRINT_CERTIFICATE_HASH, + artifact_hash_hex: Some(OP_INCREMENT_FOOTPRINT_ARTIFACT_HASH), + }]; + let artifact_policy = ContractArtifactVerificationPolicy { + codec_id: CODEC_ID, + registry_version: REGISTRY_VERSION, + schema_sha256_hex: SCHEMA_SHA256, + footprint_certificates: &expected_certificates, + require_mutation_footprint_certificates: true, + }; + let verified_artifact = verify_contract_artifact(®ISTRY, &artifact_policy) + .expect("generated contract artifact should verify"); + assert_eq!( + verified_artifact.posture, + ContractArtifactTrustPosture::CompileTimeCertified + ); assert_eq!(REGISTRY.op_by_id(OP_COUNTER_VALUE).unwrap().kind, OpKind::Query); + assert_eq!( + REGISTRY + .op_by_id(OP_COUNTER_VALUE) + .unwrap() + .footprint_certificate, + None + ); let intent = pack_increment_intent(&IncrementVars { input: IncrementInput { amount: 42 }, @@ -261,6 +447,137 @@ mod tests { data: b"counter=42".to_vec() } ); + + let optic_id = echo_wasm_abi::kernel_port::OpticId::from_bytes([10; 32]); + let capability_id = OpticCapabilityId::from_bytes([11; 32]); + let actor = OpticActorId::from_bytes([12; 32]); + let intent_family = IntentFamilyId::from_bytes([13; 32]); + let focus = OpticFocus::Worldline { worldline_id }; + let coordinate = EchoCoordinate::Worldline { + worldline_id, + at: CoordinateAt::Frontier, + }; + let budget = OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(64), + max_ticks: Some(8), + max_attachments: Some(0), + }; + let optic_read = counter_value_observe_optic_request( + optic_id, + focus.clone(), + coordinate.clone(), + capability_id, + ProjectionVersion(1), + None, + budget, + &query_vars, + ) + .unwrap(); + let raw_optic_read = counter_value_observe_optic_request_raw_vars( + optic_id, + focus.clone(), + coordinate.clone(), + capability_id, + ProjectionVersion(1), + None, + budget, + &encoded_query_vars, + ); + assert_eq!(optic_read, raw_optic_read); + let mut expected_vars_hasher = blake3::Hasher::new(); + expected_vars_hasher.update(b"echo-wesley-query-vars/v1\0"); + expected_vars_hasher.update(&encoded_query_vars); + let expected_vars_digest = expected_vars_hasher.finalize().as_bytes().to_vec(); + let decoded_read: ObserveOpticRequest = + decode_cbor(&encode_cbor(&optic_read).unwrap()).unwrap(); + assert_eq!(decoded_read, optic_read); + assert!(matches!( + optic_read.aperture.shape, + OpticApertureShape::QueryBytes { query_id, ref vars_digest } + if query_id == OP_COUNTER_VALUE + && vars_digest == &expected_vars_digest + )); + + let capability = OpticCapability { + capability_id, + actor, + issuer_ref: None, + policy_hash: vec![14; 32], + allowed_focus: focus.clone(), + projection_version: ProjectionVersion(1), + reducer_version: None, + allowed_intent_family: intent_family, + max_budget: budget, + }; + let cause = OpticCause { + actor, + cause_hash: vec![15; 32], + label: Some("generated optic dispatch".into()), + }; + let dispatch = increment_dispatch_optic_intent_request( + optic_id, + coordinate.clone(), + intent_family, + focus, + cause, + capability, + AdmissionLawId::from_bytes([16; 32]), + &IncrementVars { + input: IncrementInput { amount: 42 }, + }, + ) + .unwrap(); + let decoded_dispatch: DispatchOpticIntentRequest = + decode_cbor(&encode_cbor(&dispatch).unwrap()).unwrap(); + assert_eq!(decoded_dispatch, dispatch); + assert_eq!(dispatch.base_coordinate, coordinate); + let OpticIntentPayload::EintV1 { bytes } = &dispatch.payload; + let (op_id, vars_bytes) = unpack_intent_v1(bytes).unwrap(); + assert_eq!(op_id, OP_INCREMENT); + let decoded: IncrementVars = decode_cbor(vars_bytes).unwrap(); + assert_eq!(decoded.input.amount, 42); + } + + #[test] + fn generated_contract_runs_through_installed_warp_wasm_kernel() { + let kernel = ToyKernel::default(); + warp_wasm::install_kernel(Box::new(kernel)); + + let registry_envelope: OkEnvelope = + decode_cbor(&warp_wasm::get_registry_info_cbor()).unwrap(); + assert_eq!(registry_envelope.data.codec_id.as_deref(), Some(CODEC_ID)); + assert_eq!( + registry_envelope.data.registry_version.as_deref(), + Some(REGISTRY_VERSION.to_string().as_str()) + ); + assert_eq!( + registry_envelope.data.schema_sha256_hex.as_deref(), + Some(SCHEMA_SHA256) + ); + + let intent = pack_increment_intent(&IncrementVars { + input: IncrementInput { amount: 42 }, + }) + .unwrap(); + let dispatch_envelope: OkEnvelope = + decode_cbor(&warp_wasm::dispatch_intent_cbor(&intent)).unwrap(); + assert!(dispatch_envelope.data.accepted); + assert_eq!(dispatch_envelope.data.intent_id, vec![7; 32]); + + let worldline_id = WorldlineId::from_bytes([9; 32]); + let request = counter_value_observation_request(worldline_id, &CounterValueVars {}) + .unwrap(); + let request_bytes = encode_cbor(&request).unwrap(); + let observe_envelope: OkEnvelope = + decode_cbor(&warp_wasm::observe_cbor(&request_bytes)).unwrap(); + assert_eq!(observe_envelope.data.frame, ObservationFrame::QueryView); + assert_eq!( + observe_envelope.data.payload, + ObservationPayload::QueryBytes { + data: b"counter=42".to_vec() + } + ); } } "#, @@ -345,6 +662,272 @@ mod generated; crate_dir } +fn write_optic_binding_smoke_crate() -> PathBuf { + let workspace = workspace_root(); + let crate_dir = workspace + .join("target") + .join("echo-wesley-gen-optic-binding-smoke") + .join(std::process::id().to_string()); + if crate_dir.exists() { + fs::remove_dir_all(&crate_dir).expect("failed to remove old optic smoke crate"); + } + fs::create_dir_all(crate_dir.join("src")).expect("failed to create optic smoke crate"); + + let wasm_abi_path = workspace.join("crates/echo-wasm-abi"); + fs::write( + crate_dir.join("Cargo.toml"), + format!( + r#"[package] +name = "echo-wesley-gen-optic-binding-smoke" +version = "0.0.0" +edition = "2021" +publish = false + +[workspace] + +[dependencies] +echo-wasm-abi = {{ path = "{}" }} +serde = {{ version = "1.0", features = ["derive"] }} +"#, + wasm_abi_path.display() + ), + ) + .expect("failed to write optic smoke Cargo.toml"); + + fs::write( + crate_dir.join("src/generated.rs"), + r" +use echo_wasm_abi::kernel_port::{ + AdmissionLawId, AttachmentDescentPolicy, DispatchOpticIntentRequest, EchoCoordinate, + IntentFamilyId, ObserveOpticRequest, OpticAperture, OpticApertureShape, OpticCapability, + OpticCause, OpticFocus, OpticId, OpticIntentPayload, OpticReadBudget, ProjectionVersion, + ReducerVersion, +}; +use echo_wasm_abi::pack_intent_v1; + +pub const OP_INCREMENT: u32 = 1001; +pub const OP_COUNTER_VALUE: u32 = 1002; + +#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] +pub struct IncrementVars { + pub amount: i32, +} + +#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] +pub struct CounterValueVars {} + +#[derive(Debug)] +pub enum GeneratedOpticIntentError { + EncodeVars(echo_wasm_abi::CanonError), + PackEnvelope(echo_wasm_abi::EnvelopeError), +} + +pub fn encode_increment_vars(vars: &IncrementVars) -> Result, echo_wasm_abi::CanonError> { + echo_wasm_abi::encode_cbor(vars) +} + +pub fn encode_counter_value_vars( + vars: &CounterValueVars, +) -> Result, echo_wasm_abi::CanonError> { + echo_wasm_abi::encode_cbor(vars) +} + +fn generated_vars_digest(vars_bytes: &[u8]) -> Vec { + let mut digest = vec![0u8; 32]; + for (index, byte) in vars_bytes.iter().enumerate() { + digest[index % 32] ^= *byte; + } + digest +} + +pub fn counter_value_observe_optic_request( + optic_id: OpticId, + focus: OpticFocus, + coordinate: EchoCoordinate, + capability: echo_wasm_abi::kernel_port::OpticCapabilityId, + projection_version: ProjectionVersion, + reducer_version: Option, + budget: OpticReadBudget, + vars: &CounterValueVars, +) -> Result { + let vars_bytes = encode_counter_value_vars(vars)?; + let vars_digest = generated_vars_digest(&vars_bytes); + Ok(counter_value_observe_optic_request_raw_vars_digest( + optic_id, + focus, + coordinate, + capability, + projection_version, + reducer_version, + budget, + vars_digest, + )) +} + +#[allow(clippy::too_many_arguments)] +pub fn counter_value_observe_optic_request_raw_vars_digest( + optic_id: OpticId, + focus: OpticFocus, + coordinate: EchoCoordinate, + capability: echo_wasm_abi::kernel_port::OpticCapabilityId, + projection_version: ProjectionVersion, + reducer_version: Option, + budget: OpticReadBudget, + vars_digest: Vec, +) -> ObserveOpticRequest { + ObserveOpticRequest { + optic_id, + focus, + coordinate, + aperture: OpticAperture { + shape: OpticApertureShape::QueryBytes { + query_id: OP_COUNTER_VALUE, + vars_digest, + }, + budget, + attachment_descent: AttachmentDescentPolicy::BoundaryOnly, + }, + projection_version, + reducer_version, + capability, + } +} + +pub fn increment_dispatch_optic_intent_request( + optic_id: OpticId, + base_coordinate: EchoCoordinate, + intent_family: IntentFamilyId, + focus: OpticFocus, + cause: OpticCause, + capability: OpticCapability, + admission_law: AdmissionLawId, + vars: &IncrementVars, +) -> Result { + let vars_bytes = encode_increment_vars(vars).map_err(GeneratedOpticIntentError::EncodeVars)?; + let bytes = + pack_intent_v1(OP_INCREMENT, &vars_bytes).map_err(GeneratedOpticIntentError::PackEnvelope)?; + Ok(DispatchOpticIntentRequest { + optic_id, + base_coordinate, + intent_family, + focus, + cause, + capability, + admission_law, + payload: OpticIntentPayload::EintV1 { bytes }, + }) +} +", + ) + .expect("failed to write optic generated module"); + + fs::write( + crate_dir.join("src/lib.rs"), + r#" +mod generated; + +#[cfg(test)] +mod tests { + use super::generated::{ + counter_value_observe_optic_request, encode_counter_value_vars, + increment_dispatch_optic_intent_request, CounterValueVars, IncrementVars, OP_COUNTER_VALUE, + OP_INCREMENT, + }; + use echo_wasm_abi::kernel_port::{ + AdmissionLawId, CoordinateAt, DispatchOpticIntentRequest, EchoCoordinate, IntentFamilyId, + ObserveOpticRequest, OpticActorId, OpticCapability, OpticCapabilityId, OpticCause, + OpticFocus, OpticId, OpticIntentPayload, OpticReadBudget, OpticApertureShape, + ProjectionVersion, WorldlineId, + }; + use echo_wasm_abi::{decode_cbor, encode_cbor, unpack_intent_v1}; + + #[test] + fn generated_optic_helpers_build_abi_requests() { + let worldline_id = WorldlineId::from_bytes([1; 32]); + let optic_id = OpticId::from_bytes([2; 32]); + let capability_id = OpticCapabilityId::from_bytes([3; 32]); + let actor = OpticActorId::from_bytes([4; 32]); + let intent_family = IntentFamilyId::from_bytes([5; 32]); + let focus = OpticFocus::Worldline { worldline_id }; + let coordinate = EchoCoordinate::Worldline { + worldline_id, + at: CoordinateAt::Frontier, + }; + let budget = OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(64), + max_ticks: Some(8), + max_attachments: Some(0), + }; + + let query_vars = CounterValueVars {}; + let observe = counter_value_observe_optic_request( + optic_id, + focus.clone(), + coordinate.clone(), + capability_id, + ProjectionVersion(1), + None, + budget, + &query_vars, + ) + .unwrap(); + let decoded: ObserveOpticRequest = decode_cbor(&encode_cbor(&observe).unwrap()).unwrap(); + assert_eq!(decoded, observe); + assert_eq!(observe.optic_id, optic_id); + assert!(matches!( + observe.aperture.shape, + OpticApertureShape::QueryBytes { query_id, ref vars_digest } + if query_id == OP_COUNTER_VALUE + && vars_digest.len() == 32 + && vars_digest != &encode_counter_value_vars(&query_vars).unwrap() + )); + + let capability = OpticCapability { + capability_id, + actor, + issuer_ref: None, + policy_hash: vec![6; 32], + allowed_focus: focus.clone(), + projection_version: ProjectionVersion(1), + reducer_version: None, + allowed_intent_family: intent_family, + max_budget: budget, + }; + let cause = OpticCause { + actor, + cause_hash: vec![7; 32], + label: Some("generated optic dispatch".into()), + }; + let dispatch = increment_dispatch_optic_intent_request( + optic_id, + coordinate.clone(), + intent_family, + focus, + cause, + capability, + AdmissionLawId::from_bytes([8; 32]), + &IncrementVars { amount: 42 }, + ) + .unwrap(); + let decoded: DispatchOpticIntentRequest = + decode_cbor(&encode_cbor(&dispatch).unwrap()).unwrap(); + assert_eq!(decoded, dispatch); + assert_eq!(dispatch.base_coordinate, coordinate); + let OpticIntentPayload::EintV1 { bytes } = &dispatch.payload; + let (op_id, vars_bytes) = unpack_intent_v1(bytes).unwrap(); + assert_eq!(op_id, OP_INCREMENT); + let vars: IncrementVars = decode_cbor(vars_bytes).unwrap(); + assert_eq!(vars.amount, 42); + } +} +"#, + ) + .expect("failed to write optic smoke lib.rs"); + + crate_dir +} + fn assert_generated_crate_checks(crate_dir: &Path) { let output = Command::new("cargo") .args(["check", "--manifest-path"]) @@ -360,6 +943,23 @@ fn assert_generated_crate_checks(crate_dir: &Path) { ); } +#[test] +fn test_generated_optic_helper_shape_compiles_against_abi() { + let crate_dir = write_optic_binding_smoke_crate(); + let output = Command::new("cargo") + .args(["test", "--manifest-path"]) + .arg(crate_dir.join("Cargo.toml")) + .output() + .expect("failed to run optic generated smoke crate"); + + assert!( + output.status.success(), + "optic generated smoke crate failed\nstdout:\n{}\nstderr:\n{}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); +} + #[test] fn test_generate_from_json() { let ir = r#"{ @@ -422,6 +1022,11 @@ fn test_generate_from_json() { assert!(stdout.contains("use echo_registry_api::{")); assert!(stdout.contains("pub const OPS: &[OpDef]")); assert!(stdout.contains("pub static REGISTRY: GeneratedRegistry")); + assert!(stdout.contains("pub fn propose_set_theme_dispatch_optic_intent_request")); + assert!( + !stdout.contains("pub fn set_theme_dispatch_optic_intent_request"), + "setter-like mutation names must be proposal builders, not set_* helpers" + ); } #[test] @@ -462,7 +1067,17 @@ fn test_toy_contract_generates_eint_and_observation_helpers() { assert!(stdout.contains("pub const OP_INCREMENT: u32 = 1001")); assert!(stdout.contains("pub const OP_COUNTER_VALUE: u32 = 1002")); + assert!(stdout.contains("pub const GENERATED_RUST_ARTIFACT_HASH: &str")); assert!(stdout.contains("pub static REGISTRY: GeneratedRegistry")); + assert!(stdout.contains("directives_json:")); + assert!(stdout.contains("\\\"wes_footprint\\\"")); + assert!(stdout.contains("pub const OP_INCREMENT_FOOTPRINT_READS: &[&str]")); + assert!(stdout.contains("pub const OP_INCREMENT_FOOTPRINT_WRITES: &[&str]")); + assert!(stdout.contains("pub const OP_INCREMENT_FOOTPRINT_ARTIFACT_HASH: &str")); + assert!(stdout.contains("pub const OP_INCREMENT_FOOTPRINT_CERTIFICATE_HASH: &str")); + assert!(stdout.contains("pub const OP_INCREMENT_FOOTPRINT_CERTIFICATE: FootprintCertificate")); + assert!(stdout.contains("footprint_certificate: Some(&OP_INCREMENT_FOOTPRINT_CERTIFICATE)")); + assert!(stdout.contains("footprint_certificate: None")); for required in [ "use echo_wasm_abi::pack_intent_v1;", @@ -476,12 +1091,105 @@ fn test_toy_contract_generates_eint_and_observation_helpers() { "pack_intent_v1(super::OP_INCREMENT", "pub fn counter_value_observation_request", "pub fn counter_value_observation_request_raw_vars", + "pub fn counter_value_observe_optic_request", + "pub fn counter_value_observe_optic_request_raw_vars", + "pub fn increment_dispatch_optic_intent_request", + "pub fn increment_dispatch_optic_intent_request_raw_vars", + "DispatchOpticIntentRequest", + "ObserveOpticRequest", + "base_coordinate: EchoCoordinate", ] { assert!( stdout.contains(required), "generated toy contract output is missing first-consumer bridge: {required}" ); } + assert!( + !stdout.contains("pub fn set_"), + "generated optic helpers should not expose setter-style function names" + ); +} + +#[test] +fn test_footprint_artifact_hash_changes_when_generated_args_change() { + let without_arg = r#"{ + "ir_version": "echo-ir/v1", + "schema_sha256": "abc123", + "codec_id": "cbor-canon-v1", + "registry_version": 1, + "types": [ + { "name": "CounterValue", "kind": "OBJECT", "fields": [ + { "name": "value", "type": "Int", "required": true } + ] } + ], + "ops": [ + { + "kind": "MUTATION", + "name": "increment", + "op_id": 1001, + "args": [], + "result_type": "CounterValue", + "directives": { + "wes_footprint": { + "reads": ["CounterValue"], + "writes": ["CounterValue"] + } + } + } + ] + }"#; + let with_arg = r#"{ + "ir_version": "echo-ir/v1", + "schema_sha256": "abc123", + "codec_id": "cbor-canon-v1", + "registry_version": 1, + "types": [ + { "name": "CounterValue", "kind": "OBJECT", "fields": [ + { "name": "value", "type": "Int", "required": true } + ] }, + { "name": "IncrementInput", "kind": "INPUT_OBJECT", "fields": [ + { "name": "amount", "type": "Int", "required": true } + ] } + ], + "ops": [ + { + "kind": "MUTATION", + "name": "increment", + "op_id": 1001, + "args": [ + { "name": "input", "type": "IncrementInput", "required": true } + ], + "result_type": "CounterValue", + "directives": { + "wes_footprint": { + "reads": ["CounterValue"], + "writes": ["CounterValue"] + } + } + } + ] + }"#; + + let without_output = run_wesley_gen(without_arg); + assert!( + without_output.status.success(), + "CLI failed: {}", + String::from_utf8_lossy(&without_output.stderr) + ); + let with_output = run_wesley_gen(with_arg); + assert!( + with_output.status.success(), + "CLI failed: {}", + String::from_utf8_lossy(&with_output.stderr) + ); + let without_stdout = String::from_utf8_lossy(&without_output.stdout); + let with_stdout = String::from_utf8_lossy(&with_output.stdout); + + assert_ne!( + generated_str_const(&without_stdout, "OP_INCREMENT_FOOTPRINT_ARTIFACT_HASH"), + generated_str_const(&with_stdout, "OP_INCREMENT_FOOTPRINT_ARTIFACT_HASH"), + "footprint artifact identity must change when generated operation args change" + ); } #[test] @@ -506,6 +1214,7 @@ fn test_query_only_contract_does_not_import_intent_packer() { let stdout = String::from_utf8_lossy(&output.stdout); assert!(stdout.contains("pub fn counter_value_observation_request")); + assert!(stdout.contains("pub fn counter_value_observe_optic_request")); assert!( !stdout.contains("pack_intent_v1"), "query-only generated code should not import or use EINT packing" @@ -648,6 +1357,8 @@ fn test_toy_contract_no_std_generated_output_checks_in_consumer_crate() { assert!(generated.contains("extern crate alloc;")); assert!(generated.contains("pub mod __echo_wesley_generated")); assert!(generated.contains("use alloc::vec::Vec;")); + assert!(generated.contains("pub fn counter_value_observe_optic_request")); + assert!(generated.contains("pub fn increment_dispatch_optic_intent_request")); assert_generated_crate_checks(&write_basic_generated_crate( generated.as_ref(), "toy-no-std", diff --git a/crates/method/src/close.rs b/crates/method/src/close.rs new file mode 100644 index 00000000..76ba0137 --- /dev/null +++ b/crates/method/src/close.rs @@ -0,0 +1,154 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS + +//! METHOD cycle closeout scaffolding. + +use std::fs; +use std::path::PathBuf; + +use serde::Serialize; + +use crate::status::{ActiveCycle, StatusReport}; +use crate::workspace::MethodWorkspace; + +/// Result of creating a cycle retro scaffold. +#[derive(Debug, Clone, Serialize)] +pub struct CloseCycleResult { + /// Full cycle directory name, e.g. `0018-echo-optics-api-design`. + pub cycle: String, + /// Retro markdown path. + pub retro_path: PathBuf, + /// Witness artifact directory path. + pub witness_dir: PathBuf, +} + +/// Create a retro template and witness directory for an active cycle. +/// +/// When `selector` is `None`, the most recent active cycle is selected by +/// cycle number. A selector may be either the full cycle directory name or just +/// the numeric prefix. +pub fn close_cycle( + workspace: &MethodWorkspace, + selector: Option<&str>, +) -> Result { + if let Some(raw) = selector { + if let Some(existing_retro) = existing_retro_for_selector(workspace, raw)? { + return Err(format!( + "refusing to overwrite existing retro directory: {}", + existing_retro.display() + )); + } + } + + let mut cycles = StatusReport::build(workspace)?.active_cycles; + cycles.sort_by_key(cycle_name); + + let cycle = match selector { + Some(raw) => find_cycle(&cycles, raw)?, + None => cycles + .last() + .cloned() + .ok_or_else(|| "no active METHOD cycles found".to_string())?, + }; + + let cycle_dir_name = cycle_name(&cycle); + let retro_dir = workspace.retro_root().join(&cycle_dir_name); + let retro_path = retro_dir.join("retro.md"); + let witness_dir = retro_dir.join("witness"); + + if retro_dir.exists() { + return Err(format!( + "refusing to overwrite existing retro directory: {}", + retro_dir.display() + )); + } + + fs::create_dir_all(&witness_dir) + .map_err(|e| format!("failed to create {}: {e}", witness_dir.display()))?; + fs::write(&retro_path, retro_template(&cycle)) + .map_err(|e| format!("failed to write {}: {e}", retro_path.display()))?; + + Ok(CloseCycleResult { + cycle: cycle_dir_name, + retro_path, + witness_dir, + }) +} + +fn existing_retro_for_selector( + workspace: &MethodWorkspace, + raw: &str, +) -> Result, String> { + let retro_root = workspace.retro_root(); + let entries = match fs::read_dir(&retro_root) { + Ok(entries) => entries, + Err(err) => return Err(format!("failed to read {}: {err}", retro_root.display())), + }; + + for entry in entries { + let entry = entry.map_err(|e| format!("failed to read retro entry: {e}"))?; + if !entry.path().is_dir() { + continue; + } + let name = entry.file_name(); + let name = name.to_string_lossy(); + if name == raw + || name + .strip_prefix(raw) + .is_some_and(|suffix| suffix.starts_with('-')) + { + return Ok(Some(entry.path())); + } + } + + Ok(None) +} + +fn find_cycle(cycles: &[ActiveCycle], raw: &str) -> Result { + let matches = cycles + .iter() + .filter(|cycle| cycle.number == raw || cycle_name(cycle) == raw) + .cloned() + .collect::>(); + + match matches.as_slice() { + [cycle] => Ok(cycle.clone()), + [] => Err(format!("no active METHOD cycle matches `{raw}`")), + _ => Err(format!("METHOD cycle selector `{raw}` is ambiguous")), + } +} + +fn cycle_name(cycle: &ActiveCycle) -> String { + format!("{}-{}", cycle.number, cycle.slug) +} + +fn retro_template(cycle: &ActiveCycle) -> String { + let cycle_name = cycle_name(cycle); + format!( + "\n\ + \n\ + \n\ + # Retro: {cycle_name}\n\ + \n\ + Cycle: `{cycle_name}`\n\ + Design: [`docs/design/{cycle_name}/`](../../../design/{cycle_name}/)\n\ + Witness: [`witness/`](./witness/)\n\ + \n\ + ## Outcome\n\ + \n\ + - Status: TODO\n\ + - Summary: TODO\n\ + \n\ + ## Evidence\n\ + \n\ + - TODO\n\ + \n\ + ## Drift Check\n\ + \n\ + - TODO\n\ + \n\ + ## Follow-Up\n\ + \n\ + - TODO\n" + ) +} diff --git a/crates/method/src/drift.rs b/crates/method/src/drift.rs new file mode 100644 index 00000000..c56cfeac --- /dev/null +++ b/crates/method/src/drift.rs @@ -0,0 +1,333 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS + +//! METHOD playback-question drift checks. + +use std::fs; +use std::path::{Path, PathBuf}; + +use serde::Serialize; + +use crate::status::{ActiveCycle, StatusReport}; +use crate::workspace::MethodWorkspace; + +/// Drift coverage report for one cycle. +#[derive(Debug, Clone, Serialize)] +pub struct DriftReport { + /// Cycle directory name. + pub cycle: String, + /// Design markdown files inspected. + pub design_paths: Vec, + /// Playback-question coverage rows. + pub questions: Vec, +} + +impl DriftReport { + /// Number of playback questions with no matching test coverage. + pub fn missing_count(&self) -> usize { + self.questions + .iter() + .filter(|question| question.matches.is_empty()) + .count() + } + + /// Whether all discovered playback questions have visible test coverage. + pub fn covered(&self) -> bool { + self.missing_count() == 0 + } +} + +/// One playback question and the tests that appear to cover it. +#[derive(Debug, Clone, Serialize)] +pub struct PlaybackQuestionCoverage { + /// Question text extracted from the design doc. + pub question: String, + /// Relative test files with matching names/descriptions. + pub matches: Vec, +} + +/// Check playback questions for an active cycle against committed tests. +/// +/// `selector` may be a full cycle directory name or just the numeric prefix. +/// When omitted, the most recent active cycle is checked. +pub fn drift_report( + workspace: &MethodWorkspace, + selector: Option<&str>, +) -> Result { + let cycle = select_cycle(workspace, selector)?; + let cycle_name = cycle_name(&cycle); + let cycle_dir = workspace.design_root().join(&cycle_name); + let design_paths = collect_markdown_files(&cycle_dir)?; + let repo_root = workspace + .backlog_root() + .parent() + .and_then(Path::parent) + .and_then(Path::parent) + .map(Path::to_path_buf) + .ok_or_else(|| "failed to resolve METHOD repo root".to_string())?; + let test_files = collect_test_files(&repo_root)?; + + let mut questions = Vec::new(); + for path in &design_paths { + let text = fs::read_to_string(path) + .map_err(|e| format!("failed to read {}: {e}", path.display()))?; + for question in extract_playback_questions(&text) { + let matches = matching_test_files(&repo_root, &test_files, &question)?; + questions.push(PlaybackQuestionCoverage { question, matches }); + } + } + + Ok(DriftReport { + cycle: cycle_name, + design_paths, + questions, + }) +} + +fn select_cycle( + workspace: &MethodWorkspace, + selector: Option<&str>, +) -> Result { + let mut cycles = StatusReport::build(workspace)?.active_cycles; + cycles.sort_by_key(cycle_name); + + match selector { + Some(raw) => { + let matches = cycles + .into_iter() + .filter(|cycle| cycle.number == raw || cycle_name(cycle) == raw) + .collect::>(); + match matches.as_slice() { + [cycle] => Ok(cycle.clone()), + [] => Err(format!("no active METHOD cycle matches `{raw}`")), + _ => Err(format!("METHOD cycle selector `{raw}` is ambiguous")), + } + } + None => cycles + .last() + .cloned() + .ok_or_else(|| "no active METHOD cycles found".to_string()), + } +} + +fn cycle_name(cycle: &ActiveCycle) -> String { + format!("{}-{}", cycle.number, cycle.slug) +} + +fn collect_markdown_files(root: &Path) -> Result, String> { + let mut out = Vec::new(); + collect_files(root, &mut out, |path| { + path.extension().is_some_and(|ext| ext == "md") + })?; + out.sort(); + Ok(out) +} + +fn collect_test_files(root: &Path) -> Result, String> { + let mut out = Vec::new(); + for dir in ["crates", "xtask", "scripts", "apps", "packages"] { + let path = root.join(dir); + if path.is_dir() { + collect_files(&path, &mut out, is_test_file)?; + } + } + out.sort(); + Ok(out) +} + +fn collect_files( + root: &Path, + out: &mut Vec, + include: fn(&Path) -> bool, +) -> Result<(), String> { + let entries = + fs::read_dir(root).map_err(|e| format!("failed to read {}: {e}", root.display()))?; + for entry in entries { + let entry = entry.map_err(|e| format!("failed to read directory entry: {e}"))?; + let path = entry.path(); + if path.is_dir() { + let name = path + .file_name() + .and_then(|name| name.to_str()) + .unwrap_or(""); + if matches!(name, "target" | "node_modules" | "dist" | ".git") { + continue; + } + collect_files(&path, out, include)?; + } else if include(&path) { + out.push(path); + } + } + Ok(()) +} + +fn is_test_file(path: &Path) -> bool { + let Some(ext) = path.extension().and_then(|ext| ext.to_str()) else { + return false; + }; + let Some(path_text) = path.to_str() else { + return false; + }; + matches!( + ext, + "rs" | "ts" | "tsx" | "js" | "mjs" | "sh" | "bats" | "md" + ) && (path_text.contains("/test") + || path_text.contains("/tests") + || path_text.contains("_test.") + || path_text.contains(".test.") + || path_text.contains(".spec.")) +} + +fn extract_playback_questions(text: &str) -> Vec { + let mut questions = Vec::new(); + let mut in_playback = false; + + for line in text.lines() { + let trimmed = line.trim(); + if trimmed.starts_with('#') { + let lower = trimmed.to_ascii_lowercase(); + in_playback = lower.contains("playback") && !lower.contains("not playback"); + continue; + } + if !in_playback { + continue; + } + if let Some(question) = table_question(trimmed).or_else(|| list_question(trimmed)) { + questions.push(question); + } + } + + questions +} + +fn table_question(line: &str) -> Option { + if !line.starts_with('|') || line.contains("---") { + return None; + } + line.split('|') + .map(str::trim) + .find(|cell| cell.contains('?')) + .map(clean_question) + .filter(|question| !question.is_empty()) +} + +fn list_question(line: &str) -> Option { + if !line.contains('?') { + return None; + } + let without_checkbox = line + .trim_start_matches(|ch: char| { + ch == '-' || ch == '*' || ch == '+' || ch.is_ascii_digit() || ch == '.' || ch == ' ' + }) + .trim_start_matches("[ ]") + .trim_start_matches("[x]") + .trim_start_matches("[X]") + .trim(); + Some(clean_question(without_checkbox)).filter(|question| !question.is_empty()) +} + +fn clean_question(text: &str) -> String { + text.trim() + .trim_matches('|') + .trim() + .trim_matches('"') + .trim() + .to_string() +} + +fn matching_test_files( + repo_root: &Path, + test_files: &[PathBuf], + question: &str, +) -> Result, String> { + let question_norm = normalize(question); + let terms = significant_terms(&question_norm); + let mut matches = Vec::new(); + + for path in test_files { + let text = fs::read_to_string(path) + .map_err(|e| format!("failed to read {}: {e}", path.display()))?; + let haystack = normalize(&text); + let exact = !question_norm.is_empty() && haystack.contains(&question_norm); + let term_match = terms.len() >= 3 && terms.iter().all(|term| haystack.contains(term)); + if exact || term_match { + matches.push(path.strip_prefix(repo_root).unwrap_or(path).to_path_buf()); + } + } + + Ok(matches) +} + +fn normalize(text: &str) -> String { + let mut out = String::with_capacity(text.len()); + for ch in text.chars() { + if ch.is_ascii_alphanumeric() { + out.push(ch.to_ascii_lowercase()); + } else { + out.push(' '); + } + } + out.split_whitespace().collect::>().join(" ") +} + +fn significant_terms(normalized: &str) -> Vec { + normalized + .split_whitespace() + .filter(|term| term.len() >= 4) + .filter(|term| { + !matches!( + *term, + "does" + | "with" + | "from" + | "that" + | "this" + | "have" + | "what" + | "when" + | "where" + | "which" + | "should" + | "would" + | "could" + | "agent" + | "human" + | "test" + | "tests" + ) + }) + .map(str::to_string) + .collect() +} + +#[cfg(test)] +mod tests { + use super::extract_playback_questions; + + #[test] + fn extracts_numbered_and_table_playback_questions() { + let questions = extract_playback_questions( + r"# Design + +## Human playback + +1. Does the command exit 0? + +## Agent playback + +| Question | Expected | +| -------- | -------- | +| Can JSON be parsed? | Yes | + +## Not playback + +1. Does this get ignored? +", + ); + + assert_eq!( + questions, + vec!["Does the command exit 0?", "Can JSON be parsed?"] + ); + } +} diff --git a/crates/method/src/graph.rs b/crates/method/src/graph.rs new file mode 100644 index 00000000..1b215379 --- /dev/null +++ b/crates/method/src/graph.rs @@ -0,0 +1,1278 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS + +//! Backlog task graph parsing, matrix rendering, and scheduling queries. + +use std::collections::{BTreeMap, BTreeSet}; +use std::fs; +use std::path::{Component, Path, PathBuf}; + +use serde::Serialize; + +use crate::workspace::MethodWorkspace; + +const GRAPH_LANES: &[&str] = &["asap", "up-next", "inbox", "cool-ideas", "bad-code"]; + +const TASK_SECTION_PREFIX: &str = "## T-"; + +/// A parsed METHOD backlog graph. +#[derive(Debug, Clone, Serialize)] +pub struct TaskGraph { + /// Parsed task rows, in deterministic matrix order. + pub tasks: Vec, + /// Direct dependency edges from prerequisite to dependent. + pub edges: Vec, + /// Dependency-shaped references that did not resolve to backlog task rows. + pub external_refs: Vec, +} + +/// One schedulable task row. +#[derive(Debug, Clone, Serialize)] +pub struct TaskNode { + /// Matrix/task id, e.g. `M001`. + pub id: String, + /// Backlog lane. + pub lane: String, + /// Native legacy task id when the source is an internal `## T-*` section. + pub native_id: Option, + /// Human title. + pub title: String, + /// Source markdown path, relative to repo root. + pub source_path: String, + /// Optional markdown anchor for internal `## T-*` section tasks. + pub anchor: Option, + /// Whether this task is already complete but still present as a backlog + /// coordination/index card. + pub completed: bool, +} + +/// A directed dependency edge. +#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize)] +pub struct TaskEdge { + /// Prerequisite task id. + pub prerequisite: String, + /// Dependent task id. + pub dependent: String, +} + +/// A dependency-shaped reference that points outside the backlog graph. +#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize)] +pub struct ExternalDependencyRef { + /// Task containing the reference. + pub task_id: String, + /// Field where it was found. + pub field: String, + /// Raw reference text. + pub reference: String, +} + +/// A task with scheduling metrics. +#[derive(Debug, Clone, Serialize)] +pub struct FrontierTask { + /// Task node. + pub task: TaskNode, + /// Number of tasks transitively unblocked by this task. + pub downstream_count: usize, + /// Longest downstream chain length from this task, including itself. + pub downstream_depth: usize, +} + +/// Summary of graph health and scheduling state. +#[derive(Debug, Clone, Serialize)] +pub struct GraphSummary { + /// Total task rows. + pub tasks: usize, + /// Direct in-graph dependency edges. + pub edges: usize, + /// Open tasks with no in-graph blockers. + pub open_tasks: usize, + /// Completed tasks still present in the backlog graph. + pub completed_tasks: usize, + /// External/unresolved dependency references. + pub external_refs: usize, + /// Task counts by backlog lane. + pub lanes: BTreeMap, +} + +impl TaskGraph { + /// Build the task graph from `docs/method/backlog/**`. + pub fn build(workspace: &MethodWorkspace) -> Result { + let root = workspace + .backlog_root() + .parent() + .and_then(Path::parent) + .and_then(Path::parent) + .map_or_else(|| PathBuf::from("."), Path::to_path_buf); + let backlog_root = workspace.backlog_root(); + let mut files = collect_markdown_files(&backlog_root)?; + files.sort_by(|a, b| { + let a_lane = lane_for(&backlog_root, a); + let b_lane = lane_for(&backlog_root, b); + lane_rank(&a_lane) + .cmp(&lane_rank(&b_lane)) + .then_with(|| path_string(a).cmp(&path_string(b))) + }); + + let mut tasks = Vec::new(); + let mut file_to_tasks: BTreeMap> = BTreeMap::new(); + let mut file_text: BTreeMap = BTreeMap::new(); + + for path in &files { + let text = fs::read_to_string(path) + .map_err(|e| format!("failed to read {}: {e}", path.display()))?; + let rel = relative_path(&root, path)?; + let lane = lane_for(&backlog_root, path); + let sections = task_sections(&text); + let mut indexes = Vec::new(); + + if sections.is_empty() { + indexes.push(tasks.len()); + tasks.push(TaskNode { + id: String::new(), + lane, + native_id: None, + title: h1_title(&text).unwrap_or_else(|| fallback_title(path)), + source_path: rel.clone(), + anchor: None, + completed: status_is_complete(&text), + }); + } else { + let sections_with_body = task_sections_with_body(&text); + for (idx, section) in sections.into_iter().enumerate() { + indexes.push(tasks.len()); + let heading = format!("{} {}", section.native_id, section.title); + tasks.push(TaskNode { + id: String::new(), + lane: lane.clone(), + native_id: Some(section.native_id), + title: section.title, + source_path: rel.clone(), + anchor: Some(slugify_heading(&heading)), + completed: sections_with_body + .get(idx) + .is_some_and(|section| status_is_complete(§ion.body)), + }); + } + } + + file_to_tasks.insert(rel.clone(), indexes); + file_text.insert(rel, text); + } + + for (idx, task) in tasks.iter_mut().enumerate() { + task.id = format!("M{:03}", idx + 1); + } + + let mut id_by_native: BTreeMap> = BTreeMap::new(); + for (idx, task) in tasks.iter().enumerate() { + if let Some(native_id) = &task.native_id { + id_by_native.entry(native_id.clone()).or_default().push(idx); + } + } + + let mut slug_aliases: BTreeMap> = BTreeMap::new(); + for (rel, indexes) in &file_to_tasks { + let stem = Path::new(rel) + .file_stem() + .and_then(|s| s.to_str()) + .unwrap_or_default() + .to_ascii_lowercase(); + if !stem.is_empty() { + slug_aliases + .entry(stem.clone()) + .or_default() + .extend(indexes.iter().copied()); + } + if let Some((_, suffix)) = stem.split_once('_') { + slug_aliases + .entry(suffix.to_string()) + .or_default() + .extend(indexes.iter().copied()); + } + } + + let mut edge_set: BTreeSet = BTreeSet::new(); + let mut external_set: BTreeSet = BTreeSet::new(); + + for (rel, text) in &file_text { + let Some(source_indexes) = file_to_tasks.get(rel) else { + continue; + }; + for link in file_dependency_links(text) { + let Some(dep_rel) = resolve_backlog_link(rel, &link) else { + for source_idx in source_indexes { + external_set.insert(ExternalDependencyRef { + task_id: tasks[*source_idx].id.clone(), + field: "Depends on".to_string(), + reference: link.clone(), + }); + } + continue; + }; + if let Some(dep_indexes) = file_to_tasks.get(&dep_rel) { + for source_idx in source_indexes { + for dep_idx in dep_indexes { + if source_idx != dep_idx { + edge_set.insert(TaskEdge { + prerequisite: tasks[*dep_idx].id.clone(), + dependent: tasks[*source_idx].id.clone(), + }); + } + } + } + } else { + for source_idx in source_indexes { + external_set.insert(ExternalDependencyRef { + task_id: tasks[*source_idx].id.clone(), + field: "Depends on".to_string(), + reference: link.clone(), + }); + } + } + } + } + + for (rel, text) in &file_text { + let Some(source_indexes) = file_to_tasks.get(rel) else { + continue; + }; + if source_indexes.len() == 1 && tasks[source_indexes[0]].native_id.is_none() { + continue; + } + let sections = task_sections_with_body(text); + for (section_idx, section) in sections.iter().enumerate() { + let Some(source_idx) = source_indexes.get(section_idx) else { + continue; + }; + for raw in blocked_by_values(§ion.body) { + add_blocker_edges( + raw, + *source_idx, + &tasks, + &id_by_native, + &slug_aliases, + &mut edge_set, + ); + } + for raw in blocking_values(§ion.body) { + for token in task_tokens(raw) { + if let Some(target_indexes) = id_by_native.get(&token) { + for target_idx in target_indexes { + if *target_idx != *source_idx { + edge_set.insert(TaskEdge { + prerequisite: tasks[*source_idx].id.clone(), + dependent: tasks[*target_idx].id.clone(), + }); + } + } + } + } + } + } + } + + let edges = edge_set.into_iter().collect(); + let external_refs = external_set.into_iter().collect(); + let graph = Self { + tasks, + edges, + external_refs, + }; + graph.ensure_acyclic()?; + Ok(graph) + } + + /// Return graph summary metrics. + pub fn summary(&self) -> GraphSummary { + let open_tasks = self.frontier().len(); + let mut lanes = BTreeMap::new(); + for task in &self.tasks { + *lanes.entry(task.lane.clone()).or_insert(0) += 1; + } + GraphSummary { + tasks: self.tasks.len(), + edges: self.edges.len(), + open_tasks, + completed_tasks: self.tasks.iter().filter(|task| task.completed).count(), + external_refs: self.external_refs.len(), + lanes, + } + } + + /// Return open frontier tasks, ranked by lane and downstream impact. + pub fn frontier(&self) -> Vec { + let incoming = self.active_incoming_counts(); + let downstream_count = self.downstream_counts(); + let downstream_depth = self.downstream_depths(); + let mut frontier = self + .tasks + .iter() + .filter(|task| !task.completed) + .filter(|task| incoming.get(&task.id).copied().unwrap_or(0) == 0) + .map(|task| FrontierTask { + task: task.clone(), + downstream_count: downstream_count.get(&task.id).copied().unwrap_or(0), + downstream_depth: downstream_depth.get(&task.id).copied().unwrap_or(1), + }) + .collect::>(); + + frontier.sort_by(|a, b| { + lane_rank(&a.task.lane) + .cmp(&lane_rank(&b.task.lane)) + .then_with(|| b.downstream_count.cmp(&a.downstream_count)) + .then_with(|| b.downstream_depth.cmp(&a.downstream_depth)) + .then_with(|| a.task.id.cmp(&b.task.id)) + }); + frontier + } + + /// Return the unweighted longest dependency path. + pub fn critical_path(&self) -> Vec { + let outgoing = self.active_outgoing_map(); + let order = self.topological_order().unwrap_or_default(); + let mut best_len: BTreeMap = BTreeMap::new(); + let mut next: BTreeMap = BTreeMap::new(); + + for task_id in order.iter().rev() { + let mut best = 1usize; + let mut best_next = None; + if let Some(children) = outgoing.get(task_id) { + for child in children { + let candidate = 1 + best_len.get(child).copied().unwrap_or(1); + if candidate > best { + best = candidate; + best_next = Some(child.clone()); + } + } + } + best_len.insert(task_id.clone(), best); + if let Some(child) = best_next { + next.insert(task_id.clone(), child); + } + } + + let Some(start) = self + .tasks + .iter() + .filter(|task| !task.completed) + .map(|task| task.id.clone()) + .max_by_key(|id| best_len.get(id).copied().unwrap_or(1)) + else { + return Vec::new(); + }; + + let by_id = self.task_by_id(); + let mut path = Vec::new(); + let mut current = start; + while let Some(task) = by_id.get(¤t) { + path.push((*task).clone()); + let Some(next_id) = next.get(¤t) else { + break; + }; + current = next_id.clone(); + } + path + } + + /// Render `docs/method/task-matrix.md`. + pub fn render_matrix_markdown(&self) -> String { + let summary = self.summary(); + let mut lines = Vec::new(); + lines.push( + "".to_string(), + ); + lines.push( + "".to_string(), + ); + lines.push(String::new()); + lines.push("# METHOD Task Matrix".to_string()); + lines.push(String::new()); + lines.push( + "Rows are dependent tasks. Columns are prerequisite tasks. A cell contains".to_string(), + ); + lines.push( + "`depends on` when the row task directly depends on the column task.".to_string(), + ); + lines.push(String::new()); + lines.push( + "This matrix is generated from `docs/method/backlog/**`. If a backlog file".to_string(), + ); + lines.push( + "contains `## T-...` task sections, each section is a task row. Otherwise,".to_string(), + ); + lines.push( + "the backlog file itself is one task row. File-level `Depends on:` links are" + .to_string(), + ); + lines.push("included when they resolve to another backlog task. Section-level".to_string()); + lines.push( + "`Blocked By:` / `Blocking:` task IDs are included when they resolve to a".to_string(), + ); + lines.push("task row.".to_string()); + lines.push(String::new()); + lines.push( + "Blank cells mean no direct dependency was found. Transitive dependencies are" + .to_string(), + ); + lines.push("not expanded.".to_string()); + lines.push(String::new()); + lines.push("## Summary".to_string()); + lines.push(String::new()); + lines.push(format!("- Matrix rows/columns: {}", summary.tasks)); + lines.push(format!( + "- Direct in-matrix dependency edges: {}", + summary.edges + )); + lines.push(format!( + "- Completed backlog tasks: {}", + summary.completed_tasks + )); + for lane in GRAPH_LANES { + if let Some(count) = summary.lanes.get(*lane) { + lines.push(format!("- `{lane}` tasks: {count}")); + } + } + lines.push(String::new()); + lines.push("## Task IDs".to_string()); + lines.push(String::new()); + for task in &self.tasks { + let native = task + .native_id + .as_ref() + .map(|id| format!(" `{id}`")) + .unwrap_or_default(); + let task_link = task_markdown_link(task); + lines.push(format!( + "- `{}` `{}`{}: {} (source: [`{}`]({}))", + task.id, task.lane, native, task_link, task.source_path, task.source_path + )); + } + lines.push(String::new()); + lines.push("## Matrix".to_string()); + lines.push(String::new()); + lines.push("```csv".to_string()); + lines.push(self.render_matrix_csv().trim_end().to_string()); + lines.push("```".to_string()); + lines.push(String::new()); + + if !self.external_refs.is_empty() { + lines.push("## External Or Unresolved Dependency References".to_string()); + lines.push(String::new()); + lines.push( + "These references were found in dependency-shaped fields but do not resolve to" + .to_string(), + ); + lines.push("a task row in `docs/method/backlog/**`.".to_string()); + lines.push(String::new()); + for reference in &self.external_refs { + lines.push(format!( + "- `{}` {}: `{}`", + reference.task_id, reference.field, reference.reference + )); + } + lines.push(String::new()); + } + + lines.join("\n") + } + + /// Render standalone CSV matrix. + pub fn render_matrix_csv(&self) -> String { + let mut lines = Vec::new(); + let mut header = Vec::with_capacity(self.tasks.len() + 1); + header.push("task".to_string()); + header.extend(self.tasks.iter().map(|task| task.id.clone())); + lines.push(header.join(",")); + + let edge_set = self + .edges + .iter() + .map(|edge| (edge.dependent.as_str(), edge.prerequisite.as_str())) + .collect::>(); + + for row_task in &self.tasks { + let mut row = Vec::with_capacity(self.tasks.len() + 1); + row.push(row_task.id.clone()); + for col_task in &self.tasks { + if edge_set.contains(&(row_task.id.as_str(), col_task.id.as_str())) { + row.push("depends on".to_string()); + } else { + row.push(String::new()); + } + } + lines.push(row.join(",")); + } + + format!("{}\n", lines.join("\n")) + } + + /// Render Graphviz DOT. + pub fn render_dot(&self) -> String { + let summary = self.summary(); + let incoming = self.active_incoming_counts(); + let open = self + .frontier() + .into_iter() + .map(|task| task.task.id) + .collect::>(); + + let mut lines = Vec::new(); + lines.push("digraph method_task_dag {".to_string()); + lines.push(" graph [".to_string()); + lines.push(format!( + " label=\"METHOD Backlog Task Dependency DAG\\nopen tasks: {} / {}; dependency edges: {}\",", + summary.open_tasks, summary.tasks, summary.edges + )); + lines.push(" labelloc=t,".to_string()); + lines.push(" fontsize=24,".to_string()); + lines.push(" fontname=\"Inter, Helvetica, Arial\",".to_string()); + lines.push(" rankdir=LR,".to_string()); + lines.push(" bgcolor=\"white\",".to_string()); + lines.push(" splines=ortho,".to_string()); + lines.push(" overlap=false,".to_string()); + lines.push(" nodesep=0.35,".to_string()); + lines.push(" ranksep=0.75".to_string()); + lines.push(" ];".to_string()); + lines.push(" node [".to_string()); + lines.push(" shape=box,".to_string()); + lines.push(" style=\"rounded,filled\",".to_string()); + lines.push(" fontsize=10,".to_string()); + lines.push(" fontname=\"Inter, Helvetica, Arial\",".to_string()); + lines.push(" margin=\"0.08,0.06\",".to_string()); + lines.push(" penwidth=1.2".to_string()); + lines.push(" ];".to_string()); + lines.push(" edge [".to_string()); + lines.push(" color=\"#dc2626\",".to_string()); + lines.push(" arrowsize=0.8,".to_string()); + lines.push(" penwidth=2.6".to_string()); + lines.push(" ];".to_string()); + lines.push(String::new()); + + for lane in GRAPH_LANES { + let lane_tasks = self + .tasks + .iter() + .filter(|task| task.lane == *lane) + .collect::>(); + if lane_tasks.is_empty() { + continue; + } + let (fill, border) = lane_colors(lane); + lines.push(format!( + " subgraph \"cluster_{}\" {{", + lane.replace('-', "_") + )); + lines.push(format!(" label=\"{lane}\";")); + lines.push(" color=\"#cbd5e1\";".to_string()); + lines.push(" fontname=\"Inter, Helvetica, Arial\";".to_string()); + lines.push(" fontsize=16;".to_string()); + lines.push(" style=\"rounded\";".to_string()); + for task in lane_tasks { + let is_open = open.contains(&task.id); + let is_completed = task.completed; + let mut label_parts = vec![task.id.clone()]; + if is_completed { + label_parts.push("DONE".to_string()); + } else if is_open { + label_parts.push("OPEN".to_string()); + } + if let Some(native_id) = &task.native_id { + label_parts.push(native_id.clone()); + } + label_parts.extend(wrap_title(&task.title, 26, 3)); + let label = dot_label(&label_parts); + let status = if is_completed { + "done" + } else if is_open { + "open" + } else { + "blocked" + }; + let blockers = incoming.get(&task.id).copied().unwrap_or(0); + let native = task.native_id.clone().unwrap_or_default(); + let tooltip = dot_escape(&format!( + "{} [{}] {}; blockers={}; {} {}", + task.id, task.lane, status, blockers, native, task.title + )); + let (node_fill, node_border, penwidth) = if is_completed { + ("#f1f5f9", "#94a3b8", "1.4") + } else if is_open { + ("#bbf7d0", "#15803d", "2.8") + } else { + (fill, border, "1.2") + }; + lines.push(format!( + " \"{}\" [label=\"{}\", tooltip=\"{}\", fillcolor=\"{}\", color=\"{}\", penwidth={}];", + task.id, label, tooltip, node_fill, node_border, penwidth + )); + } + lines.push(" }".to_string()); + lines.push(String::new()); + } + + for edge in &self.edges { + let blocking = self + .task(&edge.prerequisite) + .zip(self.task(&edge.dependent)) + .is_some_and(|(from, to)| !from.completed && !to.completed); + if blocking { + lines.push(format!( + " \"{}\" -> \"{}\";", + edge.prerequisite, edge.dependent + )); + } else { + lines.push(format!( + " \"{}\" -> \"{}\" [color=\"#94a3b8\", penwidth=1.0, arrowsize=0.45, style=dashed];", + edge.prerequisite, edge.dependent + )); + } + } + lines.push("}".to_string()); + lines.push(String::new()); + lines.join("\n") + } + + /// Ensure the dependency graph has no cycles. + pub fn ensure_acyclic(&self) -> Result<(), String> { + self.topological_order().map(|_| ()) + } + + fn incoming_counts(&self) -> BTreeMap { + let mut counts = self + .tasks + .iter() + .map(|task| (task.id.clone(), 0usize)) + .collect::>(); + for edge in &self.edges { + *counts.entry(edge.dependent.clone()).or_insert(0) += 1; + } + counts + } + + fn active_incoming_counts(&self) -> BTreeMap { + let mut counts = self + .tasks + .iter() + .map(|task| (task.id.clone(), 0usize)) + .collect::>(); + for edge in &self.edges { + let Some((from, to)) = self + .task(&edge.prerequisite) + .zip(self.task(&edge.dependent)) + else { + continue; + }; + if !from.completed && !to.completed { + *counts.entry(edge.dependent.clone()).or_insert(0) += 1; + } + } + counts + } + + fn outgoing_map(&self) -> BTreeMap> { + let mut map = self + .tasks + .iter() + .map(|task| (task.id.clone(), Vec::new())) + .collect::>(); + for edge in &self.edges { + map.entry(edge.prerequisite.clone()) + .or_default() + .push(edge.dependent.clone()); + } + for values in map.values_mut() { + values.sort(); + } + map + } + + fn active_outgoing_map(&self) -> BTreeMap> { + let mut map = self + .tasks + .iter() + .map(|task| (task.id.clone(), Vec::new())) + .collect::>(); + for edge in &self.edges { + let Some((from, to)) = self + .task(&edge.prerequisite) + .zip(self.task(&edge.dependent)) + else { + continue; + }; + if !from.completed && !to.completed { + map.entry(edge.prerequisite.clone()) + .or_default() + .push(edge.dependent.clone()); + } + } + for values in map.values_mut() { + values.sort(); + } + map + } + + fn task(&self, id: &str) -> Option<&TaskNode> { + self.tasks.iter().find(|task| task.id == id) + } + + fn task_by_id(&self) -> BTreeMap { + self.tasks + .iter() + .map(|task| (task.id.clone(), task)) + .collect() + } + + fn topological_order(&self) -> Result, String> { + let outgoing = self.outgoing_map(); + let mut incoming = self.incoming_counts(); + let mut ready = self + .tasks + .iter() + .filter(|task| incoming.get(&task.id).copied().unwrap_or(0) == 0) + .map(|task| task.id.clone()) + .collect::>(); + let mut order = Vec::new(); + + while let Some(next) = ready.pop_first() { + order.push(next.clone()); + if let Some(children) = outgoing.get(&next) { + for child in children { + if let Some(count) = incoming.get_mut(child) { + *count = count.saturating_sub(1); + if *count == 0 { + ready.insert(child.clone()); + } + } + } + } + } + + if order.len() == self.tasks.len() { + Ok(order) + } else { + let blocked = incoming + .into_iter() + .filter_map(|(id, count)| if count > 0 { Some(id) } else { None }) + .collect::>(); + Err(format!( + "task dependency graph has at least one cycle; unresolved nodes: {}", + blocked.join(", ") + )) + } + } + + fn downstream_counts(&self) -> BTreeMap { + let outgoing = self.active_outgoing_map(); + let mut counts = BTreeMap::new(); + for task in &self.tasks { + let mut seen = BTreeSet::new(); + let mut stack = outgoing.get(&task.id).cloned().unwrap_or_default(); + while let Some(next) = stack.pop() { + if !seen.insert(next.clone()) { + continue; + } + if let Some(children) = outgoing.get(&next) { + stack.extend(children.iter().cloned()); + } + } + counts.insert(task.id.clone(), seen.len()); + } + counts + } + + fn downstream_depths(&self) -> BTreeMap { + let outgoing = self.active_outgoing_map(); + let order = self.topological_order().unwrap_or_default(); + let mut depth = BTreeMap::new(); + for task_id in order.into_iter().rev() { + let best_child = outgoing + .get(&task_id) + .into_iter() + .flatten() + .filter_map(|child| depth.get(child).copied()) + .max() + .unwrap_or(0); + depth.insert(task_id, best_child + 1); + } + depth + } +} + +/// Paths for generated graph artifacts. +#[derive(Debug, Clone)] +pub struct GraphArtifactPaths { + /// Markdown matrix path. + pub matrix_md: PathBuf, + /// Standalone CSV matrix path. + pub matrix_csv: PathBuf, + /// Graphviz DOT path. + pub dot: PathBuf, + /// Rendered SVG path. + pub svg: PathBuf, +} + +impl GraphArtifactPaths { + /// Build default artifact paths under `docs/method`. + pub fn defaults(workspace: &MethodWorkspace) -> Self { + let method_root = workspace.method_root(); + Self { + matrix_md: method_root.join("task-matrix.md"), + matrix_csv: method_root.join("task-matrix.csv"), + dot: method_root.join("task-dag.dot"), + svg: method_root.join("task-dag.svg"), + } + } +} + +/// Generated text artifacts except SVG, which xtask renders through Graphviz. +#[derive(Debug, Clone)] +pub struct GraphArtifacts { + /// Markdown matrix. + pub matrix_md: String, + /// CSV matrix. + pub matrix_csv: String, + /// Graphviz DOT. + pub dot: String, +} + +impl GraphArtifacts { + /// Render artifacts from a graph. + pub fn render(graph: &TaskGraph) -> Self { + Self { + matrix_md: graph.render_matrix_markdown(), + matrix_csv: graph.render_matrix_csv(), + dot: graph.render_dot(), + } + } +} + +#[derive(Debug, Clone)] +struct TaskSection { + native_id: String, + title: String, +} + +#[derive(Debug, Clone)] +struct TaskSectionBody { + body: String, +} + +fn collect_markdown_files(root: &Path) -> Result, String> { + let mut files = Vec::new(); + collect_markdown_files_inner(root, &mut files)?; + Ok(files) +} + +fn collect_markdown_files_inner(dir: &Path, files: &mut Vec) -> Result<(), String> { + let entries = + fs::read_dir(dir).map_err(|e| format!("failed to read dir {}: {e}", dir.display()))?; + for entry in entries { + let entry = entry.map_err(|e| format!("failed to read dir entry: {e}"))?; + let path = entry.path(); + if path.is_dir() { + collect_markdown_files_inner(&path, files)?; + } else if path + .extension() + .is_some_and(|ext| ext.eq_ignore_ascii_case("md")) + { + files.push(path); + } + } + Ok(()) +} + +fn lane_for(backlog_root: &Path, path: &Path) -> String { + path.strip_prefix(backlog_root) + .ok() + .and_then(|p| p.components().next()) + .and_then(|c| match c { + Component::Normal(os) => os.to_str(), + _ => None, + }) + .unwrap_or("unknown") + .to_string() +} + +fn lane_rank(lane: &str) -> usize { + GRAPH_LANES + .iter() + .position(|known| *known == lane) + .unwrap_or(usize::MAX) +} + +fn path_string(path: &Path) -> String { + path.to_string_lossy().replace('\\', "/") +} + +fn relative_path(root: &Path, path: &Path) -> Result { + path.strip_prefix(root) + .map(path_string) + .map_err(|e| format!("failed to make {} relative: {e}", path.display())) +} + +fn h1_title(text: &str) -> Option { + text.lines() + .find_map(|line| line.strip_prefix("# ").map(str::trim)) + .map(str::to_string) +} + +fn status_is_complete(text: &str) -> bool { + text.lines() + .filter_map(|line| line.trim().strip_prefix("Status:")) + .map(|status| status.trim().to_ascii_lowercase()) + .any(|status| { + (status.contains("complete") || status.contains("completed") || status.contains("done")) + && !status.contains("incomplete") + }) +} + +fn fallback_title(path: &Path) -> String { + path.file_stem() + .and_then(|s| s.to_str()) + .unwrap_or("task") + .replace(['_', '-'], " ") +} + +fn task_sections(text: &str) -> Vec { + text.lines() + .filter_map(parse_task_heading) + .map(|(native_id, title)| TaskSection { native_id, title }) + .collect() +} + +fn task_sections_with_body(text: &str) -> Vec { + let lines = text.lines().collect::>(); + let headings = lines + .iter() + .enumerate() + .filter_map(|(idx, line)| parse_task_heading(line).map(|_| idx)) + .collect::>(); + let mut sections = Vec::new(); + for (idx, heading_line) in headings.iter().enumerate() { + let body_start = heading_line + 1; + let body_end = headings.get(idx + 1).copied().unwrap_or(lines.len()); + sections.push(TaskSectionBody { + body: lines[body_start..body_end].join("\n"), + }); + } + sections +} + +fn parse_task_heading(line: &str) -> Option<(String, String)> { + let rest = line.strip_prefix(TASK_SECTION_PREFIX)?; + let (id_tail, title) = rest.split_once(':')?; + let native_id = format!("T-{}", id_tail.trim()); + if native_id.is_empty() || title.trim().is_empty() { + return None; + } + Some((native_id, title.trim().to_string())) +} + +fn slugify_heading(text: &str) -> String { + let mut out = String::new(); + let mut last_dash = false; + for ch in text.chars().flat_map(char::to_lowercase) { + if ch.is_ascii_alphanumeric() { + out.push(ch); + last_dash = false; + } else if (ch.is_whitespace() || ch == '_' || ch == '-') && !last_dash && !out.is_empty() { + out.push('-'); + last_dash = true; + } + } + if out.ends_with('-') { + out.pop(); + } + out +} + +fn file_dependency_links(text: &str) -> Vec { + let mut links = Vec::new(); + let mut collecting = false; + let mut block = Vec::new(); + + for line in text.lines() { + let trimmed = line.trim(); + if !collecting && trimmed == "Depends on:" { + collecting = true; + block.clear(); + continue; + } + if !collecting { + continue; + } + if trimmed.is_empty() && !block.is_empty() { + links.extend(markdown_links(&block.join("\n"))); + collecting = false; + block.clear(); + continue; + } + if trimmed.starts_with("## ") { + links.extend(markdown_links(&block.join("\n"))); + collecting = false; + block.clear(); + continue; + } + if trimmed.starts_with("- ") { + block.push(line.to_string()); + continue; + } + if !block.is_empty() { + links.extend(markdown_links(&block.join("\n"))); + collecting = false; + block.clear(); + } + } + + if collecting && !block.is_empty() { + links.extend(markdown_links(&block.join("\n"))); + } + + links +} + +fn markdown_links(text: &str) -> Vec { + let mut links = Vec::new(); + let mut cursor = 0usize; + while let Some(open_bracket) = text[cursor..].find('[') { + let bracket = cursor + open_bracket; + let Some(close_bracket_rel) = text[bracket..].find(']') else { + break; + }; + let close_bracket = bracket + close_bracket_rel; + let open_paren = close_bracket + 1; + if text.as_bytes().get(open_paren) != Some(&b'(') { + cursor = close_bracket + 1; + continue; + } + let Some(close_paren_rel) = text[open_paren + 1..].find(')') else { + break; + }; + let close_paren = open_paren + 1 + close_paren_rel; + links.push(text[open_paren + 1..close_paren].to_string()); + cursor = close_paren + 1; + } + links +} + +fn resolve_backlog_link(source_rel: &str, link: &str) -> Option { + if link.contains("://") { + return None; + } + let path_part = link.split('#').next().unwrap_or_default(); + if path_part.is_empty() + || !Path::new(path_part) + .extension() + .is_some_and(|ext| ext.eq_ignore_ascii_case("md")) + { + return None; + } + let source_parent = Path::new(source_rel) + .parent() + .unwrap_or_else(|| Path::new("")); + let normalized = normalize_relative_path(source_parent.join(path_part)); + if normalized.starts_with("docs/method/backlog/") { + Some(normalized) + } else { + None + } +} + +fn normalize_relative_path(path: PathBuf) -> String { + let mut parts: Vec = Vec::new(); + for component in path.components() { + match component { + Component::ParentDir => { + parts.pop(); + } + Component::Normal(os) => parts.push(os.to_string_lossy().into_owned()), + Component::CurDir | Component::RootDir | Component::Prefix(_) => {} + } + } + parts.join("/") +} + +fn blocked_by_values(section: &str) -> Vec<&str> { + field_values(section, "**Blocked By:**") +} + +fn blocking_values(section: &str) -> Vec<&str> { + field_values(section, "**Blocking:**") +} + +fn field_values<'a>(section: &'a str, prefix: &str) -> Vec<&'a str> { + section + .lines() + .filter_map(|line| line.trim().strip_prefix(prefix)) + .map(str::trim) + .collect() +} + +fn add_blocker_edges( + raw: &str, + source_idx: usize, + tasks: &[TaskNode], + id_by_native: &BTreeMap>, + slug_aliases: &BTreeMap>, + edge_set: &mut BTreeSet, +) { + for token in task_tokens(raw) { + if let Some(dep_indexes) = id_by_native.get(&token) { + for dep_idx in dep_indexes { + if *dep_idx != source_idx { + edge_set.insert(TaskEdge { + prerequisite: tasks[*dep_idx].id.clone(), + dependent: tasks[source_idx].id.clone(), + }); + } + } + } + } + + let remainder = remove_task_tokens(raw).to_ascii_lowercase(); + for part in remainder.split([',', ';', '(', ')']) { + let alias = part.trim().trim_matches('.').trim_matches('`'); + if alias.is_empty() + || matches!( + alias, + "none" + | "n/a" + | "na" + | "none in the task dag" + | "operationally blocked until there is at least one" + ) + { + continue; + } + let alias = alias.replace(' ', "-"); + if let Some(dep_indexes) = slug_aliases.get(&alias) { + for dep_idx in dep_indexes { + if *dep_idx != source_idx { + edge_set.insert(TaskEdge { + prerequisite: tasks[*dep_idx].id.clone(), + dependent: tasks[source_idx].id.clone(), + }); + } + } + } + } +} + +fn task_tokens(raw: &str) -> Vec { + raw.split(|ch: char| !ch.is_ascii_alphanumeric() && ch != '-') + .filter(|token| token.starts_with("T-")) + .map(|token| token.trim_matches('.').to_string()) + .collect() +} + +fn remove_task_tokens(raw: &str) -> String { + let mut out = Vec::new(); + for token in raw.split_whitespace() { + if !token + .trim_matches(|ch: char| ch == ',' || ch == ';') + .starts_with("T-") + { + out.push(token); + } + } + out.join(" ") +} + +fn task_markdown_link(task: &TaskNode) -> String { + let title = task.title.replace('|', "\\|"); + if let Some(anchor) = &task.anchor { + format!("[{}]({}#{})", title, task.source_path, anchor) + } else { + format!("[{}]({})", title, task.source_path) + } +} + +fn lane_colors(lane: &str) -> (&'static str, &'static str) { + match lane { + "asap" => ("#fff3bf", "#b08900"), + "up-next" => ("#dbeafe", "#1d4ed8"), + "inbox" => ("#e5e7eb", "#4b5563"), + "cool-ideas" => ("#ede9fe", "#7c3aed"), + "bad-code" => ("#fee2e2", "#b91c1c"), + _ => ("#f8fafc", "#64748b"), + } +} + +fn dot_escape(value: &str) -> String { + value.replace('\\', "\\\\").replace('"', "\\\"") +} + +fn dot_label(parts: &[String]) -> String { + parts + .iter() + .filter(|part| !part.is_empty()) + .map(|part| dot_escape(part)) + .collect::>() + .join("\\n") +} + +fn wrap_title(title: &str, limit: usize, max_lines: usize) -> Vec { + let mut lines = Vec::new(); + let mut current = String::new(); + for word in title.split_whitespace() { + let next = if current.is_empty() { + word.to_string() + } else { + format!("{current} {word}") + }; + if next.len() <= limit { + current = next; + } else { + if !current.is_empty() { + lines.push(current); + } + current = word.to_string(); + } + if lines.len() >= max_lines { + break; + } + } + if !current.is_empty() && lines.len() < max_lines { + lines.push(current); + } + if lines.join(" ").len() < title.len() { + if let Some(last) = lines.last_mut() { + *last = format!("{}...", last.trim_end_matches('.')); + } + } + lines +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parses_markdown_links_from_depends_block_only() { + let text = r" +Depends on: + +- [A](./a.md) + +Design source: +[B](./b.md) +"; + assert_eq!(file_dependency_links(text), vec!["./a.md"]); + } + + #[test] + fn token_parser_finds_legacy_task_ids() { + assert_eq!( + task_tokens("T-1-2-3, T-10-6-1a"), + vec!["T-1-2-3", "T-10-6-1a"] + ); + } + + #[test] + fn markdown_anchor_matches_expected_task_heading() { + assert_eq!( + slugify_heading("T-6-4-2 Inspect -- attachment payload pretty-printing"), + "t-6-4-2-inspect-attachment-payload-pretty-printing" + ); + } +} diff --git a/crates/method/src/lib.rs b/crates/method/src/lib.rs index 42cbea04..2a94b04e 100644 --- a/crates/method/src/lib.rs +++ b/crates/method/src/lib.rs @@ -9,6 +9,10 @@ //! It has no dependency on Echo or any other project. It could live in //! its own repo. +pub mod close; +pub mod drift; +pub mod graph; pub mod inbox; +pub mod pull; pub mod status; pub mod workspace; diff --git a/crates/method/src/pull.rs b/crates/method/src/pull.rs new file mode 100644 index 00000000..43bd8f86 --- /dev/null +++ b/crates/method/src/pull.rs @@ -0,0 +1,188 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS + +//! METHOD backlog-to-design promotion. + +use std::collections::BTreeSet; +use std::fs; +use std::path::{Path, PathBuf}; + +use serde::Serialize; + +use crate::graph::TaskGraph; +use crate::workspace::MethodWorkspace; + +/// Result of promoting a backlog item into a design cycle. +#[derive(Debug, Clone, Serialize)] +pub struct PullResult { + /// New cycle number, e.g. `0019`. + pub cycle_number: String, + /// New cycle directory name, e.g. `0019-xtask-method-pull`. + pub cycle: String, + /// Path to the moved design document. + pub design_path: PathBuf, +} + +/// Promote one backlog file into the next numbered design cycle. +/// +/// `selector` may be a relative/absolute markdown path, a backlog file stem, a +/// generated METHOD task id such as `M043`, or a native task id such as +/// `T-6-5-1`. If a selector resolves to more than one backlog file, this fails +/// closed and asks for a more specific selector. +pub fn pull_backlog_item( + workspace: &MethodWorkspace, + selector: &str, +) -> Result { + let source = resolve_source(workspace, selector)?; + let source_stem = source + .file_stem() + .and_then(|stem| stem.to_str()) + .ok_or_else(|| format!("invalid backlog filename: {}", source.display()))?; + let design_slug = strip_legend_prefix(source_stem); + let cycle_number = next_cycle_number(workspace)?; + let cycle = format!("{cycle_number}-{design_slug}"); + let cycle_dir = workspace.design_root().join(&cycle); + let design_path = cycle_dir.join(format!("{design_slug}.md")); + + if cycle_dir.exists() { + return Err(format!( + "refusing to overwrite existing design cycle: {}", + cycle_dir.display() + )); + } + + fs::create_dir_all(&cycle_dir) + .map_err(|e| format!("failed to create {}: {e}", cycle_dir.display()))?; + fs::rename(&source, &design_path).map_err(|e| { + format!( + "failed to move {} to {}: {e}", + source.display(), + design_path.display() + ) + })?; + + Ok(PullResult { + cycle_number, + cycle, + design_path, + }) +} + +fn resolve_source(workspace: &MethodWorkspace, selector: &str) -> Result { + let backlog_root = workspace.backlog_root(); + let root = backlog_root + .parent() + .and_then(Path::parent) + .and_then(Path::parent) + .ok_or_else(|| "failed to resolve METHOD repo root".to_string())?; + + let selector_path = Path::new(selector); + if selector_path.extension().is_some_and(|ext| ext == "md") { + let candidate = if selector_path.is_absolute() { + selector_path.to_path_buf() + } else { + root.join(selector_path) + }; + ensure_backlog_file(workspace, &candidate)?; + return Ok(candidate); + } + + let graph = TaskGraph::build(workspace)?; + let matches = graph + .tasks + .iter() + .filter(|task| { + task.id == selector + || task.native_id.as_deref() == Some(selector) + || Path::new(&task.source_path) + .file_stem() + .and_then(|stem| stem.to_str()) + .is_some_and(|stem| stem == selector || strip_legend_prefix(stem) == selector) + }) + .map(|task| root.join(&task.source_path)) + .collect::>(); + + match matches.len() { + 0 => Err(format!("no backlog item matches `{selector}`")), + 1 => { + let Some(path) = matches.into_iter().next() else { + return Err(format!("no backlog item matches `{selector}`")); + }; + ensure_backlog_file(workspace, &path)?; + Ok(path) + } + _ => Err(format!( + "backlog selector `{selector}` is ambiguous; use a source path" + )), + } +} + +fn ensure_backlog_file(workspace: &MethodWorkspace, path: &Path) -> Result<(), String> { + let backlog_root = workspace.backlog_root(); + if !path.starts_with(&backlog_root) { + return Err(format!( + "{} is not under {}", + path.display(), + backlog_root.display() + )); + } + if !path.is_file() { + return Err(format!("backlog item not found: {}", path.display())); + } + Ok(()) +} + +fn next_cycle_number(workspace: &MethodWorkspace) -> Result { + let mut max = 0_u32; + let design_root = workspace.design_root(); + let entries = fs::read_dir(&design_root) + .map_err(|e| format!("failed to read {}: {e}", design_root.display()))?; + + for entry in entries { + let entry = entry.map_err(|e| format!("failed to read design entry: {e}"))?; + if !entry.path().is_dir() { + continue; + } + let Some(name) = entry.file_name().to_str().map(str::to_owned) else { + continue; + }; + let Some(prefix) = name.split_once('-').map(|(prefix, _)| prefix) else { + continue; + }; + if prefix.len() == 4 { + if let Ok(number) = prefix.parse::() { + max = max.max(number); + } + } + } + + Ok(format!("{:04}", max + 1)) +} + +fn strip_legend_prefix(stem: &str) -> String { + let stripped = stem + .split_once('_') + .filter(|(prefix, _)| { + !prefix.is_empty() && prefix.chars().all(|ch| ch.is_ascii_uppercase()) + }) + .map_or(stem, |(_, suffix)| suffix); + stripped.replace('_', "-").to_ascii_lowercase() +} + +#[cfg(test)] +mod tests { + use super::strip_legend_prefix; + + #[test] + fn strips_uppercase_legend_prefix() { + assert_eq!( + strip_legend_prefix("PLATFORM_xtask-method-pull"), + "xtask-method-pull" + ); + } + + #[test] + fn leaves_unprefixed_stem_as_slug() { + assert_eq!(strip_legend_prefix("docs-cleanup"), "docs-cleanup"); + } +} diff --git a/crates/method/src/status.rs b/crates/method/src/status.rs index f5c277d5..207d41be 100644 --- a/crates/method/src/status.rs +++ b/crates/method/src/status.rs @@ -24,7 +24,7 @@ pub struct StatusReport { } /// An active (not yet retro'd) cycle. -#[derive(Debug, Serialize)] +#[derive(Clone, Debug, Serialize)] pub struct ActiveCycle { /// Cycle number (e.g., "0002"). pub number: String, diff --git a/crates/method/src/workspace.rs b/crates/method/src/workspace.rs index 83a597ea..6c9f8db8 100644 --- a/crates/method/src/workspace.rs +++ b/crates/method/src/workspace.rs @@ -35,6 +35,11 @@ impl MethodWorkspace { self.root.join("docs/method/backlog") } + /// Return the path to the METHOD docs root. + pub fn method_root(&self) -> PathBuf { + self.root.join("docs/method") + } + /// Return the path to the design docs root. pub fn design_root(&self) -> PathBuf { self.root.join("docs/design") diff --git a/crates/method/tests/close_tests.rs b/crates/method/tests/close_tests.rs new file mode 100644 index 00000000..d976877d --- /dev/null +++ b/crates/method/tests/close_tests.rs @@ -0,0 +1,64 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS + +//! Tests for METHOD cycle closeout scaffolding. +#![allow(clippy::expect_used)] + +use std::fs; + +use method::close::close_cycle; +use method::workspace::MethodWorkspace; + +fn scaffold(root: &std::path::Path) { + fs::create_dir_all(root.join("docs/method/backlog/inbox")).expect("create inbox"); + fs::create_dir_all(root.join("docs/design/0001-first-cycle")).expect("create first design"); + fs::create_dir_all(root.join("docs/design/0002-second-cycle")).expect("create second design"); + fs::create_dir_all(root.join("docs/method/retro/0001-first-cycle")) + .expect("create closed first retro"); +} + +#[test] +fn close_defaults_to_most_recent_active_cycle() { + let tmp = tempfile::tempdir().expect("tempdir"); + scaffold(tmp.path()); + let workspace = MethodWorkspace::discover(tmp.path()).expect("discover"); + + let result = close_cycle(&workspace, None).expect("close cycle"); + + assert_eq!(result.cycle, "0002-second-cycle"); + assert!(result.retro_path.ends_with("retro.md")); + assert!(result.witness_dir.ends_with("witness")); + assert!(result.witness_dir.is_dir()); + + let retro = fs::read_to_string(result.retro_path).expect("read retro"); + assert!(retro.contains("# Retro: 0002-second-cycle")); + assert!(retro.contains("Witness: [`witness/`](./witness/)")); +} + +#[test] +fn close_accepts_numeric_cycle_selector() { + let tmp = tempfile::tempdir().expect("tempdir"); + scaffold(tmp.path()); + fs::create_dir_all(tmp.path().join("docs/design/0003-third-cycle")) + .expect("create third design"); + let workspace = MethodWorkspace::discover(tmp.path()).expect("discover"); + + let result = close_cycle(&workspace, Some("0002")).expect("close selected cycle"); + + assert_eq!(result.cycle, "0002-second-cycle"); +} + +#[test] +fn close_refuses_to_overwrite_existing_retro() { + let tmp = tempfile::tempdir().expect("tempdir"); + scaffold(tmp.path()); + let workspace = MethodWorkspace::discover(tmp.path()).expect("discover"); + + let _ = close_cycle(&workspace, Some("0002")).expect("close cycle once"); + let err = close_cycle(&workspace, Some("0002")).expect_err("overwrite should fail"); + + assert!( + err.contains("refusing to overwrite existing retro directory"), + "unexpected error: {err}" + ); +} diff --git a/crates/method/tests/drift_tests.rs b/crates/method/tests/drift_tests.rs new file mode 100644 index 00000000..d7765a2f --- /dev/null +++ b/crates/method/tests/drift_tests.rs @@ -0,0 +1,65 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS + +//! Tests for METHOD playback drift coverage. +#![allow(clippy::expect_used)] + +use std::fs; + +use method::drift::drift_report; +use method::workspace::MethodWorkspace; + +fn scaffold(root: &std::path::Path) { + fs::create_dir_all(root.join("docs/method/backlog/inbox")).expect("create inbox"); + fs::create_dir_all(root.join("docs/design/0001-playback-check")).expect("create design"); + fs::create_dir_all(root.join("docs/method/retro")).expect("create retro"); + fs::create_dir_all(root.join("crates/demo/tests")).expect("create test dir"); +} + +#[test] +fn drift_marks_playback_question_covered_by_test_description() { + let tmp = tempfile::tempdir().expect("tempdir"); + scaffold(tmp.path()); + fs::write( + tmp.path().join("docs/design/0001-playback-check/design.md"), + "# Playback Check\n\n## Human playback\n\n1. Does the status command emit JSON?\n", + ) + .expect("write design"); + fs::write( + tmp.path().join("crates/demo/tests/status_tests.rs"), + "// Does the status command emit JSON?\n#[test]\nfn status_json() {}\n", + ) + .expect("write test"); + let workspace = MethodWorkspace::discover(tmp.path()).expect("discover"); + + let report = drift_report(&workspace, Some("0001")).expect("drift report"); + + assert!(report.covered()); + assert_eq!(report.missing_count(), 0); + assert_eq!(report.questions.len(), 1); + assert_eq!( + report.questions[0].matches, + vec![std::path::PathBuf::from( + "crates/demo/tests/status_tests.rs" + )] + ); +} + +#[test] +fn drift_reports_missing_playback_question() { + let tmp = tempfile::tempdir().expect("tempdir"); + scaffold(tmp.path()); + fs::write( + tmp.path() + .join("docs/design/0001-playback-check/design.md"), + "# Playback Check\n\n## Agent playback\n\n| Question | Expected |\n| --- | --- |\n| Can the agent parse the frontier? | Yes |\n", + ) + .expect("write design"); + let workspace = MethodWorkspace::discover(tmp.path()).expect("discover"); + + let report = drift_report(&workspace, None).expect("drift report"); + + assert!(!report.covered()); + assert_eq!(report.missing_count(), 1); + assert!(report.questions[0].matches.is_empty()); +} diff --git a/crates/method/tests/graph_tests.rs b/crates/method/tests/graph_tests.rs new file mode 100644 index 00000000..b44933cd --- /dev/null +++ b/crates/method/tests/graph_tests.rs @@ -0,0 +1,138 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS + +//! Tests for METHOD task graph parsing and scheduling queries. +#![allow(clippy::expect_used)] + +use std::fs; + +use method::graph::TaskGraph; +use method::workspace::MethodWorkspace; + +fn scaffold(root: &std::path::Path) { + for lane in &["inbox", "asap", "up-next", "cool-ideas", "bad-code"] { + fs::create_dir_all(root.join(format!("docs/method/backlog/{lane}"))).expect("create lane"); + } + fs::create_dir_all(root.join("docs/design")).expect("create design"); + fs::create_dir_all(root.join("docs/method/retro")).expect("create retro"); +} + +#[test] +fn graph_builds_file_level_depends_on_edges() { + let tmp = tempfile::tempdir().expect("tempdir"); + scaffold(tmp.path()); + + fs::write( + tmp.path().join("docs/method/backlog/asap/A_first.md"), + "# First\n", + ) + .expect("write first"); + fs::write( + tmp.path().join("docs/method/backlog/asap/A_second.md"), + "# Second\n\nDepends on:\n\n- [First](./A_first.md)\n\n## Goal\n\nDo it.\n", + ) + .expect("write second"); + + let workspace = MethodWorkspace::discover(tmp.path()).expect("discover"); + let graph = TaskGraph::build(&workspace).expect("graph"); + + assert_eq!(graph.tasks.len(), 2); + assert_eq!(graph.edges.len(), 1); + assert_eq!(graph.edges[0].prerequisite, "M001"); + assert_eq!(graph.edges[0].dependent, "M002"); + + let frontier = graph.frontier(); + assert_eq!(frontier.len(), 1); + assert_eq!(frontier[0].task.title, "First"); +} + +#[test] +fn graph_splits_legacy_t_sections_and_blocks_by_native_id() { + let tmp = tempfile::tempdir().expect("tempdir"); + scaffold(tmp.path()); + + fs::write( + tmp.path() + .join("docs/method/backlog/asap/PLATFORM_legacy.md"), + r"# Legacy + +## T-1-1-1: First task + +**Blocked By:** none +**Blocking:** T-1-1-2 + +## T-1-1-2: Second task + +**Blocked By:** T-1-1-1 +**Blocking:** none +", + ) + .expect("write legacy"); + + let workspace = MethodWorkspace::discover(tmp.path()).expect("discover"); + let graph = TaskGraph::build(&workspace).expect("graph"); + + assert_eq!(graph.tasks.len(), 2); + assert_eq!(graph.edges.len(), 1); + assert_eq!(graph.edges[0].prerequisite, "M001"); + assert_eq!(graph.edges[0].dependent, "M002"); + assert_eq!( + graph.frontier()[0].task.native_id.as_deref(), + Some("T-1-1-1") + ); +} + +#[test] +fn matrix_csv_has_square_shape() { + let tmp = tempfile::tempdir().expect("tempdir"); + scaffold(tmp.path()); + + fs::write( + tmp.path().join("docs/method/backlog/asap/A_first.md"), + "# First\n", + ) + .expect("write first"); + fs::write( + tmp.path().join("docs/method/backlog/asap/A_second.md"), + "# Second\n\nDepends on:\n\n- [First](./A_first.md)\n", + ) + .expect("write second"); + + let workspace = MethodWorkspace::discover(tmp.path()).expect("discover"); + let graph = TaskGraph::build(&workspace).expect("graph"); + let csv = graph.render_matrix_csv(); + let rows = csv + .trim_end() + .lines() + .map(|line| line.split(',').count()) + .collect::>(); + + assert_eq!(rows, vec![3, 3, 3]); + assert!(csv.contains("depends on")); +} + +#[test] +fn completed_backlog_cards_satisfy_blockers_without_becoming_open() { + let tmp = tempfile::tempdir().expect("tempdir"); + scaffold(tmp.path()); + + fs::write( + tmp.path().join("docs/method/backlog/asap/A_design.md"), + "# Design\n\nStatus: design packet complete.\n", + ) + .expect("write design"); + fs::write( + tmp.path().join("docs/method/backlog/asap/A_impl.md"), + "# Impl\n\nDepends on:\n\n- [Design](./A_design.md)\n", + ) + .expect("write impl"); + + let workspace = MethodWorkspace::discover(tmp.path()).expect("discover"); + let graph = TaskGraph::build(&workspace).expect("graph"); + let frontier = graph.frontier(); + + assert_eq!(frontier.len(), 1); + assert_eq!(frontier[0].task.title, "Impl"); + assert!(graph.render_dot().contains("DONE")); + assert!(graph.render_dot().contains("OPEN")); +} diff --git a/crates/method/tests/pull_tests.rs b/crates/method/tests/pull_tests.rs new file mode 100644 index 00000000..df1599e2 --- /dev/null +++ b/crates/method/tests/pull_tests.rs @@ -0,0 +1,87 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS + +//! Tests for METHOD backlog pull. +#![allow(clippy::expect_used)] + +use std::fs; + +use method::pull::pull_backlog_item; +use method::workspace::MethodWorkspace; + +fn scaffold(root: &std::path::Path) { + for lane in &["inbox", "asap", "up-next", "cool-ideas", "bad-code"] { + fs::create_dir_all(root.join(format!("docs/method/backlog/{lane}"))).expect("create lane"); + } + fs::create_dir_all(root.join("docs/design/0001-existing-cycle")).expect("create design"); + fs::create_dir_all(root.join("docs/method/retro")).expect("create retro"); +} + +#[test] +fn pull_by_task_id_moves_backlog_file_to_next_design_cycle() { + let tmp = tempfile::tempdir().expect("tempdir"); + scaffold(tmp.path()); + let source = tmp + .path() + .join("docs/method/backlog/asap/PLATFORM_build-spaceship.md"); + fs::write(&source, "# Build Spaceship\n").expect("write backlog"); + let workspace = MethodWorkspace::discover(tmp.path()).expect("discover"); + + let result = pull_backlog_item(&workspace, "M001").expect("pull item"); + + assert_eq!(result.cycle_number, "0002"); + assert_eq!(result.cycle, "0002-build-spaceship"); + assert_eq!( + result + .design_path + .strip_prefix(tmp.path()) + .expect("relative design path"), + std::path::Path::new("docs/design/0002-build-spaceship/build-spaceship.md") + ); + assert!(!source.exists()); + assert_eq!( + fs::read_to_string(result.design_path).expect("read moved design"), + "# Build Spaceship\n" + ); +} + +#[test] +fn pull_by_unprefixed_stem_strips_legend_prefix() { + let tmp = tempfile::tempdir().expect("tempdir"); + scaffold(tmp.path()); + fs::write( + tmp.path() + .join("docs/method/backlog/asap/KERNEL_determinism-torture.md"), + "# Determinism Torture\n", + ) + .expect("write backlog"); + let workspace = MethodWorkspace::discover(tmp.path()).expect("discover"); + + let result = pull_backlog_item(&workspace, "determinism-torture").expect("pull item"); + + assert_eq!(result.cycle, "0002-determinism-torture"); + assert!(result + .design_path + .ends_with("docs/design/0002-determinism-torture/determinism-torture.md")); +} + +#[test] +fn pull_by_native_section_id_moves_containing_file() { + let tmp = tempfile::tempdir().expect("tempdir"); + scaffold(tmp.path()); + let source = tmp + .path() + .join("docs/method/backlog/asap/PLATFORM_two-tasks.md"); + fs::write( + &source, + "# Two Tasks\n\n## T-1-1-1: First\n\n## T-1-1-2: Second\n", + ) + .expect("write backlog"); + let workspace = MethodWorkspace::discover(tmp.path()).expect("discover"); + + let result = pull_backlog_item(&workspace, "T-1-1-2").expect("pull containing file"); + + assert_eq!(result.cycle, "0002-two-tasks"); + assert!(!source.exists()); + assert!(result.design_path.is_file()); +} diff --git a/crates/ttd-manifest/.gitkeep b/crates/ttd-manifest/.gitkeep deleted file mode 100644 index 50276101..00000000 --- a/crates/ttd-manifest/.gitkeep +++ /dev/null @@ -1,3 +0,0 @@ -# Vendored Wesley TTD manifest outputs -# Generated by: cargo xtask wesley:sync -# Do not edit manually - regenerate from Wesley schema diff --git a/crates/ttd-protocol-rs/Cargo.toml b/crates/ttd-protocol-rs/Cargo.toml index ae35cae4..fc044b1a 100644 --- a/crates/ttd-protocol-rs/Cargo.toml +++ b/crates/ttd-protocol-rs/Cargo.toml @@ -10,9 +10,10 @@ license = "Apache-2.0" description = "Generated TTD protocol types from the canonical warp-ttd protocol" publish = false -# NOTE: This crate's lib.rs is generated by `cargo xtask wesley sync`. -# Do not edit lib.rs manually - changes will be overwritten. +# NOTE: This crate's lib.rs is a checked-in Wesley-generated consumer artifact. +# Do not edit lib.rs manually - changes will be overwritten by regeneration. # Source of truth: canonical warp-ttd protocol schema, consumed through Wesley-generated artifacts. +# Local provenance check: cargo xtask wesley sync [lib] path = "lib.rs" diff --git a/crates/ttd-protocol-rs/README.md b/crates/ttd-protocol-rs/README.md new file mode 100644 index 00000000..a035e228 --- /dev/null +++ b/crates/ttd-protocol-rs/README.md @@ -0,0 +1,27 @@ + + + +# ttd-protocol-rs + +Generated Rust consumer types for the host-neutral TTD protocol. + +Echo is not the source of truth for this protocol. The canonical authored +schema lives with `warp-ttd` at: + +```text +warp-ttd/schemas/warp-ttd-protocol.graphql +``` + +This crate is a checked-in downstream consumer artifact produced through the +Wesley TTD generator path. Do not edit `lib.rs` by hand. + +Local provenance check: + +```sh +cargo xtask wesley sync +``` + +That check verifies that Echo no longer carries a backup +`schemas/ttd-protocol.graphql`, that the Rust and TypeScript generated +consumers agree on the canonical schema hash, and that Echo runtime compliance +code remains separate from host-neutral debugger protocol nouns. diff --git a/crates/warp-cli/README.md b/crates/warp-cli/README.md index fb78c1c0..e5e91006 100644 --- a/crates/warp-cli/README.md +++ b/crates/warp-cli/README.md @@ -18,6 +18,8 @@ The binary is named `echo-cli`. ### `echo-cli verify ` Validate WSC snapshot integrity. Loads the file, validates structure, reconstructs the graph, and computes state root hashes. +Text output stays plain when stdout is not a TTY and decorates pass/fail statuses +with color when stdout is a terminal. ```sh # Verify a snapshot @@ -30,7 +32,7 @@ echo-cli verify state.wsc --expected abcd1234... echo-cli --format json verify state.wsc ``` -### `echo-cli bench [--filter ]` +### `echo-cli bench [--filter ] [--baseline ]` Run Criterion benchmarks, parse JSON results, and format as an ASCII table. @@ -41,13 +43,18 @@ echo-cli bench # Filter by name echo-cli bench --filter hotpath +# Compare current medians against perf-baseline.json +echo-cli bench --baseline main + # JSON output for CI echo-cli --format json bench ``` -### `echo-cli inspect [--tree]` +### `echo-cli inspect [--tree] [--raw]` -Display WSC snapshot metadata and graph statistics. +Display WSC snapshot metadata, graph statistics, and attachment payloads. Known +motion payloads are decoded by default; unknown or invalid payloads fall back to +hex. ```sh # Show metadata and stats @@ -56,6 +63,9 @@ echo-cli inspect state.wsc # Include ASCII tree of graph structure echo-cli inspect state.wsc --tree +# Force all attachment payloads to hex +echo-cli inspect state.wsc --raw + # JSON output echo-cli --format json inspect state.wsc ``` @@ -75,6 +85,10 @@ cargo xtask man-pages # Output: docs/man/echo-cli.1, echo-cli-verify.1, etc. ``` +CLI help goldens are committed as plain text fixtures and must not contain +trailing whitespace. The integration suite enforces this so snapshot updates do +not smuggle editor or generator padding into review diffs. + ## Documentation See the root `README.md` and `docs/spec/` for architecture context. diff --git a/crates/warp-cli/src/bench.rs b/crates/warp-cli/src/bench.rs index 2d29b119..554e158b 100644 --- a/crates/warp-cli/src/bench.rs +++ b/crates/warp-cli/src/bench.rs @@ -6,7 +6,8 @@ //! JSON from `target/criterion/**/new/estimates.json`, and renders an ASCII //! table or JSON array. -use std::path::Path; +use std::collections::BTreeMap; +use std::path::{Path, PathBuf}; use std::process::Command; use anyhow::{bail, Context, Result}; @@ -23,6 +24,20 @@ pub(crate) struct BenchResult { pub(crate) mean_ns: f64, pub(crate) median_ns: f64, pub(crate) stddev_ns: f64, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) baseline_ns: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) delta_pct: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) baseline_status: Option, +} + +/// Baseline metadata included in JSON output when `--baseline` is supplied. +#[derive(Debug, Clone, Serialize)] +pub(crate) struct BaselineInfo { + pub(crate) name: String, + pub(crate) path: String, + pub(crate) found: bool, } /// Raw Criterion estimates JSON structure. @@ -73,7 +88,11 @@ pub(crate) fn build_bench_command(filter: Option<&str>) -> Command { } /// Runs the bench subcommand. -pub(crate) fn run(filter: Option<&str>, format: &OutputFormat) -> Result<()> { +pub(crate) fn run( + filter: Option<&str>, + baseline: Option<&str>, + format: &OutputFormat, +) -> Result<()> { // 1. Shell out to cargo bench. let mut cmd = build_bench_command(filter); @@ -90,20 +109,25 @@ pub(crate) fn run(filter: Option<&str>, format: &OutputFormat) -> Result<()> { } // 2. Parse Criterion JSON results. - let results = collect_criterion_results(Path::new("target/criterion"), filter)?; + let mut results = collect_criterion_results(Path::new("target/criterion"), filter)?; + let baseline_info = if let Some(name) = baseline { + Some(apply_named_baseline(name, &mut results)?) + } else { + None + }; if results.is_empty() { let text = "No benchmark results found.\n"; - let json = serde_json::json!({ "benchmarks": [], "message": "no results found" }); + let json = serde_json::json!({ "benchmarks": [], "baseline": baseline_info, "message": "no results found" }); eprintln!("warning: no benchmark results found in target/criterion/"); emit(format, text, &json)?; return Ok(()); } // 3. Format output. - let text = format_table(&results); + let text = format_table(&results, baseline_info.as_ref()); let json = serde_json::to_value(&results).context("failed to serialize bench results")?; - let json = serde_json::json!({ "benchmarks": json }); + let json = serde_json::json!({ "benchmarks": json, "baseline": baseline_info }); emit(format, &text, &json)?; Ok(()) @@ -204,25 +228,123 @@ pub(crate) fn parse_estimates(name: &str, path: &Path) -> Result { mean_ns: estimates.mean.point_estimate, median_ns: estimates.median.point_estimate, stddev_ns: estimates.std_dev.point_estimate, + baseline_ns: None, + delta_pct: None, + baseline_status: None, }) } +fn baseline_path(name: &str) -> PathBuf { + if name == "main" { + PathBuf::from("perf-baseline.json") + } else { + PathBuf::from(format!("perf-baseline.{name}.json")) + } +} + +fn apply_named_baseline(name: &str, results: &mut [BenchResult]) -> Result { + let path = baseline_path(name); + let info = BaselineInfo { + name: name.to_string(), + path: path.display().to_string(), + found: path.is_file(), + }; + + if !info.found { + return Ok(info); + } + + let baseline = load_baseline(&path)?; + apply_baseline(results, &baseline); + Ok(info) +} + +fn load_baseline(path: &Path) -> Result> { + let content = std::fs::read_to_string(path) + .with_context(|| format!("failed to read baseline {}", path.display()))?; + serde_json::from_str(&content) + .with_context(|| format!("failed to parse baseline {}", path.display())) +} + +fn apply_baseline(results: &mut [BenchResult], baseline: &BTreeMap) { + for result in results { + let Some(base) = baseline.get(&result.name).copied() else { + result.baseline_status = Some("NEW".to_string()); + continue; + }; + + result.baseline_ns = Some(base); + if base > 0.0 && base.is_finite() { + result.delta_pct = Some(((result.median_ns - base) / base) * 100.0); + result.baseline_status = Some("OK".to_string()); + } else { + result.baseline_status = Some("INVALID_BASELINE".to_string()); + } + } +} + /// Formats benchmark results as an ASCII table. -pub(crate) fn format_table(results: &[BenchResult]) -> String { +pub(crate) fn format_table(results: &[BenchResult], baseline: Option<&BaselineInfo>) -> String { + use std::fmt::Write as _; + let mut table = Table::new(); table.set_content_arrangement(ContentArrangement::Dynamic); - table.set_header(vec!["Benchmark", "Mean", "Median", "Std Dev"]); + let show_baseline = baseline.is_some_and(|info| info.found); + if show_baseline { + table.set_header(vec![ + "Benchmark", + "Mean", + "Median", + "Std Dev", + "Baseline", + "Delta", + "Status", + ]); + } else { + table.set_header(vec!["Benchmark", "Mean", "Median", "Std Dev"]); + } for r in results { - table.add_row(vec![ + let mut row = vec![ r.name.clone(), format_duration(r.mean_ns), format_duration(r.median_ns), format_duration(r.stddev_ns), - ]); + ]; + if show_baseline { + row.push( + r.baseline_ns + .map_or_else(|| "\u{2014}".to_string(), format_duration), + ); + row.push( + r.delta_pct + .map_or_else(|| "\u{2014}".to_string(), |delta| format!("{delta:+.1}%")), + ); + row.push( + r.baseline_status + .as_deref() + .unwrap_or("\u{2014}") + .to_string(), + ); + } + table.add_row(row); } - format!("{table}\n") + let mut out = String::new(); + if let Some(info) = baseline { + if info.found { + let _ = writeln!(out, "Baseline: {} ({})", info.name, info.path); + } else { + let _ = writeln!( + out, + "No baseline found at {}; showing absolute values only.", + info.path + ); + } + let _ = writeln!(out); + } + let _ = writeln!(out, "{table}"); + out } /// Formats nanosecond durations in human-readable form. @@ -286,16 +408,22 @@ mod tests { mean_ns: 1_230_000.0, median_ns: 1_210_000.0, stddev_ns: 120_000.0, + baseline_ns: None, + delta_pct: None, + baseline_status: None, }, BenchResult { name: "materialize".to_string(), mean_ns: 456_700.0, median_ns: 450_200.0, stddev_ns: 32_100.0, + baseline_ns: None, + delta_pct: None, + baseline_status: None, }, ]; - let table = format_table(&results); + let table = format_table(&results, None); assert!( table.contains("tick_pipeline"), "table should contain bench name" @@ -314,6 +442,9 @@ mod tests { mean_ns: 100.0, median_ns: 95.0, stddev_ns: 5.0, + baseline_ns: None, + delta_pct: None, + baseline_status: None, }]; let json = serde_json::to_value(&results).unwrap(); @@ -479,4 +610,100 @@ mod tests { "command without filter should not contain '--'" ); } + + #[test] + fn baseline_path_main_uses_repo_baseline() { + assert_eq!(baseline_path("main"), PathBuf::from("perf-baseline.json")); + assert_eq!( + baseline_path("feature"), + PathBuf::from("perf-baseline.feature.json") + ); + } + + #[test] + fn apply_baseline_adds_delta_status() { + let mut results = vec![BenchResult { + name: "tick_pipeline".to_string(), + mean_ns: 120.0, + median_ns: 110.0, + stddev_ns: 3.0, + baseline_ns: None, + delta_pct: None, + baseline_status: None, + }]; + let baseline = BTreeMap::from([("tick_pipeline".to_string(), 100.0)]); + + apply_baseline(&mut results, &baseline); + + assert_eq!(results[0].baseline_ns, Some(100.0)); + assert_eq!(results[0].delta_pct, Some(10.0)); + assert_eq!(results[0].baseline_status.as_deref(), Some("OK")); + } + + #[test] + fn apply_baseline_marks_new_benchmark() { + let mut results = vec![BenchResult { + name: "new_bench".to_string(), + mean_ns: 120.0, + median_ns: 110.0, + stddev_ns: 3.0, + baseline_ns: None, + delta_pct: None, + baseline_status: None, + }]; + apply_baseline(&mut results, &BTreeMap::new()); + + assert_eq!(results[0].baseline_ns, None); + assert_eq!(results[0].delta_pct, None); + assert_eq!(results[0].baseline_status.as_deref(), Some("NEW")); + } + + #[test] + fn table_with_baseline_shows_delta_columns() { + let results = vec![BenchResult { + name: "tick_pipeline".to_string(), + mean_ns: 120.0, + median_ns: 110.0, + stddev_ns: 3.0, + baseline_ns: Some(100.0), + delta_pct: Some(10.0), + baseline_status: Some("OK".to_string()), + }]; + let info = BaselineInfo { + name: "main".to_string(), + path: "perf-baseline.json".to_string(), + found: true, + }; + + let table = format_table(&results, Some(&info)); + + assert!(table.contains("Baseline: main (perf-baseline.json)")); + assert!(table.contains("Delta")); + assert!(table.contains("+10.0%")); + assert!(table.contains("OK")); + } + + #[test] + fn missing_baseline_keeps_absolute_table() { + let results = vec![BenchResult { + name: "tick_pipeline".to_string(), + mean_ns: 120.0, + median_ns: 110.0, + stddev_ns: 3.0, + baseline_ns: None, + delta_pct: None, + baseline_status: None, + }]; + let info = BaselineInfo { + name: "main".to_string(), + path: "perf-baseline.json".to_string(), + found: false, + }; + + let table = format_table(&results, Some(&info)); + + assert!(table.contains("No baseline found at perf-baseline.json")); + assert!(!table.contains("Delta")); + assert!(table.contains("tick_pipeline")); + } } diff --git a/crates/warp-cli/src/cli.rs b/crates/warp-cli/src/cli.rs index 7a338502..364c77cc 100644 --- a/crates/warp-cli/src/cli.rs +++ b/crates/warp-cli/src/cli.rs @@ -19,7 +19,13 @@ use clap::{Parser, Subcommand, ValueEnum}; )] pub struct Cli { /// Output format (text or json). - #[arg(long, global = true, default_value = "text", value_enum)] + #[arg( + long, + global = true, + default_value = "text", + value_enum, + hide_possible_values = true + )] pub format: OutputFormat, /// Subcommand to execute. @@ -46,6 +52,10 @@ pub enum Commands { /// Filter benchmarks by pattern. #[arg(long)] filter: Option, + + /// Compare current medians against a saved baseline. + #[arg(long)] + baseline: Option, }, /// Inspect a WSC snapshot. @@ -56,6 +66,10 @@ pub enum Commands { /// Show ASCII tree of graph structure. #[arg(long)] tree: bool, + + /// Show attachment payload bytes as hex instead of decoding known payloads. + #[arg(long)] + raw: bool, }, } @@ -118,11 +132,23 @@ mod tests { assert_eq!(cli.format, OutputFormat::Json); } + #[test] + fn invalid_format_is_error() { + let result = Cli::try_parse_from(["echo-cli", "--format", "yaml", "bench"]); + assert!(result.is_err()); + } + #[test] fn parse_bench_no_filter() { let cli = Cli::try_parse_from(["echo-cli", "bench"]).unwrap(); match cli.command { - Commands::Bench { ref filter } => assert!(filter.is_none()), + Commands::Bench { + ref filter, + ref baseline, + } => { + assert!(filter.is_none()); + assert!(baseline.is_none()); + } _ => panic!("expected Bench command"), } } @@ -131,20 +157,36 @@ mod tests { fn parse_bench_with_filter() { let cli = Cli::try_parse_from(["echo-cli", "bench", "--filter", "hotpath"]).unwrap(); match cli.command { - Commands::Bench { ref filter } => { + Commands::Bench { ref filter, .. } => { assert_eq!(filter.as_deref(), Some("hotpath")); } _ => panic!("expected Bench command"), } } + #[test] + fn parse_bench_with_baseline() { + let cli = Cli::try_parse_from(["echo-cli", "bench", "--baseline", "main"]).unwrap(); + match cli.command { + Commands::Bench { ref baseline, .. } => { + assert_eq!(baseline.as_deref(), Some("main")); + } + _ => panic!("expected Bench command"), + } + } + #[test] fn parse_inspect_basic() { let cli = Cli::try_parse_from(["echo-cli", "inspect", "state.wsc"]).unwrap(); match cli.command { - Commands::Inspect { ref snapshot, tree } => { + Commands::Inspect { + ref snapshot, + tree, + raw, + } => { assert_eq!(snapshot, &PathBuf::from("state.wsc")); assert!(!tree); + assert!(!raw); } _ => panic!("expected Inspect command"), } @@ -159,6 +201,15 @@ mod tests { } } + #[test] + fn parse_inspect_with_raw() { + let cli = Cli::try_parse_from(["echo-cli", "inspect", "state.wsc", "--raw"]).unwrap(); + match cli.command { + Commands::Inspect { raw, .. } => assert!(raw), + _ => panic!("expected Inspect command"), + } + } + #[test] fn unknown_subcommand_is_error() { let result = Cli::try_parse_from(["echo-cli", "bogus"]); diff --git a/crates/warp-cli/src/inspect.rs b/crates/warp-cli/src/inspect.rs index ba1200aa..7727439f 100644 --- a/crates/warp-cli/src/inspect.rs +++ b/crates/warp-cli/src/inspect.rs @@ -10,10 +10,13 @@ use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet, VecDeque}; use std::path::Path; use anyhow::{Context, Result}; +use bytes::Bytes; use serde::Serialize; +use warp_core::wsc::types::AttRow; use warp_core::wsc::view::WarpView; use warp_core::wsc::{validate_wsc, WscFile}; +use warp_core::{decode_motion_atom_payload, AtomPayload, TypeId}; use crate::cli::OutputFormat; use crate::output::{emit, hex_hash, short_hex}; @@ -39,6 +42,21 @@ pub(crate) struct WarpStats { pub(crate) node_types: BTreeMap, pub(crate) edge_types: BTreeMap, pub(crate) connected_components: usize, + pub(crate) attachments: Vec, +} + +/// Attachment payload display row. +#[derive(Debug, Serialize)] +pub(crate) struct AttachmentSummary { + pub(crate) owner: String, + pub(crate) owner_id: String, + pub(crate) plane: String, + pub(crate) kind: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) type_id: Option, + pub(crate) payload: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) warning: Option, } /// Full inspect report. @@ -64,7 +82,12 @@ pub(crate) struct TreeNode { const TREE_MAX_DEPTH: usize = 5; /// Runs the inspect subcommand. -pub(crate) fn run(snapshot: &Path, show_tree: bool, format: &OutputFormat) -> Result<()> { +pub(crate) fn run( + snapshot: &Path, + show_tree: bool, + raw_payloads: bool, + format: &OutputFormat, +) -> Result<()> { let file = WscFile::open(snapshot) .with_context(|| format!("failed to open WSC file: {}", snapshot.display()))?; @@ -89,7 +112,7 @@ pub(crate) fn run(snapshot: &Path, show_tree: bool, format: &OutputFormat) -> Re let store = graph_store_from_warp_view(&view); let state_root = store.canonical_state_hash(); - let stats = compute_stats(&view, &state_root); + let stats = compute_stats(&view, &state_root, raw_payloads); warp_stats.push(stats); if let Some(ref mut tree_list) = trees { @@ -111,7 +134,7 @@ pub(crate) fn run(snapshot: &Path, show_tree: bool, format: &OutputFormat) -> Re Ok(()) } -fn compute_stats(view: &WarpView<'_>, state_root: &[u8; 32]) -> WarpStats { +fn compute_stats(view: &WarpView<'_>, state_root: &[u8; 32], raw_payloads: bool) -> WarpStats { let nodes = view.nodes(); let edges = view.edges(); @@ -128,6 +151,7 @@ fn compute_stats(view: &WarpView<'_>, state_root: &[u8; 32]) -> WarpStats { // Connected components via BFS. let connected_components = count_connected_components(view); + let attachments = collect_attachments(view, raw_payloads); WarpStats { warp_id: hex_hash(view.warp_id()), @@ -138,7 +162,115 @@ fn compute_stats(view: &WarpView<'_>, state_root: &[u8; 32]) -> WarpStats { node_types, edge_types, connected_components, + attachments, + } +} + +fn collect_attachments(view: &WarpView<'_>, raw_payloads: bool) -> Vec { + let mut attachments = Vec::new(); + for (node_ix, node) in view.nodes().iter().enumerate() { + for att in view.node_attachments(node_ix) { + attachments.push(format_attachment( + "node", + &node.node_id, + "alpha", + att, + view, + raw_payloads, + )); + } + } + for (edge_ix, edge) in view.edges().iter().enumerate() { + for att in view.edge_attachments(edge_ix) { + attachments.push(format_attachment( + "edge", + &edge.edge_id, + "beta", + att, + view, + raw_payloads, + )); + } } + attachments +} + +fn format_attachment( + owner: &str, + owner_id: &[u8; 32], + plane: &str, + att: &AttRow, + view: &WarpView<'_>, + raw_payloads: bool, +) -> AttachmentSummary { + if att.is_descend() { + return AttachmentSummary { + owner: owner.to_string(), + owner_id: short_hex(owner_id), + plane: plane.to_string(), + kind: "descend".to_string(), + type_id: None, + payload: format!("warp:{}", hex_hash(&att.type_or_warp)), + warning: None, + }; + } + + let type_id = TypeId(att.type_or_warp); + let type_id_hex = hex_hash(&att.type_or_warp); + let (bytes, missing_blob) = match view.blob_for_attachment(att) { + Some(bytes) => (bytes, false), + None => (&[][..], true), + }; + let atom = AtomPayload::new(type_id, Bytes::copy_from_slice(bytes)); + let mut warning = missing_blob.then(|| "warning: missing attachment blob".to_string()); + let payload = if raw_payloads { + hex_payload(&type_id_hex, bytes) + } else if let Some((position, velocity)) = decode_motion_atom_payload(&atom) { + format!( + "position: ({}, {}, {}), velocity: ({}, {}, {})", + decimal(position[0]), + decimal(position[1]), + decimal(position[2]), + decimal(velocity[0]), + decimal(velocity[1]), + decimal(velocity[2]) + ) + } else { + if is_motion_type(type_id) && warning.is_none() { + warning = Some("warning: truncated or invalid motion payload".to_string()); + } + hex_payload(&type_id_hex, bytes) + }; + + AttachmentSummary { + owner: owner.to_string(), + owner_id: short_hex(owner_id), + plane: plane.to_string(), + kind: "atom".to_string(), + type_id: Some(type_id_hex), + payload, + warning, + } +} + +fn is_motion_type(type_id: TypeId) -> bool { + type_id == warp_core::motion_payload_type_id() + || type_id == warp_core::motion_payload_type_id_v0() +} + +fn hex_payload(type_id: &str, bytes: &[u8]) -> String { + format!("[type_id: {type_id}] 0x{}", hex::encode(bytes)) +} + +fn decimal(value: f32) -> String { + let mut text = format!("{value:.6}"); + while text.contains('.') && text.ends_with('0') { + text.pop(); + } + if text.ends_with('.') { + text.push('0'); + } + text } /// Counts connected components using BFS on the undirected graph. @@ -299,6 +431,30 @@ fn format_text_report(report: &InspectReport) -> String { let _ = writeln!(out, " {ty}: {count}"); } } + + if !w.attachments.is_empty() { + let _ = writeln!(out, " Attachments:"); + for attachment in &w.attachments { + let type_suffix = attachment + .type_id + .as_ref() + .map(|type_id| format!(" type_id={type_id}")) + .unwrap_or_default(); + let _ = writeln!( + out, + " {} {} {} {}{}: {}", + attachment.owner, + attachment.owner_id, + attachment.plane, + attachment.kind, + type_suffix, + attachment.payload + ); + if let Some(warning) = &attachment.warning { + let _ = writeln!(out, " {warning}"); + } + } + } let _ = writeln!(out); } @@ -361,7 +517,8 @@ mod tests { use warp_core::wsc::build::build_one_warp_input; use warp_core::wsc::write::write_wsc_one_warp; use warp_core::{ - make_edge_id, make_node_id, make_type_id, make_warp_id, EdgeRecord, GraphStore, NodeRecord, + encode_motion_atom_payload, make_edge_id, make_node_id, make_type_id, make_warp_id, + motion_payload_type_id, AttachmentValue, EdgeRecord, GraphStore, NodeRecord, }; fn make_test_graph() -> (GraphStore, warp_core::NodeId) { @@ -405,6 +562,19 @@ mod tests { write_wsc_one_warp(&input, [0u8; 32], 42).expect("WSC write") } + fn make_motion_attachment_wsc(raw_payload: Option) -> Vec { + let warp = make_warp_id("test"); + let node_ty = make_type_id("Actor"); + let root = make_node_id("root"); + let mut store = GraphStore::new(warp); + store.insert_node(root, NodeRecord { ty: node_ty }); + let payload = raw_payload + .unwrap_or_else(|| encode_motion_atom_payload([1.0, 2.5, -3.0], [0.25, -0.5, 4.0])); + store.set_node_attachment(root, Some(AttachmentValue::Atom(payload))); + let input = build_one_warp_input(&store, root); + write_wsc_one_warp(&input, [0u8; 32], 42).expect("WSC write") + } + fn write_temp_wsc(data: &[u8]) -> NamedTempFile { let mut f = NamedTempFile::new().expect("tempfile"); f.write_all(data).expect("write"); @@ -416,7 +586,7 @@ mod tests { fn metadata_fields_present() { let wsc = make_test_wsc(); let f = write_temp_wsc(&wsc); - let result = run(f.path(), false, &OutputFormat::Text); + let result = run(f.path(), false, false, &OutputFormat::Text); assert!(result.is_ok()); } @@ -428,7 +598,7 @@ mod tests { let store = graph_store_from_warp_view(&view); let state_root = store.canonical_state_hash(); - let stats = compute_stats(&view, &state_root); + let stats = compute_stats(&view, &state_root, false); let node_type_sum: usize = stats.node_types.values().sum(); assert_eq!(node_type_sum, stats.total_nodes); @@ -469,10 +639,77 @@ mod tests { let wsc = make_test_wsc(); let f = write_temp_wsc(&wsc); // Verify JSON mode doesn't panic. - let result = run(f.path(), false, &OutputFormat::Json); + let result = run(f.path(), false, false, &OutputFormat::Json); assert!(result.is_ok()); } + #[test] + fn motion_attachment_displays_decoded_fields() { + let wsc = make_motion_attachment_wsc(None); + let file = WscFile::from_bytes(wsc).unwrap(); + let view = file.warp_view(0).unwrap(); + let store = graph_store_from_warp_view(&view); + let state_root = store.canonical_state_hash(); + + let stats = compute_stats(&view, &state_root, false); + + assert_eq!(stats.attachments.len(), 1); + assert_eq!(stats.attachments[0].kind, "atom"); + assert!(stats.attachments[0] + .payload + .contains("position: (1.0, 2.5, -3.0), velocity: (0.25, -0.5, 4.0)")); + assert!(stats.attachments[0].warning.is_none()); + } + + #[test] + fn raw_attachment_displays_hex_for_known_payload() { + let wsc = make_motion_attachment_wsc(None); + let file = WscFile::from_bytes(wsc).unwrap(); + let view = file.warp_view(0).unwrap(); + let store = graph_store_from_warp_view(&view); + let state_root = store.canonical_state_hash(); + + let stats = compute_stats(&view, &state_root, true); + + assert!(stats.attachments[0].payload.starts_with("[type_id: ")); + assert!(stats.attachments[0].payload.contains("] 0x")); + assert!(!stats.attachments[0].payload.contains("position:")); + } + + #[test] + fn unknown_attachment_type_displays_type_id_and_hex() { + let payload = AtomPayload::new(make_type_id("OtherPayload"), Bytes::from_static(b"Hello")); + let wsc = make_motion_attachment_wsc(Some(payload)); + let file = WscFile::from_bytes(wsc).unwrap(); + let view = file.warp_view(0).unwrap(); + let store = graph_store_from_warp_view(&view); + let state_root = store.canonical_state_hash(); + + let stats = compute_stats(&view, &state_root, false); + + assert!(stats.attachments[0].payload.starts_with("[type_id: ")); + assert!(stats.attachments[0].payload.ends_with("0x48656c6c6f")); + assert!(stats.attachments[0].warning.is_none()); + } + + #[test] + fn truncated_motion_attachment_warns_and_falls_back_to_hex() { + let payload = AtomPayload::new(motion_payload_type_id(), Bytes::from_static(&[1, 2, 3])); + let wsc = make_motion_attachment_wsc(Some(payload)); + let file = WscFile::from_bytes(wsc).unwrap(); + let view = file.warp_view(0).unwrap(); + let store = graph_store_from_warp_view(&view); + let state_root = store.canonical_state_hash(); + + let stats = compute_stats(&view, &state_root, false); + + assert_eq!( + stats.attachments[0].warning.as_deref(), + Some("warning: truncated or invalid motion payload") + ); + assert!(stats.attachments[0].payload.ends_with("0x010203")); + } + #[test] fn connected_components_single_graph() { let wsc = make_test_wsc(); diff --git a/crates/warp-cli/src/main.rs b/crates/warp-cli/src/main.rs index 7bc18c0a..0cda8c4d 100644 --- a/crates/warp-cli/src/main.rs +++ b/crates/warp-cli/src/main.rs @@ -8,7 +8,7 @@ //! //! - `echo-cli verify ` — validate WSC snapshot integrity //! - `echo-cli bench [--filter ]` — run and format benchmarks -//! - `echo-cli inspect [--tree]` — display snapshot metadata +//! - `echo-cli inspect [--tree] [--raw]` — display snapshot metadata //! //! # Usage //! ```text @@ -35,7 +35,14 @@ fn main() -> Result<()> { ref snapshot, ref expected, } => verify::run(snapshot, expected.as_deref(), &cli.format), - Commands::Bench { ref filter } => bench::run(filter.as_deref(), &cli.format), - Commands::Inspect { ref snapshot, tree } => inspect::run(snapshot, tree, &cli.format), + Commands::Bench { + ref filter, + ref baseline, + } => bench::run(filter.as_deref(), baseline.as_deref(), &cli.format), + Commands::Inspect { + ref snapshot, + tree, + raw, + } => inspect::run(snapshot, tree, raw, &cli.format), } } diff --git a/crates/warp-cli/src/verify.rs b/crates/warp-cli/src/verify.rs index 0661f694..3a23e591 100644 --- a/crates/warp-cli/src/verify.rs +++ b/crates/warp-cli/src/verify.rs @@ -6,6 +6,7 @@ //! each warp, and computes state root hashes. Optionally compares against //! an expected hash. +use std::io::IsTerminal as _; use std::path::Path; use anyhow::{bail, Context, Result}; @@ -17,6 +18,10 @@ use crate::cli::OutputFormat; use crate::output::{emit, hex_hash}; use crate::wsc_loader::graph_store_from_warp_view; +const ANSI_GREEN: &str = "\x1b[32m"; +const ANSI_RED: &str = "\x1b[31m"; +const ANSI_RESET: &str = "\x1b[0m"; + /// Result of verifying a single warp instance within a WSC file. #[derive(Debug, Serialize)] pub(crate) struct WarpVerifyResult { @@ -113,7 +118,8 @@ pub(crate) fn run(snapshot: &Path, expected: Option<&str>, format: &OutputFormat }; // 4. Output. - let text = format_text_report(&report); + let use_color = matches!(format, OutputFormat::Text) && std::io::stdout().is_terminal(); + let text = format_text_report(&report, use_color); let json = serde_json::to_value(&report).context("failed to serialize verify report")?; emit(format, &text, &json)?; @@ -124,7 +130,7 @@ pub(crate) fn run(snapshot: &Path, expected: Option<&str>, format: &OutputFormat Ok(()) } -fn format_text_report(report: &VerifyReport) -> String { +fn format_text_report(report: &VerifyReport, use_color: bool) -> String { use std::fmt::Write; let mut out = String::new(); @@ -142,14 +148,36 @@ fn format_text_report(report: &VerifyReport) -> String { let _ = writeln!(out, " Nodes: {}", w.nodes); let _ = writeln!(out, " Edges: {}", w.edges); let _ = writeln!(out, " State root: {}", w.state_root); - let _ = writeln!(out, " Status: {}", w.status); + let _ = writeln!( + out, + " Status: {}", + format_status(&w.status, use_color) + ); let _ = writeln!(out); } - let _ = writeln!(out, " Result: {}", report.result); + let _ = writeln!( + out, + " Result: {}", + format_status(&report.result, use_color) + ); out } +fn format_status(status: &str, use_color: bool) -> String { + if !use_color { + return status.to_string(); + } + + if status == "pass" { + format!("{ANSI_GREEN}\u{2713} {status}{ANSI_RESET}") + } else if status == "fail" || status.starts_with("MISMATCH") { + format!("{ANSI_RED}\u{2717} {status}{ANSI_RESET}") + } else { + status.to_string() + } +} + #[cfg(test)] #[allow(clippy::expect_used, clippy::unwrap_used)] mod tests { @@ -294,7 +322,7 @@ mod tests { result: "pass".to_string(), }; - let text = format_text_report(&report); + let text = format_text_report(&report, false); assert!( text.contains("unchecked"), "multi-warp report should show 'unchecked' for warps 1+: {text}" @@ -306,6 +334,67 @@ mod tests { ); } + #[test] + fn plain_text_report_has_no_ansi_or_glyphs() { + let report = VerifyReport { + file: "test.wsc".to_string(), + tick: 1, + schema_hash: "abcd".to_string(), + warp_count: 1, + warps: vec![WarpVerifyResult { + warp_id: "0000".to_string(), + root_node_id: "1111".to_string(), + nodes: 1, + edges: 0, + state_root: "aaaa".to_string(), + status: "pass".to_string(), + }], + result: "pass".to_string(), + }; + + let text = format_text_report(&report, false); + + assert!(text.contains("Status: pass")); + assert!(text.contains("Result: pass")); + assert!(!text.contains("\x1b[")); + assert!(!text.contains('\u{2713}')); + } + + #[test] + fn tty_text_report_colors_pass_and_fail() { + let report = VerifyReport { + file: "test.wsc".to_string(), + tick: 1, + schema_hash: "abcd".to_string(), + warp_count: 2, + warps: vec![ + WarpVerifyResult { + warp_id: "0000".to_string(), + root_node_id: "1111".to_string(), + nodes: 1, + edges: 0, + state_root: "aaaa".to_string(), + status: "pass".to_string(), + }, + WarpVerifyResult { + warp_id: "2222".to_string(), + root_node_id: "3333".to_string(), + nodes: 1, + edges: 0, + state_root: "bbbb".to_string(), + status: "MISMATCH (expected cccc)".to_string(), + }, + ], + result: "fail".to_string(), + }; + + let text = format_text_report(&report, true); + + assert!(text.contains("\x1b[32m\u{2713} pass\x1b[0m")); + assert!(text.contains("\x1b[31m\u{2717} MISMATCH (expected cccc)\x1b[0m")); + assert!(text.contains("\x1b[31m\u{2717} fail\x1b[0m")); + } + #[test] fn empty_graph_passes() { let warp = make_warp_id("test"); diff --git a/crates/warp-cli/tests/cli_integration.rs b/crates/warp-cli/tests/cli_integration.rs index 34ecca52..c732f5c4 100644 --- a/crates/warp-cli/tests/cli_integration.rs +++ b/crates/warp-cli/tests/cli_integration.rs @@ -7,13 +7,65 @@ #![allow(deprecated)] // assert_cmd::cargo::cargo_bin deprecation — no stable replacement in v2.x +use std::error::Error; +use std::fs; + use assert_cmd::cargo::cargo_bin; use predicates::prelude::*; +use tempfile::TempDir; +use warp_core::wsc::{build_one_warp_input, write_wsc_one_warp}; +use warp_core::{ + make_edge_id, make_node_id, make_type_id, make_warp_id, EdgeRecord, GraphStore, NodeRecord, +}; + +type TestResult = Result>; fn echo_cli() -> assert_cmd::Command { assert_cmd::Command::new(cargo_bin("echo-cli")) } +fn make_demo_wsc() -> TestResult> { + let warp = make_warp_id("test"); + let node_ty = make_type_id("Actor"); + let child_ty = make_type_id("Item"); + let edge_ty = make_type_id("HasItem"); + let root = make_node_id("root"); + let child1 = make_node_id("child1"); + let child2 = make_node_id("child2"); + + let mut store = GraphStore::new(warp); + store.insert_node(root, NodeRecord { ty: node_ty }); + store.insert_node(child1, NodeRecord { ty: child_ty }); + store.insert_node(child2, NodeRecord { ty: child_ty }); + store.insert_edge( + root, + EdgeRecord { + id: make_edge_id("root->child1"), + from: root, + to: child1, + ty: edge_ty, + }, + ); + store.insert_edge( + root, + EdgeRecord { + id: make_edge_id("root->child2"), + from: root, + to: child2, + ty: edge_ty, + }, + ); + + let input = build_one_warp_input(&store, root); + Ok(write_wsc_one_warp(&input, [0u8; 32], 42)?) +} + +fn write_demo_snapshot() -> TestResult { + let temp = TempDir::new()?; + fs::write(temp.path().join("state.wsc"), make_demo_wsc()?)?; + Ok(temp) +} + #[test] fn help_shows_all_subcommands() { echo_cli() @@ -26,6 +78,48 @@ fn help_shows_all_subcommands() { .stdout(predicate::str::contains("inspect")); } +#[test] +fn help_output_has_no_trailing_whitespace() { + let assert = echo_cli().arg("--help").assert().success(); + let stdout = String::from_utf8_lossy(&assert.get_output().stdout); + let offenders = stdout + .lines() + .enumerate() + .filter(|(_, line)| line.ends_with(' ')) + .map(|(index, _)| format!("line {}", index + 1)) + .collect::>(); + + assert!( + offenders.is_empty(), + "help output contains trailing whitespace on {}", + offenders.join(", ") + ); +} + +#[test] +fn help_matches_golden() { + let assert = echo_cli().arg("--help").assert().success(); + let stdout = String::from_utf8_lossy(&assert.get_output().stdout); + assert_eq!(stdout, include_str!("golden/echo-cli-help.txt")); +} + +#[test] +fn help_golden_has_no_trailing_whitespace() { + let golden = include_str!("golden/echo-cli-help.txt"); + let offenders = golden + .lines() + .enumerate() + .filter(|(_, line)| line.ends_with(' ')) + .map(|(index, _)| format!("line {}", index + 1)) + .collect::>(); + + assert!( + offenders.is_empty(), + "help golden contains trailing whitespace on {}", + offenders.join(", ") + ); +} + #[test] fn verify_help_lists_snapshot_arg() { echo_cli() @@ -41,7 +135,8 @@ fn bench_help_lists_filter() { .args(["bench", "--help"]) .assert() .success() - .stdout(predicate::str::contains("filter")); + .stdout(predicate::str::contains("filter")) + .stdout(predicate::str::contains("baseline")); } #[test] @@ -50,7 +145,102 @@ fn inspect_help_lists_tree_flag() { .args(["inspect", "--help"]) .assert() .success() - .stdout(predicate::str::contains("tree")); + .stdout(predicate::str::contains("tree")) + .stdout(predicate::str::contains("raw")); +} + +#[test] +fn inspect_text_reports_metadata_stats_and_tree() -> TestResult { + let temp = write_demo_snapshot()?; + let assert = echo_cli() + .current_dir(temp.path()) + .args(["inspect", "state.wsc", "--tree"]) + .assert() + .success(); + let stdout = String::from_utf8_lossy(&assert.get_output().stdout); + + assert!(stdout.contains("echo-cli inspect")); + assert!(stdout.contains("File: state.wsc")); + assert!(stdout.contains("Tick: 42")); + assert!( + stdout.contains("Schema: 0000000000000000000000000000000000000000000000000000000000000000") + ); + assert!(stdout.contains("Warps: 1")); + assert!(stdout + .contains("ID: 6939dc0fbdb5004cb5d9d1aca2d096042456f4257b88ee8c7fdbfca163f10f11")); + assert!(stdout + .contains("Root node: 401e1d8fcbc26350901be9100a153e8eaf644560386edf68f876ffc1335cccf0")); + assert!(stdout + .contains("State root: 5934ceb0b331755a406e85fbe1a6dda3d6ce5278f7c1802713a50c4e754c84a6")); + assert!(stdout.contains("Nodes: 3")); + assert!(stdout.contains("Edges: 2")); + assert!(stdout.contains("Components: 1")); + assert!(stdout.contains("Node types:")); + assert!(stdout.contains("1e27b4d0: 1")); + assert!(stdout.contains("d9f7db5f: 2")); + assert!(stdout.contains("Edge types:")); + assert!(stdout.contains("8e4ee065: 2")); + assert!(stdout.contains("Tree:")); + assert!(stdout.contains("[401e1d8f] type=1e27b4d0")); + assert!(stdout + .lines() + .any(|line| line.starts_with(" ") && line.contains("type=d9f7db5f"))); + + Ok(()) +} + +#[test] +fn inspect_json_reports_structured_metadata_and_stats() -> TestResult { + let temp = write_demo_snapshot()?; + let assert = echo_cli() + .current_dir(temp.path()) + .args(["--format", "json", "inspect", "state.wsc"]) + .assert() + .success(); + let json: serde_json::Value = serde_json::from_slice(&assert.get_output().stdout)?; + + assert_eq!(json["metadata"]["file"], "state.wsc"); + assert_eq!(json["metadata"]["tick"], 42); + assert_eq!( + json["metadata"]["schema_hash"], + "0000000000000000000000000000000000000000000000000000000000000000" + ); + assert_eq!(json["metadata"]["warp_count"], 1); + assert_eq!(json["warps"][0]["total_nodes"], 3); + assert_eq!(json["warps"][0]["total_edges"], 2); + assert_eq!(json["warps"][0]["connected_components"], 1); + assert_eq!(json["warps"][0]["node_types"]["1e27b4d0"], 1); + assert_eq!(json["warps"][0]["node_types"]["d9f7db5f"], 2); + assert_eq!(json["warps"][0]["edge_types"]["8e4ee065"], 2); + assert!(json.get("tree").is_none()); + + let node_type_sum = json["warps"][0]["node_types"] + .as_object() + .into_iter() + .flat_map(serde_json::Map::values) + .filter_map(serde_json::Value::as_u64) + .sum::(); + assert_eq!(node_type_sum, 3); + + Ok(()) +} + +#[test] +fn inspect_corrupt_snapshot_exits_nonzero_without_panic() -> TestResult { + let temp = TempDir::new()?; + fs::write(temp.path().join("bad.wsc"), b"not a wsc")?; + + echo_cli() + .current_dir(temp.path()) + .args(["inspect", "bad.wsc"]) + .assert() + .failure() + .stderr( + predicate::str::contains("failed to open WSC file") + .or(predicate::str::contains("WSC validation failed")), + ); + + Ok(()) } #[test] diff --git a/crates/warp-cli/tests/golden/echo-cli-help.txt b/crates/warp-cli/tests/golden/echo-cli-help.txt new file mode 100644 index 00000000..bf4261e4 --- /dev/null +++ b/crates/warp-cli/tests/golden/echo-cli-help.txt @@ -0,0 +1,13 @@ +Echo developer CLI + +Usage: echo-cli [OPTIONS] + +Commands: + verify Verify hash integrity of a WSC snapshot + bench Run benchmarks and format results + inspect Inspect a WSC snapshot + +Options: + --format Output format (text or json) [default: text] + -h, --help Print help + -V, --version Print version diff --git a/crates/warp-core/src/cmd.rs b/crates/warp-core/src/cmd.rs index 2906a298..bcec54b5 100644 --- a/crates/warp-core/src/cmd.rs +++ b/crates/warp-core/src/cmd.rs @@ -5,3 +5,191 @@ //! Generic engine-level commands (e.g. system management or GC triggers) //! belong in this module. Application-specific commands should be defined //! in application crates and registered with the engine at runtime. + +use blake3::Hasher; +use bytes::Bytes; +use echo_wasm_abi::kernel_port as abi; +use echo_wasm_abi::{encode_cbor, unpack_import_suffix_intent_v1}; + +use crate::attachment::{AtomPayload, AttachmentKey, AttachmentValue}; +use crate::footprint::{AttachmentSet, EdgeSet, Footprint, NodeSet, PortSet}; +use crate::ident::{make_type_id, EdgeId, NodeId, NodeKey}; +use crate::inbox::INTENT_ATTACHMENT_TYPE; +use crate::record::{EdgeRecord, NodeRecord}; +use crate::rule::{ConflictPolicy, PatternGraph, RewriteRule}; +use crate::tick_patch::WarpOp; +use crate::TickDelta; + +/// Human-readable command rule for witnessed suffix import proposals. +pub const IMPORT_SUFFIX_INTENT_RULE_NAME: &str = "cmd/import_suffix_intent"; + +/// Type identifier label for result nodes created by [`import_suffix_intent_rule`]. +pub const IMPORT_SUFFIX_RESULT_NODE_TYPE: &str = "echo/import-suffix-result"; + +/// Type identifier label for result edges from ingress event to import result. +pub const IMPORT_SUFFIX_RESULT_EDGE_TYPE: &str = "echo/import-suffix-result-edge"; + +/// Type identifier label for canonical CBOR [`abi::ImportSuffixResult`] atoms. +pub const IMPORT_SUFFIX_RESULT_ATTACHMENT_TYPE: &str = "echo/import-suffix-result/cbor-v1"; + +/// Constructs the core command rule for Echo-owned witnessed suffix import intents. +/// +/// This handler is intentionally conservative. It does not directly mutate a +/// target worldline with remote history. It records a typed `Staged` admission +/// result as causal graph evidence during the admitted tick; later slices can +/// replace the staging evaluator with full basis-aware admission. +#[must_use] +pub fn import_suffix_intent_rule() -> RewriteRule { + RewriteRule { + id: make_type_id("rule:cmd/import_suffix_intent").0, + name: IMPORT_SUFFIX_INTENT_RULE_NAME, + left: PatternGraph { nodes: vec![] }, + matcher: import_suffix_intent_matches, + executor: import_suffix_intent_executor, + compute_footprint: import_suffix_intent_footprint, + factor_mask: 0, + conflict_policy: ConflictPolicy::Abort, + join_fn: None, + } +} + +/// Stable result node id for one import-suffix ingress event. +#[must_use] +pub fn import_suffix_result_node_id(event_id: &NodeId) -> NodeId { + let mut hasher = Hasher::new(); + hasher.update(b"echo.import_suffix.result.node.v1:"); + hasher.update(&event_id.0); + NodeId(hasher.finalize().into()) +} + +/// Stable result edge id for one import-suffix ingress event. +#[must_use] +pub fn import_suffix_result_edge_id(event_id: &NodeId, result_id: &NodeId) -> EdgeId { + let mut hasher = Hasher::new(); + hasher.update(b"echo.import_suffix.result.edge.v1:"); + hasher.update(&event_id.0); + hasher.update(&result_id.0); + EdgeId(hasher.finalize().into()) +} + +fn import_suffix_intent_matches(view: crate::GraphView<'_>, scope: &NodeId) -> bool { + import_suffix_request_from_scope(view, scope).is_some() +} + +fn import_suffix_intent_executor( + view: crate::GraphView<'_>, + scope: &NodeId, + delta: &mut TickDelta, +) { + let Some(request) = import_suffix_request_from_scope(view, scope) else { + return; + }; + let result = staged_import_suffix_result(&request); + let Ok(result_bytes) = encode_cbor(&result) else { + return; + }; + + let warp_id = view.warp_id(); + let result_id = import_suffix_result_node_id(scope); + let result_edge_id = import_suffix_result_edge_id(scope, &result_id); + let result_key = AttachmentKey::node_alpha(NodeKey { + warp_id, + local_id: result_id, + }); + + delta.push(WarpOp::UpsertNode { + node: NodeKey { + warp_id, + local_id: result_id, + }, + record: NodeRecord { + ty: make_type_id(IMPORT_SUFFIX_RESULT_NODE_TYPE), + }, + }); + delta.push(WarpOp::UpsertEdge { + warp_id, + record: EdgeRecord { + id: result_edge_id, + from: *scope, + to: result_id, + ty: make_type_id(IMPORT_SUFFIX_RESULT_EDGE_TYPE), + }, + }); + delta.push(WarpOp::SetAttachment { + key: result_key, + value: Some(AttachmentValue::Atom(AtomPayload::new( + make_type_id(IMPORT_SUFFIX_RESULT_ATTACHMENT_TYPE), + Bytes::from(result_bytes), + ))), + }); +} + +fn import_suffix_intent_footprint(view: crate::GraphView<'_>, scope: &NodeId) -> Footprint { + let warp_id = view.warp_id(); + let result_id = import_suffix_result_node_id(scope); + let result_edge_id = import_suffix_result_edge_id(scope, &result_id); + + let mut n_read = NodeSet::default(); + let mut n_write = NodeSet::default(); + let mut e_write = EdgeSet::default(); + let mut a_read = AttachmentSet::default(); + let mut a_write = AttachmentSet::default(); + + n_read.insert_with_warp(warp_id, *scope); + n_write.insert_with_warp(warp_id, *scope); + n_write.insert_with_warp(warp_id, result_id); + e_write.insert_with_warp(warp_id, result_edge_id); + a_read.insert(AttachmentKey::node_alpha(NodeKey { + warp_id, + local_id: *scope, + })); + a_write.insert(AttachmentKey::node_alpha(NodeKey { + warp_id, + local_id: result_id, + })); + + Footprint { + n_read, + n_write, + e_read: EdgeSet::default(), + e_write, + a_read, + a_write, + b_in: PortSet::default(), + b_out: PortSet::default(), + factor_mask: 0, + } +} + +fn import_suffix_request_from_scope( + view: crate::GraphView<'_>, + scope: &NodeId, +) -> Option { + let Some(AttachmentValue::Atom(atom)) = view.node_attachment(scope) else { + return None; + }; + if atom.type_id != make_type_id(INTENT_ATTACHMENT_TYPE) { + return None; + } + unpack_import_suffix_intent_v1(atom.bytes.as_ref()).ok() +} + +fn staged_import_suffix_result(request: &abi::ImportSuffixRequest) -> abi::ImportSuffixResult { + let staged_refs = if request.bundle.source_suffix.source_entries.is_empty() { + vec![request.target_basis.clone()] + } else { + request.bundle.source_suffix.source_entries.clone() + }; + + abi::ImportSuffixResult { + bundle_digest: request.bundle.bundle_digest.clone(), + admission: abi::WitnessedSuffixAdmissionResponse { + source_shell_digest: request.bundle.source_suffix.witness_digest.clone(), + target_basis: request.target_basis.clone(), + outcome: abi::WitnessedSuffixAdmissionOutcome::Staged { + staged_refs, + basis_report: request.basis_report.clone(), + }, + }, + } +} diff --git a/crates/warp-core/src/lib.rs b/crates/warp-core/src/lib.rs index 29cef2fc..611e2507 100644 --- a/crates/warp-core/src/lib.rs +++ b/crates/warp-core/src/lib.rs @@ -98,6 +98,7 @@ pub mod inbox; pub mod materialization; mod neighborhood; mod observation; +mod optic; /// Parallel execution module. /// /// Provides both serial and parallel execution strategies for rewrite rules, @@ -158,6 +159,11 @@ pub use attachment::{ CodecRegistry, DecodeError, ErasedCodec, RegistryError, }; pub use clock::{GlobalTick, RunId, WorldlineTick}; +pub use cmd::{ + import_suffix_intent_rule, import_suffix_result_edge_id, import_suffix_result_node_id, + IMPORT_SUFFIX_INTENT_RULE_NAME, IMPORT_SUFFIX_RESULT_ATTACHMENT_TYPE, + IMPORT_SUFFIX_RESULT_EDGE_TYPE, IMPORT_SUFFIX_RESULT_NODE_TYPE, +}; pub use constants::{blake3_empty, digest_len0_u64, POLICY_ID_NO_POLICY_V0}; pub use engine_impl::{ scope_hash, ApplyResult, CommitOutcome, DispatchDisposition, Engine, EngineBuilder, @@ -206,12 +212,26 @@ pub use neighborhood::{ ParticipantRole, SiteParticipant, SitePlurality, }; pub use observation::{ - BuiltinObserverPlan, HeadObservation, ObservationArtifact, ObservationAt, + AuthoredObserverPlan, BuiltinObserverPlan, HeadObservation, ObservationArtifact, ObservationAt, ObservationBasisPosture, ObservationCoordinate, ObservationError, ObservationFrame, - ObservationPayload, ObservationProjection, ObservationProjectionKind, ObservationRequest, - ObservationService, ReadingBudgetPosture, ReadingEnvelope, ReadingObserverBasis, - ReadingObserverPlan, ReadingResidualPosture, ReadingRightsPosture, ReadingWitnessRef, - ResolvedObservationCoordinate, WorldlineSnapshot, + ObservationPayload, ObservationProjection, ObservationProjectionKind, ObservationReadBudget, + ObservationRequest, ObservationRights, ObservationService, ObserverInstanceId, + ObserverInstanceRef, ObserverPlanId, ReadingBudgetPosture, ReadingEnvelope, + ReadingObserverBasis, ReadingObserverPlan, ReadingResidualPosture, ReadingRightsPosture, + ReadingWitnessRef, ResolvedObservationCoordinate, WorldlineSnapshot, +}; +pub use optic::{ + AdmissionLawId, AdmittedIntent, AttachmentDescentPolicy, BraidId, CapabilityPosture, + CloseOpticRequest, CloseOpticResult, CoordinateAt, DispatchOpticIntentRequest, EchoCoordinate, + EchoOptic, IntentConflict, IntentConflictReason, IntentDispatchResult, IntentFamilyId, + MissingWitnessBasisReason, ObserveOpticRequest, ObserveOpticResult, OpenOpticRequest, + OpenOpticResult, OpticActorId, OpticAperture, OpticApertureShape, OpticCapability, + OpticCapabilityId, OpticCause, OpticCloseError, OpticFocus, OpticId, OpticIntentPayload, + OpticObstruction, OpticObstructionKind, OpticOpenError, OpticReadBudget, OpticReading, + OpticReadingEnvelope, PluralIntent, ProjectionVersion, ReadIdentity, ReducerVersion, + RetainReadingRequest, RetainReadingResult, RetainedReadingCache, RetainedReadingCodecId, + RetainedReadingDescriptor, RetainedReadingKey, RevealReadingRequest, RevealReadingResult, + StagedIntent, StagedIntentReason, WitnessBasis, WorldlineHeadOptic, }; pub use playback::{CursorReceipt, TruthFrame, TruthSink}; pub use provenance_store::{ @@ -250,10 +270,13 @@ pub use tick_patch::{ pub use tx::TxId; pub use warp_state::{WarpInstance, WarpState}; pub use witnessed_suffix::{ - evaluate_witnessed_suffix_admission, WitnessedSuffixAdmissionContext, + derive_witnessed_suffix_shell_digest, evaluate_witnessed_suffix_admission, export_suffix, + import_suffix, CausalSuffixBundle, ExportSuffixObstruction, ExportSuffixRequest, + ImportSuffixRequest, ImportSuffixResult, WitnessedSuffixAdmissionContext, WitnessedSuffixAdmissionOutcome, WitnessedSuffixAdmissionRequest, - WitnessedSuffixAdmissionResponse, WitnessedSuffixLocalAdmissionPosture, - WitnessedSuffixLocalAdmissionPostureError, WitnessedSuffixShell, + WitnessedSuffixAdmissionResponse, WitnessedSuffixExportContext, + WitnessedSuffixLocalAdmissionPosture, WitnessedSuffixLocalAdmissionPostureError, + WitnessedSuffixShell, }; pub use worldline::{ ApplyError, AtomWrite, AtomWriteSet, HashTriplet, OutputFrameSet, WorldlineId, diff --git a/crates/warp-core/src/observation.rs b/crates/warp-core/src/observation.rs index b247b4bb..087e3a4b 100644 --- a/crates/warp-core/src/observation.rs +++ b/crates/warp-core/src/observation.rs @@ -23,6 +23,11 @@ use crate::coordinator::WorldlineRuntime; use crate::engine_impl::Engine; use crate::ident::Hash; use crate::materialization::ChannelId; +use crate::optic::{ + AttachmentDescentPolicy, CoordinateAt, EchoCoordinate, MissingWitnessBasisReason, + ObserveOpticRequest, ObserveOpticResult, OpticApertureShape, OpticCapabilityId, OpticFocus, + OpticObstruction, OpticObstructionKind, OpticReading, ReadIdentity, WitnessBasis, +}; use crate::provenance_store::{ProvenanceRef, ProvenanceService, ProvenanceStore}; use crate::snapshot::Snapshot; use crate::strand::{StrandId, StrandRevalidationState}; @@ -31,6 +36,43 @@ use crate::worldline::WorldlineId; const OBSERVATION_VERSION: u32 = 2; const OBSERVATION_ARTIFACT_DOMAIN: &[u8] = b"echo:observation-artifact:v2\0"; +const OPTIC_OBSERVATION_WITNESS_SET_DOMAIN: &[u8] = b"echo:optic-observation-witness-set:v1\0"; +const OPTIC_LIVE_TAIL_WITNESS_SET_DOMAIN: &[u8] = b"echo:optic-live-tail-witness-set:v1\0"; +const OPTIC_METADATA_APERTURE_MIN_BYTES: u64 = 128; + +macro_rules! opaque_id { + ($(#[$meta:meta])* $name:ident) => { + $(#[$meta])* + #[repr(transparent)] + #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] + #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] + pub struct $name([u8; 32]); + + impl $name { + /// Reconstructs the id from canonical bytes. + #[must_use] + pub const fn from_bytes(bytes: [u8; 32]) -> Self { + Self(bytes) + } + + /// Returns the canonical byte representation. + #[must_use] + pub const fn as_bytes(&self) -> &[u8; 32] { + &self.0 + } + } + }; +} + +opaque_id!( + /// Stable identity for an authored or kernel observer plan. + ObserverPlanId +); + +opaque_id!( + /// Stable identity for a hosted observer instance. + ObserverInstanceId +); /// Coordinate selector for an observation request. #[derive(Clone, Debug, PartialEq, Eq)] @@ -41,6 +83,15 @@ pub struct ObservationCoordinate { pub at: ObservationAt, } +impl ObservationCoordinate { + fn to_abi(&self) -> abi::ObservationCoordinate { + abi::ObservationCoordinate { + worldline_id: abi::WorldlineId::from_bytes(*self.worldline_id.as_bytes()), + at: self.at.to_abi(), + } + } +} + /// Requested position within a worldline. #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ObservationAt { @@ -186,6 +237,51 @@ pub struct ObservationRequest { pub frame: ObservationFrame, /// Requested projection within that frame. pub projection: ObservationProjection, + /// Observer plan the caller is explicitly invoking. + pub observer_plan: ReadingObserverPlan, + /// Hosted observer instance state, when this is not a one-shot read. + pub observer_instance: Option, + /// Declared read budget. + pub budget: ObservationReadBudget, + /// Declared rights posture for the read. + pub rights: ObservationRights, +} + +impl ObservationRequest { + /// Builds a one-shot built-in observation request for the frame/projection pair. + pub fn builtin_one_shot( + coordinate: ObservationCoordinate, + frame: ObservationFrame, + projection: ObservationProjection, + ) -> Result { + let observer_plan = builtin_observer_plan_for(frame, projection.kind())?; + Ok(Self { + coordinate, + frame, + projection, + observer_plan, + observer_instance: None, + budget: ObservationReadBudget::UnboundedOneShot, + rights: ObservationRights::KernelPublic, + }) + } + + /// Converts the request to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::ObservationRequest { + abi::ObservationRequest { + coordinate: self.coordinate.to_abi(), + frame: self.frame.to_abi(), + projection: self.projection.to_abi(), + observer_plan: self.observer_plan.to_abi(), + observer_instance: self + .observer_instance + .as_ref() + .map(ObserverInstanceRef::to_abi), + budget: self.budget.to_abi(), + rights: self.rights.to_abi(), + } + } } /// Fully resolved coordinate returned with every observation. @@ -326,6 +422,36 @@ impl BuiltinObserverPlan { } } +/// Authored observer plan identity. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct AuthoredObserverPlan { + /// Stable plan identity. + pub plan_id: ObserverPlanId, + /// Hash of the generated or installed observer artifact. + pub artifact_hash: Hash, + /// Hash of the authored schema or contract family. + pub schema_hash: Hash, + /// Hash of the observer state schema. + pub state_schema_hash: Hash, + /// Hash of the observer update law. + pub update_law_hash: Hash, + /// Hash of the observer emission law. + pub emission_law_hash: Hash, +} + +impl AuthoredObserverPlan { + fn to_abi(&self) -> abi::AuthoredObserverPlan { + abi::AuthoredObserverPlan { + plan_id: observer_plan_id_to_abi(self.plan_id), + artifact_hash: self.artifact_hash.to_vec(), + schema_hash: self.schema_hash.to_vec(), + state_schema_hash: self.state_schema_hash.to_vec(), + update_law_hash: self.update_law_hash.to_vec(), + emission_law_hash: self.emission_law_hash.to_vec(), + } + } +} + /// Observer plan identity for a reading artifact. #[derive(Clone, Debug, PartialEq, Eq)] pub enum ReadingObserverPlan { @@ -334,6 +460,11 @@ pub enum ReadingObserverPlan { /// Built-in plan selected by the observation frame/projection pair. plan: BuiltinObserverPlan, }, + /// Authored/generated observer plan. + Authored { + /// Authored plan identity and law hashes. + plan: Box, + }, } impl ReadingObserverPlan { @@ -342,6 +473,39 @@ impl ReadingObserverPlan { Self::Builtin { plan } => abi::ReadingObserverPlan::Builtin { plan: plan.to_abi(), }, + Self::Authored { plan } => abi::ReadingObserverPlan::Authored { + plan: Box::new(plan.to_abi()), + }, + } + } +} + +fn builtin_observer_plan_for( + frame: ObservationFrame, + projection: ObservationProjectionKind, +) -> Result { + Ok(ReadingObserverPlan::Builtin { + plan: ObservationService::builtin_observer_plan(frame, projection)?, + }) +} + +/// Hosted observer instance identity. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ObserverInstanceRef { + /// Runtime instance identity. + pub instance_id: ObserverInstanceId, + /// Plan that owns this instance. + pub plan_id: ObserverPlanId, + /// Hash of the accumulated observer state. + pub state_hash: Hash, +} + +impl ObserverInstanceRef { + fn to_abi(&self) -> abi::ObserverInstanceRef { + abi::ObserverInstanceRef { + instance_id: observer_instance_id_to_abi(self.instance_id), + plan_id: observer_plan_id_to_abi(self.plan_id), + state_hash: self.state_hash.to_vec(), } } } @@ -367,6 +531,59 @@ impl ReadingObserverBasis { } } +/// Read budget requested by an observation caller. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum ObservationReadBudget { + /// One-shot built-in observer with no caller-specified slice budget. + UnboundedOneShot, + /// Caller-bounded read budget. + Bounded { + /// Maximum encoded payload bytes the caller is willing to receive. + max_payload_bytes: u64, + /// Maximum witness references the caller is willing to accept. + max_witness_refs: u64, + }, +} + +impl ObservationReadBudget { + fn to_abi(self) -> abi::ObservationReadBudget { + match self { + Self::UnboundedOneShot => abi::ObservationReadBudget::UnboundedOneShot, + Self::Bounded { + max_payload_bytes, + max_witness_refs, + } => abi::ObservationReadBudget::Bounded { + max_payload_bytes, + max_witness_refs, + }, + } + } +} + +/// Rights posture requested by an observation caller. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum ObservationRights { + /// Kernel-public read. + KernelPublic, + /// Capability-scoped read. Echo carries this now but does not execute it + /// until a capability checker is installed for the observer family. + CapabilityScoped { + /// Capability basis named by the caller. + capability: OpticCapabilityId, + }, +} + +impl ObservationRights { + fn to_abi(self) -> abi::ObservationRights { + match self { + Self::KernelPublic => abi::ObservationRights::KernelPublic, + Self::CapabilityScoped { capability } => abi::ObservationRights::CapabilityScoped { + capability: abi::OpticCapabilityId::from_bytes(*capability.as_bytes()), + }, + } + } +} + /// Witness reference carried by a reading artifact. #[derive(Clone, Debug, PartialEq, Eq)] pub enum ReadingWitnessRef { @@ -410,12 +627,34 @@ impl ReadingWitnessRef { pub enum ReadingBudgetPosture { /// One-shot built-in observer with no caller-specified slice budget. UnboundedOneShot, + /// Caller-bounded reading that remained within budget. + Bounded { + /// Requested encoded payload byte limit. + max_payload_bytes: u64, + /// Encoded payload bytes emitted. + payload_bytes: u64, + /// Requested witness-reference limit. + max_witness_refs: u64, + /// Witness references emitted. + witness_refs: u64, + }, } impl ReadingBudgetPosture { fn to_abi(self) -> abi::ReadingBudgetPosture { match self { Self::UnboundedOneShot => abi::ReadingBudgetPosture::UnboundedOneShot, + Self::Bounded { + max_payload_bytes, + payload_bytes, + max_witness_refs, + witness_refs, + } => abi::ReadingBudgetPosture::Bounded { + max_payload_bytes, + payload_bytes, + max_witness_refs, + witness_refs, + }, } } } @@ -464,6 +703,8 @@ impl ReadingResidualPosture { pub struct ReadingEnvelope { /// Observer plan identity. pub observer_plan: ReadingObserverPlan, + /// Hosted observer instance, when the reading used accumulated observer state. + pub observer_instance: Option, /// Native observer basis used by the reading. pub observer_basis: ReadingObserverBasis, /// Witnesses or shell references that support the reading. @@ -479,9 +720,13 @@ pub struct ReadingEnvelope { } impl ReadingEnvelope { - fn to_abi(&self) -> abi::ReadingEnvelope { + pub(crate) fn to_abi(&self) -> abi::ReadingEnvelope { abi::ReadingEnvelope { observer_plan: self.observer_plan.to_abi(), + observer_instance: self + .observer_instance + .as_ref() + .map(ObserverInstanceRef::to_abi), observer_basis: self.observer_basis.to_abi(), witness_refs: self .witness_refs @@ -562,7 +807,7 @@ pub enum ObservationPayload { } impl ObservationPayload { - fn to_abi(&self) -> abi::ObservationPayload { + pub(crate) fn to_abi(&self) -> abi::ObservationPayload { match self { Self::Head(head) => abi::ObservationPayload::Head { head: head.to_abi(), @@ -641,6 +886,29 @@ pub enum ObservationError { /// Query observation is not implemented yet. #[error("query observation is not supported in phase 5")] UnsupportedQuery, + /// The requested observer plan is not installed or executable. + #[error("unsupported observer plan: {0:?}")] + UnsupportedObserverPlan(ReadingObserverPlan), + /// The requested hosted observer instance is not installed or executable. + #[error("unsupported observer instance: {0:?}")] + UnsupportedObserverInstance(ObserverInstanceRef), + /// The requested observation rights posture is not executable. + #[error("unsupported observation rights posture: {0:?}")] + UnsupportedRights(ObservationRights), + /// The requested observation exceeded its declared budget. + #[error( + "observation budget exceeded: payload {payload_bytes}/{max_payload_bytes} bytes, witness refs {witness_refs}/{max_witness_refs}" + )] + BudgetExceeded { + /// Declared payload byte limit. + max_payload_bytes: u64, + /// Encoded payload bytes produced by the observer. + payload_bytes: u64, + /// Declared witness-reference limit. + max_witness_refs: u64, + /// Witness references needed by the reading. + witness_refs: u64, + }, /// The requested observation cannot be produced at this coordinate. #[error("observation unavailable for worldline {worldline_id:?} at {at:?}")] ObservationUnavailable { @@ -679,6 +947,7 @@ impl ObservationService { return Err(ObservationError::InvalidWorldline(worldline_id)); } Self::validate_frame_projection(request.frame, &request.projection)?; + Self::validate_observer_contract(&request)?; if matches!(request.frame, ObservationFrame::QueryView) { return Err(ObservationError::UnsupportedQuery); } @@ -686,12 +955,6 @@ impl ObservationService { let resolved = Self::resolve_coordinate(runtime, provenance, engine, &request)?; let parent_basis_posture = Self::basis_posture(runtime, provenance, worldline_id, request.coordinate.at)?; - let reading = Self::reading_envelope( - &resolved, - parent_basis_posture, - request.frame, - &request.projection, - ); let payload = match (&request.frame, &request.projection) { (ObservationFrame::CommitBoundary, ObservationProjection::Head) => { ObservationPayload::Head(HeadObservation { @@ -734,6 +997,7 @@ impl ObservationService { } _ => unreachable!("validity matrix must reject unsupported combinations"), }; + let reading = Self::reading_envelope(&resolved, parent_basis_posture, &request, &payload)?; let artifact_hash = Self::compute_artifact_hash( &resolved, @@ -752,6 +1016,473 @@ impl ObservationService { }) } + /// Observe a worldline through a bounded optic request. + /// + /// This is the first narrow bridge from optics into the existing + /// observation path. It supports commit-boundary head and snapshot + /// apertures and returns typed obstructions for unsupported or unbounded + /// reads instead of widening the read behind the caller's back. + pub fn observe_optic( + runtime: &WorldlineRuntime, + provenance: &ProvenanceService, + engine: &Engine, + request: ObserveOpticRequest, + ) -> ObserveOpticResult { + match Self::observe_optic_inner(runtime, provenance, engine, &request) { + Ok(reading) => ObserveOpticResult::Reading(Box::new(reading)), + Err(obstruction) => ObserveOpticResult::Obstructed(obstruction), + } + } + + fn observe_optic_inner( + runtime: &WorldlineRuntime, + provenance: &ProvenanceService, + engine: &Engine, + request: &ObserveOpticRequest, + ) -> Result> { + Self::validate_optic_budget(request)?; + if let Some(obstruction) = Self::attachment_boundary_obstruction(request) { + return Err(obstruction); + } + let observation_request = Self::optic_observation_request(request)?; + let artifact = Self::observe(runtime, provenance, engine, observation_request) + .map_err(|err| Self::optic_observation_error(request, err))?; + let witness_basis = Self::optic_witness_basis(provenance, request, &artifact)?; + let read_identity = ReadIdentity::new( + request.optic_id, + &request.focus, + request.coordinate.clone(), + &request.aperture, + request.projection_version, + request.reducer_version, + witness_basis, + artifact.reading.rights_posture, + artifact.reading.budget_posture, + artifact.reading.residual_posture, + ); + + Ok(OpticReading { + envelope: artifact.reading, + read_identity, + payload: artifact.payload, + retained: None, + }) + } + + fn validate_optic_budget(request: &ObserveOpticRequest) -> Result<(), Box> { + let Some(max_bytes) = request.aperture.budget.max_bytes else { + return Err(Self::optic_obstruction( + request, + OpticObstructionKind::BudgetExceeded, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::BudgetLimited, + }), + "optic reads must declare a byte budget", + )); + }; + if max_bytes == 0 { + return Err(Self::optic_obstruction( + request, + OpticObstructionKind::BudgetExceeded, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::BudgetLimited, + }), + "optic byte budget is zero", + )); + } + + match &request.aperture.shape { + OpticApertureShape::Head | OpticApertureShape::SnapshotMetadata + if max_bytes < OPTIC_METADATA_APERTURE_MIN_BYTES => + { + Err(Self::optic_obstruction( + request, + OpticObstructionKind::BudgetExceeded, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::BudgetLimited, + }), + "optic metadata aperture exceeds the declared byte budget", + )) + } + OpticApertureShape::ByteRange { len, .. } if *len > max_bytes => { + Err(Self::optic_obstruction( + request, + OpticObstructionKind::BudgetExceeded, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::BudgetLimited, + }), + "optic byte-range aperture exceeds the declared byte budget", + )) + } + _ => Ok(()), + } + } + + fn attachment_boundary_obstruction( + request: &ObserveOpticRequest, + ) -> Option> { + if !matches!(request.focus, OpticFocus::AttachmentBoundary { .. }) { + return None; + } + + match (&request.aperture.shape, request.aperture.attachment_descent) { + (OpticApertureShape::AttachmentBoundary, AttachmentDescentPolicy::BoundaryOnly) => { + Some(Self::optic_obstruction( + request, + OpticObstructionKind::AttachmentDescentRequired, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::UnsupportedBasis, + }), + "optic read reached an attachment boundary; recursive descent requires an explicit aperture, capability, budget, and law", + )) + } + (OpticApertureShape::AttachmentBoundary, AttachmentDescentPolicy::Explicit) + if request.aperture.budget.max_attachments.unwrap_or(0) == 0 => + { + Some(Self::optic_obstruction( + request, + OpticObstructionKind::BudgetExceeded, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::BudgetLimited, + }), + "explicit attachment descent requires a positive attachment budget", + )) + } + (OpticApertureShape::AttachmentBoundary, AttachmentDescentPolicy::Explicit) => { + Some(Self::optic_obstruction( + request, + OpticObstructionKind::AttachmentDescentDenied, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::RightsLimited, + }), + "explicit attachment descent has no installed capability checker or projection law", + )) + } + _ => Some(Self::optic_obstruction( + request, + OpticObstructionKind::UnsupportedAperture, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::UnsupportedBasis, + }), + "attachment boundary focus requires an attachment-boundary aperture", + )), + } + } + + fn optic_observation_request( + request: &ObserveOpticRequest, + ) -> Result> { + let ( + OpticFocus::Worldline { + worldline_id: focus_worldline, + }, + EchoCoordinate::Worldline { worldline_id, at }, + ) = (&request.focus, &request.coordinate) + else { + return Err(Self::optic_obstruction( + request, + OpticObstructionKind::UnsupportedProjectionLaw, + None, + "observe_optic currently supports worldline coordinates only", + )); + }; + + if focus_worldline != worldline_id { + return Err(Self::optic_obstruction( + request, + OpticObstructionKind::ConflictingFrontier, + None, + "optic focus and coordinate name different worldlines", + )); + } + + let at = Self::optic_coordinate_at(request, *worldline_id, *at)?; + let (frame, projection) = match &request.aperture.shape { + OpticApertureShape::Head => ( + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ), + OpticApertureShape::SnapshotMetadata => ( + ObservationFrame::CommitBoundary, + ObservationProjection::Snapshot, + ), + OpticApertureShape::QueryBytes { .. } => { + return Err(Self::optic_obstruction( + request, + OpticObstructionKind::UnsupportedProjectionLaw, + None, + "contract QueryView optics are not installed yet", + )); + } + _ => { + return Err(Self::optic_obstruction( + request, + OpticObstructionKind::UnsupportedAperture, + None, + "this optic aperture is not supported by the observation bridge", + )); + } + }; + + let mut observation_request = match ObservationRequest::builtin_one_shot( + ObservationCoordinate { + worldline_id: *worldline_id, + at, + }, + frame, + projection, + ) { + Ok(request) => request, + Err(err) => return Err(Self::optic_observation_error(request, err)), + }; + if let Some(max_payload_bytes) = request.aperture.budget.max_bytes { + observation_request.budget = ObservationReadBudget::Bounded { + max_payload_bytes, + max_witness_refs: request.aperture.budget.max_ticks.unwrap_or(u64::MAX), + }; + } + Ok(observation_request) + } + + fn optic_coordinate_at( + request: &ObserveOpticRequest, + worldline_id: WorldlineId, + at: CoordinateAt, + ) -> Result> { + match at { + CoordinateAt::Frontier => Ok(ObservationAt::Frontier), + CoordinateAt::Tick(tick) => Ok(ObservationAt::Tick(tick)), + CoordinateAt::Provenance(reference) if reference.worldline_id == worldline_id => { + Ok(ObservationAt::Tick(reference.worldline_tick)) + } + CoordinateAt::Provenance(_) => Err(Self::optic_obstruction( + request, + OpticObstructionKind::ConflictingFrontier, + None, + "provenance coordinate belongs to a different worldline", + )), + } + } + + fn optic_observation_error( + request: &ObserveOpticRequest, + error: ObservationError, + ) -> Box { + match error { + ObservationError::InvalidWorldline(_) + | ObservationError::InvalidTick { .. } + | ObservationError::ObservationUnavailable { .. } => Self::optic_obstruction( + request, + OpticObstructionKind::MissingWitness, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::EvidenceUnavailable, + }), + "required observation witness evidence is unavailable", + ), + ObservationError::UnsupportedFrameProjection { .. } => Self::optic_obstruction( + request, + OpticObstructionKind::UnsupportedAperture, + None, + "unsupported optic frame/projection pairing", + ), + ObservationError::UnsupportedQuery + | ObservationError::UnsupportedObserverPlan(_) + | ObservationError::UnsupportedObserverInstance(_) => Self::optic_obstruction( + request, + OpticObstructionKind::UnsupportedProjectionLaw, + None, + "contract QueryView optics are not installed yet", + ), + ObservationError::UnsupportedRights(_) => Self::optic_obstruction( + request, + OpticObstructionKind::CapabilityDenied, + None, + "observation rights posture is not authorized", + ), + ObservationError::BudgetExceeded { .. } => Self::optic_obstruction( + request, + OpticObstructionKind::BudgetExceeded, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::BudgetLimited, + }), + "optic observation exceeded the declared read budget", + ), + ObservationError::CodecFailure(_) => Self::optic_obstruction( + request, + OpticObstructionKind::MissingWitness, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::EvidenceUnavailable, + }), + "observation artifact could not be encoded as witness evidence", + ), + } + } + + fn optic_obstruction( + request: &ObserveOpticRequest, + kind: OpticObstructionKind, + witness_basis: Option, + message: &str, + ) -> Box { + Box::new(OpticObstruction { + kind, + optic_id: Some(request.optic_id), + focus: Some(request.focus.clone()), + coordinate: Some(request.coordinate.clone()), + witness_basis, + message: message.to_owned(), + }) + } + + fn optic_witness_basis( + provenance: &ProvenanceService, + request: &ObserveOpticRequest, + artifact: &ObservationArtifact, + ) -> Result> { + if let Some(witness_basis) = + Self::checkpoint_plus_tail_witness_basis(provenance, request, artifact)? + { + return Ok(witness_basis); + } + + Ok(Self::artifact_witness_basis(artifact)) + } + + fn artifact_witness_basis(artifact: &ObservationArtifact) -> WitnessBasis { + match artifact.reading.witness_refs.as_slice() { + [ReadingWitnessRef::ResolvedCommit { reference }] => WitnessBasis::ResolvedCommit { + reference: *reference, + state_root: artifact.resolved.state_root, + commit_hash: artifact.resolved.commit_hash, + }, + refs => WitnessBasis::WitnessSet { + refs: refs.to_vec(), + witness_set_hash: optic_witness_refs_hash(refs), + }, + } + } + + fn checkpoint_plus_tail_witness_basis( + provenance: &ProvenanceService, + request: &ObserveOpticRequest, + artifact: &ObservationArtifact, + ) -> Result, Box> { + let EchoCoordinate::Worldline { worldline_id, .. } = request.coordinate else { + return Ok(None); + }; + let [ReadingWitnessRef::ResolvedCommit { .. }] = artifact.reading.witness_refs.as_slice() + else { + return Ok(None); + }; + let materialized_tick = artifact.resolved.resolved_worldline_tick; + if materialized_tick == WorldlineTick::ZERO { + return Ok(None); + } + let Some(checkpoint) = provenance.checkpoint_before(worldline_id, materialized_tick) else { + return Ok(None); + }; + if checkpoint.worldline_tick == WorldlineTick::ZERO { + return Ok(None); + } + if checkpoint.worldline_tick >= materialized_tick { + return Ok(None); + } + + let tail_start = checkpoint.worldline_tick.as_u64(); + let tail_end = materialized_tick + .checked_sub(1) + .map(WorldlineTick::as_u64) + .ok_or_else(|| { + Self::optic_obstruction( + request, + OpticObstructionKind::LiveTailRequiresReduction, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::EvidenceUnavailable, + }), + "live-tail witness span has no commit boundary", + ) + })?; + if tail_start > tail_end { + return Ok(None); + } + let tail_len = tail_end + .checked_sub(tail_start) + .and_then(|len| len.checked_add(1)) + .ok_or_else(|| { + Self::optic_obstruction( + request, + OpticObstructionKind::LiveTailRequiresReduction, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::EvidenceUnavailable, + }), + "live-tail witness span overflowed", + ) + })?; + if tail_len > request.aperture.budget.max_ticks.unwrap_or(u64::MAX) { + return Err(Self::optic_obstruction( + request, + OpticObstructionKind::LiveTailRequiresReduction, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::BudgetLimited, + }), + "live-tail witness set exceeds the declared tick budget", + )); + } + + let checkpoint_commit_tick = checkpoint.worldline_tick.checked_sub(1).ok_or_else(|| { + Self::optic_obstruction( + request, + OpticObstructionKind::MissingWitness, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::EvidenceUnavailable, + }), + "checkpoint witness coordinate is unavailable", + ) + })?; + let checkpoint_entry = provenance + .entry(worldline_id, checkpoint_commit_tick) + .map_err(|_| { + Self::optic_obstruction( + request, + OpticObstructionKind::MissingWitness, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::EvidenceUnavailable, + }), + "checkpoint provenance witness entry is unavailable", + ) + })?; + let mut tail_witness_refs = Vec::new(); + for raw_tick in tail_start..=tail_end { + let tick = WorldlineTick::from_raw(raw_tick); + let entry = provenance.entry(worldline_id, tick).map_err(|_| { + Self::optic_obstruction( + request, + OpticObstructionKind::MissingWitness, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::EvidenceUnavailable, + }), + "live-tail provenance witness entry is unavailable", + ) + })?; + tail_witness_refs.push(ProvenanceRef { + worldline_id, + worldline_tick: tick, + commit_hash: entry.expected.commit_hash, + }); + } + + Ok(Some(WitnessBasis::CheckpointPlusTail { + checkpoint_ref: ProvenanceRef { + worldline_id, + worldline_tick: checkpoint_commit_tick, + commit_hash: checkpoint_entry.expected.commit_hash, + }, + checkpoint_hash: checkpoint.state_hash, + tail_digest: optic_tail_witness_refs_hash(&tail_witness_refs), + tail_witness_refs, + })) + } + fn validate_frame_projection( frame: ObservationFrame, projection: &ObservationProjection, @@ -780,6 +1511,35 @@ impl ObservationService { } } + fn validate_observer_contract(request: &ObservationRequest) -> Result<(), ObservationError> { + let expected = Self::observer_plan(request.frame, request.projection.kind())?; + match &request.observer_plan { + ReadingObserverPlan::Builtin { .. } if request.observer_plan != expected => { + return Err(ObservationError::UnsupportedObserverPlan( + request.observer_plan.clone(), + )); + } + ReadingObserverPlan::Builtin { .. } => {} + ReadingObserverPlan::Authored { .. } => { + return Err(ObservationError::UnsupportedObserverPlan( + request.observer_plan.clone(), + )); + } + } + + if let Some(instance) = &request.observer_instance { + return Err(ObservationError::UnsupportedObserverInstance( + instance.clone(), + )); + } + + if let ObservationRights::CapabilityScoped { .. } = request.rights { + return Err(ObservationError::UnsupportedRights(request.rights)); + } + + Ok(()) + } + fn resolve_coordinate( runtime: &WorldlineRuntime, provenance: &ProvenanceService, @@ -924,46 +1684,99 @@ impl ObservationService { fn reading_envelope( resolved: &ResolvedObservationCoordinate, parent_basis_posture: ObservationBasisPosture, - frame: ObservationFrame, - projection: &ObservationProjection, - ) -> ReadingEnvelope { - ReadingEnvelope { - observer_plan: Self::observer_plan(frame, projection.kind()), - observer_basis: Self::observer_basis(frame), - witness_refs: Self::witness_refs(resolved, frame), + request: &ObservationRequest, + payload: &ObservationPayload, + ) -> Result { + let witness_refs = Self::witness_refs(resolved, request.frame); + let budget_posture = Self::budget_posture(request.budget, payload, witness_refs.len())?; + Ok(ReadingEnvelope { + observer_plan: request.observer_plan.clone(), + observer_instance: request.observer_instance.clone(), + observer_basis: Self::observer_basis(request.frame), + witness_refs, parent_basis_posture, - budget_posture: ReadingBudgetPosture::UnboundedOneShot, - rights_posture: ReadingRightsPosture::KernelPublic, + budget_posture, + rights_posture: Self::rights_posture(request.rights), residual_posture: ReadingResidualPosture::Complete, + }) + } + + fn budget_posture( + budget: ObservationReadBudget, + payload: &ObservationPayload, + witness_ref_count: usize, + ) -> Result { + match budget { + ObservationReadBudget::UnboundedOneShot => Ok(ReadingBudgetPosture::UnboundedOneShot), + ObservationReadBudget::Bounded { + max_payload_bytes, + max_witness_refs, + } => { + let payload_bytes = Self::payload_wire_len(payload)?; + let witness_refs = witness_ref_count as u64; + if payload_bytes > max_payload_bytes || witness_refs > max_witness_refs { + return Err(ObservationError::BudgetExceeded { + max_payload_bytes, + payload_bytes, + max_witness_refs, + witness_refs, + }); + } + Ok(ReadingBudgetPosture::Bounded { + max_payload_bytes, + payload_bytes, + max_witness_refs, + witness_refs, + }) + } + } + } + + fn payload_wire_len(payload: &ObservationPayload) -> Result { + let bytes = echo_wasm_abi::encode_cbor(&payload.to_abi()) + .map_err(|err| ObservationError::CodecFailure(err.to_string()))?; + Ok(bytes.len() as u64) + } + + fn rights_posture(rights: ObservationRights) -> ReadingRightsPosture { + match rights { + ObservationRights::KernelPublic => ReadingRightsPosture::KernelPublic, + ObservationRights::CapabilityScoped { .. } => { + debug_assert!( + false, + "capability-scoped observation rights must be rejected before reading" + ); + ReadingRightsPosture::KernelPublic + } } } fn observer_plan( frame: ObservationFrame, projection: ObservationProjectionKind, - ) -> ReadingObserverPlan { - let plan = match (frame, projection) { + ) -> Result { + builtin_observer_plan_for(frame, projection) + } + + fn builtin_observer_plan( + frame: ObservationFrame, + projection: ObservationProjectionKind, + ) -> Result { + match (frame, projection) { (ObservationFrame::CommitBoundary, ObservationProjectionKind::Head) => { - BuiltinObserverPlan::CommitBoundaryHead + Ok(BuiltinObserverPlan::CommitBoundaryHead) } (ObservationFrame::CommitBoundary, ObservationProjectionKind::Snapshot) => { - BuiltinObserverPlan::CommitBoundarySnapshot + Ok(BuiltinObserverPlan::CommitBoundarySnapshot) } (ObservationFrame::RecordedTruth, ObservationProjectionKind::TruthChannels) => { - BuiltinObserverPlan::RecordedTruthChannels + Ok(BuiltinObserverPlan::RecordedTruthChannels) } (ObservationFrame::QueryView, ObservationProjectionKind::Query) => { - BuiltinObserverPlan::QueryBytes - } - _ => { - debug_assert!( - false, - "observer_plan requires a valid frame/projection pair" - ); - BuiltinObserverPlan::QueryBytes + Ok(BuiltinObserverPlan::QueryBytes) } - }; - ReadingObserverPlan::Builtin { plan } + _ => Err(ObservationError::UnsupportedFrameProjection { frame, projection }), + } } fn observer_basis(frame: ObservationFrame) -> ReadingObserverBasis { @@ -1059,6 +1872,14 @@ fn provenance_ref_to_abi(reference: crate::provenance_store::ProvenanceRef) -> a } } +fn observer_plan_id_to_abi(plan_id: ObserverPlanId) -> abi::ObserverPlanId { + abi::ObserverPlanId::from_bytes(*plan_id.as_bytes()) +} + +fn observer_instance_id_to_abi(instance_id: ObserverInstanceId) -> abi::ObserverInstanceId { + abi::ObserverInstanceId::from_bytes(*instance_id.as_bytes()) +} + fn overlap_slots_digest(slots: &[SlotId]) -> Hash { let mut hasher = Hasher::new(); hasher.update(b"echo:observation-overlap-slots:v1\0"); @@ -1069,6 +1890,47 @@ fn overlap_slots_digest(slots: &[SlotId]) -> Hash { hasher.finalize().into() } +fn optic_witness_refs_hash(refs: &[ReadingWitnessRef]) -> Hash { + let mut hasher = Hasher::new(); + hasher.update(OPTIC_OBSERVATION_WITNESS_SET_DOMAIN); + hasher.update(&(refs.len() as u64).to_le_bytes()); + for reference in refs { + match reference { + ReadingWitnessRef::ResolvedCommit { reference } => { + hasher.update(&[1]); + hash_provenance_ref(&mut hasher, *reference); + } + ReadingWitnessRef::EmptyFrontier { + worldline_id, + state_root, + commit_hash, + } => { + hasher.update(&[2]); + hasher.update(worldline_id.as_bytes()); + hasher.update(state_root); + hasher.update(commit_hash); + } + } + } + hasher.finalize().into() +} + +fn optic_tail_witness_refs_hash(refs: &[ProvenanceRef]) -> Hash { + let mut hasher = Hasher::new(); + hasher.update(OPTIC_LIVE_TAIL_WITNESS_SET_DOMAIN); + hasher.update(&(refs.len() as u64).to_le_bytes()); + for reference in refs { + hash_provenance_ref(&mut hasher, *reference); + } + hasher.finalize().into() +} + +fn hash_provenance_ref(hasher: &mut Hasher, reference: ProvenanceRef) { + hasher.update(reference.worldline_id.as_bytes()); + hasher.update(&reference.worldline_tick.as_u64().to_le_bytes()); + hasher.update(&reference.commit_hash); +} + fn hash_slot(hasher: &mut Hasher, slot: &SlotId) { match slot { SlotId::Node(node) => { @@ -1125,6 +1987,12 @@ mod tests { use crate::head_inbox::{make_intent_kind, InboxPolicy, IngressEnvelope, IngressTarget}; use crate::ident::{make_edge_id, make_node_id, make_type_id, WarpId}; use crate::materialization::make_channel_id; + use crate::optic::{ + AttachmentDescentPolicy, CoordinateAt, EchoCoordinate, MissingWitnessBasisReason, + ObserveOpticRequest, ObserveOpticResult, OpticAperture, OpticApertureShape, + OpticCapabilityId, OpticFocus, OpticId, OpticObstructionKind, OpticReadBudget, + ProjectionVersion, WitnessBasis, + }; use crate::provenance_store::replay_artifacts_for_entry; use crate::receipt::TickReceipt; use crate::record::{EdgeRecord, NodeRecord}; @@ -1145,10 +2013,59 @@ mod tests { WorldlineTick::from_raw(raw) } + fn builtin_one_shot( + coordinate: ObservationCoordinate, + frame: ObservationFrame, + projection: ObservationProjection, + ) -> ObservationRequest { + ObservationRequest::builtin_one_shot(coordinate, frame, projection).unwrap() + } + fn gt(raw: u64) -> GlobalTick { GlobalTick::from_raw(raw) } + fn optic_request( + worldline_id: WorldlineId, + shape: OpticApertureShape, + max_bytes: Option, + ) -> ObserveOpticRequest { + ObserveOpticRequest { + optic_id: OpticId::from_bytes([70; 32]), + focus: OpticFocus::Worldline { worldline_id }, + coordinate: EchoCoordinate::Worldline { + worldline_id, + at: CoordinateAt::Frontier, + }, + aperture: OpticAperture { + shape, + budget: OpticReadBudget { + max_bytes, + max_nodes: Some(8), + max_ticks: Some(1), + max_attachments: Some(0), + }, + attachment_descent: AttachmentDescentPolicy::BoundaryOnly, + }, + projection_version: ProjectionVersion::from_raw(1), + reducer_version: None, + capability: OpticCapabilityId::from_bytes([71; 32]), + } + } + + fn authored_observer_plan() -> ReadingObserverPlan { + ReadingObserverPlan::Authored { + plan: Box::new(AuthoredObserverPlan { + plan_id: ObserverPlanId::from_bytes([80; 32]), + artifact_hash: [81; 32], + schema_hash: [82; 32], + state_schema_hash: [83; 32], + update_law_hash: [84; 32], + emission_law_hash: [85; 32], + }), + } + } + fn empty_runtime_fixture() -> (Engine, WorldlineRuntime, ProvenanceService, WorldlineId) { let mut store = GraphStore::default(); let root = make_node_id("root"); @@ -1560,6 +2477,27 @@ mod tests { )); } + #[test] + fn builtin_one_shot_rejects_invalid_frame_projection() { + let err = ObservationRequest::builtin_one_shot( + ObservationCoordinate { + worldline_id: wl(1), + at: ObservationAt::Frontier, + }, + ObservationFrame::RecordedTruth, + ObservationProjection::Head, + ) + .unwrap_err(); + + assert_eq!( + err, + ObservationError::UnsupportedFrameProjection { + frame: ObservationFrame::RecordedTruth, + projection: ObservationProjectionKind::Head, + } + ); + } + #[test] fn frontier_head_matches_live_frontier_snapshot() { let (engine, runtime, provenance, worldline_id) = one_commit_fixture(); @@ -1567,14 +2505,14 @@ mod tests { &runtime, &provenance, &engine, - ObservationRequest { - coordinate: ObservationCoordinate { + builtin_one_shot( + ObservationCoordinate { worldline_id, at: ObservationAt::Frontier, }, - frame: ObservationFrame::CommitBoundary, - projection: ObservationProjection::Head, - }, + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ), ) .unwrap(); @@ -1601,14 +2539,14 @@ mod tests { &runtime, &provenance, &engine, - ObservationRequest { - coordinate: ObservationCoordinate { + builtin_one_shot( + ObservationCoordinate { worldline_id, at: ObservationAt::Frontier, }, - frame: ObservationFrame::RecordedTruth, - projection: ObservationProjection::TruthChannels { channels: None }, - }, + ObservationFrame::RecordedTruth, + ObservationProjection::TruthChannels { channels: None }, + ), ) .unwrap_err(); assert_eq!( @@ -1627,14 +2565,14 @@ mod tests { &runtime, &provenance, &engine, - ObservationRequest { - coordinate: ObservationCoordinate { + builtin_one_shot( + ObservationCoordinate { worldline_id, at: ObservationAt::Frontier, }, - frame: ObservationFrame::RecordedTruth, - projection: ObservationProjection::TruthChannels { channels: None }, - }, + ObservationFrame::RecordedTruth, + ObservationProjection::TruthChannels { channels: None }, + ), ) .unwrap(); let channels = if let ObservationPayload::TruthChannels(channels) = artifact.payload { @@ -1649,14 +2587,14 @@ mod tests { #[test] fn identical_requests_produce_stable_artifact_hashes() { let (engine, runtime, provenance, worldline_id) = one_commit_fixture(); - let request = ObservationRequest { - coordinate: ObservationCoordinate { + let request = builtin_one_shot( + ObservationCoordinate { worldline_id, at: ObservationAt::Frontier, }, - frame: ObservationFrame::CommitBoundary, - projection: ObservationProjection::Head, - }; + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ); let first = ObservationService::observe(&runtime, &provenance, &engine, request.clone()).unwrap(); let second = ObservationService::observe(&runtime, &provenance, &engine, request).unwrap(); @@ -1664,6 +2602,47 @@ mod tests { assert_eq!(first.to_abi(), second.to_abi()); } + #[test] + fn reading_envelope_posture_participates_in_artifact_identity() { + let (engine, runtime, provenance, worldline_id) = one_commit_fixture(); + let unbounded_request = builtin_one_shot( + ObservationCoordinate { + worldline_id, + at: ObservationAt::Frontier, + }, + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ); + let mut bounded_request = unbounded_request.clone(); + bounded_request.budget = ObservationReadBudget::Bounded { + max_payload_bytes: 512, + max_witness_refs: 1, + }; + + let unbounded = + ObservationService::observe(&runtime, &provenance, &engine, unbounded_request).unwrap(); + let bounded = + ObservationService::observe(&runtime, &provenance, &engine, bounded_request).unwrap(); + + assert_eq!(unbounded.resolved, bounded.resolved); + assert_eq!(unbounded.payload, bounded.payload); + assert_ne!(unbounded.reading, bounded.reading); + assert_ne!(unbounded.artifact_hash, bounded.artifact_hash); + assert_eq!( + unbounded.reading.budget_posture, + ReadingBudgetPosture::UnboundedOneShot + ); + assert!(matches!( + bounded.reading.budget_posture, + ReadingBudgetPosture::Bounded { + max_payload_bytes: 512, + payload_bytes: 1..=512, + max_witness_refs: 1, + witness_refs: 1, + } + )); + } + #[test] fn ordinary_worldline_observation_reports_worldline_posture() { let (engine, runtime, provenance, worldline_id) = one_commit_fixture(); @@ -1671,14 +2650,14 @@ mod tests { &runtime, &provenance, &engine, - ObservationRequest { - coordinate: ObservationCoordinate { + builtin_one_shot( + ObservationCoordinate { worldline_id, at: ObservationAt::Frontier, }, - frame: ObservationFrame::CommitBoundary, - projection: ObservationProjection::Head, - }, + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ), ) .unwrap(); @@ -1729,6 +2708,233 @@ mod tests { ); } + #[test] + fn explicit_bounded_observer_request_returns_bounded_reading_artifact() -> Result<(), String> { + let (engine, runtime, provenance, worldline_id) = one_commit_fixture(); + let mut request = builtin_one_shot( + ObservationCoordinate { + worldline_id, + at: ObservationAt::Frontier, + }, + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ); + request.budget = ObservationReadBudget::Bounded { + max_payload_bytes: 512, + max_witness_refs: 1, + }; + + let artifact = ObservationService::observe(&runtime, &provenance, &engine, request.clone()) + .map_err(|err| err.to_string())?; + + assert_eq!(artifact.reading.observer_plan, request.observer_plan); + assert_eq!(artifact.reading.observer_instance, None); + assert!(matches!( + artifact.reading.budget_posture, + ReadingBudgetPosture::Bounded { + max_payload_bytes: 512, + payload_bytes: 1..=512, + max_witness_refs: 1, + witness_refs: 1, + } + )); + assert!(matches!( + request.to_abi().budget, + abi::ObservationReadBudget::Bounded { + max_payload_bytes: 512, + max_witness_refs: 1, + } + )); + + Ok(()) + } + + #[test] + fn authored_observer_plan_obstructs_without_hidden_builtin_fallback() { + let (engine, runtime, provenance, worldline_id) = one_commit_fixture(); + let mut request = builtin_one_shot( + ObservationCoordinate { + worldline_id, + at: ObservationAt::Frontier, + }, + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ); + let authored = authored_observer_plan(); + request.observer_plan = authored.clone(); + + let err = ObservationService::observe(&runtime, &provenance, &engine, request).unwrap_err(); + + assert_eq!(err, ObservationError::UnsupportedObserverPlan(authored)); + } + + #[test] + fn hosted_observer_instance_obstructs_without_stateful_fallback() { + let (engine, runtime, provenance, worldline_id) = one_commit_fixture(); + let mut request = builtin_one_shot( + ObservationCoordinate { + worldline_id, + at: ObservationAt::Frontier, + }, + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ); + let instance = ObserverInstanceRef { + instance_id: ObserverInstanceId::from_bytes([86; 32]), + plan_id: ObserverPlanId::from_bytes([80; 32]), + state_hash: [87; 32], + }; + request.observer_instance = Some(instance.clone()); + + let err = ObservationService::observe(&runtime, &provenance, &engine, request).unwrap_err(); + + assert_eq!(err, ObservationError::UnsupportedObserverInstance(instance)); + } + + #[test] + fn capability_scoped_observer_rights_obstruct_without_public_fallback() { + let (engine, runtime, provenance, worldline_id) = one_commit_fixture(); + let mut request = builtin_one_shot( + ObservationCoordinate { + worldline_id, + at: ObservationAt::Frontier, + }, + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ); + let rights = ObservationRights::CapabilityScoped { + capability: OpticCapabilityId::from_bytes([88; 32]), + }; + request.rights = rights; + + let err = ObservationService::observe(&runtime, &provenance, &engine, request).unwrap_err(); + + assert_eq!(err, ObservationError::UnsupportedRights(rights)); + } + + #[test] + fn observation_budget_obstructs_instead_of_emitting_oversized_reading() { + let (engine, runtime, provenance, worldline_id) = one_commit_fixture(); + let mut request = builtin_one_shot( + ObservationCoordinate { + worldline_id, + at: ObservationAt::Frontier, + }, + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ); + request.budget = ObservationReadBudget::Bounded { + max_payload_bytes: 1, + max_witness_refs: 1, + }; + + let err = ObservationService::observe(&runtime, &provenance, &engine, request).unwrap_err(); + + assert!(matches!( + err, + ObservationError::BudgetExceeded { + max_payload_bytes: 1, + payload_bytes: 2.., + max_witness_refs: 1, + witness_refs: 1, + } + )); + } + + #[test] + fn bounded_head_optic_returns_read_identity() -> Result<(), String> { + let (engine, runtime, provenance, worldline_id) = one_commit_fixture(); + let request = optic_request(worldline_id, OpticApertureShape::Head, Some(256)); + let reading = match ObservationService::observe_optic( + &runtime, + &provenance, + &engine, + request.clone(), + ) { + ObserveOpticResult::Reading(reading) => reading, + ObserveOpticResult::Obstructed(obstruction) => { + return Err(format!("expected optic reading, got {obstruction:?}")); + } + }; + + assert_eq!(reading.read_identity.optic_id, request.optic_id); + assert_eq!(reading.read_identity.coordinate, request.coordinate); + assert_eq!( + reading.read_identity.aperture_digest, + request.aperture.digest() + ); + assert!(matches!( + reading.read_identity.witness_basis, + WitnessBasis::ResolvedCommit { .. } + )); + assert!(matches!(reading.payload, ObservationPayload::Head(_))); + assert_eq!(reading.retained, None); + assert_eq!( + reading.to_abi().read_identity.read_identity_hash, + reading.read_identity.read_identity_hash.to_vec() + ); + + Ok(()) + } + + #[test] + fn bounded_snapshot_optic_returns_read_identity() -> Result<(), String> { + let (engine, runtime, provenance, worldline_id) = one_commit_fixture(); + let request = optic_request( + worldline_id, + OpticApertureShape::SnapshotMetadata, + Some(256), + ); + let reading = match ObservationService::observe_optic( + &runtime, + &provenance, + &engine, + request.clone(), + ) { + ObserveOpticResult::Reading(reading) => reading, + ObserveOpticResult::Obstructed(obstruction) => { + return Err(format!("expected optic reading, got {obstruction:?}")); + } + }; + + assert_eq!(reading.read_identity.optic_id, request.optic_id); + assert_eq!(reading.read_identity.focus_digest, request.focus.digest()); + assert!(matches!(reading.payload, ObservationPayload::Snapshot(_))); + assert!(matches!( + reading.read_identity.witness_basis, + WitnessBasis::ResolvedCommit { .. } + )); + + Ok(()) + } + + #[test] + fn oversized_optic_aperture_returns_budget_obstruction() { + let (engine, runtime, provenance, worldline_id) = one_commit_fixture(); + let request = optic_request( + worldline_id, + OpticApertureShape::ByteRange { + start: 0, + len: 4096, + }, + Some(1024), + ); + + let result = ObservationService::observe_optic(&runtime, &provenance, &engine, request); + + assert!(matches!( + result, + ObserveOpticResult::Obstructed(ref obstruction) + if obstruction.kind == OpticObstructionKind::BudgetExceeded + && matches!( + obstruction.witness_basis, + Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::BudgetLimited, + }) + ) + )); + } + #[test] fn reading_residual_postures_convert_to_abi() { let cases = [ @@ -1763,14 +2969,14 @@ mod tests { &runtime, &provenance, &engine, - ObservationRequest { - coordinate: ObservationCoordinate { + builtin_one_shot( + ObservationCoordinate { worldline_id: child_worldline, at: ObservationAt::Frontier, }, - frame: ObservationFrame::CommitBoundary, - projection: ObservationProjection::Snapshot, - }, + ObservationFrame::CommitBoundary, + ObservationProjection::Snapshot, + ), ) .unwrap(); @@ -1791,28 +2997,28 @@ mod tests { &anchor_runtime, &anchor_provenance, &anchor_engine, - ObservationRequest { - coordinate: ObservationCoordinate { + builtin_one_shot( + ObservationCoordinate { worldline_id: anchor_child, at: ObservationAt::Frontier, }, - frame: ObservationFrame::CommitBoundary, - projection: ObservationProjection::Snapshot, - }, + ObservationFrame::CommitBoundary, + ObservationProjection::Snapshot, + ), ) .unwrap(); let artifact = ObservationService::observe( &runtime, &provenance, &engine, - ObservationRequest { - coordinate: ObservationCoordinate { + builtin_one_shot( + ObservationCoordinate { worldline_id: child_worldline, at: ObservationAt::Frontier, }, - frame: ObservationFrame::CommitBoundary, - projection: ObservationProjection::Snapshot, - }, + ObservationFrame::CommitBoundary, + ObservationProjection::Snapshot, + ), ) .unwrap(); @@ -1838,14 +3044,14 @@ mod tests { &runtime, &provenance, &engine, - ObservationRequest { - coordinate: ObservationCoordinate { + builtin_one_shot( + ObservationCoordinate { worldline_id: child_worldline, at: ObservationAt::Frontier, }, - frame: ObservationFrame::CommitBoundary, - projection: ObservationProjection::Snapshot, - }, + ObservationFrame::CommitBoundary, + ObservationProjection::Snapshot, + ), ) .unwrap(); @@ -1886,14 +3092,14 @@ mod tests { &runtime, &provenance, &engine, - ObservationRequest { - coordinate: ObservationCoordinate { + builtin_one_shot( + ObservationCoordinate { worldline_id, at: ObservationAt::Tick(wt(0)), }, - frame: ObservationFrame::CommitBoundary, - projection: ObservationProjection::Snapshot, - }, + ObservationFrame::CommitBoundary, + ObservationProjection::Snapshot, + ), ) .unwrap(); diff --git a/crates/warp-core/src/optic.rs b/crates/warp-core/src/optic.rs new file mode 100644 index 00000000..fe32fe54 --- /dev/null +++ b/crates/warp-core/src/optic.rs @@ -0,0 +1,2617 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! Generic Echo optic nouns and deterministic identifiers. + +use std::collections::{BTreeMap, BTreeSet}; + +use blake3::Hasher; +use echo_wasm_abi::kernel_port as abi; + +use crate::attachment::{AttachmentKey, AttachmentOwner, AttachmentPlane}; +use crate::clock::WorldlineTick; +use crate::ident::{EdgeKey, Hash, NodeKey, TypeId, WarpId}; +use crate::materialization::ChannelId; +use crate::observation::{ + ObservationPayload, ReadingBudgetPosture, ReadingEnvelope, ReadingResidualPosture, + ReadingRightsPosture, ReadingWitnessRef, +}; +use crate::provenance_store::ProvenanceRef; +use crate::strand::StrandId; +use crate::worldline::WorldlineId; + +const OPTIC_ID_DOMAIN: &[u8] = b"echo:optic-id:v1\0"; +const FOCUS_DIGEST_DOMAIN: &[u8] = b"echo:optic-focus:v1\0"; +const APERTURE_DIGEST_DOMAIN: &[u8] = b"echo:optic-aperture:v1\0"; +const READ_IDENTITY_DOMAIN: &[u8] = b"echo:read-identity:v1\0"; +const RETAINED_READING_KEY_DOMAIN: &[u8] = b"echo:retained-reading-key:v1\0"; + +macro_rules! opaque_id { + ($(#[$meta:meta])* $name:ident) => { + $(#[$meta])* + #[repr(transparent)] + #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] + #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] + pub struct $name([u8; 32]); + + impl $name { + /// Reconstructs the id from canonical bytes. + #[must_use] + pub const fn from_bytes(bytes: [u8; 32]) -> Self { + Self(bytes) + } + + /// Returns the canonical byte representation. + #[must_use] + pub const fn as_bytes(&self) -> &[u8; 32] { + &self.0 + } + } + }; +} + +opaque_id!( + /// Stable identity for an Echo optic descriptor. + OpticId +); + +opaque_id!( + /// Stable identity for a generic braid. + BraidId +); + +opaque_id!( + /// Stable key for a retained reading. + RetainedReadingKey +); + +opaque_id!( + /// Stable identity for the encoding used by a retained reading payload. + RetainedReadingCodecId +); + +opaque_id!( + /// Stable identity for an intent family admitted through an optic. + IntentFamilyId +); + +opaque_id!( + /// Stable identity for an admission law used by optic dispatch. + AdmissionLawId +); + +opaque_id!( + /// Stable identity for an optic capability basis. + OpticCapabilityId +); + +opaque_id!( + /// Stable identity for an actor opening or using an optic. + OpticActorId +); + +/// Version of the projection law used by an optic read. +#[repr(transparent)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(transparent))] +pub struct ProjectionVersion(u32); + +impl ProjectionVersion { + /// Builds a projection version from its raw value. + #[must_use] + pub const fn from_raw(raw: u32) -> Self { + Self(raw) + } + + /// Returns the raw version value. + #[must_use] + pub const fn as_u32(self) -> u32 { + self.0 + } + + fn to_abi(self) -> abi::ProjectionVersion { + abi::ProjectionVersion(self.0) + } +} + +/// Version of the reducer law used by an optic read, when present. +#[repr(transparent)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(transparent))] +pub struct ReducerVersion(u32); + +impl ReducerVersion { + /// Builds a reducer version from its raw value. + #[must_use] + pub const fn from_raw(raw: u32) -> Self { + Self(raw) + } + + /// Returns the raw version value. + #[must_use] + pub const fn as_u32(self) -> u32 { + self.0 + } + + fn to_abi(self) -> abi::ReducerVersion { + abi::ReducerVersion(self.0) + } +} + +/// Lawful subject named by an optic. +/// +/// This is deliberately not a global graph handle. Each variant names a +/// focused substrate subject or boundary that can be observed under an explicit +/// coordinate and capability. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum OpticFocus { + /// A whole worldline. + Worldline { + /// Target worldline. + worldline_id: WorldlineId, + }, + /// A live or retained strand. + Strand { + /// Target strand. + strand_id: StrandId, + }, + /// A generic braid projection. + Braid { + /// Target braid. + braid_id: BraidId, + }, + /// A retained reading. + RetainedReading { + /// Retained reading key. + key: RetainedReadingKey, + }, + /// Explicit attachment boundary. Descending through it is a separate + /// aperture/capability decision. + AttachmentBoundary { + /// Attachment boundary key. + key: AttachmentKey, + }, +} + +impl OpticFocus { + /// Converts the focus to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::OpticFocus { + match self { + Self::Worldline { worldline_id } => abi::OpticFocus::Worldline { + worldline_id: worldline_id_to_abi(*worldline_id), + }, + Self::Strand { strand_id } => abi::OpticFocus::Strand { + strand_id: strand_id_to_abi(*strand_id), + }, + Self::Braid { braid_id } => abi::OpticFocus::Braid { + braid_id: braid_id_to_abi(*braid_id), + }, + Self::RetainedReading { key } => abi::OpticFocus::RetainedReading { + key: retained_reading_key_to_abi(*key), + }, + Self::AttachmentBoundary { key } => abi::OpticFocus::AttachmentBoundary { + key: attachment_key_to_abi(*key), + }, + } + } + + /// Returns a stable digest of this focus for read-identity construction. + #[must_use] + pub fn digest(&self) -> Hash { + let mut hasher = Hasher::new(); + hasher.update(FOCUS_DIGEST_DOMAIN); + self.feed_hash(&mut hasher); + hasher.finalize().into() + } + + fn feed_hash(&self, hasher: &mut Hasher) { + match self { + Self::Worldline { worldline_id } => { + feed_tag(hasher, 1); + hasher.update(worldline_id.as_bytes()); + } + Self::Strand { strand_id } => { + feed_tag(hasher, 2); + hasher.update(strand_id.as_bytes()); + } + Self::Braid { braid_id } => { + feed_tag(hasher, 3); + hasher.update(braid_id.as_bytes()); + } + Self::RetainedReading { key } => { + feed_tag(hasher, 4); + hasher.update(key.as_bytes()); + } + Self::AttachmentBoundary { key } => { + feed_tag(hasher, 5); + feed_attachment_key(hasher, *key); + } + } + } +} + +/// Requested position within a substrate coordinate. +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum CoordinateAt { + /// Observe or target the current frontier. + Frontier, + /// Observe or target a specific committed tick. + Tick(WorldlineTick), + /// Observe or target a full provenance coordinate. + Provenance(ProvenanceRef), +} + +impl CoordinateAt { + fn to_abi(self) -> abi::CoordinateAt { + match self { + Self::Frontier => abi::CoordinateAt::Frontier, + Self::Tick(worldline_tick) => abi::CoordinateAt::Tick { + worldline_tick: worldline_tick_to_abi(worldline_tick), + }, + Self::Provenance(reference) => abi::CoordinateAt::Provenance { + reference: provenance_ref_to_abi(reference), + }, + } + } + + fn feed_hash(self, hasher: &mut Hasher) { + match self { + Self::Frontier => feed_tag(hasher, 1), + Self::Tick(worldline_tick) => { + feed_tag(hasher, 2); + feed_u64(hasher, worldline_tick.as_u64()); + } + Self::Provenance(reference) => { + feed_tag(hasher, 3); + feed_provenance_ref(hasher, reference); + } + } + } +} + +/// Causal coordinate named by an optic. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum EchoCoordinate { + /// Coordinate on a worldline. + Worldline { + /// Target worldline. + worldline_id: WorldlineId, + /// Requested position. + at: CoordinateAt, + }, + /// Coordinate on a strand. + Strand { + /// Target strand. + strand_id: StrandId, + /// Requested position. + at: CoordinateAt, + /// Optional parent basis that makes the strand read honest. + parent_basis: Option, + }, + /// Coordinate on a braid projection. + Braid { + /// Target braid. + braid_id: BraidId, + /// Projection digest at the named member frontier. + projection_digest: Hash, + /// Number of members included in the projection. + member_count: u64, + }, + /// Coordinate of a retained reading. + RetainedReading { + /// Retained reading key. + key: RetainedReadingKey, + }, +} + +impl EchoCoordinate { + /// Converts the coordinate to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::EchoCoordinate { + match self { + Self::Worldline { worldline_id, at } => abi::EchoCoordinate::Worldline { + worldline_id: worldline_id_to_abi(*worldline_id), + at: at.to_abi(), + }, + Self::Strand { + strand_id, + at, + parent_basis, + } => abi::EchoCoordinate::Strand { + strand_id: strand_id_to_abi(*strand_id), + at: at.to_abi(), + parent_basis: parent_basis.map(provenance_ref_to_abi), + }, + Self::Braid { + braid_id, + projection_digest, + member_count, + } => abi::EchoCoordinate::Braid { + braid_id: braid_id_to_abi(*braid_id), + projection_digest: projection_digest.to_vec(), + member_count: *member_count, + }, + Self::RetainedReading { key } => abi::EchoCoordinate::RetainedReading { + key: retained_reading_key_to_abi(*key), + }, + } + } + + fn feed_hash(&self, hasher: &mut Hasher) { + match self { + Self::Worldline { worldline_id, at } => { + feed_tag(hasher, 1); + hasher.update(worldline_id.as_bytes()); + at.feed_hash(hasher); + } + Self::Strand { + strand_id, + at, + parent_basis, + } => { + feed_tag(hasher, 2); + hasher.update(strand_id.as_bytes()); + at.feed_hash(hasher); + feed_optional_provenance_ref(hasher, *parent_basis); + } + Self::Braid { + braid_id, + projection_digest, + member_count, + } => { + feed_tag(hasher, 3); + hasher.update(braid_id.as_bytes()); + hasher.update(projection_digest); + feed_u64(hasher, *member_count); + } + Self::RetainedReading { key } => { + feed_tag(hasher, 4); + hasher.update(key.as_bytes()); + } + } + } +} + +/// Attachment recursion policy for an optic aperture. +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum AttachmentDescentPolicy { + /// Stop at the attachment boundary and expose only the boundary reference. + BoundaryOnly, + /// Recursive descent was explicitly requested and remains budget/capability checked. + Explicit, +} + +impl AttachmentDescentPolicy { + fn to_abi(self) -> abi::AttachmentDescentPolicy { + match self { + Self::BoundaryOnly => abi::AttachmentDescentPolicy::BoundaryOnly, + Self::Explicit => abi::AttachmentDescentPolicy::Explicit, + } + } + + fn feed_hash(self, hasher: &mut Hasher) { + match self { + Self::BoundaryOnly => feed_tag(hasher, 1), + Self::Explicit => feed_tag(hasher, 2), + } + } +} + +/// Budget bound for an optic read. +#[derive(Clone, Copy, PartialEq, Eq, Debug, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct OpticReadBudget { + /// Maximum payload bytes to produce. + pub max_bytes: Option, + /// Maximum graph nodes or entities to visit. + pub max_nodes: Option, + /// Maximum causal ticks to reduce. + pub max_ticks: Option, + /// Maximum attachment boundaries to descend through. + pub max_attachments: Option, +} + +impl OpticReadBudget { + fn to_abi(self) -> abi::OpticReadBudget { + abi::OpticReadBudget { + max_bytes: self.max_bytes, + max_nodes: self.max_nodes, + max_ticks: self.max_ticks, + max_attachments: self.max_attachments, + } + } + + fn feed_hash(self, hasher: &mut Hasher) { + feed_optional_u64(hasher, self.max_bytes); + feed_optional_u64(hasher, self.max_nodes); + feed_optional_u64(hasher, self.max_ticks); + feed_optional_u64(hasher, self.max_attachments); + } +} + +/// Bounded aperture shape selected by an optic read. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum OpticApertureShape { + /// Head/frontier metadata only. + Head, + /// Snapshot metadata only. + SnapshotMetadata, + /// Recorded truth channels. + TruthChannels { + /// Optional channel filter. `None` means all recorded channels within budget. + channels: Option>, + }, + /// Contract query bytes identified by query id and vars digest. + QueryBytes { + /// Stable query identifier. + query_id: u32, + /// Hash of canonical query variables. + vars_digest: Hash, + }, + /// Bounded byte range aperture. + ByteRange { + /// Start byte offset. + start: u64, + /// Maximum byte length to return. + len: u64, + }, + /// Explicit attachment boundary. + AttachmentBoundary, +} + +impl OpticApertureShape { + fn to_abi(&self) -> abi::OpticApertureShape { + match self { + Self::Head => abi::OpticApertureShape::Head, + Self::SnapshotMetadata => abi::OpticApertureShape::SnapshotMetadata, + Self::TruthChannels { channels } => abi::OpticApertureShape::TruthChannels { + channels: channels + .as_ref() + .map(|ids| ids.iter().map(channel_id_to_abi).collect()), + }, + Self::QueryBytes { + query_id, + vars_digest, + } => abi::OpticApertureShape::QueryBytes { + query_id: *query_id, + vars_digest: vars_digest.to_vec(), + }, + Self::ByteRange { start, len } => abi::OpticApertureShape::ByteRange { + start: *start, + len: *len, + }, + Self::AttachmentBoundary => abi::OpticApertureShape::AttachmentBoundary, + } + } + + fn feed_hash(&self, hasher: &mut Hasher) { + match self { + Self::Head => feed_tag(hasher, 1), + Self::SnapshotMetadata => feed_tag(hasher, 2), + Self::TruthChannels { channels } => { + feed_tag(hasher, 3); + feed_optional_hash_list(hasher, channels.as_deref()); + } + Self::QueryBytes { + query_id, + vars_digest, + } => { + feed_tag(hasher, 4); + feed_u32(hasher, *query_id); + hasher.update(vars_digest); + } + Self::ByteRange { start, len } => { + feed_tag(hasher, 5); + feed_u64(hasher, *start); + feed_u64(hasher, *len); + } + Self::AttachmentBoundary => feed_tag(hasher, 6), + } + } +} + +/// Complete aperture for one optic read. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct OpticAperture { + /// Shape of the read aperture. + pub shape: OpticApertureShape, + /// Read budget. + pub budget: OpticReadBudget, + /// Attachment recursion policy. + pub attachment_descent: AttachmentDescentPolicy, +} + +impl OpticAperture { + /// Converts the aperture to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::OpticAperture { + abi::OpticAperture { + shape: self.shape.to_abi(), + budget: self.budget.to_abi(), + attachment_descent: self.attachment_descent.to_abi(), + } + } + + /// Returns a stable digest of this aperture for read-identity construction. + #[must_use] + pub fn digest(&self) -> Hash { + let mut hasher = Hasher::new(); + hasher.update(APERTURE_DIGEST_DOMAIN); + self.feed_hash(&mut hasher); + hasher.finalize().into() + } + + fn feed_hash(&self, hasher: &mut Hasher) { + self.shape.feed_hash(hasher); + self.budget.feed_hash(hasher); + self.attachment_descent.feed_hash(hasher); + } +} + +/// Opened optic descriptor. This is not a mutable handle. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct EchoOptic { + /// Stable optic identity derived from the descriptor. + pub optic_id: OpticId, + /// Lawful subject being observed or targeted by intent dispatch. + pub focus: OpticFocus, + /// Explicit causal coordinate. + pub coordinate: EchoCoordinate, + /// Projection law version. + pub projection_version: ProjectionVersion, + /// Reducer law version, if a reducer participates. + pub reducer_version: Option, + /// Intent family allowed through this optic. + pub intent_family: IntentFamilyId, + /// Capability basis under which the optic was opened. + pub capability: OpticCapabilityId, +} + +impl EchoOptic { + /// Builds a descriptor and derives its stable id from the generic optic fields. + #[must_use] + pub fn new( + focus: OpticFocus, + coordinate: EchoCoordinate, + projection_version: ProjectionVersion, + reducer_version: Option, + intent_family: IntentFamilyId, + capability: OpticCapabilityId, + ) -> Self { + let optic_id = derive_optic_id( + &focus, + &coordinate, + projection_version, + reducer_version, + intent_family, + capability, + ); + Self { + optic_id, + focus, + coordinate, + projection_version, + reducer_version, + intent_family, + capability, + } + } + + /// Converts the descriptor to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::EchoOptic { + abi::EchoOptic { + optic_id: optic_id_to_abi(self.optic_id), + focus: self.focus.to_abi(), + coordinate: self.coordinate.to_abi(), + projection_version: self.projection_version.to_abi(), + reducer_version: self.reducer_version.map(ReducerVersion::to_abi), + intent_family: intent_family_id_to_abi(self.intent_family), + capability: optic_capability_id_to_abi(self.capability), + } + } +} + +/// Reason an optic read identity cannot name a complete witness basis yet. +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum MissingWitnessBasisReason { + /// Required witness evidence is unavailable. + EvidenceUnavailable, + /// The requested read exceeded its declared budget. + BudgetLimited, + /// The current capability does not permit revealing the basis. + RightsLimited, + /// The requested basis posture is not supported by this projection law. + UnsupportedBasis, +} + +impl MissingWitnessBasisReason { + fn to_abi(self) -> abi::MissingWitnessBasisReason { + match self { + Self::EvidenceUnavailable => abi::MissingWitnessBasisReason::EvidenceUnavailable, + Self::BudgetLimited => abi::MissingWitnessBasisReason::BudgetLimited, + Self::RightsLimited => abi::MissingWitnessBasisReason::RightsLimited, + Self::UnsupportedBasis => abi::MissingWitnessBasisReason::UnsupportedBasis, + } + } + + fn feed_hash(self, hasher: &mut Hasher) { + match self { + Self::EvidenceUnavailable => feed_tag(hasher, 1), + Self::BudgetLimited => feed_tag(hasher, 2), + Self::RightsLimited => feed_tag(hasher, 3), + Self::UnsupportedBasis => feed_tag(hasher, 4), + } + } +} + +/// Witness basis named by a read identity. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum WitnessBasis { + /// One resolved provenance commit witnesses the reading. + ResolvedCommit { + /// Provenance coordinate that witnesses the reading. + reference: ProvenanceRef, + /// State root at the witness coordinate. + state_root: Hash, + /// Commit hash at the witness coordinate. + commit_hash: Hash, + }, + /// A checkpoint plus explicit live-tail witness set witnesses the reading. + CheckpointPlusTail { + /// Checkpoint coordinate used as the cold basis. + checkpoint_ref: ProvenanceRef, + /// Checkpoint content hash. + checkpoint_hash: Hash, + /// Live-tail provenance refs reduced after the checkpoint. + tail_witness_refs: Vec, + /// Digest of the live-tail witness set. + tail_digest: Hash, + }, + /// A witness set whose exact semantics are named by the contained refs and digest. + WitnessSet { + /// Witness refs supporting the read. + refs: Vec, + /// Digest over the witness set. + witness_set_hash: Hash, + }, + /// The basis is missing; callers must treat the read as obstructed or incomplete. + Missing { + /// Deterministic reason the basis is missing. + reason: MissingWitnessBasisReason, + }, +} + +impl WitnessBasis { + /// Converts the witness basis to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::WitnessBasis { + match self { + Self::ResolvedCommit { + reference, + state_root, + commit_hash, + } => abi::WitnessBasis::ResolvedCommit { + reference: provenance_ref_to_abi(*reference), + state_root: state_root.to_vec(), + commit_hash: commit_hash.to_vec(), + }, + Self::CheckpointPlusTail { + checkpoint_ref, + checkpoint_hash, + tail_witness_refs, + tail_digest, + } => abi::WitnessBasis::CheckpointPlusTail { + checkpoint_ref: provenance_ref_to_abi(*checkpoint_ref), + checkpoint_hash: checkpoint_hash.to_vec(), + tail_witness_refs: tail_witness_refs + .iter() + .copied() + .map(provenance_ref_to_abi) + .collect(), + tail_digest: tail_digest.to_vec(), + }, + Self::WitnessSet { + refs, + witness_set_hash, + } => abi::WitnessBasis::WitnessSet { + refs: refs.iter().map(reading_witness_ref_to_abi).collect(), + witness_set_hash: witness_set_hash.to_vec(), + }, + Self::Missing { reason } => abi::WitnessBasis::Missing { + reason: reason.to_abi(), + }, + } + } + + fn feed_hash(&self, hasher: &mut Hasher) { + match self { + Self::ResolvedCommit { + reference, + state_root, + commit_hash, + } => { + feed_tag(hasher, 1); + feed_provenance_ref(hasher, *reference); + hasher.update(state_root); + hasher.update(commit_hash); + } + Self::CheckpointPlusTail { + checkpoint_ref, + checkpoint_hash, + tail_witness_refs, + tail_digest, + } => { + feed_tag(hasher, 2); + feed_provenance_ref(hasher, *checkpoint_ref); + hasher.update(checkpoint_hash); + feed_u64(hasher, tail_witness_refs.len() as u64); + for reference in tail_witness_refs { + feed_provenance_ref(hasher, *reference); + } + hasher.update(tail_digest); + } + Self::WitnessSet { + refs, + witness_set_hash, + } => { + feed_tag(hasher, 3); + feed_u64(hasher, refs.len() as u64); + for reference in refs { + feed_reading_witness_ref(hasher, reference); + } + hasher.update(witness_set_hash); + } + Self::Missing { reason } => { + feed_tag(hasher, 4); + reason.feed_hash(hasher); + } + } + } +} + +/// Stable identity of the question an optic read answered. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct ReadIdentity { + /// Stable hash over all identity fields. + pub read_identity_hash: Hash, + /// Optic being observed. + pub optic_id: OpticId, + /// Digest of the focus named by the read. + pub focus_digest: Hash, + /// Coordinate named by the read. + pub coordinate: EchoCoordinate, + /// Digest of the aperture named by the read. + pub aperture_digest: Hash, + /// Projection law version. + pub projection_version: ProjectionVersion, + /// Reducer law version, if present. + pub reducer_version: Option, + /// Witness basis used by the read. + pub witness_basis: WitnessBasis, + /// Rights posture of the emitted reading. + pub rights_posture: ReadingRightsPosture, + /// Budget posture of the emitted reading. + pub budget_posture: ReadingBudgetPosture, + /// Residual posture of the emitted reading. + pub residual_posture: ReadingResidualPosture, +} + +impl ReadIdentity { + /// Builds a read identity from the full question and evidence posture. + #[allow(clippy::too_many_arguments)] + #[must_use] + pub fn new( + optic_id: OpticId, + focus: &OpticFocus, + coordinate: EchoCoordinate, + aperture: &OpticAperture, + projection_version: ProjectionVersion, + reducer_version: Option, + witness_basis: WitnessBasis, + rights_posture: ReadingRightsPosture, + budget_posture: ReadingBudgetPosture, + residual_posture: ReadingResidualPosture, + ) -> Self { + let focus_digest = focus.digest(); + let aperture_digest = aperture.digest(); + let read_identity_hash = derive_read_identity_hash( + optic_id, + &focus_digest, + &coordinate, + &aperture_digest, + projection_version, + reducer_version, + &witness_basis, + rights_posture, + budget_posture, + residual_posture, + ); + Self { + read_identity_hash, + optic_id, + focus_digest, + coordinate, + aperture_digest, + projection_version, + reducer_version, + witness_basis, + rights_posture, + budget_posture, + residual_posture, + } + } + + /// Builds a compatible identity using posture fields from an existing reading envelope. + #[allow(clippy::too_many_arguments)] + #[must_use] + pub fn from_reading_envelope( + optic_id: OpticId, + focus: &OpticFocus, + coordinate: EchoCoordinate, + aperture: &OpticAperture, + projection_version: ProjectionVersion, + reducer_version: Option, + witness_basis: WitnessBasis, + reading: &ReadingEnvelope, + ) -> Self { + Self::new( + optic_id, + focus, + coordinate, + aperture, + projection_version, + reducer_version, + witness_basis, + reading.rights_posture, + reading.budget_posture, + reading.residual_posture, + ) + } + + /// Converts the read identity to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::ReadIdentity { + abi::ReadIdentity { + read_identity_hash: self.read_identity_hash.to_vec(), + optic_id: optic_id_to_abi(self.optic_id), + focus_digest: self.focus_digest.to_vec(), + coordinate: self.coordinate.to_abi(), + aperture_digest: self.aperture_digest.to_vec(), + projection_version: self.projection_version.to_abi(), + reducer_version: self.reducer_version.map(ReducerVersion::to_abi), + witness_basis: self.witness_basis.to_abi(), + rights_posture: reading_rights_posture_to_abi(self.rights_posture), + budget_posture: reading_budget_posture_to_abi(self.budget_posture), + residual_posture: reading_residual_posture_to_abi(self.residual_posture), + } + } +} + +/// Reading envelope plus first-class optic read identity. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct OpticReadingEnvelope { + /// Existing observation reading envelope. + pub reading: ReadingEnvelope, + /// Stable read identity for the question this reading answered. + pub read_identity: ReadIdentity, +} + +impl OpticReadingEnvelope { + /// Builds an optic reading envelope from an existing reading envelope and identity. + #[must_use] + pub fn new(reading: ReadingEnvelope, read_identity: ReadIdentity) -> Self { + Self { + reading, + read_identity, + } + } + + /// Converts the envelope to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::OpticReadingEnvelope { + abi::OpticReadingEnvelope { + reading: self.reading.to_abi(), + read_identity: self.read_identity.to_abi(), + } + } +} + +impl RetainedReadingKey { + /// Derives a retained-reading key from semantic identity and retained bytes. + #[must_use] + pub fn derive( + read_identity: &ReadIdentity, + content_hash: Hash, + codec_id: RetainedReadingCodecId, + byte_len: u64, + ) -> Self { + derive_retained_reading_key(read_identity, &content_hash, codec_id, byte_len) + } +} + +/// Descriptor for a retained reading payload. +/// +/// The CAS/content hash names bytes. This descriptor's key additionally names +/// the semantic read identity and codec, so equal bytes answering different +/// questions do not alias. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct RetainedReadingDescriptor { + /// Stable key derived from the semantic read identity and byte identity. + pub key: RetainedReadingKey, + /// Semantic identity of the question answered by the retained payload. + pub read_identity: ReadIdentity, + /// Content hash of the retained payload bytes. + pub content_hash: Hash, + /// Codec used for the retained payload bytes. + pub codec_id: RetainedReadingCodecId, + /// Retained payload byte length. + pub byte_len: u64, +} + +impl RetainedReadingDescriptor { + /// Builds a retained-reading descriptor and derives its stable key. + #[must_use] + pub fn new( + read_identity: ReadIdentity, + content_hash: Hash, + codec_id: RetainedReadingCodecId, + byte_len: u64, + ) -> Self { + let key = RetainedReadingKey::derive(&read_identity, content_hash, codec_id, byte_len); + Self { + key, + read_identity, + content_hash, + codec_id, + byte_len, + } + } + + /// Converts the descriptor to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::RetainedReadingDescriptor { + abi::RetainedReadingDescriptor { + key: retained_reading_key_to_abi(self.key), + read_identity: self.read_identity.to_abi(), + content_hash: self.content_hash.to_vec(), + codec_id: retained_reading_codec_id_to_abi(self.codec_id), + byte_len: self.byte_len, + } + } +} + +/// Request to retain reading payload bytes under a semantic read identity. +/// +/// Retention stores bytes and a descriptor only. It does not create substrate +/// truth and does not mutate the optic subject. +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct RetainReadingRequest { + /// Semantic identity of the read question answered by the payload. + pub read_identity: ReadIdentity, + /// Codec used to encode the retained payload bytes. + pub codec_id: RetainedReadingCodecId, + /// Encoded reading payload bytes. + pub payload: Vec, +} + +/// Result of retaining reading payload bytes. +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct RetainReadingResult { + /// Descriptor naming both the retained bytes and their semantic read identity. + pub descriptor: RetainedReadingDescriptor, +} + +/// Request to reveal a retained reading payload. +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct RevealReadingRequest { + /// Retained-reading key returned by retention. + pub key: RetainedReadingKey, + /// Exact semantic identity the caller is authorized to reveal. + pub read_identity: ReadIdentity, +} + +/// Result of revealing a retained reading payload. +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct RevealReadingResult { + /// Descriptor for the revealed payload. + pub descriptor: RetainedReadingDescriptor, + /// Encoded retained payload bytes. + pub payload: Vec, +} + +#[derive(Clone, PartialEq, Eq, Debug)] +struct RetainedReadingCacheEntry { + descriptor: RetainedReadingDescriptor, + payload: Vec, +} + +/// In-memory semantic cache for retained optic readings. +/// +/// This cache is intentionally above CAS. The content hash names bytes, while +/// the retained-reading key names bytes plus the semantic `ReadIdentity` and +/// codec. Revealing by content hash alone is not a supported operation. +#[derive(Clone, PartialEq, Eq, Debug, Default)] +pub struct RetainedReadingCache { + entries: BTreeMap, + content_index: BTreeMap>, +} + +impl RetainedReadingCache { + /// Returns the number of retained semantic readings. + #[must_use] + pub fn len(&self) -> usize { + self.entries.len() + } + + /// Returns `true` when the cache has no retained readings. + #[must_use] + pub fn is_empty(&self) -> bool { + self.entries.is_empty() + } + + /// Retains encoded reading bytes under their semantic read identity. + /// + /// The derived key includes the content hash, codec, byte length, and exact + /// read identity. Equal bytes answering different questions therefore retain + /// under different keys. + pub fn retain_reading(&mut self, request: RetainReadingRequest) -> RetainReadingResult { + let content_hash = retained_payload_hash(&request.payload); + let byte_len = request.payload.len() as u64; + let descriptor = RetainedReadingDescriptor::new( + request.read_identity, + content_hash, + request.codec_id, + byte_len, + ); + self.content_index + .entry(content_hash) + .or_default() + .insert(descriptor.key); + self.entries.insert( + descriptor.key, + RetainedReadingCacheEntry { + descriptor: descriptor.clone(), + payload: request.payload, + }, + ); + RetainReadingResult { descriptor } + } + + /// Reveals retained reading bytes only when key and read identity both match. + /// + /// A content hash alone cannot reveal payload bytes because it does not name + /// the coordinate, aperture, witness basis, projection/reducer versions, + /// rights posture, budget posture, or residual posture the bytes answer. + pub fn reveal_reading( + &self, + request: &RevealReadingRequest, + ) -> Result> { + let Some(entry) = self.entries.get(&request.key) else { + return Err(retained_reading_obstruction( + request.key, + &request.read_identity, + "retained reading key was not found", + )); + }; + if entry.descriptor.read_identity != request.read_identity { + return Err(retained_reading_obstruction( + request.key, + &request.read_identity, + "retained reading identity mismatch; reveal requires the exact read identity", + )); + } + + Ok(RevealReadingResult { + descriptor: entry.descriptor.clone(), + payload: entry.payload.clone(), + }) + } + + /// Returns retained-reading keys that share the same byte content hash. + /// + /// This is an index/diagnostic query, not reveal authority. Callers must use + /// [`Self::reveal_reading`] with an exact `ReadIdentity` to obtain bytes. + #[must_use] + pub fn keys_for_content_hash(&self, content_hash: Hash) -> Vec { + self.content_index + .get(&content_hash) + .map_or_else(Vec::new, |keys| keys.iter().copied().collect()) + } +} + +/// Bounded read request through an Echo optic. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct ObserveOpticRequest { + /// Optic being observed. + pub optic_id: OpticId, + /// Focus being observed. + pub focus: OpticFocus, + /// Explicit causal coordinate for the read. + pub coordinate: EchoCoordinate, + /// Bounded aperture selected by the read. + pub aperture: OpticAperture, + /// Projection law version requested by the read. + pub projection_version: ProjectionVersion, + /// Reducer law version requested by the read, when present. + pub reducer_version: Option, + /// Capability basis for the read. + pub capability: OpticCapabilityId, +} + +impl ObserveOpticRequest { + /// Converts the request to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::ObserveOpticRequest { + abi::ObserveOpticRequest { + optic_id: optic_id_to_abi(self.optic_id), + focus: self.focus.to_abi(), + coordinate: self.coordinate.to_abi(), + aperture: self.aperture.to_abi(), + projection_version: self.projection_version.to_abi(), + reducer_version: self.reducer_version.map(ReducerVersion::to_abi), + capability: optic_capability_id_to_abi(self.capability), + } + } +} + +/// Intent payload dispatched through an optic. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum OpticIntentPayload { + /// Canonical Echo intent v1 bytes. + EintV1 { + /// Complete EINT v1 envelope bytes. + bytes: Vec, + }, +} + +impl OpticIntentPayload { + /// Converts the payload to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::OpticIntentPayload { + match self { + Self::EintV1 { bytes } => abi::OpticIntentPayload::EintV1 { + bytes: bytes.clone(), + }, + } + } +} + +/// Write-side proposal request through an Echo optic. +/// +/// This is not a setter. It names an explicit causal basis and carries an +/// intent payload for normal Echo admission. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct DispatchOpticIntentRequest { + /// Optic being used as the proposal boundary. + pub optic_id: OpticId, + /// Explicit causal basis for the proposal. + pub base_coordinate: EchoCoordinate, + /// Intent family being proposed. + pub intent_family: IntentFamilyId, + /// Focus targeted by the proposal. + pub focus: OpticFocus, + /// Actor/cause associated with the proposal. + pub cause: OpticCause, + /// Capability basis for the proposal. + pub capability: OpticCapability, + /// Admission law requested for the proposal. + pub admission_law: AdmissionLawId, + /// Intent payload carried by the proposal. + pub payload: OpticIntentPayload, +} + +impl DispatchOpticIntentRequest { + /// Validates the generic optic proposal boundary without dispatching it. + /// + /// # Errors + /// + /// Returns a typed obstruction when focus, base coordinate, actor, + /// capability, intent family, or payload evidence does not line up. + pub fn validate_proposal(&self) -> Result<(), Box> { + if !focus_matches_coordinate(&self.focus, &self.base_coordinate) { + return Err(self.dispatch_obstruction( + OpticObstructionKind::ConflictingFrontier, + "optic dispatch focus and base coordinate name different subjects", + )); + } + + if self.capability.actor != self.cause.actor { + return Err(self.dispatch_obstruction( + OpticObstructionKind::CapabilityDenied, + "optic dispatch capability actor does not match cause actor", + )); + } + + if self.capability.allowed_focus != self.focus { + return Err(self.dispatch_obstruction( + OpticObstructionKind::CapabilityDenied, + "optic dispatch capability does not authorize focus", + )); + } + + if self.capability.allowed_intent_family != self.intent_family { + return Err(self.dispatch_obstruction( + OpticObstructionKind::UnsupportedIntentFamily, + "optic dispatch capability does not authorize intent family", + )); + } + + match &self.payload { + OpticIntentPayload::EintV1 { bytes } => { + if let Err(error) = echo_wasm_abi::unpack_intent_v1(bytes) { + return Err(self.dispatch_obstruction( + OpticObstructionKind::UnsupportedIntentFamily, + format!("optic dispatch EINT v1 payload is malformed: {error}"), + )); + } + } + } + + Ok(()) + } + + /// Validates the proposal against a known current coordinate. + /// + /// # Errors + /// + /// Returns [`OpticObstructionKind::StaleBasis`] when the request names a + /// concrete base coordinate older than the supplied current coordinate. + pub fn validate_proposal_against_current( + &self, + current_coordinate: &EchoCoordinate, + ) -> Result<(), Box> { + self.validate_proposal()?; + + if !coordinates_name_same_subject(&self.base_coordinate, current_coordinate) { + return Err(self.dispatch_obstruction( + OpticObstructionKind::ConflictingFrontier, + "optic dispatch current coordinate names a different subject", + )); + } + + if base_coordinate_is_stale(&self.base_coordinate, current_coordinate) { + return Err(self.dispatch_obstruction( + OpticObstructionKind::StaleBasis, + "optic dispatch base coordinate is stale relative to current frontier", + )); + } + + Ok(()) + } + + fn dispatch_obstruction( + &self, + kind: OpticObstructionKind, + message: impl Into, + ) -> Box { + Box::new(OpticObstruction { + kind, + optic_id: Some(self.optic_id), + focus: Some(self.focus.clone()), + coordinate: Some(self.base_coordinate.clone()), + witness_basis: None, + message: message.into(), + }) + } + + /// Converts the request to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::DispatchOpticIntentRequest { + abi::DispatchOpticIntentRequest { + optic_id: optic_id_to_abi(self.optic_id), + base_coordinate: self.base_coordinate.to_abi(), + intent_family: intent_family_id_to_abi(self.intent_family), + focus: self.focus.to_abi(), + cause: self.cause.to_abi(), + capability: self.capability.to_abi(), + admission_law: admission_law_id_to_abi(self.admission_law), + payload: self.payload.to_abi(), + } + } +} + +/// Successful bounded reading returned through an optic. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct OpticReading { + /// Reading-envelope metadata. + pub envelope: ReadingEnvelope, + /// Stable read identity for the question this reading answered. + pub read_identity: ReadIdentity, + /// Observation payload emitted by the observer. + pub payload: ObservationPayload, + /// Retained reading key, when the payload was retained. + pub retained: Option, +} + +impl OpticReading { + /// Converts the reading to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::OpticReading { + abi::OpticReading { + envelope: self.envelope.to_abi(), + read_identity: self.read_identity.to_abi(), + payload: self.payload.to_abi(), + retained: self.retained.map(retained_reading_key_to_abi), + } + } +} + +/// Result of observing an optic. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum ObserveOpticResult { + /// The optic emitted a bounded reading. + Reading(Box), + /// The optic could not lawfully emit a reading. + Obstructed(Box), +} + +impl ObserveOpticResult { + /// Converts the result to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::ObserveOpticResult { + match self { + Self::Reading(reading) => abi::ObserveOpticResult::Reading(Box::new(reading.to_abi())), + Self::Obstructed(obstruction) => { + abi::ObserveOpticResult::Obstructed(Box::new(obstruction.to_abi())) + } + } + } +} + +/// Deterministic reason an optic read or dispatch could not lawfully proceed. +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum OpticObstructionKind { + /// Required witness evidence is unavailable. + MissingWitness, + /// A retained reading named by the optic cannot be found or revealed. + MissingRetainedReading, + /// The dispatch named a base coordinate that is no longer the admitted basis. + StaleBasis, + /// The capability basis does not authorize the requested read or dispatch. + CapabilityDenied, + /// The declared read or dispatch budget was exceeded. + BudgetExceeded, + /// The requested aperture is not supported by this optic or projection law. + UnsupportedAperture, + /// The requested projection law/version is not available. + UnsupportedProjectionLaw, + /// The requested intent family is not available through this optic. + UnsupportedIntentFamily, + /// The read reached an attachment boundary and explicit descent is required. + AttachmentDescentRequired, + /// The requested attachment descent is not authorized. + AttachmentDescentDenied, + /// A live-tail read requires additional bounded reduction before it is honest. + LiveTailRequiresReduction, + /// The requested coordinate names an incompatible frontier. + ConflictingFrontier, + /// The request would collapse plurality without an explicit policy. + PluralityRequiresExplicitPolicy, +} + +impl OpticObstructionKind { + fn to_abi(self) -> abi::OpticObstructionKind { + match self { + Self::MissingWitness => abi::OpticObstructionKind::MissingWitness, + Self::MissingRetainedReading => abi::OpticObstructionKind::MissingRetainedReading, + Self::StaleBasis => abi::OpticObstructionKind::StaleBasis, + Self::CapabilityDenied => abi::OpticObstructionKind::CapabilityDenied, + Self::BudgetExceeded => abi::OpticObstructionKind::BudgetExceeded, + Self::UnsupportedAperture => abi::OpticObstructionKind::UnsupportedAperture, + Self::UnsupportedProjectionLaw => abi::OpticObstructionKind::UnsupportedProjectionLaw, + Self::UnsupportedIntentFamily => abi::OpticObstructionKind::UnsupportedIntentFamily, + Self::AttachmentDescentRequired => abi::OpticObstructionKind::AttachmentDescentRequired, + Self::AttachmentDescentDenied => abi::OpticObstructionKind::AttachmentDescentDenied, + Self::LiveTailRequiresReduction => abi::OpticObstructionKind::LiveTailRequiresReduction, + Self::ConflictingFrontier => abi::OpticObstructionKind::ConflictingFrontier, + Self::PluralityRequiresExplicitPolicy => { + abi::OpticObstructionKind::PluralityRequiresExplicitPolicy + } + } + } +} + +/// Typed obstruction returned instead of a hidden fallback or fake success. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct OpticObstruction { + /// Deterministic obstruction kind. + pub kind: OpticObstructionKind, + /// Optic implicated by the obstruction, when known. + pub optic_id: Option, + /// Focus implicated by the obstruction, when known. + pub focus: Option, + /// Coordinate implicated by the obstruction, when known. + pub coordinate: Option, + /// Witness basis posture that explains evidence availability, when known. + pub witness_basis: Option, + /// Human-readable diagnostic text. + pub message: String, +} + +impl OpticObstruction { + /// Converts the obstruction to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::OpticObstruction { + abi::OpticObstruction { + kind: self.kind.to_abi(), + optic_id: self.optic_id.map(optic_id_to_abi), + focus: self.focus.as_ref().map(OpticFocus::to_abi), + coordinate: self.coordinate.as_ref().map(EchoCoordinate::to_abi), + witness_basis: self.witness_basis.as_ref().map(WitnessBasis::to_abi), + message: self.message.clone(), + } + } +} + +/// Admission result for an optic intent that Echo accepted into witnessed history. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct AdmittedIntent { + /// Optic through which the intent was dispatched. + pub optic_id: OpticId, + /// Explicit causal basis named by the dispatch. + pub base_coordinate: EchoCoordinate, + /// Intent family admitted through the optic. + pub intent_family: IntentFamilyId, + /// Provenance coordinate produced or identified by admission. + pub admitted_ref: ProvenanceRef, + /// Receipt digest witnessing the admission outcome. + pub receipt_hash: Hash, +} + +impl AdmittedIntent { + /// Converts the admitted outcome to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::AdmittedIntent { + abi::AdmittedIntent { + optic_id: optic_id_to_abi(self.optic_id), + base_coordinate: self.base_coordinate.to_abi(), + intent_family: intent_family_id_to_abi(self.intent_family), + admitted_ref: provenance_ref_to_abi(self.admitted_ref), + receipt_hash: self.receipt_hash.to_vec(), + } + } +} + +/// Reason an optic intent is staged instead of admitted immediately. +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum StagedIntentReason { + /// The proposal needs an explicit rebase before admission can proceed. + RebaseRequired, + /// The proposal is waiting for additional capability evidence. + AwaitingCapability, + /// The proposal is waiting for additional witness evidence. + AwaitingWitness, + /// The proposal was deliberately staged for later explicit admission. + AwaitingExplicitAdmission, +} + +impl StagedIntentReason { + fn to_abi(self) -> abi::StagedIntentReason { + match self { + Self::RebaseRequired => abi::StagedIntentReason::RebaseRequired, + Self::AwaitingCapability => abi::StagedIntentReason::AwaitingCapability, + Self::AwaitingWitness => abi::StagedIntentReason::AwaitingWitness, + Self::AwaitingExplicitAdmission => abi::StagedIntentReason::AwaitingExplicitAdmission, + } + } +} + +/// Admission result for an optic intent retained without mutating the frontier. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct StagedIntent { + /// Optic through which the intent was dispatched. + pub optic_id: OpticId, + /// Explicit causal basis named by the dispatch. + pub base_coordinate: EchoCoordinate, + /// Intent family proposed through the optic. + pub intent_family: IntentFamilyId, + /// Stable digest or storage key for the staged proposal. + pub stage_ref: Hash, + /// Deterministic reason the proposal is staged. + pub reason: StagedIntentReason, +} + +impl StagedIntent { + /// Converts the staged outcome to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::StagedIntent { + abi::StagedIntent { + optic_id: optic_id_to_abi(self.optic_id), + base_coordinate: self.base_coordinate.to_abi(), + intent_family: intent_family_id_to_abi(self.intent_family), + stage_ref: self.stage_ref.to_vec(), + reason: self.reason.to_abi(), + } + } +} + +/// Admission result that preserves lawful plurality instead of selecting one winner. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct PluralIntent { + /// Optic through which the intent was dispatched. + pub optic_id: OpticId, + /// Explicit causal basis named by the dispatch. + pub base_coordinate: EchoCoordinate, + /// Intent family proposed through the optic. + pub intent_family: IntentFamilyId, + /// Candidate coordinates that remain lawful plural outcomes. + pub candidate_refs: Vec, + /// Residual posture associated with the preserved plurality. + pub residual_posture: ReadingResidualPosture, +} + +impl PluralIntent { + /// Converts the plural outcome to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::PluralIntent { + abi::PluralIntent { + optic_id: optic_id_to_abi(self.optic_id), + base_coordinate: self.base_coordinate.to_abi(), + intent_family: intent_family_id_to_abi(self.intent_family), + candidate_refs: self + .candidate_refs + .iter() + .copied() + .map(provenance_ref_to_abi) + .collect(), + residual_posture: reading_residual_posture_to_abi(self.residual_posture), + } + } +} + +/// Deterministic conflict reason for an optic intent dispatch. +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum IntentConflictReason { + /// The named base coordinate is no longer the applicable basis. + StaleBasis, + /// The request conflicts with the named or observed frontier. + ConflictingFrontier, + /// Capability evidence conflicts with the requested operation. + CapabilityConflict, + /// The verified footprint conflicts with concurrent causal claims. + FootprintConflict, + /// The requested admission law conflicts with the available host law. + AdmissionLawConflict, + /// The request needs an explicit plurality policy before admission. + UnsupportedPluralityPolicy, +} + +impl IntentConflictReason { + fn to_abi(self) -> abi::IntentConflictReason { + match self { + Self::StaleBasis => abi::IntentConflictReason::StaleBasis, + Self::ConflictingFrontier => abi::IntentConflictReason::ConflictingFrontier, + Self::CapabilityConflict => abi::IntentConflictReason::CapabilityConflict, + Self::FootprintConflict => abi::IntentConflictReason::FootprintConflict, + Self::AdmissionLawConflict => abi::IntentConflictReason::AdmissionLawConflict, + Self::UnsupportedPluralityPolicy => { + abi::IntentConflictReason::UnsupportedPluralityPolicy + } + } + } +} + +/// Admission result for incompatible causal claims under an optic dispatch. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct IntentConflict { + /// Optic through which the intent was dispatched. + pub optic_id: OpticId, + /// Explicit causal basis named by the dispatch. + pub base_coordinate: EchoCoordinate, + /// Intent family proposed through the optic. + pub intent_family: IntentFamilyId, + /// Deterministic conflict reason. + pub reason: IntentConflictReason, + /// Provenance coordinate implicated by the conflict, when known. + pub conflict_ref: Option, + /// Digest of compact conflict evidence. + pub evidence_digest: Hash, + /// Human-readable diagnostic text. + pub message: String, +} + +impl IntentConflict { + /// Converts the conflict outcome to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::IntentConflict { + abi::IntentConflict { + optic_id: optic_id_to_abi(self.optic_id), + base_coordinate: self.base_coordinate.to_abi(), + intent_family: intent_family_id_to_abi(self.intent_family), + reason: self.reason.to_abi(), + conflict_ref: self.conflict_ref.map(provenance_ref_to_abi), + evidence_digest: self.evidence_digest.to_vec(), + message: self.message.clone(), + } + } +} + +/// Typed top-level result for dispatching an intent through an optic. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum IntentDispatchResult { + /// Echo accepted the intent into witnessed history. + Admitted(AdmittedIntent), + /// Echo retained the proposal without mutating the named frontier. + Staged(StagedIntent), + /// Echo preserved lawful plurality instead of selecting a single result. + Plural(PluralIntent), + /// Echo found incompatible causal claims under the named admission law. + Conflict(IntentConflict), + /// Echo could not lawfully proceed because basis, evidence, rights, or law is missing. + Obstructed(OpticObstruction), +} + +impl IntentDispatchResult { + /// Converts the dispatch result to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::IntentDispatchResult { + match self { + Self::Admitted(outcome) => abi::IntentDispatchResult::Admitted(outcome.to_abi()), + Self::Staged(outcome) => abi::IntentDispatchResult::Staged(outcome.to_abi()), + Self::Plural(outcome) => abi::IntentDispatchResult::Plural(outcome.to_abi()), + Self::Conflict(outcome) => abi::IntentDispatchResult::Conflict(outcome.to_abi()), + Self::Obstructed(obstruction) => { + abi::IntentDispatchResult::Obstructed(obstruction.to_abi()) + } + } + } +} + +/// Auditable cause for opening, closing, observing, or dispatching through an optic. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct OpticCause { + /// Actor associated with the cause. + pub actor: OpticActorId, + /// Stable digest of the host-level cause or request. + pub cause_hash: Hash, + /// Optional diagnostic label for humans. + pub label: Option, +} + +impl OpticCause { + /// Converts the cause to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::OpticCause { + abi::OpticCause { + actor: optic_actor_id_to_abi(self.actor), + cause_hash: self.cause_hash.to_vec(), + label: self.label.clone(), + } + } +} + +/// Capability grant used while validating an optic descriptor. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct OpticCapability { + /// Stable capability identity retained in opened optic descriptors. + pub capability_id: OpticCapabilityId, + /// Actor to which the capability was issued. + pub actor: OpticActorId, + /// Provenance ref for the issuer or policy source, when available. + pub issuer_ref: Option, + /// Stable digest of the capability policy. + pub policy_hash: Hash, + /// Focus this minimal capability authorizes. + pub allowed_focus: OpticFocus, + /// Projection law version this capability authorizes. + pub projection_version: ProjectionVersion, + /// Reducer law version this capability authorizes, when required. + pub reducer_version: Option, + /// Intent family this capability authorizes. + pub allowed_intent_family: IntentFamilyId, + /// Maximum read budget authorized by this capability. + pub max_budget: OpticReadBudget, +} + +impl OpticCapability { + /// Converts the capability to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::OpticCapability { + abi::OpticCapability { + capability_id: optic_capability_id_to_abi(self.capability_id), + actor: optic_actor_id_to_abi(self.actor), + issuer_ref: self.issuer_ref.map(provenance_ref_to_abi), + policy_hash: self.policy_hash.to_vec(), + allowed_focus: self.allowed_focus.to_abi(), + projection_version: self.projection_version.to_abi(), + reducer_version: self.reducer_version.map(ReducerVersion::to_abi), + allowed_intent_family: intent_family_id_to_abi(self.allowed_intent_family), + max_budget: self.max_budget.to_abi(), + } + } +} + +/// Capability posture returned after successfully validating an optic descriptor. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum CapabilityPosture { + /// The descriptor is authorized by the named capability grant. + Granted { + /// Capability identity retained in the opened descriptor. + capability_id: OpticCapabilityId, + /// Actor to which the capability was issued. + actor: OpticActorId, + /// Provenance ref for the issuer or policy source, when available. + issuer_ref: Option, + /// Stable digest of the capability policy. + policy_hash: Hash, + }, +} + +impl CapabilityPosture { + fn to_abi(&self) -> abi::CapabilityPosture { + match self { + Self::Granted { + capability_id, + actor, + issuer_ref, + policy_hash, + } => abi::CapabilityPosture::Granted { + capability_id: optic_capability_id_to_abi(*capability_id), + actor: optic_actor_id_to_abi(*actor), + issuer_ref: issuer_ref.map(provenance_ref_to_abi), + policy_hash: policy_hash.to_vec(), + }, + } + } +} + +/// Descriptor-validation request for opening a session-local optic resource. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct OpenOpticRequest { + /// Lawful subject being observed or targeted by intent dispatch. + pub focus: OpticFocus, + /// Explicit causal coordinate for the optic descriptor. + pub coordinate: EchoCoordinate, + /// Projection law version requested by the descriptor. + pub projection_version: ProjectionVersion, + /// Reducer law version requested by the descriptor, when present. + pub reducer_version: Option, + /// Intent family allowed through the opened optic. + pub intent_family: IntentFamilyId, + /// Capability grant used to validate this descriptor. + pub capability: OpticCapability, + /// Auditable cause for opening the descriptor. + pub cause: OpticCause, +} + +impl OpenOpticRequest { + /// Validates the descriptor and derives the session-local optic identity. + /// + /// This is descriptor validation only. It does not create a mutable handle + /// to the subject and does not mutate causal history. + /// + /// # Errors + /// + /// Returns a typed obstruction when focus, coordinate, projection law, + /// reducer law, intent family, or capability evidence does not line up. + pub fn validate_descriptor(&self) -> Result { + if !focus_matches_coordinate(&self.focus, &self.coordinate) { + return Err(self.open_obstruction( + OpticObstructionKind::ConflictingFrontier, + "optic focus and coordinate name different subjects", + )); + } + + if self.capability.actor != self.cause.actor { + return Err(self.open_obstruction( + OpticObstructionKind::CapabilityDenied, + "capability actor does not match optic cause actor", + )); + } + + if self.capability.allowed_focus != self.focus { + return Err(self.open_obstruction( + OpticObstructionKind::CapabilityDenied, + "capability does not authorize optic focus", + )); + } + + if self.capability.projection_version != self.projection_version + || self.capability.reducer_version != self.reducer_version + { + return Err(self.open_obstruction( + OpticObstructionKind::UnsupportedProjectionLaw, + "capability does not authorize projection or reducer law", + )); + } + + if self.capability.allowed_intent_family != self.intent_family { + return Err(self.open_obstruction( + OpticObstructionKind::UnsupportedIntentFamily, + "capability does not authorize intent family", + )); + } + + let optic = EchoOptic::new( + self.focus.clone(), + self.coordinate.clone(), + self.projection_version, + self.reducer_version, + self.intent_family, + self.capability.capability_id, + ); + Ok(OpenOpticResult { + optic, + capability_posture: CapabilityPosture::Granted { + capability_id: self.capability.capability_id, + actor: self.capability.actor, + issuer_ref: self.capability.issuer_ref, + policy_hash: self.capability.policy_hash, + }, + }) + } + + fn open_obstruction(&self, kind: OpticObstructionKind, message: &str) -> OpticOpenError { + OpticOpenError::Obstructed(Box::new(OpticObstruction { + kind, + optic_id: None, + focus: Some(self.focus.clone()), + coordinate: Some(self.coordinate.clone()), + witness_basis: None, + message: message.to_owned(), + })) + } + + /// Converts the request to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::OpenOpticRequest { + abi::OpenOpticRequest { + focus: self.focus.to_abi(), + coordinate: self.coordinate.to_abi(), + projection_version: self.projection_version.to_abi(), + reducer_version: self.reducer_version.map(ReducerVersion::to_abi), + intent_family: intent_family_id_to_abi(self.intent_family), + capability: self.capability.to_abi(), + cause: self.cause.to_abi(), + } + } +} + +/// Successful descriptor-validation result for opening an optic. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct OpenOpticResult { + /// Opened optic descriptor. This is not a mutable subject handle. + pub optic: EchoOptic, + /// Capability posture that authorized the descriptor. + pub capability_posture: CapabilityPosture, +} + +impl OpenOpticResult { + /// Converts the result to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::OpenOpticResult { + abi::OpenOpticResult { + optic: self.optic.to_abi(), + capability_posture: self.capability_posture.to_abi(), + } + } +} + +/// Error returned while opening an optic descriptor. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum OpticOpenError { + /// Opening failed with a typed obstruction. + Obstructed(Box), +} + +impl OpticOpenError { + /// Converts the error to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::OpticOpenError { + match self { + Self::Obstructed(obstruction) => abi::OpticOpenError::Obstructed(obstruction.to_abi()), + } + } +} + +/// Request for releasing a session-local optic descriptor resource. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct CloseOpticRequest { + /// Optic descriptor to release from the session. + pub optic_id: OpticId, + /// Auditable cause for closing the descriptor. + pub cause: OpticCause, +} + +impl CloseOpticRequest { + /// Builds the close result without naming or mutating any subject coordinate. + #[must_use] + pub fn close_session_descriptor(&self) -> CloseOpticResult { + CloseOpticResult { + optic_id: self.optic_id, + } + } + + /// Converts the request to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::CloseOpticRequest { + abi::CloseOpticRequest { + optic_id: optic_id_to_abi(self.optic_id), + cause: self.cause.to_abi(), + } + } +} + +/// Result for releasing a session-local optic descriptor resource. +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct CloseOpticResult { + /// Optic descriptor released from the session. + pub optic_id: OpticId, +} + +impl CloseOpticResult { + /// Converts the result to the shared ABI DTO. + #[must_use] + pub fn to_abi(self) -> abi::CloseOpticResult { + abi::CloseOpticResult { + optic_id: optic_id_to_abi(self.optic_id), + } + } +} + +/// Error returned while closing an optic descriptor. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum OpticCloseError { + /// Closing failed with a typed obstruction. + Obstructed(Box), +} + +impl OpticCloseError { + /// Converts the error to the shared ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::OpticCloseError { + match self { + Self::Obstructed(obstruction) => abi::OpticCloseError::Obstructed(obstruction.to_abi()), + } + } +} + +/// Narrow built-in example optic over a worldline head. +/// +/// This type exists to validate the ergonomics of the generic optics API +/// without introducing a universal optic engine or an application-specific +/// substrate. It is still only a request builder: reads go through +/// `observe_optic`, and proposals go through `dispatch_optic_intent`. +#[derive(Clone, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct WorldlineHeadOptic { + /// Opened optic descriptor. This is not a mutable handle. + pub optic: EchoOptic, + /// Capability grant that authorized the descriptor. + pub capability: OpticCapability, +} + +impl WorldlineHeadOptic { + /// Opens a narrow worldline-head optic descriptor. + /// + /// # Errors + /// + /// Returns the same typed open obstruction as [`OpenOpticRequest`] when + /// descriptor fields or capability evidence do not line up. + pub fn open( + worldline_id: WorldlineId, + coordinate_at: CoordinateAt, + actor: OpticActorId, + capability_id: OpticCapabilityId, + intent_family: IntentFamilyId, + policy_hash: Hash, + ) -> Result { + let focus = OpticFocus::Worldline { worldline_id }; + let coordinate = EchoCoordinate::Worldline { + worldline_id, + at: coordinate_at, + }; + let capability = OpticCapability { + capability_id, + actor, + issuer_ref: None, + policy_hash, + allowed_focus: focus.clone(), + projection_version: ProjectionVersion::from_raw(1), + reducer_version: None, + allowed_intent_family: intent_family, + max_budget: OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(64), + max_ticks: Some(16), + max_attachments: Some(0), + }, + }; + let cause = OpticCause { + actor, + cause_hash: derive_example_cause_hash( + worldline_id, + coordinate_at, + intent_family, + capability_id, + ), + label: Some("worldline head optic example".to_owned()), + }; + let request = OpenOpticRequest { + focus, + coordinate, + projection_version: ProjectionVersion::from_raw(1), + reducer_version: None, + intent_family, + capability: capability.clone(), + cause, + }; + let result = request.validate_descriptor()?; + Ok(Self { + optic: result.optic, + capability, + }) + } + + /// Builds a bounded head read request for this optic. + #[must_use] + pub fn observe_head_request(&self, budget: OpticReadBudget) -> ObserveOpticRequest { + ObserveOpticRequest { + optic_id: self.optic.optic_id, + focus: self.optic.focus.clone(), + coordinate: self.optic.coordinate.clone(), + aperture: OpticAperture { + shape: OpticApertureShape::Head, + budget, + attachment_descent: AttachmentDescentPolicy::BoundaryOnly, + }, + projection_version: self.optic.projection_version, + reducer_version: self.optic.reducer_version, + capability: self.capability.capability_id, + } + } + + /// Builds a QueryBytes-shaped read request against this optic. + /// + /// The current example host does not install a contract query observer, so + /// executing this request should produce a typed projection-law obstruction. + #[must_use] + pub fn observe_query_bytes_request( + &self, + query_id: u32, + vars_digest: Hash, + budget: OpticReadBudget, + ) -> ObserveOpticRequest { + ObserveOpticRequest { + optic_id: self.optic.optic_id, + focus: self.optic.focus.clone(), + coordinate: self.optic.coordinate.clone(), + aperture: OpticAperture { + shape: OpticApertureShape::QueryBytes { + query_id, + vars_digest, + }, + budget, + attachment_descent: AttachmentDescentPolicy::BoundaryOnly, + }, + projection_version: self.optic.projection_version, + reducer_version: self.optic.reducer_version, + capability: self.capability.capability_id, + } + } + + /// Builds an EINT v1 proposal request with an explicit causal basis. + /// + /// This is intentionally not named `set`: the returned value is a proposal + /// request that must be passed to `dispatch_optic_intent`. + #[must_use] + pub fn dispatch_eint_v1_request( + &self, + base_coordinate: EchoCoordinate, + cause: OpticCause, + admission_law: AdmissionLawId, + bytes: Vec, + ) -> DispatchOpticIntentRequest { + DispatchOpticIntentRequest { + optic_id: self.optic.optic_id, + base_coordinate, + intent_family: self.optic.intent_family, + focus: self.optic.focus.clone(), + cause, + capability: self.capability.clone(), + admission_law, + payload: OpticIntentPayload::EintV1 { bytes }, + } + } +} + +fn focus_matches_coordinate(focus: &OpticFocus, coordinate: &EchoCoordinate) -> bool { + match (focus, coordinate) { + ( + OpticFocus::Worldline { worldline_id }, + EchoCoordinate::Worldline { + worldline_id: coordinate_worldline, + .. + }, + ) => worldline_id == coordinate_worldline, + ( + OpticFocus::Strand { strand_id }, + EchoCoordinate::Strand { + strand_id: coordinate_strand, + .. + }, + ) => strand_id == coordinate_strand, + ( + OpticFocus::Braid { braid_id }, + EchoCoordinate::Braid { + braid_id: coordinate_braid, + .. + }, + ) => braid_id == coordinate_braid, + ( + OpticFocus::RetainedReading { key }, + EchoCoordinate::RetainedReading { + key: coordinate_key, + }, + ) => key == coordinate_key, + (OpticFocus::AttachmentBoundary { .. }, _) => true, + _ => false, + } +} + +fn coordinates_name_same_subject(base: &EchoCoordinate, current: &EchoCoordinate) -> bool { + match (base, current) { + ( + EchoCoordinate::Worldline { worldline_id, .. }, + EchoCoordinate::Worldline { + worldline_id: current_worldline, + .. + }, + ) => worldline_id == current_worldline, + ( + EchoCoordinate::Strand { strand_id, .. }, + EchoCoordinate::Strand { + strand_id: current_strand, + .. + }, + ) => strand_id == current_strand, + ( + EchoCoordinate::Braid { braid_id, .. }, + EchoCoordinate::Braid { + braid_id: current_braid, + .. + }, + ) => braid_id == current_braid, + ( + EchoCoordinate::RetainedReading { key }, + EchoCoordinate::RetainedReading { key: current_key }, + ) => key == current_key, + _ => false, + } +} + +fn base_coordinate_is_stale(base: &EchoCoordinate, current: &EchoCoordinate) -> bool { + match (base, current) { + ( + EchoCoordinate::Worldline { at, .. } | EchoCoordinate::Strand { at, .. }, + EchoCoordinate::Worldline { at: current_at, .. } + | EchoCoordinate::Strand { at: current_at, .. }, + ) => coordinate_at_tick(*at).is_some_and(|base_tick| { + coordinate_at_tick(*current_at).is_some_and(|current_tick| base_tick < current_tick) + }), + ( + EchoCoordinate::Braid { member_count, .. }, + EchoCoordinate::Braid { + member_count: current_member_count, + .. + }, + ) => member_count < current_member_count, + _ => false, + } +} + +fn coordinate_at_tick(at: CoordinateAt) -> Option { + match at { + CoordinateAt::Frontier => None, + CoordinateAt::Tick(tick) => Some(tick.as_u64()), + CoordinateAt::Provenance(reference) => Some(reference.worldline_tick.as_u64()), + } +} + +fn derive_optic_id( + focus: &OpticFocus, + coordinate: &EchoCoordinate, + projection_version: ProjectionVersion, + reducer_version: Option, + intent_family: IntentFamilyId, + capability: OpticCapabilityId, +) -> OpticId { + let mut hasher = Hasher::new(); + hasher.update(OPTIC_ID_DOMAIN); + focus.feed_hash(&mut hasher); + coordinate.feed_hash(&mut hasher); + feed_u32(&mut hasher, projection_version.as_u32()); + match reducer_version { + Some(version) => { + feed_tag(&mut hasher, 1); + feed_u32(&mut hasher, version.as_u32()); + } + None => feed_tag(&mut hasher, 0), + } + hasher.update(intent_family.as_bytes()); + hasher.update(capability.as_bytes()); + OpticId::from_bytes(hasher.finalize().into()) +} + +fn derive_example_cause_hash( + worldline_id: WorldlineId, + coordinate_at: CoordinateAt, + intent_family: IntentFamilyId, + capability_id: OpticCapabilityId, +) -> Hash { + let mut hasher = Hasher::new(); + hasher.update(b"echo:worldline-head-optic-example-cause:v1\0"); + hasher.update(worldline_id.as_bytes()); + coordinate_at.feed_hash(&mut hasher); + hasher.update(intent_family.as_bytes()); + hasher.update(capability_id.as_bytes()); + hasher.finalize().into() +} + +#[allow(clippy::too_many_arguments)] +fn derive_read_identity_hash( + optic_id: OpticId, + focus_digest: &Hash, + coordinate: &EchoCoordinate, + aperture_digest: &Hash, + projection_version: ProjectionVersion, + reducer_version: Option, + witness_basis: &WitnessBasis, + rights_posture: ReadingRightsPosture, + budget_posture: ReadingBudgetPosture, + residual_posture: ReadingResidualPosture, +) -> Hash { + let mut hasher = Hasher::new(); + hasher.update(READ_IDENTITY_DOMAIN); + hasher.update(optic_id.as_bytes()); + hasher.update(focus_digest); + coordinate.feed_hash(&mut hasher); + hasher.update(aperture_digest); + feed_u32(&mut hasher, projection_version.as_u32()); + match reducer_version { + Some(version) => { + feed_tag(&mut hasher, 1); + feed_u32(&mut hasher, version.as_u32()); + } + None => feed_tag(&mut hasher, 0), + } + witness_basis.feed_hash(&mut hasher); + feed_reading_rights_posture(&mut hasher, rights_posture); + feed_reading_budget_posture(&mut hasher, budget_posture); + feed_reading_residual_posture(&mut hasher, residual_posture); + hasher.finalize().into() +} + +fn derive_retained_reading_key( + read_identity: &ReadIdentity, + content_hash: &Hash, + codec_id: RetainedReadingCodecId, + byte_len: u64, +) -> RetainedReadingKey { + let mut hasher = Hasher::new(); + hasher.update(RETAINED_READING_KEY_DOMAIN); + hasher.update(&read_identity.read_identity_hash); + hasher.update(content_hash); + hasher.update(codec_id.as_bytes()); + feed_u64(&mut hasher, byte_len); + RetainedReadingKey::from_bytes(hasher.finalize().into()) +} + +fn retained_payload_hash(payload: &[u8]) -> Hash { + blake3::hash(payload).into() +} + +fn retained_reading_obstruction( + key: RetainedReadingKey, + read_identity: &ReadIdentity, + message: impl Into, +) -> Box { + Box::new(OpticObstruction { + kind: OpticObstructionKind::MissingRetainedReading, + optic_id: Some(read_identity.optic_id), + focus: Some(OpticFocus::RetainedReading { key }), + coordinate: Some(EchoCoordinate::RetainedReading { key }), + witness_basis: Some(read_identity.witness_basis.clone()), + message: message.into(), + }) +} + +fn feed_tag(hasher: &mut Hasher, tag: u8) { + hasher.update(&[tag]); +} + +fn feed_u32(hasher: &mut Hasher, value: u32) { + hasher.update(&value.to_le_bytes()); +} + +fn feed_u64(hasher: &mut Hasher, value: u64) { + hasher.update(&value.to_le_bytes()); +} + +fn feed_optional_u64(hasher: &mut Hasher, value: Option) { + match value { + Some(value) => { + feed_tag(hasher, 1); + feed_u64(hasher, value); + } + None => feed_tag(hasher, 0), + } +} + +fn feed_optional_hash_list(hasher: &mut Hasher, hashes: Option<&[TypeId]>) { + match hashes { + Some(hashes) => { + feed_tag(hasher, 1); + feed_u64(hasher, hashes.len() as u64); + for hash in hashes { + hasher.update(hash.as_bytes()); + } + } + None => feed_tag(hasher, 0), + } +} + +fn feed_optional_provenance_ref(hasher: &mut Hasher, reference: Option) { + match reference { + Some(reference) => { + feed_tag(hasher, 1); + feed_provenance_ref(hasher, reference); + } + None => feed_tag(hasher, 0), + } +} + +fn feed_provenance_ref(hasher: &mut Hasher, reference: ProvenanceRef) { + hasher.update(reference.worldline_id.as_bytes()); + feed_u64(hasher, reference.worldline_tick.as_u64()); + hasher.update(&reference.commit_hash); +} + +fn feed_reading_witness_ref(hasher: &mut Hasher, reference: &ReadingWitnessRef) { + match reference { + ReadingWitnessRef::ResolvedCommit { reference } => { + feed_tag(hasher, 1); + feed_provenance_ref(hasher, *reference); + } + ReadingWitnessRef::EmptyFrontier { + worldline_id, + state_root, + commit_hash, + } => { + feed_tag(hasher, 2); + hasher.update(worldline_id.as_bytes()); + hasher.update(state_root); + hasher.update(commit_hash); + } + } +} + +fn feed_reading_budget_posture(hasher: &mut Hasher, posture: ReadingBudgetPosture) { + match posture { + ReadingBudgetPosture::UnboundedOneShot => feed_tag(hasher, 1), + ReadingBudgetPosture::Bounded { + max_payload_bytes, + payload_bytes, + max_witness_refs, + witness_refs, + } => { + feed_tag(hasher, 2); + feed_u64(hasher, max_payload_bytes); + feed_u64(hasher, payload_bytes); + feed_u64(hasher, max_witness_refs); + feed_u64(hasher, witness_refs); + } + } +} + +fn feed_reading_rights_posture(hasher: &mut Hasher, posture: ReadingRightsPosture) { + match posture { + ReadingRightsPosture::KernelPublic => feed_tag(hasher, 1), + } +} + +fn feed_reading_residual_posture(hasher: &mut Hasher, posture: ReadingResidualPosture) { + match posture { + ReadingResidualPosture::Complete => feed_tag(hasher, 1), + ReadingResidualPosture::Residual => feed_tag(hasher, 2), + ReadingResidualPosture::PluralityPreserved => feed_tag(hasher, 3), + ReadingResidualPosture::Obstructed => feed_tag(hasher, 4), + } +} + +fn feed_attachment_key(hasher: &mut Hasher, key: AttachmentKey) { + match key.owner { + AttachmentOwner::Node(NodeKey { warp_id, local_id }) => { + feed_tag(hasher, 1); + hasher.update(warp_id.as_bytes()); + hasher.update(local_id.as_bytes()); + } + AttachmentOwner::Edge(EdgeKey { warp_id, local_id }) => { + feed_tag(hasher, 2); + hasher.update(warp_id.as_bytes()); + hasher.update(local_id.as_bytes()); + } + } + match key.plane { + AttachmentPlane::Alpha => feed_tag(hasher, 1), + AttachmentPlane::Beta => feed_tag(hasher, 2), + } +} + +fn optic_id_to_abi(id: OpticId) -> abi::OpticId { + abi::OpticId::from_bytes(*id.as_bytes()) +} + +fn braid_id_to_abi(id: BraidId) -> abi::BraidId { + abi::BraidId::from_bytes(*id.as_bytes()) +} + +fn retained_reading_key_to_abi(key: RetainedReadingKey) -> abi::RetainedReadingKey { + abi::RetainedReadingKey::from_bytes(*key.as_bytes()) +} + +fn retained_reading_codec_id_to_abi(id: RetainedReadingCodecId) -> abi::RetainedReadingCodecId { + abi::RetainedReadingCodecId::from_bytes(*id.as_bytes()) +} + +fn intent_family_id_to_abi(id: IntentFamilyId) -> abi::IntentFamilyId { + abi::IntentFamilyId::from_bytes(*id.as_bytes()) +} + +fn admission_law_id_to_abi(id: AdmissionLawId) -> abi::AdmissionLawId { + abi::AdmissionLawId::from_bytes(*id.as_bytes()) +} + +fn optic_capability_id_to_abi(id: OpticCapabilityId) -> abi::OpticCapabilityId { + abi::OpticCapabilityId::from_bytes(*id.as_bytes()) +} + +fn optic_actor_id_to_abi(id: OpticActorId) -> abi::OpticActorId { + abi::OpticActorId::from_bytes(*id.as_bytes()) +} + +fn worldline_id_to_abi(worldline_id: WorldlineId) -> abi::WorldlineId { + abi::WorldlineId::from_bytes(*worldline_id.as_bytes()) +} + +fn strand_id_to_abi(strand_id: StrandId) -> abi::StrandId { + abi::StrandId::from_bytes(*strand_id.as_bytes()) +} + +fn worldline_tick_to_abi(worldline_tick: WorldlineTick) -> abi::WorldlineTick { + abi::WorldlineTick(worldline_tick.as_u64()) +} + +fn provenance_ref_to_abi(reference: ProvenanceRef) -> abi::ProvenanceRef { + abi::ProvenanceRef { + worldline_id: worldline_id_to_abi(reference.worldline_id), + worldline_tick: worldline_tick_to_abi(reference.worldline_tick), + commit_hash: reference.commit_hash.to_vec(), + } +} + +fn reading_witness_ref_to_abi(reference: &ReadingWitnessRef) -> abi::ReadingWitnessRef { + match reference { + ReadingWitnessRef::ResolvedCommit { reference } => abi::ReadingWitnessRef::ResolvedCommit { + reference: provenance_ref_to_abi(*reference), + }, + ReadingWitnessRef::EmptyFrontier { + worldline_id, + state_root, + commit_hash, + } => abi::ReadingWitnessRef::EmptyFrontier { + worldline_id: worldline_id_to_abi(*worldline_id), + state_root: state_root.to_vec(), + commit_hash: commit_hash.to_vec(), + }, + } +} + +fn reading_budget_posture_to_abi(posture: ReadingBudgetPosture) -> abi::ReadingBudgetPosture { + match posture { + ReadingBudgetPosture::UnboundedOneShot => abi::ReadingBudgetPosture::UnboundedOneShot, + ReadingBudgetPosture::Bounded { + max_payload_bytes, + payload_bytes, + max_witness_refs, + witness_refs, + } => abi::ReadingBudgetPosture::Bounded { + max_payload_bytes, + payload_bytes, + max_witness_refs, + witness_refs, + }, + } +} + +fn reading_rights_posture_to_abi(posture: ReadingRightsPosture) -> abi::ReadingRightsPosture { + match posture { + ReadingRightsPosture::KernelPublic => abi::ReadingRightsPosture::KernelPublic, + } +} + +fn reading_residual_posture_to_abi(posture: ReadingResidualPosture) -> abi::ReadingResidualPosture { + match posture { + ReadingResidualPosture::Complete => abi::ReadingResidualPosture::Complete, + ReadingResidualPosture::Residual => abi::ReadingResidualPosture::Residual, + ReadingResidualPosture::PluralityPreserved => { + abi::ReadingResidualPosture::PluralityPreserved + } + ReadingResidualPosture::Obstructed => abi::ReadingResidualPosture::Obstructed, + } +} + +fn warp_id_to_abi(warp_id: WarpId) -> abi::WarpId { + abi::WarpId::from_bytes(*warp_id.as_bytes()) +} + +fn node_key_to_abi(key: NodeKey) -> abi::AttachmentOwnerRef { + abi::AttachmentOwnerRef::Node { + warp_id: warp_id_to_abi(key.warp_id), + node_id: abi::NodeId::from_bytes(*key.local_id.as_bytes()), + } +} + +fn edge_key_to_abi(key: EdgeKey) -> abi::AttachmentOwnerRef { + abi::AttachmentOwnerRef::Edge { + warp_id: warp_id_to_abi(key.warp_id), + edge_id: abi::EdgeId::from_bytes(*key.local_id.as_bytes()), + } +} + +fn attachment_key_to_abi(key: AttachmentKey) -> abi::AttachmentKey { + let owner = match key.owner { + AttachmentOwner::Node(node) => node_key_to_abi(node), + AttachmentOwner::Edge(edge) => edge_key_to_abi(edge), + }; + let plane = match key.plane { + AttachmentPlane::Alpha => abi::AttachmentPlane::Alpha, + AttachmentPlane::Beta => abi::AttachmentPlane::Beta, + }; + abi::AttachmentKey { owner, plane } +} + +fn channel_id_to_abi(channel_id: &ChannelId) -> abi::ChannelId { + abi::ChannelId::from_bytes(*channel_id.as_bytes()) +} + +#[cfg(test)] +mod tests; diff --git a/crates/warp-core/src/optic/tests.rs b/crates/warp-core/src/optic/tests.rs new file mode 100644 index 00000000..f4e9fa2f --- /dev/null +++ b/crates/warp-core/src/optic/tests.rs @@ -0,0 +1,918 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS + +use super::*; +use crate::attachment::{AttachmentKey, AttachmentOwner, AttachmentPlane}; +use crate::ident::{EdgeId, EdgeKey, NodeId, NodeKey, TypeId, WarpId}; +use crate::observation::{ + BuiltinObserverPlan, ObservationBasisPosture, ReadingBudgetPosture, ReadingObserverBasis, + ReadingObserverPlan, ReadingResidualPosture, ReadingRightsPosture, ReadingWitnessRef, +}; +use crate::provenance_store::ProvenanceRef; +use crate::strand::StrandId; +use crate::worldline::WorldlineId; + +fn worldline(seed: u8) -> WorldlineId { + WorldlineId::from_bytes([seed; 32]) +} + +fn strand(seed: u8) -> StrandId { + StrandId::from_bytes([seed; 32]) +} + +fn braid(seed: u8) -> BraidId { + BraidId::from_bytes([seed; 32]) +} + +fn retained(seed: u8) -> RetainedReadingKey { + RetainedReadingKey::from_bytes([seed; 32]) +} + +fn retained_codec(seed: u8) -> RetainedReadingCodecId { + RetainedReadingCodecId::from_bytes([seed; 32]) +} + +fn intent_family(seed: u8) -> IntentFamilyId { + IntentFamilyId::from_bytes([seed; 32]) +} + +fn capability(seed: u8) -> OpticCapabilityId { + OpticCapabilityId::from_bytes([seed; 32]) +} + +fn admission_law(seed: u8) -> AdmissionLawId { + AdmissionLawId::from_bytes([seed; 32]) +} + +fn actor(seed: u8) -> OpticActorId { + OpticActorId::from_bytes([seed; 32]) +} + +fn cause(seed: u8) -> OpticCause { + OpticCause { + actor: actor(seed), + cause_hash: [seed.wrapping_add(1); 32], + label: Some("test cause".to_owned()), + } +} + +fn node_key(seed: u8) -> NodeKey { + NodeKey { + warp_id: WarpId([seed; 32]), + local_id: NodeId([seed.wrapping_add(1); 32]), + } +} + +fn edge_key(seed: u8) -> EdgeKey { + EdgeKey { + warp_id: WarpId([seed; 32]), + local_id: EdgeId([seed.wrapping_add(1); 32]), + } +} + +fn provenance(seed: u8, tick: u64) -> ProvenanceRef { + ProvenanceRef { + worldline_id: worldline(seed), + worldline_tick: crate::clock::WorldlineTick::from_raw(tick), + commit_hash: [seed.wrapping_add(1); 32], + } +} + +fn worldline_focus() -> OpticFocus { + OpticFocus::Worldline { + worldline_id: worldline(1), + } +} + +fn frontier_coordinate() -> EchoCoordinate { + EchoCoordinate::Worldline { + worldline_id: worldline(1), + at: CoordinateAt::Frontier, + } +} + +fn head_aperture() -> OpticAperture { + OpticAperture { + shape: OpticApertureShape::Head, + budget: OpticReadBudget { + max_bytes: Some(512), + max_nodes: Some(8), + max_ticks: Some(1), + max_attachments: Some(0), + }, + attachment_descent: AttachmentDescentPolicy::BoundaryOnly, + } +} + +fn witness_basis(seed: u8, tick: u64) -> WitnessBasis { + let reference = provenance(seed, tick); + WitnessBasis::ResolvedCommit { + reference, + state_root: [seed.wrapping_add(2); 32], + commit_hash: reference.commit_hash, + } +} + +fn reading_envelope() -> ReadingEnvelope { + ReadingEnvelope { + observer_plan: ReadingObserverPlan::Builtin { + plan: BuiltinObserverPlan::CommitBoundaryHead, + }, + observer_instance: None, + observer_basis: ReadingObserverBasis::CommitBoundary, + witness_refs: vec![ReadingWitnessRef::ResolvedCommit { + reference: provenance(1, 2), + }], + parent_basis_posture: ObservationBasisPosture::Worldline, + budget_posture: ReadingBudgetPosture::UnboundedOneShot, + rights_posture: ReadingRightsPosture::KernelPublic, + residual_posture: ReadingResidualPosture::Complete, + } +} + +fn optic_capability(seed: u8, focus: OpticFocus) -> OpticCapability { + OpticCapability { + capability_id: capability(seed), + actor: actor(seed), + issuer_ref: Some(provenance(seed, 1)), + policy_hash: [seed.wrapping_add(2); 32], + allowed_focus: focus, + projection_version: ProjectionVersion::from_raw(1), + reducer_version: None, + allowed_intent_family: intent_family(seed), + max_budget: OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(128), + max_ticks: Some(8), + max_attachments: Some(0), + }, + } +} + +#[test] +fn echo_optic_id_is_stable_and_descriptor_derived() { + let focus = OpticFocus::Worldline { + worldline_id: worldline(1), + }; + let coordinate = EchoCoordinate::Worldline { + worldline_id: worldline(1), + at: CoordinateAt::Frontier, + }; + + let first = EchoOptic::new( + focus.clone(), + coordinate.clone(), + ProjectionVersion::from_raw(1), + Some(ReducerVersion::from_raw(7)), + intent_family(4), + capability(5), + ); + let second = EchoOptic::new( + focus, + coordinate, + ProjectionVersion::from_raw(1), + Some(ReducerVersion::from_raw(7)), + intent_family(4), + capability(5), + ); + + assert_eq!(first.optic_id, second.optic_id); + + let changed_projection = EchoOptic::new( + OpticFocus::Worldline { + worldline_id: worldline(1), + }, + EchoCoordinate::Worldline { + worldline_id: worldline(1), + at: CoordinateAt::Frontier, + }, + ProjectionVersion::from_raw(2), + Some(ReducerVersion::from_raw(7)), + intent_family(4), + capability(5), + ); + + assert_ne!(first.optic_id, changed_projection.optic_id); +} + +#[test] +fn optic_focus_covers_generic_subjects_without_graph_handle() { + let focuses = vec![ + OpticFocus::Worldline { + worldline_id: worldline(1), + }, + OpticFocus::Strand { + strand_id: strand(2), + }, + OpticFocus::Braid { braid_id: braid(3) }, + OpticFocus::RetainedReading { key: retained(4) }, + OpticFocus::AttachmentBoundary { + key: AttachmentKey { + owner: AttachmentOwner::Node(node_key(5)), + plane: AttachmentPlane::Alpha, + }, + }, + OpticFocus::AttachmentBoundary { + key: AttachmentKey { + owner: AttachmentOwner::Edge(edge_key(6)), + plane: AttachmentPlane::Beta, + }, + }, + ]; + + for focus in focuses { + let encoded = focus.to_abi(); + assert!(matches!( + encoded, + echo_wasm_abi::kernel_port::OpticFocus::Worldline { .. } + | echo_wasm_abi::kernel_port::OpticFocus::Strand { .. } + | echo_wasm_abi::kernel_port::OpticFocus::Braid { .. } + | echo_wasm_abi::kernel_port::OpticFocus::RetainedReading { .. } + | echo_wasm_abi::kernel_port::OpticFocus::AttachmentBoundary { .. } + )); + } +} + +#[test] +fn strand_coordinate_names_explicit_parent_basis_in_abi() { + let parent_basis = ProvenanceRef { + worldline_id: worldline(9), + worldline_tick: crate::clock::WorldlineTick::from_raw(11), + commit_hash: [12; 32], + }; + let coordinate = EchoCoordinate::Strand { + strand_id: strand(2), + at: CoordinateAt::Provenance(parent_basis), + parent_basis: Some(parent_basis), + }; + + assert_eq!( + coordinate.to_abi(), + echo_wasm_abi::kernel_port::EchoCoordinate::Strand { + strand_id: echo_wasm_abi::kernel_port::StrandId::from_bytes([2; 32]), + at: echo_wasm_abi::kernel_port::CoordinateAt::Provenance { + reference: echo_wasm_abi::kernel_port::ProvenanceRef { + worldline_id: echo_wasm_abi::kernel_port::WorldlineId::from_bytes([9; 32]), + worldline_tick: echo_wasm_abi::kernel_port::WorldlineTick(11), + commit_hash: vec![12; 32], + }, + }, + parent_basis: Some(echo_wasm_abi::kernel_port::ProvenanceRef { + worldline_id: echo_wasm_abi::kernel_port::WorldlineId::from_bytes([9; 32]), + worldline_tick: echo_wasm_abi::kernel_port::WorldlineTick(11), + commit_hash: vec![12; 32], + }), + } + ); +} + +#[test] +fn optic_aperture_encodes_bounds_without_full_materialization_fallback() { + let aperture = OpticAperture { + shape: OpticApertureShape::QueryBytes { + query_id: 42, + vars_digest: [7; 32], + }, + budget: OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(128), + max_ticks: Some(12), + max_attachments: Some(0), + }, + attachment_descent: AttachmentDescentPolicy::BoundaryOnly, + }; + + assert_eq!( + aperture.to_abi(), + echo_wasm_abi::kernel_port::OpticAperture { + shape: echo_wasm_abi::kernel_port::OpticApertureShape::QueryBytes { + query_id: 42, + vars_digest: vec![7; 32], + }, + budget: echo_wasm_abi::kernel_port::OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(128), + max_ticks: Some(12), + max_attachments: Some(0), + }, + attachment_descent: echo_wasm_abi::kernel_port::AttachmentDescentPolicy::BoundaryOnly, + } + ); +} + +#[test] +fn truth_channel_aperture_converts_channel_ids_to_abi_bytes() { + let channel = TypeId([3; 32]); + let aperture = OpticAperture { + shape: OpticApertureShape::TruthChannels { + channels: Some(vec![channel]), + }, + budget: OpticReadBudget::default(), + attachment_descent: AttachmentDescentPolicy::BoundaryOnly, + }; + + assert_eq!( + aperture.to_abi().shape, + echo_wasm_abi::kernel_port::OpticApertureShape::TruthChannels { + channels: Some(vec![echo_wasm_abi::kernel_port::ChannelId::from_bytes( + [3; 32] + )]), + } + ); +} + +#[test] +fn read_identity_is_stable_for_same_read_question() { + let focus = worldline_focus(); + let coordinate = frontier_coordinate(); + let aperture = head_aperture(); + let optic = EchoOptic::new( + focus.clone(), + coordinate.clone(), + ProjectionVersion::from_raw(1), + None, + intent_family(1), + capability(2), + ); + + let first = ReadIdentity::new( + optic.optic_id, + &focus, + coordinate.clone(), + &aperture, + ProjectionVersion::from_raw(1), + None, + witness_basis(1, 2), + ReadingRightsPosture::KernelPublic, + ReadingBudgetPosture::UnboundedOneShot, + ReadingResidualPosture::Complete, + ); + let second = ReadIdentity::new( + optic.optic_id, + &focus, + coordinate, + &aperture, + ProjectionVersion::from_raw(1), + None, + witness_basis(1, 2), + ReadingRightsPosture::KernelPublic, + ReadingBudgetPosture::UnboundedOneShot, + ReadingResidualPosture::Complete, + ); + + assert_eq!(first, second); + assert_eq!(first.read_identity_hash, second.read_identity_hash); + assert_eq!(first.focus_digest, focus.digest()); + assert_eq!(first.aperture_digest, aperture.digest()); +} + +#[test] +fn read_identity_changes_when_question_or_witness_changes() { + let focus = worldline_focus(); + let coordinate = frontier_coordinate(); + let aperture = head_aperture(); + let optic_id = EchoOptic::new( + focus.clone(), + coordinate.clone(), + ProjectionVersion::from_raw(1), + None, + intent_family(1), + capability(2), + ) + .optic_id; + + let base = ReadIdentity::new( + optic_id, + &focus, + coordinate.clone(), + &aperture, + ProjectionVersion::from_raw(1), + None, + witness_basis(1, 2), + ReadingRightsPosture::KernelPublic, + ReadingBudgetPosture::UnboundedOneShot, + ReadingResidualPosture::Complete, + ); + let changed_coordinate = ReadIdentity::new( + optic_id, + &focus, + EchoCoordinate::Worldline { + worldline_id: worldline(1), + at: CoordinateAt::Tick(crate::clock::WorldlineTick::from_raw(3)), + }, + &aperture, + ProjectionVersion::from_raw(1), + None, + witness_basis(1, 2), + ReadingRightsPosture::KernelPublic, + ReadingBudgetPosture::UnboundedOneShot, + ReadingResidualPosture::Complete, + ); + let changed_aperture = ReadIdentity::new( + optic_id, + &focus, + coordinate.clone(), + &OpticAperture { + shape: OpticApertureShape::SnapshotMetadata, + budget: aperture.budget, + attachment_descent: aperture.attachment_descent, + }, + ProjectionVersion::from_raw(1), + None, + witness_basis(1, 2), + ReadingRightsPosture::KernelPublic, + ReadingBudgetPosture::UnboundedOneShot, + ReadingResidualPosture::Complete, + ); + let changed_projection = ReadIdentity::new( + optic_id, + &focus, + coordinate.clone(), + &aperture, + ProjectionVersion::from_raw(2), + None, + witness_basis(1, 2), + ReadingRightsPosture::KernelPublic, + ReadingBudgetPosture::UnboundedOneShot, + ReadingResidualPosture::Complete, + ); + let changed_witness = ReadIdentity::new( + optic_id, + &focus, + coordinate, + &aperture, + ProjectionVersion::from_raw(1), + None, + witness_basis(1, 3), + ReadingRightsPosture::KernelPublic, + ReadingBudgetPosture::UnboundedOneShot, + ReadingResidualPosture::Complete, + ); + + assert_ne!( + base.read_identity_hash, + changed_coordinate.read_identity_hash + ); + assert_ne!(base.read_identity_hash, changed_aperture.read_identity_hash); + assert_ne!( + base.read_identity_hash, + changed_projection.read_identity_hash + ); + assert_ne!(base.read_identity_hash, changed_witness.read_identity_hash); +} + +#[test] +fn existing_reading_envelope_can_carry_compatible_optic_identity() { + let focus = worldline_focus(); + let coordinate = frontier_coordinate(); + let aperture = head_aperture(); + let reading = reading_envelope(); + let optic_id = EchoOptic::new( + focus.clone(), + coordinate.clone(), + ProjectionVersion::from_raw(1), + None, + intent_family(1), + capability(2), + ) + .optic_id; + + let identity = ReadIdentity::from_reading_envelope( + optic_id, + &focus, + coordinate, + &aperture, + ProjectionVersion::from_raw(1), + None, + witness_basis(1, 2), + &reading, + ); + let envelope = OpticReadingEnvelope::new(reading, identity); + let abi = envelope.to_abi(); + + assert_eq!(abi.read_identity.optic_id, optic_id_to_abi(optic_id)); + assert_eq!( + abi.read_identity.rights_posture, + echo_wasm_abi::kernel_port::ReadingRightsPosture::KernelPublic + ); + assert_eq!( + abi.read_identity.budget_posture, + echo_wasm_abi::kernel_port::ReadingBudgetPosture::UnboundedOneShot + ); + assert_eq!( + abi.read_identity.residual_posture, + echo_wasm_abi::kernel_port::ReadingResidualPosture::Complete + ); +} + +#[test] +fn retained_reading_key_requires_content_hash_and_read_identity() { + let focus = worldline_focus(); + let coordinate = frontier_coordinate(); + let aperture = head_aperture(); + let optic_id = EchoOptic::new( + focus.clone(), + coordinate.clone(), + ProjectionVersion::from_raw(1), + None, + intent_family(1), + capability(2), + ) + .optic_id; + let content_hash = [42; 32]; + let codec_id = retained_codec(7); + let first_identity = ReadIdentity::new( + optic_id, + &focus, + coordinate.clone(), + &aperture, + ProjectionVersion::from_raw(1), + None, + witness_basis(1, 2), + ReadingRightsPosture::KernelPublic, + ReadingBudgetPosture::UnboundedOneShot, + ReadingResidualPosture::Complete, + ); + let second_identity = ReadIdentity::new( + optic_id, + &focus, + coordinate, + &aperture, + ProjectionVersion::from_raw(1), + None, + witness_basis(1, 3), + ReadingRightsPosture::KernelPublic, + ReadingBudgetPosture::UnboundedOneShot, + ReadingResidualPosture::Complete, + ); + + let first = RetainedReadingDescriptor::new(first_identity, content_hash, codec_id, 128); + let second = RetainedReadingDescriptor::new(second_identity, content_hash, codec_id, 128); + let content_only_matches = [first.clone(), second.clone()] + .iter() + .filter(|descriptor| descriptor.content_hash == content_hash) + .count(); + + assert_ne!(first.key, second.key); + assert_eq!(content_only_matches, 2); + assert_eq!(first.to_abi().key, retained_reading_key_to_abi(first.key)); +} + +#[test] +fn checkpoint_plus_tail_identity_does_not_collapse_to_checkpoint_hash() { + let focus = worldline_focus(); + let coordinate = frontier_coordinate(); + let aperture = head_aperture(); + let optic_id = EchoOptic::new( + focus.clone(), + coordinate.clone(), + ProjectionVersion::from_raw(1), + None, + intent_family(1), + capability(2), + ) + .optic_id; + let checkpoint_ref = provenance(4, 10); + let checkpoint_hash = [44; 32]; + let checkpoint_only = WitnessBasis::ResolvedCommit { + reference: checkpoint_ref, + state_root: checkpoint_hash, + commit_hash: checkpoint_hash, + }; + let checkpoint_plus_tail = WitnessBasis::CheckpointPlusTail { + checkpoint_ref, + checkpoint_hash, + tail_witness_refs: vec![provenance(4, 11)], + tail_digest: [45; 32], + }; + + let checkpoint_only_identity = ReadIdentity::new( + optic_id, + &focus, + coordinate.clone(), + &aperture, + ProjectionVersion::from_raw(1), + None, + checkpoint_only, + ReadingRightsPosture::KernelPublic, + ReadingBudgetPosture::UnboundedOneShot, + ReadingResidualPosture::Complete, + ); + let live_tail_identity = ReadIdentity::new( + optic_id, + &focus, + coordinate, + &aperture, + ProjectionVersion::from_raw(1), + None, + checkpoint_plus_tail, + ReadingRightsPosture::KernelPublic, + ReadingBudgetPosture::UnboundedOneShot, + ReadingResidualPosture::Complete, + ); + let checkpoint_only_retained = + RetainedReadingDescriptor::new(checkpoint_only_identity, [55; 32], retained_codec(7), 256); + let live_tail_retained = + RetainedReadingDescriptor::new(live_tail_identity, [55; 32], retained_codec(7), 256); + + assert_ne!( + checkpoint_only_retained.read_identity.read_identity_hash, + live_tail_retained.read_identity.read_identity_hash + ); + assert_ne!(checkpoint_only_retained.key, live_tail_retained.key); +} + +#[test] +fn optic_obstruction_kinds_keep_fail_closed_cases_distinct() { + use std::collections::BTreeSet; + + let required = [ + ( + OpticObstructionKind::StaleBasis, + echo_wasm_abi::kernel_port::OpticObstructionKind::StaleBasis, + ), + ( + OpticObstructionKind::MissingWitness, + echo_wasm_abi::kernel_port::OpticObstructionKind::MissingWitness, + ), + ( + OpticObstructionKind::BudgetExceeded, + echo_wasm_abi::kernel_port::OpticObstructionKind::BudgetExceeded, + ), + ( + OpticObstructionKind::CapabilityDenied, + echo_wasm_abi::kernel_port::OpticObstructionKind::CapabilityDenied, + ), + ( + OpticObstructionKind::AttachmentDescentRequired, + echo_wasm_abi::kernel_port::OpticObstructionKind::AttachmentDescentRequired, + ), + ]; + + let mut names = BTreeSet::new(); + for (core, expected) in required { + let abi = core.to_abi(); + assert_eq!(abi, expected); + assert!(names.insert(format!("{abi:?}"))); + } + + assert_eq!(names.len(), required.len()); +} + +#[test] +fn intent_dispatch_result_matching_is_variant_exhaustive() { + fn classify(result: &IntentDispatchResult) -> &'static str { + match result { + IntentDispatchResult::Admitted(_) => "admitted", + IntentDispatchResult::Staged(_) => "staged", + IntentDispatchResult::Plural(_) => "plural", + IntentDispatchResult::Conflict(_) => "conflict", + IntentDispatchResult::Obstructed(_) => "obstructed", + } + } + + let optic = EchoOptic::new( + worldline_focus(), + frontier_coordinate(), + ProjectionVersion::from_raw(1), + None, + intent_family(1), + capability(2), + ); + let base_coordinate = frontier_coordinate(); + let family = intent_family(1); + let admitted_ref = provenance(1, 3); + let obstruction = OpticObstruction { + kind: OpticObstructionKind::StaleBasis, + optic_id: Some(optic.optic_id), + focus: Some(worldline_focus()), + coordinate: Some(base_coordinate.clone()), + witness_basis: Some(WitnessBasis::Missing { + reason: MissingWitnessBasisReason::EvidenceUnavailable, + }), + message: "base coordinate is stale".to_owned(), + }; + let outcomes = vec![ + IntentDispatchResult::Admitted(AdmittedIntent { + optic_id: optic.optic_id, + base_coordinate: base_coordinate.clone(), + intent_family: family, + admitted_ref, + receipt_hash: [4; 32], + }), + IntentDispatchResult::Staged(StagedIntent { + optic_id: optic.optic_id, + base_coordinate: base_coordinate.clone(), + intent_family: family, + stage_ref: [5; 32], + reason: StagedIntentReason::RebaseRequired, + }), + IntentDispatchResult::Plural(PluralIntent { + optic_id: optic.optic_id, + base_coordinate: base_coordinate.clone(), + intent_family: family, + candidate_refs: vec![admitted_ref, provenance(1, 4)], + residual_posture: ReadingResidualPosture::PluralityPreserved, + }), + IntentDispatchResult::Conflict(IntentConflict { + optic_id: optic.optic_id, + base_coordinate, + intent_family: family, + reason: IntentConflictReason::StaleBasis, + conflict_ref: Some(admitted_ref), + evidence_digest: [6; 32], + message: "base conflicts with frontier".to_owned(), + }), + IntentDispatchResult::Obstructed(obstruction), + ]; + + assert_eq!( + outcomes.iter().map(classify).collect::>(), + vec!["admitted", "staged", "plural", "conflict", "obstructed"] + ); + assert!(matches!( + outcomes[0].to_abi(), + echo_wasm_abi::kernel_port::IntentDispatchResult::Admitted(_) + )); + assert!(matches!( + outcomes[1].to_abi(), + echo_wasm_abi::kernel_port::IntentDispatchResult::Staged(_) + )); + assert!(matches!( + outcomes[2].to_abi(), + echo_wasm_abi::kernel_port::IntentDispatchResult::Plural(_) + )); + assert!(matches!( + outcomes[3].to_abi(), + echo_wasm_abi::kernel_port::IntentDispatchResult::Conflict(_) + )); + assert!(matches!( + outcomes[4].to_abi(), + echo_wasm_abi::kernel_port::IntentDispatchResult::Obstructed(_) + )); +} + +#[test] +fn dispatch_optic_intent_request_carries_eint_v1_with_explicit_base() -> Result<(), String> { + let focus = worldline_focus(); + let base_coordinate = frontier_coordinate(); + let payload_bytes = echo_wasm_abi::pack_intent_v1(77, b"optic-vars") + .map_err(|error| format!("failed to pack EINT fixture: {error:?}"))?; + let request = DispatchOpticIntentRequest { + optic_id: OpticId::from_bytes([1; 32]), + base_coordinate: base_coordinate.clone(), + intent_family: intent_family(2), + focus: focus.clone(), + cause: cause(2), + capability: optic_capability(2, focus), + admission_law: admission_law(4), + payload: OpticIntentPayload::EintV1 { + bytes: payload_bytes.clone(), + }, + }; + + request + .validate_proposal() + .map_err(|obstruction| format!("expected valid optic dispatch, got {obstruction:?}"))?; + + let abi = request.to_abi(); + assert_eq!( + abi.optic_id, + echo_wasm_abi::kernel_port::OpticId::from_bytes([1; 32]) + ); + assert_eq!(abi.base_coordinate, base_coordinate.to_abi()); + assert_eq!( + abi.admission_law, + echo_wasm_abi::kernel_port::AdmissionLawId::from_bytes([4; 32]) + ); + assert!(matches!( + abi.payload, + echo_wasm_abi::kernel_port::OpticIntentPayload::EintV1 { ref bytes } + if bytes == &payload_bytes + )); + Ok(()) +} + +#[test] +fn dispatch_optic_intent_request_rejects_capability_bypass() -> Result<(), String> { + let request = DispatchOpticIntentRequest { + optic_id: OpticId::from_bytes([1; 32]), + base_coordinate: frontier_coordinate(), + intent_family: intent_family(99), + focus: worldline_focus(), + cause: cause(2), + capability: optic_capability(2, worldline_focus()), + admission_law: admission_law(4), + payload: OpticIntentPayload::EintV1 { + bytes: echo_wasm_abi::pack_intent_v1(77, b"optic-vars") + .map_err(|error| format!("failed to pack EINT fixture: {error:?}"))?, + }, + }; + + let obstruction = request + .validate_proposal() + .err() + .ok_or_else(|| "capability mismatch should obstruct dispatch".to_owned())?; + + assert_eq!( + obstruction.kind, + OpticObstructionKind::UnsupportedIntentFamily + ); + assert_eq!(obstruction.optic_id, Some(OpticId::from_bytes([1; 32]))); + assert_eq!(obstruction.coordinate, Some(frontier_coordinate())); + Ok(()) +} + +#[test] +fn open_optic_request_validates_descriptor_without_mutable_handle() -> Result<(), String> { + let focus = worldline_focus(); + let coordinate = frontier_coordinate(); + let grant = optic_capability(11, focus.clone()); + let request = OpenOpticRequest { + focus: focus.clone(), + coordinate: coordinate.clone(), + projection_version: ProjectionVersion::from_raw(1), + reducer_version: None, + intent_family: intent_family(11), + capability: grant, + cause: cause(11), + }; + + let result = request + .validate_descriptor() + .map_err(|error| format!("expected valid optic descriptor, got {error:?}"))?; + + assert_eq!( + result.optic, + EchoOptic::new( + focus, + coordinate, + ProjectionVersion::from_raw(1), + None, + intent_family(11), + capability(11), + ) + ); + assert_eq!( + result.capability_posture, + CapabilityPosture::Granted { + capability_id: capability(11), + actor: actor(11), + issuer_ref: Some(provenance(11, 1)), + policy_hash: [13; 32], + } + ); + assert_eq!(result.to_abi().optic, result.optic.to_abi()); + + Ok(()) +} + +#[test] +fn open_optic_denied_capability_returns_typed_obstruction() -> Result<(), String> { + let request = OpenOpticRequest { + focus: worldline_focus(), + coordinate: frontier_coordinate(), + projection_version: ProjectionVersion::from_raw(1), + reducer_version: None, + intent_family: intent_family(12), + capability: optic_capability( + 12, + OpticFocus::Strand { + strand_id: strand(2), + }, + ), + cause: cause(12), + }; + + let error = match request.validate_descriptor() { + Ok(result) => return Err(format!("expected capability denial, got {result:?}")), + Err(error) => error, + }; + let OpticOpenError::Obstructed(obstruction) = error; + + assert_eq!(obstruction.kind, OpticObstructionKind::CapabilityDenied); + assert_eq!(obstruction.optic_id, None); + assert_eq!(obstruction.focus, Some(worldline_focus())); + assert_eq!(obstruction.coordinate, Some(frontier_coordinate())); + + Ok(()) +} + +#[test] +fn close_optic_releases_only_session_descriptor_resource() { + let optic_id = OpticId::from_bytes([9; 32]); + let request = CloseOpticRequest { + optic_id, + cause: cause(9), + }; + + let result = request.close_session_descriptor(); + + assert_eq!(result, CloseOpticResult { optic_id }); + assert_eq!( + result.to_abi(), + echo_wasm_abi::kernel_port::CloseOpticResult { + optic_id: optic_id_to_abi(optic_id), + } + ); +} diff --git a/crates/warp-core/src/witnessed_suffix.rs b/crates/warp-core/src/witnessed_suffix.rs index d8b70acd..a811fe8d 100644 --- a/crates/warp-core/src/witnessed_suffix.rs +++ b/crates/warp-core/src/witnessed_suffix.rs @@ -3,8 +3,9 @@ //! Shape-only witnessed suffix admission vocabulary. //! //! This module names the first admission shell skeleton. It deliberately carries -//! compact provenance and basis evidence only; it does not implement transport, -//! remote synchronization, or import execution. +//! compact provenance and basis evidence only; export/import here means suffix +//! shell construction and admission classification, not transport, remote +//! synchronization, or import execution. use blake3::Hasher; use echo_wasm_abi::{encode_cbor, kernel_port as abi}; @@ -117,6 +118,182 @@ impl WitnessedSuffixAdmissionResponse { } } +/// Request to export a witnessed causal suffix rooted at a known source frontier. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ExportSuffixRequest { + /// Source worldline carrying the suffix. + pub source_worldline_id: WorldlineId, + /// Known source basis before the suffix begins. + pub base_frontier: ProvenanceRef, + /// Optional requested source frontier to export through. If omitted, the + /// export context may expose the current known suffix frontier. + pub target_frontier: Option, + /// Optional basis-relative settlement evidence reused by the exported shell. + pub basis_report: Option, +} + +impl ExportSuffixRequest { + /// Converts the export request into its ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::ExportSuffixRequest { + abi::ExportSuffixRequest { + source_worldline_id: worldline_id_to_abi(self.source_worldline_id), + base_frontier: provenance_ref_to_abi(self.base_frontier), + target_frontier: self.target_frontier.map(provenance_ref_to_abi), + basis_report: self + .basis_report + .as_ref() + .map(settlement_basis_report_to_abi), + } + } +} + +/// Witnessed suffix bundle exchanged across a hot/cold runtime boundary. +/// +/// The bundle is a compact causal shell. It is not a materialized state +/// snapshot, not a raw patch stream, and not a transport endpoint. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct CausalSuffixBundle { + /// Known source basis before the suffix begins. + pub base_frontier: ProvenanceRef, + /// Source frontier reached by this exported suffix shell. + pub target_frontier: ProvenanceRef, + /// Compact source suffix and its witness digest. + pub source_suffix: WitnessedSuffixShell, + /// Deterministic digest of the bundle identity used for retained shell + /// equivalence and loop-prevention surfaces. + pub bundle_digest: Hash, +} + +impl CausalSuffixBundle { + /// Builds a bundle and derives canonical source-shell and bundle digests. + #[must_use] + pub fn new( + base_frontier: ProvenanceRef, + target_frontier: ProvenanceRef, + mut source_suffix: WitnessedSuffixShell, + ) -> Self { + source_suffix.witness_digest = derive_witnessed_suffix_shell_digest(&source_suffix); + let bundle_digest = + derive_causal_suffix_bundle_digest(base_frontier, target_frontier, &source_suffix); + Self { + base_frontier, + target_frontier, + source_suffix, + bundle_digest, + } + } + + /// Returns the deterministic digest used to compare retained shell results. + #[must_use] + pub const fn shell_equivalence_digest(&self) -> Hash { + self.bundle_digest + } + + /// Converts the bundle into its ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::CausalSuffixBundle { + abi::CausalSuffixBundle { + base_frontier: provenance_ref_to_abi(self.base_frontier), + target_frontier: provenance_ref_to_abi(self.target_frontier), + source_suffix: self.source_suffix.to_abi(), + bundle_digest: self.bundle_digest.to_vec(), + } + } +} + +/// Obstruction returned when Echo cannot produce a witnessed suffix bundle. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ExportSuffixObstruction { + /// Source coordinate implicated in the obstruction. + pub source_ref: ProvenanceRef, + /// Read-side residual posture associated with the obstruction. + pub residual_posture: ReadingResidualPosture, + /// Deterministic digest of compact obstruction evidence. + pub evidence_digest: Hash, +} + +impl ExportSuffixObstruction { + /// Converts the obstruction into its ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::ExportSuffixObstruction { + abi::ExportSuffixObstruction { + source_ref: provenance_ref_to_abi(self.source_ref), + residual_posture: reading_residual_posture_to_abi(self.residual_posture), + evidence_digest: self.evidence_digest.to_vec(), + } + } +} + +/// Request to import one witnessed causal suffix bundle by classifying it +/// against a target basis. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ImportSuffixRequest { + /// Source bundle being judged. + pub bundle: CausalSuffixBundle, + /// Worldline receiving the proposed admission. + pub target_worldline_id: WorldlineId, + /// Target basis used while judging admission. + pub target_basis: ProvenanceRef, + /// Optional target-basis evidence for strand/parent realization cases. + pub basis_report: Option, +} + +impl ImportSuffixRequest { + /// Converts the import request into its ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::ImportSuffixRequest { + abi::ImportSuffixRequest { + bundle: self.bundle.to_abi(), + target_worldline_id: worldline_id_to_abi(self.target_worldline_id), + target_basis: provenance_ref_to_abi(self.target_basis), + basis_report: self + .basis_report + .as_ref() + .map(settlement_basis_report_to_abi), + } + } +} + +/// Result of importing one witnessed causal suffix bundle into local admission. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ImportSuffixResult { + /// Bundle identity retained for shell-equivalence and loop-prevention checks. + pub bundle_digest: Hash, + /// Admission classifier response for the bundle's source suffix. + pub admission: WitnessedSuffixAdmissionResponse, +} + +impl ImportSuffixResult { + /// Returns the deterministic digest used to compare retained shell results. + #[must_use] + pub const fn retained_shell_equivalence_digest(&self) -> Hash { + self.bundle_digest + } + + /// Converts the import result into its ABI DTO. + #[must_use] + pub fn to_abi(&self) -> abi::ImportSuffixResult { + abi::ImportSuffixResult { + bundle_digest: self.bundle_digest.to_vec(), + admission: self.admission.to_abi(), + } + } +} + +/// Read-only export evidence source used by witnessed suffix bundle construction. +/// +/// This trait is intentionally narrow. It supplies suffix coordinates and +/// boundary witness material, but exposes no runtime mutation, network +/// transport, patch stream, or sync loop. +pub trait WitnessedSuffixExportContext { + /// Returns the source provenance coordinates covered by the requested suffix. + fn source_entries(&self, request: &ExportSuffixRequest) -> Option>; + + /// Returns a boundary witness when the suffix has no importable entries yet. + fn boundary_witness(&self, request: &ExportSuffixRequest) -> Option; +} + /// Read-only local evidence source used by witnessed suffix admission evaluation. /// /// This trait is intentionally narrow: it does not expose runtime mutation, @@ -140,6 +317,163 @@ pub trait WitnessedSuffixAdmissionContext { ) -> WitnessedSuffixLocalAdmissionPosture; } +/// Exports a witnessed causal suffix bundle from local read-only evidence. +/// +/// This constructs a causal shell and witness digest. It does not execute +/// transport and does not mutate any receiving worldline. +pub fn export_suffix( + request: &ExportSuffixRequest, + context: &impl WitnessedSuffixExportContext, +) -> Result { + if request.base_frontier.worldline_id != request.source_worldline_id { + return Err(export_obstruction(request)); + } + if let Some(target_frontier) = request.target_frontier { + if target_frontier.worldline_id != request.source_worldline_id + || target_frontier.worldline_tick.as_u64() + < request.base_frontier.worldline_tick.as_u64() + { + return Err(export_obstruction(request)); + } + } + + let Some(entries) = context.source_entries(request) else { + return Err(export_obstruction(request)); + }; + let Ok(entries) = canonical_unique_provenance_refs(entries) else { + return Err(export_obstruction(request)); + }; + let boundary_witness = context.boundary_witness(request); + + if entries.is_empty() && boundary_witness.is_none() { + return Err(export_obstruction(request)); + } + + for source_entry in &entries { + if source_entry.worldline_id != request.source_worldline_id + || source_entry.worldline_tick.as_u64() <= request.base_frontier.worldline_tick.as_u64() + || request.target_frontier.is_some_and(|target_frontier| { + source_entry.worldline_tick.as_u64() > target_frontier.worldline_tick.as_u64() + }) + { + return Err(export_obstruction(request)); + } + } + + let derived_target_frontier = match (entries.last().copied(), request.target_frontier) { + (Some(last_entry), Some(target_frontier)) if last_entry == target_frontier => { + target_frontier + } + (None, Some(target_frontier)) if target_frontier == request.base_frontier => { + target_frontier + } + (Some(_) | None, Some(_)) => return Err(export_obstruction(request)), + (Some(last_entry), None) => last_entry, + (None, None) => request.base_frontier, + }; + if boundary_witness.is_some_and(|boundary_witness| { + boundary_witness_is_outside_export_bounds( + boundary_witness, + request.source_worldline_id, + request.base_frontier, + derived_target_frontier, + ) + }) { + return Err(export_obstruction(request)); + } + + let source_suffix_start_tick = entries + .first() + .map_or(request.base_frontier.worldline_tick, |entry| { + entry.worldline_tick + }); + let source_suffix_end_tick = entries.last().map(|entry| entry.worldline_tick); + let source_suffix = WitnessedSuffixShell { + source_worldline_id: request.source_worldline_id, + source_suffix_start_tick, + source_suffix_end_tick, + source_entries: entries, + boundary_witness, + witness_digest: [0; 32], + basis_report: request.basis_report.clone(), + }; + + Ok(CausalSuffixBundle::new( + request.base_frontier, + derived_target_frontier, + source_suffix, + )) +} + +fn boundary_witness_is_outside_export_bounds( + boundary_witness: ProvenanceRef, + source_worldline_id: WorldlineId, + base_frontier: ProvenanceRef, + target_frontier: ProvenanceRef, +) -> bool { + if boundary_witness.worldline_id != source_worldline_id { + return true; + } + + let boundary_tick = boundary_witness.worldline_tick.as_u64(); + let base_tick = base_frontier.worldline_tick.as_u64(); + let target_tick = target_frontier.worldline_tick.as_u64(); + + boundary_tick < base_tick + || boundary_tick > target_tick + || (boundary_tick == base_tick && boundary_witness != base_frontier) + || (boundary_tick == target_tick && boundary_witness != target_frontier) +} + +/// Imports one witnessed causal suffix bundle by classifying it against the +/// local target basis. +/// +/// The returned result is an admission shell result. It does not append +/// provenance or apply patches directly. +#[must_use] +pub fn import_suffix( + request: &ImportSuffixRequest, + context: &impl WitnessedSuffixAdmissionContext, +) -> ImportSuffixResult { + let bundle_digest = derive_causal_suffix_bundle_digest( + request.bundle.base_frontier, + request.bundle.target_frontier, + &request.bundle.source_suffix, + ); + let admission_request = WitnessedSuffixAdmissionRequest { + source_suffix: request.bundle.source_suffix.clone(), + target_worldline_id: request.target_worldline_id, + target_basis: request.target_basis, + basis_report: request.basis_report.clone(), + }; + if request.bundle.bundle_digest != bundle_digest { + let source_shell_digest = context + .source_shell_digest(&admission_request.source_suffix) + .unwrap_or_else(|| { + context.source_shell_obstruction_digest(&admission_request.source_suffix) + }); + let target_basis = context + .resolve_target_basis(admission_request.target_basis) + .unwrap_or(admission_request.target_basis); + return ImportSuffixResult { + bundle_digest, + admission: obstructed_response( + &admission_request, + source_shell_digest, + target_basis, + None, + ), + }; + } + + let admission = evaluate_witnessed_suffix_admission(&admission_request, context); + + ImportSuffixResult { + bundle_digest, + admission, + } +} + /// Error returned when constructing a canonical local admission posture fails. #[derive(Clone, Copy, Debug, PartialEq, Eq, Error)] pub enum WitnessedSuffixLocalAdmissionPostureError { @@ -481,6 +815,65 @@ fn obstructed_response( } } +/// Derives the canonical digest for a witnessed suffix shell. +/// +/// The shell's caller-supplied `witness_digest` field is ignored while deriving +/// identity so export does not trust a prefilled claim. +#[must_use] +pub fn derive_witnessed_suffix_shell_digest(shell: &WitnessedSuffixShell) -> Hash { + let mut shell_without_claim = shell.to_abi(); + shell_without_claim.witness_digest.clear(); + + let mut hasher = Hasher::new(); + hasher.update(b"echo:witnessed-suffix-shell:v1\0"); + match encode_cbor(&shell_without_claim) { + Ok(encoded_shell) => { + hasher.update(&encoded_shell); + } + Err(_) => hash_source_shell_obstruction_fallback(&mut hasher, shell), + } + hasher.finalize().into() +} + +fn derive_causal_suffix_bundle_digest( + base_frontier: ProvenanceRef, + target_frontier: ProvenanceRef, + source_suffix: &WitnessedSuffixShell, +) -> Hash { + let mut hasher = Hasher::new(); + hasher.update(b"echo:causal-suffix-bundle:v1\0"); + hash_provenance_ref(&mut hasher, &base_frontier); + hash_provenance_ref(&mut hasher, &target_frontier); + hasher.update(&source_suffix.witness_digest); + hasher.finalize().into() +} + +fn export_obstruction(request: &ExportSuffixRequest) -> ExportSuffixObstruction { + ExportSuffixObstruction { + source_ref: request.base_frontier, + residual_posture: ReadingResidualPosture::Obstructed, + evidence_digest: export_suffix_obstruction_digest(request), + } +} + +fn export_suffix_obstruction_digest(request: &ExportSuffixRequest) -> Hash { + let mut hasher = Hasher::new(); + hasher.update(b"echo:export-suffix-obstruction:v1\0"); + hasher.update(request.source_worldline_id.as_bytes()); + hash_provenance_ref(&mut hasher, &request.base_frontier); + match request.target_frontier { + Some(target_frontier) => { + hasher.update(&[1]); + hash_provenance_ref(&mut hasher, &target_frontier); + } + None => { + hasher.update(&[0]); + } + } + hasher.update(&[u8::from(request.basis_report.is_some())]); + hasher.finalize().into() +} + fn source_shell_obstruction_digest(shell: &WitnessedSuffixShell) -> Hash { let mut shell_without_claim = shell.to_abi(); shell_without_claim.witness_digest.clear(); diff --git a/crates/warp-core/src/witnessed_suffix_tests.rs b/crates/warp-core/src/witnessed_suffix_tests.rs index df977a74..ae976a8c 100644 --- a/crates/warp-core/src/witnessed_suffix_tests.rs +++ b/crates/warp-core/src/witnessed_suffix_tests.rs @@ -4,12 +4,14 @@ use echo_wasm_abi::kernel_port as abi; use crate::{ - evaluate_witnessed_suffix_admission, make_node_id, make_strand_id, BaseRef, ConflictReason, - Hash, NodeKey, ParentMovementFootprint, ProvenanceRef, ReadingResidualPosture, SlotId, - StrandBasisReport, StrandDivergenceFootprint, StrandOverlapRevalidation, - StrandRevalidationState, WarpId, WitnessedSuffixAdmissionContext, - WitnessedSuffixAdmissionOutcome, WitnessedSuffixAdmissionRequest, - WitnessedSuffixAdmissionResponse, WitnessedSuffixLocalAdmissionPosture, + derive_witnessed_suffix_shell_digest, evaluate_witnessed_suffix_admission, export_suffix, + import_suffix, make_node_id, make_strand_id, BaseRef, CausalSuffixBundle, ConflictReason, + ExportSuffixRequest, Hash, ImportSuffixRequest, ImportSuffixResult, NodeKey, + ParentMovementFootprint, ProvenanceRef, ReadingResidualPosture, SlotId, StrandBasisReport, + StrandDivergenceFootprint, StrandOverlapRevalidation, StrandRevalidationState, WarpId, + WitnessedSuffixAdmissionContext, WitnessedSuffixAdmissionOutcome, + WitnessedSuffixAdmissionRequest, WitnessedSuffixAdmissionResponse, + WitnessedSuffixExportContext, WitnessedSuffixLocalAdmissionPosture, WitnessedSuffixLocalAdmissionPostureError, WitnessedSuffixShell, WorldlineId, WorldlineTick, }; @@ -115,8 +117,8 @@ struct TargetBasisEchoAdmissionContext { } impl WitnessedSuffixAdmissionContext for TargetBasisEchoAdmissionContext { - fn source_shell_digest(&self, _shell: &WitnessedSuffixShell) -> Option { - Some([6; 32]) + fn source_shell_digest(&self, shell: &WitnessedSuffixShell) -> Option { + Some(shell.witness_digest) } fn resolve_target_basis(&self, _target_basis: ProvenanceRef) -> Option { @@ -135,6 +137,21 @@ impl WitnessedSuffixAdmissionContext for TargetBasisEchoAdmissionContext { } } +struct FakeExportContext { + source_entries: Option>, + boundary_witness: Option, +} + +impl WitnessedSuffixExportContext for FakeExportContext { + fn source_entries(&self, _request: &ExportSuffixRequest) -> Option> { + self.source_entries.clone() + } + + fn boundary_witness(&self, _request: &ExportSuffixRequest) -> Option { + self.boundary_witness + } +} + fn clean_context(posture: WitnessedSuffixLocalAdmissionPosture) -> FakeAdmissionContext { FakeAdmissionContext { expected_shell_digest: Some([6; 32]), @@ -143,6 +160,24 @@ fn clean_context(posture: WitnessedSuffixLocalAdmissionPosture) -> FakeAdmission } } +fn export_request() -> ExportSuffixRequest { + ExportSuffixRequest { + source_worldline_id: worldline(3), + base_frontier: provenance_ref(3, 2), + target_frontier: Some(provenance_ref(3, 4)), + basis_report: None, + } +} + +fn import_request(bundle: CausalSuffixBundle) -> ImportSuffixRequest { + ImportSuffixRequest { + bundle, + target_worldline_id: worldline(11), + target_basis: provenance_ref(12, 9), + basis_report: None, + } +} + fn admissible_posture( refs: Vec, ) -> Result { @@ -202,6 +237,194 @@ fn witnessed_suffix_core_request_converts_to_abi_shape() { ); } +#[test] +fn witnessed_suffix_export_produces_typed_causal_suffix_bundle() -> Result<(), String> { + let request = export_request(); + let context = FakeExportContext { + source_entries: Some(vec![provenance_ref(3, 4), provenance_ref(3, 3)]), + boundary_witness: Some(provenance_ref(3, 2)), + }; + + let bundle = export_suffix(&request, &context).map_err(|obstruction| { + format!("export should produce a suffix bundle, got {obstruction:?}") + })?; + + assert_eq!(bundle.base_frontier, provenance_ref(3, 2)); + assert_eq!(bundle.target_frontier, provenance_ref(3, 4)); + assert_eq!( + bundle.source_suffix.source_entries, + vec![provenance_ref(3, 3), provenance_ref(3, 4)] + ); + assert_eq!( + bundle.source_suffix.witness_digest, + derive_witnessed_suffix_shell_digest(&bundle.source_suffix) + ); + assert_eq!(bundle.shell_equivalence_digest(), bundle.bundle_digest); + Ok(()) +} + +#[test] +fn witnessed_suffix_export_obstructs_missing_witness_material() -> Result<(), String> { + let request = export_request(); + let context = FakeExportContext { + source_entries: Some(Vec::new()), + boundary_witness: None, + }; + + let obstruction = export_suffix(&request, &context) + .err() + .ok_or_else(|| "empty export without a boundary witness must obstruct".to_owned())?; + + assert_eq!(obstruction.source_ref, provenance_ref(3, 2)); + assert_eq!( + obstruction.residual_posture, + ReadingResidualPosture::Obstructed + ); + Ok(()) +} + +#[test] +fn witnessed_suffix_export_rejects_invalid_boundary_witnesses() { + let request = ExportSuffixRequest { + target_frontier: Some(provenance_ref(3, 2)), + ..export_request() + }; + + for boundary_witness in [provenance_ref(4, 2), provenance_ref(3, 9)] { + let context = FakeExportContext { + source_entries: Some(Vec::new()), + boundary_witness: Some(boundary_witness), + }; + + assert!( + export_suffix(&request, &context).is_err(), + "invalid boundary witness {boundary_witness:?} must obstruct export" + ); + } +} + +#[test] +fn witnessed_suffix_import_normalizes_to_comparable_frontier_before_deciding() { + let source_suffix = shell_with_entries(vec![provenance_ref(3, 3)]); + let bundle = CausalSuffixBundle::new(provenance_ref(3, 2), provenance_ref(3, 3), source_suffix); + let resolved_basis = provenance_ref(44, 20); + let context = TargetBasisEchoAdmissionContext { + resolved_target_basis: resolved_basis, + }; + + let result = import_suffix(&import_request(bundle.clone()), &context); + + assert_eq!(result.bundle_digest, bundle.bundle_digest); + assert_eq!(result.admission.target_basis, resolved_basis); + assert!(matches!( + result.admission.outcome, + WitnessedSuffixAdmissionOutcome::Admitted { admitted_refs, .. } + if admitted_refs == vec![resolved_basis] + )); +} + +#[test] +fn witnessed_suffix_import_obstructs_forged_bundle_digest( +) -> Result<(), WitnessedSuffixLocalAdmissionPostureError> { + let source_suffix = shell_with_entries(vec![provenance_ref(3, 3)]); + let mut bundle = + CausalSuffixBundle::new(provenance_ref(3, 2), provenance_ref(3, 3), source_suffix); + let canonical_bundle_digest = bundle.bundle_digest; + bundle.bundle_digest = [99; 32]; + let context = FakeAdmissionContext { + expected_shell_digest: Some(bundle.source_suffix.witness_digest), + resolved_target_basis: Some(provenance_ref(12, 9)), + posture: admissible_posture(vec![provenance_ref(30, 10)])?, + }; + + let result = import_suffix(&import_request(bundle), &context); + + assert_eq!(result.bundle_digest, canonical_bundle_digest); + assert!(matches!( + result.admission.outcome, + WitnessedSuffixAdmissionOutcome::Obstructed { + residual_posture: ReadingResidualPosture::Obstructed, + .. + } + )); + Ok(()) +} + +#[test] +fn witnessed_suffix_import_order_produces_same_retained_shell_equivalence_set( +) -> Result<(), WitnessedSuffixLocalAdmissionPostureError> { + let bundle_a = CausalSuffixBundle::new( + provenance_ref(3, 2), + provenance_ref(3, 3), + shell_with_entries(vec![provenance_ref(3, 3)]), + ); + let bundle_b = CausalSuffixBundle::new( + provenance_ref(3, 3), + provenance_ref(3, 4), + shell_with_entries(vec![provenance_ref(3, 4)]), + ); + let context_a = FakeAdmissionContext { + expected_shell_digest: Some(bundle_a.source_suffix.witness_digest), + resolved_target_basis: Some(provenance_ref(12, 9)), + posture: admissible_posture(vec![provenance_ref(30, 10)])?, + }; + let context_b = FakeAdmissionContext { + expected_shell_digest: Some(bundle_b.source_suffix.witness_digest), + resolved_target_basis: Some(provenance_ref(12, 9)), + posture: admissible_posture(vec![provenance_ref(31, 10)])?, + }; + + let mut forward = vec![ + import_suffix(&import_request(bundle_a.clone()), &context_a), + import_suffix(&import_request(bundle_b.clone()), &context_b), + ] + .into_iter() + .map(|result: ImportSuffixResult| result.retained_shell_equivalence_digest()) + .collect::>(); + let mut reverse = vec![ + import_suffix(&import_request(bundle_b), &context_b), + import_suffix(&import_request(bundle_a), &context_a), + ] + .into_iter() + .map(|result: ImportSuffixResult| result.retained_shell_equivalence_digest()) + .collect::>(); + + forward.sort_unstable(); + reverse.sort_unstable(); + + assert_eq!(forward, reverse); + Ok(()) +} + +#[test] +fn witnessed_suffix_import_preserves_non_independent_conflict() { + let source_suffix = shell_with_entries(vec![provenance_ref(3, 3)]); + let source_ref = provenance_ref(35, 14); + let bundle = CausalSuffixBundle::new(provenance_ref(3, 2), provenance_ref(3, 3), source_suffix); + let context = FakeAdmissionContext { + expected_shell_digest: Some(bundle.source_suffix.witness_digest), + resolved_target_basis: Some(provenance_ref(12, 9)), + posture: conflict_posture( + ConflictReason::ParentFootprintOverlap, + source_ref, + [36; 32], + None, + ), + }; + + let result = import_suffix(&import_request(bundle), &context); + + assert!(matches!( + result.admission.outcome, + WitnessedSuffixAdmissionOutcome::Conflict { + reason: ConflictReason::ParentFootprintOverlap, + source_ref: actual_source_ref, + evidence_digest, + .. + } if actual_source_ref == source_ref && evidence_digest == [36; 32] + )); +} + #[test] fn witnessed_suffix_core_response_converts_admitted_outcome_to_abi() { let response = response(WitnessedSuffixAdmissionOutcome::Admitted { diff --git a/crates/warp-core/tests/determinism_thread_harness.rs b/crates/warp-core/tests/determinism_thread_harness.rs new file mode 100644 index 00000000..009dc615 --- /dev/null +++ b/crates/warp-core/tests/determinism_thread_harness.rs @@ -0,0 +1,427 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +#![allow( + clippy::expect_used, + clippy::panic, + clippy::cast_precision_loss, + clippy::too_many_lines, + clippy::unwrap_used +)] +//! Thread-count determinism harness tests for M003. +//! +//! The report is rendered as deterministic JSON text by hand. Do not use +//! `serde_json` in `warp-core`; JSON here is a diagnostic test artifact, not a +//! causal encoding boundary. + +mod common; + +use std::fmt::Write as _; + +use common::{hex32, parallel_harness, ParallelScenario, ParallelTestHarness}; +use warp_core::math::scalar::F32Scalar; +use warp_core::math::Scalar; +use warp_core::{compute_commit_hash_v2, Hash}; + +#[cfg(feature = "det_fixed")] +use warp_core::math::scalar::DFix64; + +const WORKERS_UNDER_TEST: &[usize] = &[1, 2, 4, 8]; + +#[derive(Clone, Copy)] +enum ScalarBackend { + F32, + #[cfg(feature = "det_fixed")] + DFix64, +} + +impl ScalarBackend { + fn name(self) -> &'static str { + match self { + Self::F32 => "F32Scalar", + #[cfg(feature = "det_fixed")] + Self::DFix64 => "DFix64", + } + } + + fn digest(self, tick: u64) -> Hash { + match self { + Self::F32 => scalar_digest::(tick), + #[cfg(feature = "det_fixed")] + Self::DFix64 => scalar_digest::(tick), + } + } +} + +#[derive(Clone, Copy)] +struct DivergenceHook { + tick: u64, + workers: usize, +} + +#[derive(Clone)] +struct TickWitness { + state_root: Hash, + patch_digest: Hash, + commit_id: Hash, + scalar_digest: Hash, +} + +#[derive(Clone)] +struct TickComparison { + tick: u64, + workers: usize, + state_root_match: bool, + patch_digest_match: bool, + commit_id_match: bool, + scalar_digest_match: bool, + baseline_state_root: Hash, + actual_state_root: Hash, + baseline_commit_id: Hash, + actual_commit_id: Hash, +} + +impl TickComparison { + fn deterministic(&self) -> bool { + self.state_root_match + && self.patch_digest_match + && self.commit_id_match + && self.scalar_digest_match + } + + fn mismatch_fields(&self) -> Vec<&'static str> { + let mut fields = Vec::new(); + if !self.state_root_match { + fields.push("state_root"); + } + if !self.patch_digest_match { + fields.push("patch_digest"); + } + if !self.commit_id_match { + fields.push("commit_id"); + } + if !self.scalar_digest_match { + fields.push("scalar_digest"); + } + fields + } +} + +struct DeterminismReport { + scenario: &'static str, + scalar_backend: &'static str, + baseline_workers: usize, + worker_counts: Vec, + tick_count: u64, + comparisons: Vec, +} + +impl DeterminismReport { + fn divergence_count(&self) -> usize { + self.comparisons + .iter() + .filter(|comparison| !comparison.deterministic()) + .count() + } + + fn first_divergence(&self) -> Option<&TickComparison> { + self.comparisons + .iter() + .find(|comparison| !comparison.deterministic()) + } + + fn to_json(&self) -> String { + let mut out = String::new(); + let _ = writeln!(out, "{{"); + let _ = writeln!(out, " \"scenario\": \"{}\",", self.scenario); + let _ = writeln!(out, " \"scalar_backend\": \"{}\",", self.scalar_backend); + let _ = writeln!(out, " \"baseline_workers\": {},", self.baseline_workers); + let _ = writeln!( + out, + " \"worker_counts\": {},", + json_usize_array(&self.worker_counts) + ); + let _ = writeln!(out, " \"tick_count\": {},", self.tick_count); + let _ = writeln!(out, " \"divergence_count\": {},", self.divergence_count()); + match self.first_divergence() { + Some(first) => { + let _ = writeln!(out, " \"first_divergence\": {{"); + let _ = writeln!(out, " \"tick\": {},", first.tick); + let _ = writeln!(out, " \"workers\": {},", first.workers); + let _ = writeln!( + out, + " \"fields\": {}", + json_str_array(&first.mismatch_fields()) + ); + let _ = writeln!(out, " }},"); + } + None => { + let _ = writeln!(out, " \"first_divergence\": null,"); + } + } + let _ = writeln!(out, " \"comparisons\": ["); + for (idx, comparison) in self.comparisons.iter().enumerate() { + let comma = if idx + 1 == self.comparisons.len() { + "" + } else { + "," + }; + let _ = writeln!(out, " {{"); + let _ = writeln!(out, " \"tick\": {},", comparison.tick); + let _ = writeln!(out, " \"workers\": {},", comparison.workers); + let _ = writeln!( + out, + " \"deterministic\": {},", + comparison.deterministic() + ); + let _ = writeln!( + out, + " \"state_root_match\": {},", + comparison.state_root_match + ); + let _ = writeln!( + out, + " \"patch_digest_match\": {},", + comparison.patch_digest_match + ); + let _ = writeln!( + out, + " \"commit_id_match\": {},", + comparison.commit_id_match + ); + let _ = writeln!( + out, + " \"scalar_digest_match\": {},", + comparison.scalar_digest_match + ); + let _ = writeln!( + out, + " \"baseline_state_root\": \"{}\",", + hex32(&comparison.baseline_state_root) + ); + let _ = writeln!( + out, + " \"actual_state_root\": \"{}\",", + hex32(&comparison.actual_state_root) + ); + let _ = writeln!( + out, + " \"baseline_commit_id\": \"{}\",", + hex32(&comparison.baseline_commit_id) + ); + let _ = writeln!( + out, + " \"actual_commit_id\": \"{}\"", + hex32(&comparison.actual_commit_id) + ); + let _ = writeln!(out, " }}{comma}"); + } + let _ = writeln!(out, " ]"); + let _ = writeln!(out, "}}"); + out + } +} + +fn json_usize_array(values: &[usize]) -> String { + let mut out = String::from("["); + for (idx, value) in values.iter().enumerate() { + if idx > 0 { + out.push_str(", "); + } + let _ = write!(out, "{value}"); + } + out.push(']'); + out +} + +fn json_str_array(values: &[&str]) -> String { + let mut out = String::from("["); + for (idx, value) in values.iter().enumerate() { + if idx > 0 { + out.push_str(", "); + } + let _ = write!(out, "\"{value}\""); + } + out.push(']'); + out +} + +fn scenario_name(scenario: ParallelScenario) -> &'static str { + match scenario { + ParallelScenario::Small => "Small", + ParallelScenario::ManyIndependent => "ManyIndependent", + ParallelScenario::ManyConflicts => "ManyConflicts", + ParallelScenario::DeletesAndAttachments => "DeletesAndAttachments", + ParallelScenario::PrivacyClaims => "PrivacyClaims", + } +} + +fn scalar_digest(tick: u64) -> Hash { + let t = S::from_f32((tick as f32) + 1.25); + let scale = S::from_f32(0.5); + let bias = S::from_f32(3.0); + let (sin, cos) = t.sin_cos(); + let value = ((t + sin) * (cos + bias)) / (scale + S::one()); + + let mut hasher = blake3::Hasher::new(); + hasher.update(b"echo:test:thread-determinism:scalar-digest:v1\0"); + hasher.update(&tick.to_le_bytes()); + hasher.update(&value.to_f32().to_bits().to_le_bytes()); + hasher.finalize().into() +} + +fn make_witness( + raw: &common::ParallelExecResult, + parent: Option, + scalar_backend: ScalarBackend, + tick: u64, +) -> TickWitness { + let parents = parent.into_iter().collect::>(); + let commit_id = compute_commit_hash_v2(&raw.state_root, &parents, &raw.patch_digest, 0); + TickWitness { + state_root: raw.state_root, + patch_digest: raw.patch_digest, + commit_id, + scalar_digest: scalar_backend.digest(tick), + } +} + +fn make_comparison( + tick: u64, + workers: usize, + baseline: &TickWitness, + actual: &TickWitness, +) -> TickComparison { + TickComparison { + tick, + workers, + state_root_match: baseline.state_root == actual.state_root, + patch_digest_match: baseline.patch_digest == actual.patch_digest, + commit_id_match: baseline.commit_id == actual.commit_id, + scalar_digest_match: baseline.scalar_digest == actual.scalar_digest, + baseline_state_root: baseline.state_root, + actual_state_root: actual.state_root, + baseline_commit_id: baseline.commit_id, + actual_commit_id: actual.commit_id, + } +} + +fn run_report( + scenario: ParallelScenario, + scalar_backend: ScalarBackend, + tick_count: u64, + worker_counts: &[usize], + hook: Option, +) -> DeterminismReport { + let harness = parallel_harness(); + let base = harness.build_base_snapshot(scenario); + let mut comparisons = Vec::new(); + + for &workers in worker_counts { + let mut baseline_parent = None; + let mut actual_parent = None; + + for tick in 0..tick_count { + let ingress = harness.make_ingress(scenario, tick); + let baseline_raw = harness.execute_parallel(&base, &ingress, tick, 1); + let mut actual_raw = harness.execute_parallel(&base, &ingress, tick, workers); + + if hook.is_some_and(|hook| hook.tick == tick && hook.workers == workers) { + actual_raw.patch_digest[0] ^= 0x80; + } + + let baseline = make_witness(&baseline_raw, baseline_parent, scalar_backend, tick); + let actual = make_witness(&actual_raw, actual_parent, scalar_backend, tick); + comparisons.push(make_comparison(tick, workers, &baseline, &actual)); + + baseline_parent = Some(baseline.commit_id); + actual_parent = Some(actual.commit_id); + } + } + + DeterminismReport { + scenario: scenario_name(scenario), + scalar_backend: scalar_backend.name(), + baseline_workers: 1, + worker_counts: worker_counts.to_vec(), + tick_count, + comparisons, + } +} + +#[test] +fn reports_zero_divergences_for_f32_core_scenarios() { + for scenario in [ + ParallelScenario::Small, + ParallelScenario::ManyIndependent, + ParallelScenario::ManyConflicts, + ] { + let report = run_report(scenario, ScalarBackend::F32, 4, WORKERS_UNDER_TEST, None); + let json = report.to_json(); + + assert_eq!(report.divergence_count(), 0, "{json}"); + assert!(json.contains("\"scalar_backend\": \"F32Scalar\"")); + assert!(json.contains("\"worker_counts\": [1, 2, 4, 8]")); + assert!(json.contains("\"state_root_match\": true")); + assert!(json.contains("\"commit_id_match\": true")); + } +} + +#[test] +fn zero_ticks_report_is_trivially_deterministic() { + let report = run_report( + ParallelScenario::Small, + ScalarBackend::F32, + 0, + WORKERS_UNDER_TEST, + None, + ); + let json = report.to_json(); + + assert_eq!(report.divergence_count(), 0); + assert!(report.comparisons.is_empty()); + assert!(json.contains("\"tick_count\": 0")); + assert!(json.contains("\"comparisons\": [")); +} + +#[test] +fn ordering_break_hook_reports_first_divergence() { + let report = run_report( + ParallelScenario::ManyIndependent, + ScalarBackend::F32, + 4, + WORKERS_UNDER_TEST, + Some(DivergenceHook { + tick: 2, + workers: 4, + }), + ); + let json = report.to_json(); + let first = report + .first_divergence() + .expect("ordering hook should force divergence"); + + assert!(report.divergence_count() >= 1, "{json}"); + assert_eq!(first.tick, 2); + assert_eq!(first.workers, 4); + assert!(first.mismatch_fields().contains(&"patch_digest")); + assert!(first.mismatch_fields().contains(&"commit_id")); + assert!(json.contains("\"first_divergence\": {")); + assert!(json.contains("\"fields\": [\"patch_digest\", \"commit_id\"]")); +} + +#[cfg(feature = "det_fixed")] +#[test] +fn reports_zero_divergences_for_dfix64_backend() { + let report = run_report( + ParallelScenario::ManyIndependent, + ScalarBackend::DFix64, + 4, + WORKERS_UNDER_TEST, + None, + ); + let json = report.to_json(); + + assert_eq!(report.divergence_count(), 0, "{json}"); + assert!(json.contains("\"scalar_backend\": \"DFix64\"")); + assert!(json.contains("\"scalar_digest_match\": true")); +} diff --git a/crates/warp-core/tests/optic_attachment_tests.rs b/crates/warp-core/tests/optic_attachment_tests.rs new file mode 100644 index 00000000..c7ef5d61 --- /dev/null +++ b/crates/warp-core/tests/optic_attachment_tests.rs @@ -0,0 +1,160 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! Integration tests for optic attachment boundary semantics. + +#![allow(clippy::panic, clippy::unwrap_used)] + +use warp_core::{ + make_node_id, make_type_id, AttachmentDescentPolicy, AttachmentKey, CoordinateAt, + EchoCoordinate, Engine, EngineBuilder, GraphStore, NodeKey, NodeRecord, ObservationService, + ObserveOpticRequest, ObserveOpticResult, OpticAperture, OpticApertureShape, OpticCapabilityId, + OpticFocus, OpticId, OpticObstructionKind, OpticReadBudget, ProjectionVersion, + ProvenanceService, SchedulerKind, WorldlineId, WorldlineRuntime, WorldlineState, +}; + +struct OpticHarness { + runtime: WorldlineRuntime, + provenance: ProvenanceService, + engine: Engine, + worldline_id: WorldlineId, + attachment_key: AttachmentKey, +} + +fn harness() -> OpticHarness { + let mut store = GraphStore::default(); + let root = make_node_id("root"); + store.insert_node( + root, + NodeRecord { + ty: make_type_id("world"), + }, + ); + + let engine = EngineBuilder::new(store, root) + .scheduler(SchedulerKind::Radix) + .workers(1) + .build(); + let worldline_id = WorldlineId::from_bytes(*engine.root_key().warp_id.as_bytes()); + let state = WorldlineState::try_from(engine.state().clone()).unwrap(); + let mut runtime = WorldlineRuntime::new(); + let mut provenance = ProvenanceService::new(); + provenance.register_worldline(worldline_id, &state).unwrap(); + runtime.register_worldline(worldline_id, state).unwrap(); + let attachment_key = AttachmentKey::node_alpha(NodeKey { + warp_id: engine.root_key().warp_id, + local_id: root, + }); + + OpticHarness { + runtime, + provenance, + engine, + worldline_id, + attachment_key, + } +} + +fn attachment_request( + harness: &OpticHarness, + attachment_descent: AttachmentDescentPolicy, + max_attachments: Option, +) -> ObserveOpticRequest { + ObserveOpticRequest { + optic_id: OpticId::from_bytes([90; 32]), + focus: OpticFocus::AttachmentBoundary { + key: harness.attachment_key, + }, + coordinate: EchoCoordinate::Worldline { + worldline_id: harness.worldline_id, + at: CoordinateAt::Frontier, + }, + aperture: OpticAperture { + shape: OpticApertureShape::AttachmentBoundary, + budget: OpticReadBudget { + max_bytes: Some(256), + max_nodes: Some(1), + max_ticks: Some(1), + max_attachments, + }, + attachment_descent, + }, + projection_version: ProjectionVersion::from_raw(1), + reducer_version: None, + capability: OpticCapabilityId::from_bytes([91; 32]), + } +} + +#[test] +fn attachment_boundary_read_without_descent_returns_boundary_posture() { + let harness = harness(); + let request = attachment_request(&harness, AttachmentDescentPolicy::BoundaryOnly, Some(0)); + let result = ObservationService::observe_optic( + &harness.runtime, + &harness.provenance, + &harness.engine, + request, + ); + + let obstruction = match result { + ObserveOpticResult::Obstructed(obstruction) => obstruction, + ObserveOpticResult::Reading(reading) => { + panic!("attachment boundary read should stop at boundary, got {reading:?}"); + } + }; + + assert_eq!( + obstruction.kind, + OpticObstructionKind::AttachmentDescentRequired + ); + assert_eq!( + obstruction.focus, + Some(OpticFocus::AttachmentBoundary { + key: harness.attachment_key + }) + ); +} + +#[test] +fn attachment_boundary_explicit_descent_without_authority_is_denied() { + let harness = harness(); + let request = attachment_request(&harness, AttachmentDescentPolicy::Explicit, Some(1)); + let result = ObservationService::observe_optic( + &harness.runtime, + &harness.provenance, + &harness.engine, + request, + ); + + let obstruction = match result { + ObserveOpticResult::Obstructed(obstruction) => obstruction, + ObserveOpticResult::Reading(reading) => { + panic!("unauthorized attachment descent should obstruct, got {reading:?}"); + } + }; + + assert_eq!( + obstruction.kind, + OpticObstructionKind::AttachmentDescentDenied + ); +} + +#[test] +fn attachment_boundary_explicit_descent_requires_attachment_budget() { + let harness = harness(); + let request = attachment_request(&harness, AttachmentDescentPolicy::Explicit, Some(0)); + let result = ObservationService::observe_optic( + &harness.runtime, + &harness.provenance, + &harness.engine, + request, + ); + + let obstruction = match result { + ObserveOpticResult::Obstructed(obstruction) => obstruction, + ObserveOpticResult::Reading(reading) => { + panic!("attachment descent without budget should obstruct, got {reading:?}"); + } + }; + + assert_eq!(obstruction.kind, OpticObstructionKind::BudgetExceeded); +} diff --git a/crates/warp-core/tests/optic_dispatch_tests.rs b/crates/warp-core/tests/optic_dispatch_tests.rs new file mode 100644 index 00000000..6be7a00b --- /dev/null +++ b/crates/warp-core/tests/optic_dispatch_tests.rs @@ -0,0 +1,98 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! Integration tests for Echo optic intent dispatch semantics. + +#![allow(clippy::unwrap_used)] + +use warp_core::{ + AdmissionLawId, CoordinateAt, DispatchOpticIntentRequest, EchoCoordinate, IntentFamilyId, + OpticActorId, OpticCapability, OpticCapabilityId, OpticCause, OpticFocus, OpticId, + OpticIntentPayload, OpticObstructionKind, OpticReadBudget, ProjectionVersion, WorldlineId, + WorldlineTick, +}; + +fn worldline(seed: u8) -> WorldlineId { + WorldlineId::from_bytes([seed; 32]) +} + +fn intent_family(seed: u8) -> IntentFamilyId { + IntentFamilyId::from_bytes([seed; 32]) +} + +fn actor(seed: u8) -> OpticActorId { + OpticActorId::from_bytes([seed; 32]) +} + +fn dispatch_request(base_tick: u64) -> DispatchOpticIntentRequest { + let worldline_id = worldline(3); + let focus = OpticFocus::Worldline { worldline_id }; + let actor = actor(4); + let intent_family = intent_family(5); + + DispatchOpticIntentRequest { + optic_id: OpticId::from_bytes([1; 32]), + base_coordinate: EchoCoordinate::Worldline { + worldline_id, + at: CoordinateAt::Tick(WorldlineTick::from_raw(base_tick)), + }, + intent_family, + focus: focus.clone(), + cause: OpticCause { + actor, + cause_hash: [6; 32], + label: Some("stale basis test".into()), + }, + capability: OpticCapability { + capability_id: OpticCapabilityId::from_bytes([7; 32]), + actor, + issuer_ref: None, + policy_hash: [8; 32], + allowed_focus: focus, + projection_version: ProjectionVersion::from_raw(1), + reducer_version: None, + allowed_intent_family: intent_family, + max_budget: OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(64), + max_ticks: Some(8), + max_attachments: Some(0), + }, + }, + admission_law: AdmissionLawId::from_bytes([9; 32]), + payload: OpticIntentPayload::EintV1 { + bytes: echo_wasm_abi::pack_intent_v1(77, b"optic-vars").unwrap(), + }, + } +} + +#[test] +fn stale_worldline_base_coordinate_obstructs_before_dispatch() { + let worldline_id = worldline(3); + let current_coordinate = EchoCoordinate::Worldline { + worldline_id, + at: CoordinateAt::Tick(WorldlineTick::from_raw(2)), + }; + + let obstruction = dispatch_request(1) + .validate_proposal_against_current(¤t_coordinate) + .unwrap_err(); + + assert_eq!(obstruction.kind, OpticObstructionKind::StaleBasis); + assert_eq!( + obstruction.coordinate, + Some(dispatch_request(1).base_coordinate) + ); +} + +#[test] +fn matching_worldline_base_coordinate_remains_dispatchable() { + let worldline_id = worldline(3); + let current_coordinate = EchoCoordinate::Worldline { + worldline_id, + at: CoordinateAt::Tick(WorldlineTick::from_raw(2)), + }; + + dispatch_request(2) + .validate_proposal_against_current(¤t_coordinate) + .unwrap(); +} diff --git a/crates/warp-core/tests/optic_example_tests.rs b/crates/warp-core/tests/optic_example_tests.rs new file mode 100644 index 00000000..c60922a0 --- /dev/null +++ b/crates/warp-core/tests/optic_example_tests.rs @@ -0,0 +1,187 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! Integration tests for the narrow worldline-head optic example. + +#![allow(clippy::panic, clippy::unwrap_used)] + +use warp_core::{ + make_node_id, make_type_id, AdmissionLawId, CoordinateAt, EchoCoordinate, Engine, + EngineBuilder, GraphStore, IntentFamilyId, NodeRecord, ObservationPayload, ObservationService, + ObserveOpticResult, OpticActorId, OpticObstructionKind, OpticReadBudget, ProvenanceService, + ReadingBudgetPosture, SchedulerKind, WorldlineHeadOptic, WorldlineId, WorldlineRuntime, + WorldlineState, WorldlineTick, +}; + +struct OpticHarness { + runtime: WorldlineRuntime, + provenance: ProvenanceService, + engine: Engine, + worldline_id: WorldlineId, +} + +fn harness() -> OpticHarness { + let mut store = GraphStore::default(); + let root = make_node_id("root"); + store.insert_node( + root, + NodeRecord { + ty: make_type_id("world"), + }, + ); + + let engine = EngineBuilder::new(store, root) + .scheduler(SchedulerKind::Radix) + .workers(1) + .build(); + let worldline_id = WorldlineId::from_bytes(*engine.root_key().warp_id.as_bytes()); + let state = WorldlineState::try_from(engine.state().clone()).unwrap(); + let mut runtime = WorldlineRuntime::new(); + let mut provenance = ProvenanceService::new(); + provenance.register_worldline(worldline_id, &state).unwrap(); + runtime.register_worldline(worldline_id, state).unwrap(); + + OpticHarness { + runtime, + provenance, + engine, + worldline_id, + } +} + +fn optic(worldline_id: WorldlineId) -> WorldlineHeadOptic { + WorldlineHeadOptic::open( + worldline_id, + CoordinateAt::Frontier, + OpticActorId::from_bytes([3; 32]), + warp_core::OpticCapabilityId::from_bytes([4; 32]), + IntentFamilyId::from_bytes([5; 32]), + [6; 32], + ) + .unwrap() +} + +fn metadata_budget() -> OpticReadBudget { + OpticReadBudget { + max_bytes: Some(1024), + max_nodes: Some(8), + max_ticks: Some(4), + max_attachments: Some(0), + } +} + +#[test] +fn worldline_head_optic_example_reads_bounded_head() { + let harness = harness(); + let optic = optic(harness.worldline_id); + let request = optic.observe_head_request(metadata_budget()); + + let result = ObservationService::observe_optic( + &harness.runtime, + &harness.provenance, + &harness.engine, + request.clone(), + ); + + let reading = match result { + ObserveOpticResult::Reading(reading) => reading, + ObserveOpticResult::Obstructed(obstruction) => { + panic!("worldline head optic should read, got {obstruction:?}"); + } + }; + + assert_eq!(reading.read_identity.optic_id, optic.optic.optic_id); + assert_eq!(reading.read_identity.coordinate, request.coordinate); + assert_eq!( + reading.read_identity.projection_version, + request.projection_version + ); + assert!(matches!(reading.payload, ObservationPayload::Head(_))); + assert!(matches!( + reading.envelope.budget_posture, + ReadingBudgetPosture::Bounded { + max_payload_bytes: 1024, + .. + } + )); +} + +#[test] +fn worldline_head_optic_example_query_shape_obstructs_typed() { + let harness = harness(); + let optic = optic(harness.worldline_id); + let request = optic.observe_query_bytes_request(17, [9; 32], metadata_budget()); + + let result = ObservationService::observe_optic( + &harness.runtime, + &harness.provenance, + &harness.engine, + request, + ); + + let obstruction = match result { + ObserveOpticResult::Reading(reading) => { + panic!("query-shaped example optic should obstruct, got {reading:?}"); + } + ObserveOpticResult::Obstructed(obstruction) => obstruction, + }; + + assert_eq!( + obstruction.kind, + OpticObstructionKind::UnsupportedProjectionLaw + ); + assert_eq!(obstruction.optic_id, Some(optic.optic.optic_id)); +} + +#[test] +fn worldline_head_optic_example_dispatches_eint_with_explicit_base() { + let harness = harness(); + let optic = optic(harness.worldline_id); + let base_coordinate = EchoCoordinate::Worldline { + worldline_id: harness.worldline_id, + at: CoordinateAt::Tick(WorldlineTick::from_raw(0)), + }; + let request = optic.dispatch_eint_v1_request( + base_coordinate.clone(), + warp_core::OpticCause { + actor: optic.capability.actor, + cause_hash: [10; 32], + label: Some("example eint proposal".to_owned()), + }, + AdmissionLawId::from_bytes([11; 32]), + echo_wasm_abi::pack_intent_v1(77, b"example-vars").unwrap(), + ); + + request + .validate_proposal_against_current(&base_coordinate) + .unwrap(); + assert_eq!(request.base_coordinate, base_coordinate); +} + +#[test] +fn worldline_head_optic_example_stale_base_obstructs() { + let harness = harness(); + let optic = optic(harness.worldline_id); + let request = optic.dispatch_eint_v1_request( + EchoCoordinate::Worldline { + worldline_id: harness.worldline_id, + at: CoordinateAt::Tick(WorldlineTick::from_raw(0)), + }, + warp_core::OpticCause { + actor: optic.capability.actor, + cause_hash: [10; 32], + label: Some("stale example proposal".to_owned()), + }, + AdmissionLawId::from_bytes([11; 32]), + echo_wasm_abi::pack_intent_v1(77, b"example-vars").unwrap(), + ); + let current = EchoCoordinate::Worldline { + worldline_id: harness.worldline_id, + at: CoordinateAt::Tick(WorldlineTick::from_raw(1)), + }; + + let obstruction = request + .validate_proposal_against_current(¤t) + .unwrap_err(); + + assert_eq!(obstruction.kind, OpticObstructionKind::StaleBasis); +} diff --git a/crates/warp-core/tests/optic_live_tail_tests.rs b/crates/warp-core/tests/optic_live_tail_tests.rs new file mode 100644 index 00000000..69e2d578 --- /dev/null +++ b/crates/warp-core/tests/optic_live_tail_tests.rs @@ -0,0 +1,169 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! Regression tests for live-tail honesty in optic read identities. + +#![allow(clippy::panic, clippy::unwrap_used)] + +use warp_core::{ + make_head_id, make_intent_kind, make_node_id, make_type_id, CoordinateAt, Engine, + EngineBuilder, GraphStore, IngressEnvelope, IngressTarget, IntentFamilyId, NodeRecord, + ObservationPayload, ObservationService, ObserveOpticResult, OpticActorId, OpticReadBudget, + ProvenanceService, SchedulerCoordinator, SchedulerKind, WitnessBasis, WorldlineHeadOptic, + WorldlineId, WorldlineRuntime, WorldlineState, +}; +use warp_core::{InboxPolicy, PlaybackMode, WriterHead, WriterHeadKey}; + +struct OpticHarness { + runtime: WorldlineRuntime, + provenance: ProvenanceService, + engine: Engine, + worldline_id: WorldlineId, +} + +fn harness() -> OpticHarness { + let mut store = GraphStore::default(); + let root = make_node_id("root"); + store.insert_node( + root, + NodeRecord { + ty: make_type_id("world"), + }, + ); + + let engine = EngineBuilder::new(store, root) + .scheduler(SchedulerKind::Radix) + .workers(1) + .build(); + let worldline_id = WorldlineId::from_bytes(*engine.root_key().warp_id.as_bytes()); + let state = WorldlineState::try_from(engine.state().clone()).unwrap(); + let mut runtime = WorldlineRuntime::new(); + let mut provenance = ProvenanceService::new(); + provenance.register_worldline(worldline_id, &state).unwrap(); + runtime.register_worldline(worldline_id, state).unwrap(); + runtime + .register_writer_head(WriterHead::with_routing( + WriterHeadKey { + worldline_id, + head_id: make_head_id("default"), + }, + PlaybackMode::Play, + InboxPolicy::AcceptAll, + None, + true, + )) + .unwrap(); + + OpticHarness { + runtime, + provenance, + engine, + worldline_id, + } +} + +fn commit(harness: &mut OpticHarness, label: &str) { + harness + .runtime + .ingest(IngressEnvelope::local_intent( + IngressTarget::DefaultWriter { + worldline_id: harness.worldline_id, + }, + make_intent_kind("echo.intent/live-tail-test"), + label.as_bytes().to_vec(), + )) + .unwrap(); + SchedulerCoordinator::super_tick( + &mut harness.runtime, + &mut harness.provenance, + &mut harness.engine, + ) + .unwrap(); +} + +fn optic(worldline_id: WorldlineId) -> WorldlineHeadOptic { + WorldlineHeadOptic::open( + worldline_id, + CoordinateAt::Frontier, + OpticActorId::from_bytes([3; 32]), + warp_core::OpticCapabilityId::from_bytes([4; 32]), + IntentFamilyId::from_bytes([5; 32]), + [6; 32], + ) + .unwrap() +} + +fn metadata_budget() -> OpticReadBudget { + OpticReadBudget { + max_bytes: Some(1024), + max_nodes: Some(8), + max_ticks: Some(4), + max_attachments: Some(0), + } +} + +#[test] +fn frontier_read_after_checkpoint_names_live_tail_witnesses() { + let mut harness = harness(); + let optic = optic(harness.worldline_id); + + commit(&mut harness, "checkpoint-basis"); + let checkpoint_state = harness + .runtime + .worldlines() + .get(&harness.worldline_id) + .unwrap() + .state(); + let checkpoint = harness + .provenance + .checkpoint(harness.worldline_id, checkpoint_state) + .unwrap(); + let checkpoint_reading = match ObservationService::observe_optic( + &harness.runtime, + &harness.provenance, + &harness.engine, + optic.observe_head_request(metadata_budget()), + ) { + ObserveOpticResult::Reading(reading) => reading, + ObserveOpticResult::Obstructed(obstruction) => { + panic!("checkpoint optic read should succeed, got {obstruction:?}"); + } + }; + + commit(&mut harness, "live-tail"); + let live_reading = match ObservationService::observe_optic( + &harness.runtime, + &harness.provenance, + &harness.engine, + optic.observe_head_request(metadata_budget()), + ) { + ObserveOpticResult::Reading(reading) => reading, + ObserveOpticResult::Obstructed(obstruction) => { + panic!("live-tail optic read should succeed, got {obstruction:?}"); + } + }; + + assert_ne!( + checkpoint_reading.read_identity.read_identity_hash, + live_reading.read_identity.read_identity_hash + ); + assert!(matches!(live_reading.payload, ObservationPayload::Head(_))); + match live_reading.read_identity.witness_basis { + WitnessBasis::CheckpointPlusTail { + checkpoint_ref, + checkpoint_hash, + tail_witness_refs, + tail_digest, + } => { + assert_eq!(checkpoint_ref.worldline_id, harness.worldline_id); + assert_eq!(checkpoint_ref.worldline_tick.as_u64(), 0); + assert_eq!(checkpoint_hash, checkpoint.state_hash); + assert_eq!(tail_witness_refs.len(), 1); + assert_eq!(tail_witness_refs[0].worldline_id, harness.worldline_id); + assert_eq!(tail_witness_refs[0].worldline_tick.as_u64(), 1); + assert_ne!(tail_digest, [0; 32]); + } + witness_basis => { + panic!("expected checkpoint-plus-tail witness basis, got {witness_basis:?}"); + } + } +} diff --git a/crates/warp-core/tests/optic_retention_tests.rs b/crates/warp-core/tests/optic_retention_tests.rs new file mode 100644 index 00000000..9a72674c --- /dev/null +++ b/crates/warp-core/tests/optic_retention_tests.rs @@ -0,0 +1,217 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +//! Regression tests for semantic retained-reading identity. + +use warp_core::{ + AttachmentDescentPolicy, CoordinateAt, EchoCoordinate, EchoOptic, IntentFamilyId, + OpticAperture, OpticApertureShape, OpticCapabilityId, OpticFocus, ProjectionVersion, + ReadIdentity, ReadingBudgetPosture, ReadingResidualPosture, ReadingRightsPosture, + RetainReadingRequest, RetainedReadingCache, RetainedReadingCodecId, RetainedReadingKey, + RevealReadingRequest, WitnessBasis, WorldlineId, WorldlineTick, +}; +use warp_core::{OpticObstructionKind, OpticReadBudget, ProvenanceRef}; + +fn worldline(seed: u8) -> WorldlineId { + WorldlineId::from_bytes([seed; 32]) +} + +fn provenance(seed: u8, tick: u64) -> ProvenanceRef { + ProvenanceRef { + worldline_id: worldline(seed), + worldline_tick: WorldlineTick::from_raw(tick), + commit_hash: [seed.wrapping_add(1); 32], + } +} + +fn intent_family(seed: u8) -> IntentFamilyId { + IntentFamilyId::from_bytes([seed; 32]) +} + +fn capability(seed: u8) -> OpticCapabilityId { + OpticCapabilityId::from_bytes([seed; 32]) +} + +fn retained_codec(seed: u8) -> RetainedReadingCodecId { + RetainedReadingCodecId::from_bytes([seed; 32]) +} + +fn coordinate(seed: u8, tick: u64) -> EchoCoordinate { + EchoCoordinate::Worldline { + worldline_id: worldline(seed), + at: CoordinateAt::Tick(WorldlineTick::from_raw(tick)), + } +} + +fn aperture(shape: OpticApertureShape) -> OpticAperture { + OpticAperture { + shape, + budget: OpticReadBudget { + max_bytes: Some(256), + max_nodes: Some(8), + max_ticks: Some(1), + max_attachments: Some(0), + }, + attachment_descent: AttachmentDescentPolicy::BoundaryOnly, + } +} + +fn witness_basis(seed: u8, tick: u64) -> WitnessBasis { + let reference = provenance(seed, tick); + WitnessBasis::ResolvedCommit { + reference, + state_root: [seed.wrapping_add(2); 32], + commit_hash: reference.commit_hash, + } +} + +fn read_identity(seed: u8, coordinate: EchoCoordinate, aperture: OpticAperture) -> ReadIdentity { + let focus = OpticFocus::Worldline { + worldline_id: worldline(seed), + }; + let optic = EchoOptic::new( + focus.clone(), + coordinate.clone(), + ProjectionVersion::from_raw(1), + None, + intent_family(seed), + capability(seed), + ); + + ReadIdentity::new( + optic.optic_id, + &focus, + coordinate, + &aperture, + ProjectionVersion::from_raw(1), + None, + witness_basis(seed, 1), + ReadingRightsPosture::KernelPublic, + ReadingBudgetPosture::Bounded { + max_payload_bytes: 256, + payload_bytes: 12, + max_witness_refs: 1, + witness_refs: 1, + }, + ReadingResidualPosture::Complete, + ) +} + +#[test] +fn same_content_under_different_coordinate_gets_distinct_retained_keys() { + let mut cache = RetainedReadingCache::default(); + let payload = b"same reading bytes".to_vec(); + let codec_id = retained_codec(7); + let first_identity = read_identity(1, coordinate(1, 10), aperture(OpticApertureShape::Head)); + let second_identity = read_identity(1, coordinate(1, 11), aperture(OpticApertureShape::Head)); + + let first = cache.retain_reading(RetainReadingRequest { + read_identity: first_identity, + codec_id, + payload: payload.clone(), + }); + let second = cache.retain_reading(RetainReadingRequest { + read_identity: second_identity, + codec_id, + payload, + }); + + assert_eq!( + first.descriptor.content_hash, + second.descriptor.content_hash + ); + assert_ne!(first.descriptor.key, second.descriptor.key); + let same_content_keys = cache.keys_for_content_hash(first.descriptor.content_hash); + assert_eq!(same_content_keys.len(), 2); + assert!(same_content_keys.contains(&first.descriptor.key)); + assert!(same_content_keys.contains(&second.descriptor.key)); +} + +#[test] +fn same_content_under_different_aperture_gets_distinct_retained_keys() { + let mut cache = RetainedReadingCache::default(); + let payload = b"same reading bytes".to_vec(); + let coordinate = coordinate(2, 10); + let codec_id = retained_codec(8); + let head_identity = read_identity(2, coordinate.clone(), aperture(OpticApertureShape::Head)); + let snapshot_identity = read_identity( + 2, + coordinate, + aperture(OpticApertureShape::SnapshotMetadata), + ); + + let head = cache.retain_reading(RetainReadingRequest { + read_identity: head_identity, + codec_id, + payload: payload.clone(), + }); + let snapshot = cache.retain_reading(RetainReadingRequest { + read_identity: snapshot_identity, + codec_id, + payload, + }); + + assert_eq!( + head.descriptor.content_hash, + snapshot.descriptor.content_hash + ); + assert_ne!(head.descriptor.key, snapshot.descriptor.key); +} + +#[test] +fn content_hash_only_reveal_is_a_lookup_miss() -> Result<(), String> { + let mut cache = RetainedReadingCache::default(); + let payload = b"retained payload".to_vec(); + let identity = read_identity(3, coordinate(3, 10), aperture(OpticApertureShape::Head)); + let retained = cache.retain_reading(RetainReadingRequest { + read_identity: identity.clone(), + codec_id: retained_codec(9), + payload, + }); + let content_hash_as_key = RetainedReadingKey::from_bytes(retained.descriptor.content_hash); + + let err = cache + .reveal_reading(&RevealReadingRequest { + key: content_hash_as_key, + read_identity: identity, + }) + .err() + .ok_or_else(|| "content-hash-only reveal unexpectedly succeeded".to_owned())?; + + assert_eq!(err.kind, OpticObstructionKind::MissingRetainedReading); + Ok(()) +} + +#[test] +fn reveal_requires_matching_read_identity() -> Result<(), String> { + let mut cache = RetainedReadingCache::default(); + let payload = b"retained payload".to_vec(); + let identity = read_identity(4, coordinate(4, 10), aperture(OpticApertureShape::Head)); + let wrong_identity = read_identity(4, coordinate(4, 11), aperture(OpticApertureShape::Head)); + let retained = cache.retain_reading(RetainReadingRequest { + read_identity: identity.clone(), + codec_id: retained_codec(10), + payload: payload.clone(), + }); + + let err = cache + .reveal_reading(&RevealReadingRequest { + key: retained.descriptor.key, + read_identity: wrong_identity, + }) + .err() + .ok_or_else(|| "mismatched read identity unexpectedly revealed payload".to_owned())?; + + assert_eq!(err.kind, OpticObstructionKind::MissingRetainedReading); + + let revealed = cache + .reveal_reading(&RevealReadingRequest { + key: retained.descriptor.key, + read_identity: identity, + }) + .ok() + .ok_or_else(|| "matching read identity failed to reveal payload".to_owned())?; + + assert_eq!(revealed.descriptor, retained.descriptor); + assert_eq!(revealed.payload, payload); + Ok(()) +} diff --git a/crates/warp-core/tests/snapshot_restore_fuzz.rs b/crates/warp-core/tests/snapshot_restore_fuzz.rs new file mode 100644 index 00000000..85c13e46 --- /dev/null +++ b/crates/warp-core/tests/snapshot_restore_fuzz.rs @@ -0,0 +1,606 @@ +// SPDX-License-Identifier: Apache-2.0 +// © James Ross Ω FLYING•ROBOTS +#![allow( + clippy::expect_used, + clippy::panic, + clippy::too_many_lines, + clippy::unwrap_used, + clippy::cast_possible_truncation +)] +//! Snapshot/restore fuzz harness for M004. +//! +//! The harness snapshots a deterministic worldline at pseudo-random ticks, +//! restores the materialized graph from canonical WSC bytes, replays the suffix +//! from recorded provenance, and compares the final state root with the +//! uninterrupted run. JSON output is rendered by hand; it is a diagnostic test +//! artifact, not a causal encoding boundary. + +mod common; + +use std::fmt::Write as _; + +use bytes::Bytes; +use common::{append_fixture_entry, hex32, register_fixture_worldline, test_warp_id, XorShift64}; +use warp_core::wsc::types::{AttRow, WarpDirEntry, WscHeader}; +use warp_core::wsc::{build_one_warp_input, validate_wsc, write_wsc_one_warp, WarpView, WscFile}; +use warp_core::{ + compute_commit_hash_v2, make_edge_id, make_node_id, make_type_id, AtomPayload, AttachmentKey, + AttachmentValue, EdgeId, EdgeRecord, GlobalTick, GraphStore, Hash, HashTriplet, + LocalProvenanceStore, NodeId, NodeKey, NodeRecord, ProvenanceStore, TickCommitStatus, TypeId, + WarpId, WarpOp, WarpTickPatchV1, WorldlineId, WorldlineState, WorldlineTick, + WorldlineTickHeaderV1, WorldlineTickPatchV1, +}; + +const TOTAL_TICKS: u64 = 500; +const FUZZ_ITERATIONS: usize = 50; +const SNAPSHOT_SCHEMA_HASH: Hash = [0x5A; 32]; + +type TestResult = Result; + +#[derive(Clone, Copy, Debug)] +enum SnapshotFormat { + CanonicalWscV1, +} + +impl SnapshotFormat { + fn name(self) -> &'static str { + match self { + Self::CanonicalWscV1 => "canonical_wsc_v1", + } + } +} + +struct Simulation { + provenance: LocalProvenanceStore, + warp_id: WarpId, + worldline_id: WorldlineId, + expected_roots: Vec, + states_by_tick: Vec, +} + +#[derive(Clone)] +struct SnapshotRestoreIteration { + iteration: usize, + format: SnapshotFormat, + snapshot_tick: u64, + restore_tick: u64, + comparison_tick: u64, + restored_state_root: Hash, + expected_state_root: Hash, + actual_state_root: Hash, +} + +impl SnapshotRestoreIteration { + fn matches(&self) -> bool { + self.expected_state_root == self.actual_state_root + } +} + +struct SnapshotRestoreReport { + simulation_ticks: u64, + iterations: Vec, +} + +impl SnapshotRestoreReport { + fn divergence_count(&self) -> usize { + self.iterations + .iter() + .filter(|iteration| !iteration.matches()) + .count() + } + + fn to_json(&self) -> String { + let mut out = String::new(); + let _ = writeln!(out, "{{"); + let _ = writeln!(out, " \"simulation_ticks\": {},", self.simulation_ticks); + let _ = writeln!(out, " \"iteration_count\": {},", self.iterations.len()); + let _ = writeln!(out, " \"divergence_count\": {},", self.divergence_count()); + let _ = writeln!(out, " \"iterations\": ["); + for (index, iteration) in self.iterations.iter().enumerate() { + let comma = if index + 1 == self.iterations.len() { + "" + } else { + "," + }; + let _ = writeln!(out, " {{"); + let _ = writeln!(out, " \"iteration\": {},", iteration.iteration); + let _ = writeln!(out, " \"format\": \"{}\",", iteration.format.name()); + let _ = writeln!(out, " \"snapshot_tick\": {},", iteration.snapshot_tick); + let _ = writeln!(out, " \"restore_tick\": {},", iteration.restore_tick); + let _ = writeln!( + out, + " \"comparison_tick\": {},", + iteration.comparison_tick + ); + let _ = writeln!( + out, + " \"restored_state_root\": \"{}\",", + hex32(&iteration.restored_state_root) + ); + let _ = writeln!( + out, + " \"expected_state_root\": \"{}\",", + hex32(&iteration.expected_state_root) + ); + let _ = writeln!( + out, + " \"actual_state_root\": \"{}\",", + hex32(&iteration.actual_state_root) + ); + let _ = writeln!(out, " \"match\": {}", iteration.matches()); + let _ = writeln!(out, " }}{comma}"); + } + let _ = writeln!(out, " ]"); + let _ = writeln!(out, "}}"); + out + } +} + +fn wt(raw: u64) -> WorldlineTick { + WorldlineTick::from_raw(raw) +} + +fn snapshot_worldline_id() -> WorldlineId { + WorldlineId::from_bytes([4u8; 32]) +} + +fn root_key(warp_id: WarpId) -> NodeKey { + NodeKey { + warp_id, + local_id: make_node_id("root"), + } +} + +fn snapshot_fuzz_patch(warp_id: WarpId, tick: u64) -> WorldlineTickPatchV1 { + let root = root_key(warp_id); + let child = make_node_id(&format!("snapshot-fuzz/node-{tick}")); + let edge = make_edge_id(&format!("snapshot-fuzz/edge-{tick}")); + let edge_ty = make_type_id(&format!("snapshot-fuzz/link-{}", tick % 7)); + let child_ty = make_type_id(&format!("snapshot-fuzz/child-{}", tick % 11)); + let root_ty = make_type_id(&format!("snapshot-fuzz/root-{tick}")); + let attachment_ty = make_type_id("snapshot-fuzz/root-marker"); + + let mut marker_bytes = Vec::with_capacity(16); + marker_bytes.extend_from_slice(&tick.to_le_bytes()); + marker_bytes.extend_from_slice(&tick.rotate_left(17).to_le_bytes()); + let marker = AtomPayload::new(attachment_ty, Bytes::from(marker_bytes)); + + let ops = vec![ + WarpOp::UpsertNode { + node: root, + record: NodeRecord { ty: root_ty }, + }, + WarpOp::UpsertNode { + node: NodeKey { + warp_id, + local_id: child, + }, + record: NodeRecord { ty: child_ty }, + }, + WarpOp::UpsertEdge { + warp_id, + record: EdgeRecord { + id: edge, + from: root.local_id, + to: child, + ty: edge_ty, + }, + }, + WarpOp::SetAttachment { + key: AttachmentKey::node_alpha(root), + value: Some(AttachmentValue::Atom(marker)), + }, + ]; + + let header = WorldlineTickHeaderV1 { + commit_global_tick: GlobalTick::from_raw(tick + 1), + policy_id: 0, + rule_pack_id: [0u8; 32], + plan_digest: [0u8; 32], + decision_digest: [0u8; 32], + rewrites_digest: [0u8; 32], + }; + let patch_digest = WarpTickPatchV1::new( + header.policy_id, + header.rule_pack_id, + TickCommitStatus::Committed, + Vec::new(), + Vec::new(), + ops.clone(), + ) + .digest(); + + WorldlineTickPatchV1 { + header, + warp_id, + ops, + in_slots: Vec::new(), + out_slots: Vec::new(), + patch_digest, + } +} + +fn build_simulation(total_ticks: u64) -> Simulation { + let warp_id = test_warp_id(); + let worldline_id = snapshot_worldline_id(); + let initial_state = common::create_initial_worldline_state(warp_id); + let mut provenance = LocalProvenanceStore::new(); + register_fixture_worldline(&mut provenance, worldline_id, &initial_state) + .expect("fixture worldline should register"); + + let mut expected_roots = vec![initial_state.state_root()]; + let mut states_by_tick = vec![initial_state.clone()]; + let mut current_state = initial_state.clone(); + let mut parents: Vec = Vec::new(); + + for tick in 0..total_ticks { + let patch = snapshot_fuzz_patch(warp_id, tick); + patch + .apply_to_worldline_state(&mut current_state) + .expect("generated fuzz patch should apply"); + + let state_root = current_state.state_root(); + let commit_hash = compute_commit_hash_v2( + &state_root, + &parents, + &patch.patch_digest, + patch.policy_id(), + ); + let triplet = HashTriplet { + state_root, + patch_digest: patch.patch_digest, + commit_hash, + }; + append_fixture_entry(&mut provenance, worldline_id, patch, triplet, Vec::new()) + .expect("fixture entry should append"); + + parents = vec![commit_hash]; + expected_roots.push(state_root); + states_by_tick.push(current_state.clone()); + } + + Simulation { + provenance, + warp_id, + worldline_id, + expected_roots, + states_by_tick, + } +} + +fn apply_suffix_from( + provenance: &LocalProvenanceStore, + worldline_id: WorldlineId, + mut state: WorldlineState, + start_tick: u64, + target_tick: u64, +) -> TestResult { + for raw_tick in start_tick..target_tick { + let tick = wt(raw_tick); + let entry = provenance + .entry(worldline_id, tick) + .map_err(|error| format!("missing provenance at tick {raw_tick}: {error}"))?; + let patch = entry + .patch + .as_ref() + .ok_or_else(|| format!("missing replay patch at tick {raw_tick}"))?; + + patch + .apply_to_worldline_state(&mut state) + .map_err(|error| format!("restore suffix apply failed at tick {raw_tick}: {error}"))?; + + let actual_state_root = state.state_root(); + if actual_state_root != entry.expected.state_root { + return Err(format!( + "state_root mismatch at tick {raw_tick}: expected {}, got {}", + hex32(&entry.expected.state_root), + hex32(&actual_state_root) + )); + } + + let parents = entry + .parents + .iter() + .map(|parent| parent.commit_hash) + .collect::>(); + let actual_commit_hash = compute_commit_hash_v2( + &actual_state_root, + &parents, + &entry.expected.patch_digest, + patch.policy_id(), + ); + if actual_commit_hash != entry.expected.commit_hash { + return Err(format!( + "commit_hash mismatch at tick {raw_tick}: expected {}, got {}", + hex32(&entry.expected.commit_hash), + hex32(&actual_commit_hash) + )); + } + } + Ok(state) +} + +fn materialize_state_at(simulation: &Simulation, tick: u64) -> TestResult { + simulation + .states_by_tick + .get(tick as usize) + .cloned() + .ok_or_else(|| format!("snapshot tick {tick} is outside materialized states")) +} + +fn encode_snapshot( + state: &WorldlineState, + warp_id: WarpId, + tick: u64, + format: SnapshotFormat, +) -> TestResult> { + match format { + SnapshotFormat::CanonicalWscV1 => { + let store = state + .store(&warp_id) + .ok_or_else(|| format!("snapshot state missing warp {warp_id:?}"))?; + let input = build_one_warp_input(store, state.root().local_id); + write_wsc_one_warp(&input, SNAPSHOT_SCHEMA_HASH, tick) + .map_err(|error| format!("WSC snapshot encode failed: {error}")) + } + } +} + +fn decode_attachment(view: &WarpView<'_>, row: &AttRow) -> TestResult { + if row.is_atom() { + let blob = view + .blob_for_attachment(row) + .ok_or_else(|| "atom attachment blob range is invalid".to_string())?; + return Ok(AttachmentValue::Atom(AtomPayload::new( + TypeId(row.type_or_warp), + Bytes::copy_from_slice(blob), + ))); + } + + if row.is_descend() { + return Ok(AttachmentValue::Descend(WarpId(row.type_or_warp))); + } + + Err(format!("unknown attachment tag {}", row.tag)) +} + +fn decode_single_attachment( + view: &WarpView<'_>, + owner: &'static str, + rows: &[AttRow], +) -> TestResult> { + match rows { + [] => Ok(None), + [row] => decode_attachment(view, row).map(Some), + _ => Err(format!("{owner} carried more than one attachment row")), + } +} + +fn restore_snapshot(bytes: &[u8], expected_tick: u64) -> TestResult { + let file = WscFile::from_bytes(bytes.to_vec()) + .map_err(|error| format!("WSC snapshot header restore failed: {error}"))?; + validate_wsc(&file).map_err(|error| format!("WSC snapshot validation failed: {error}"))?; + if file.tick() != expected_tick { + return Err(format!( + "WSC snapshot tick mismatch: expected {expected_tick}, got {}", + file.tick() + )); + } + if file.warp_count() != 1 { + return Err(format!( + "expected exactly one WARP in snapshot, got {}", + file.warp_count() + )); + } + + let view = file + .warp_view(0) + .map_err(|error| format!("WSC warp restore failed: {error}"))?; + let warp_id = WarpId(*view.warp_id()); + let root = NodeId(*view.root_node_id()); + let mut store = GraphStore::new(warp_id); + + for node in view.nodes() { + store.insert_node( + NodeId(node.node_id), + NodeRecord { + ty: TypeId(node.node_type), + }, + ); + } + for edge in view.edges() { + store.insert_edge( + NodeId(edge.from_node_id), + EdgeRecord { + id: EdgeId(edge.edge_id), + from: NodeId(edge.from_node_id), + to: NodeId(edge.to_node_id), + ty: TypeId(edge.edge_type), + }, + ); + } + + for (index, node) in view.nodes().iter().enumerate() { + let rows = view.node_attachments(index); + if let Some(value) = decode_single_attachment(&view, "node", rows)? { + store.set_node_attachment(NodeId(node.node_id), Some(value)); + } + } + for (index, edge) in view.edges().iter().enumerate() { + let rows = view.edge_attachments(index); + if let Some(value) = decode_single_attachment(&view, "edge", rows)? { + store.set_edge_attachment(EdgeId(edge.edge_id), Some(value)); + } + } + + WorldlineState::from_root_store(store, root) + .map_err(|error| format!("restored WSC graph is not a worldline state: {error}")) +} + +fn run_iteration_from_snapshot_bytes( + simulation: &Simulation, + iteration: usize, + format: SnapshotFormat, + snapshot_tick: u64, + comparison_tick: u64, + bytes: &[u8], +) -> TestResult { + let restored = restore_snapshot(bytes, snapshot_tick)?; + let restored_state_root = restored.state_root(); + let continued = apply_suffix_from( + &simulation.provenance, + simulation.worldline_id, + restored, + snapshot_tick, + comparison_tick, + )?; + let actual_state_root = continued.state_root(); + let expected_state_root = simulation + .expected_roots + .get(comparison_tick as usize) + .copied() + .ok_or_else(|| format!("comparison tick {comparison_tick} is outside expected roots"))?; + + Ok(SnapshotRestoreIteration { + iteration, + format, + snapshot_tick, + restore_tick: snapshot_tick, + comparison_tick, + restored_state_root, + expected_state_root, + actual_state_root, + }) +} + +fn run_iteration( + simulation: &Simulation, + iteration: usize, + snapshot_tick: u64, + comparison_tick: u64, + format: SnapshotFormat, +) -> TestResult { + let snapshot_state = materialize_state_at(simulation, snapshot_tick)?; + let bytes = encode_snapshot(&snapshot_state, simulation.warp_id, snapshot_tick, format)?; + run_iteration_from_snapshot_bytes( + simulation, + iteration, + format, + snapshot_tick, + comparison_tick, + &bytes, + ) +} + +fn iteration_ticks(iteration: usize, rng: &mut XorShift64) -> (u64, u64) { + match iteration { + // Genesis snapshot; replay the whole suffix. + 0 => (0, TOTAL_TICKS), + // Last-tick snapshot; restore and compare immediately. + 1 => (TOTAL_TICKS, TOTAL_TICKS), + _ => { + let snapshot_tick = rng.gen_range_usize((TOTAL_TICKS + 1) as usize) as u64; + let remaining = (TOTAL_TICKS - snapshot_tick) as usize; + let advance = rng.gen_range_usize(remaining + 1) as u64; + (snapshot_tick, snapshot_tick + advance) + } + } +} + +fn run_snapshot_restore_fuzz() -> TestResult { + let simulation = build_simulation(TOTAL_TICKS); + let mut rng = XorShift64::new(0xA11C_EC0F_FEE0_0004); + let mut iterations = Vec::with_capacity(FUZZ_ITERATIONS); + + for iteration in 0..FUZZ_ITERATIONS { + let (snapshot_tick, comparison_tick) = iteration_ticks(iteration, &mut rng); + iterations.push(run_iteration( + &simulation, + iteration, + snapshot_tick, + comparison_tick, + SnapshotFormat::CanonicalWscV1, + )?); + } + + Ok(SnapshotRestoreReport { + simulation_ticks: TOTAL_TICKS, + iterations, + }) +} + +fn corrupt_first_edge_id_byte(bytes: &mut [u8]) -> TestResult<()> { + let header_size = std::mem::size_of::(); + let dir_size = std::mem::size_of::(); + if bytes.len() < header_size { + return Err("WSC bytes are shorter than the header".to_string()); + } + let header = bytemuck::from_bytes::(&bytes[..header_size]); + let dir_start = header.warp_dir_off() as usize; + let dir_end = dir_start + dir_size; + if bytes.len() < dir_end { + return Err("WSC bytes are shorter than the WARP directory".to_string()); + } + let dir = bytemuck::from_bytes::(&bytes[dir_start..dir_end]); + if u64::from_le(dir.edges_len_le) == 0 { + return Err("WSC snapshot has no edge row to corrupt".to_string()); + } + let edge_start = u64::from_le(dir.edges_off_le) as usize; + let byte = bytes + .get_mut(edge_start) + .ok_or_else(|| "first edge row offset is outside WSC bytes".to_string())?; + *byte ^= 0x80; + Ok(()) +} + +#[test] +fn snapshot_restore_fuzz_matches_uninterrupted_run() { + let report = run_snapshot_restore_fuzz().expect("snapshot/restore fuzz should run"); + let json = report.to_json(); + + assert_eq!(report.iterations.len(), FUZZ_ITERATIONS, "{json}"); + assert_eq!(report.divergence_count(), 0, "{json}"); + assert!(json.contains("\"iteration_count\": 50")); + assert!(json.contains("\"format\": \"canonical_wsc_v1\"")); + assert!(json.contains("\"snapshot_tick\":")); + assert!(json.contains("\"restore_tick\":")); + assert!(json.contains("\"comparison_tick\":")); + assert!(json.contains("\"expected_state_root\":")); + assert!(json.contains("\"actual_state_root\":")); + assert!(json.contains("\"match\": true")); +} + +#[test] +fn corrupted_snapshot_byte_fails_restore_or_reports_divergence() { + let simulation = build_simulation(32); + let snapshot_tick = 12; + let comparison_tick = 32; + let snapshot_state = + materialize_state_at(&simulation, snapshot_tick).expect("snapshot materialization"); + let mut bytes = encode_snapshot( + &snapshot_state, + simulation.warp_id, + snapshot_tick, + SnapshotFormat::CanonicalWscV1, + ) + .expect("snapshot encode"); + corrupt_first_edge_id_byte(&mut bytes).expect("snapshot should contain an edge to corrupt"); + + match run_iteration_from_snapshot_bytes( + &simulation, + 0, + SnapshotFormat::CanonicalWscV1, + snapshot_tick, + comparison_tick, + &bytes, + ) { + Err(_) => {} + Ok(iteration) => { + assert!( + !iteration.matches(), + "corrupted snapshot unexpectedly matched uninterrupted root: {}", + hex32(&iteration.actual_state_root) + ); + } + } +} diff --git a/crates/warp-wasm/README.md b/crates/warp-wasm/README.md index a65e2d6a..d78dbbfc 100644 --- a/crates/warp-wasm/README.md +++ b/crates/warp-wasm/README.md @@ -13,7 +13,7 @@ See the repository root `README.md` for the full overview. Echo’s deterministic wire protocol can be used from JavaScript/TypeScript in web-based tools and playgrounds. - Exposes the current observation-first and intent-shaped control surface - (`ABI_VERSION` 8 in `echo-wasm-abi`): `observe(...)` is the only public + (`ABI_VERSION` 9 in `echo-wasm-abi`): `observe(...)` is the only public world-state read export, `scheduler_status()` is the read-only scheduler metadata export, and `dispatch_intent(...)` is the write/control ingress. The current ABI also publishes strand settlement comparison, planning, diff --git a/crates/warp-wasm/src/lib.rs b/crates/warp-wasm/src/lib.rs index 0f0855fe..98fee385 100644 --- a/crates/warp-wasm/src/lib.rs +++ b/crates/warp-wasm/src/lib.rs @@ -30,7 +30,8 @@ use wasm_bindgen::JsValue; #[cfg(feature = "engine")] use echo_wasm_abi::kernel_port::HeadInfo; use echo_wasm_abi::kernel_port::{ - self, AbiError, ErrEnvelope, KernelPort, ObservationRequest, OkEnvelope, SettlementRequest, + self, AbiError, DispatchOpticIntentRequest, ErrEnvelope, KernelPort, ObservationRequest, + ObserveOpticRequest, OkEnvelope, SettlementRequest, }; use std::cell::RefCell; @@ -57,6 +58,42 @@ pub fn install_kernel(kernel: Box) { }); } +/// Dispatch intent bytes through the installed kernel and return a CBOR +/// success/error envelope as raw bytes. +/// +/// This is the native Rust equivalent of the `dispatch_intent` WASM export. It +/// keeps generated-contract smoke tests off `js_sys::Uint8Array` while +/// exercising the same installed-kernel envelope contract. +pub fn dispatch_intent_cbor(intent_bytes: &[u8]) -> Vec { + encode_result_bytes(with_kernel(|k| k.dispatch_intent(intent_bytes))) +} + +/// Observe through the installed kernel and return a CBOR success/error +/// envelope as raw bytes. +/// +/// `request_bytes` must decode as canonical-CBOR [`ObservationRequest`]. This +/// mirrors the `observe` WASM export without requiring JavaScript bindings in +/// native tests. +pub fn observe_cbor(request_bytes: &[u8]) -> Vec { + let request = match echo_wasm_abi::decode_cbor::(request_bytes) { + Ok(request) => request, + Err(err) => { + return encode_err_bytes(&AbiError { + code: kernel_port::error_codes::INVALID_PAYLOAD, + message: format!("invalid observation request payload: {err}"), + }) + } + }; + encode_result_bytes(with_kernel_ref(|k| k.observe(request))) +} + +/// Return installed registry metadata as a CBOR success/error envelope. +/// +/// This is the native Rust equivalent of the `get_registry_info` WASM export. +pub fn get_registry_info_cbor() -> Vec { + encode_result_bytes(with_kernel_ref(|k| Ok(k.registry_info()))) +} + /// Remove any installed kernel from the WASM boundary. #[cfg(feature = "engine")] fn clear_kernel() { @@ -104,10 +141,14 @@ where /// Encode a successful result as a CBOR Uint8Array with `{ ok: true, ...data }`. fn encode_ok(value: &T) -> Uint8Array { + bytes_to_uint8array(&encode_ok_bytes(value)) +} + +fn encode_ok_bytes(value: &T) -> Vec { let envelope = OkEnvelope::new(value); match echo_wasm_abi::encode_cbor(&envelope) { - Ok(bytes) => bytes_to_uint8array(&bytes), - Err(_) => encode_err_raw( + Ok(bytes) => bytes, + Err(_) => encode_err_raw_bytes( kernel_port::error_codes::CODEC_ERROR, "failed to encode response", ), @@ -116,16 +157,21 @@ fn encode_ok(value: &T) -> Uint8Array { /// Encode an error as a CBOR Uint8Array with `{ ok: false, code, message }`. fn encode_err(err: &AbiError) -> Uint8Array { - encode_err_raw(err.code, &err.message) + bytes_to_uint8array(&encode_err_bytes(err)) +} + +fn encode_err_bytes(err: &AbiError) -> Vec { + encode_err_raw_bytes(err.code, &err.message) } /// Low-level error encoding that cannot itself fail (falls back to empty array). fn encode_err_raw(code: u32, message: &str) -> Uint8Array { + bytes_to_uint8array(&encode_err_raw_bytes(code, message)) +} + +fn encode_err_raw_bytes(code: u32, message: &str) -> Vec { let envelope = ErrEnvelope::new(code, message.into()); - match echo_wasm_abi::encode_cbor(&envelope) { - Ok(bytes) => bytes_to_uint8array(&bytes), - Err(_) => Uint8Array::new_with_length(0), - } + echo_wasm_abi::encode_cbor(&envelope).unwrap_or_default() } /// Encode a `Result` into a CBOR Uint8Array envelope. @@ -136,6 +182,13 @@ fn encode_result(result: Result) -> Uint8Array } } +fn encode_result_bytes(result: Result) -> Vec { + match result { + Ok(ref val) => encode_ok_bytes(val), + Err(ref err) => encode_err_bytes(err), + } +} + /// Helper to convert a byte slice into a JS `Uint8Array`. /// /// WASM linear memory is 32-bit, so `bytes.len()` is guaranteed to fit in u32 @@ -245,6 +298,40 @@ pub fn dispatch_intent(intent_bytes: &[u8]) -> Uint8Array { encode_result(with_kernel(|k| k.dispatch_intent(intent_bytes))) } +/// Propose an intent through an explicit optic dispatch request. +/// +/// The request bytes must decode as canonical-CBOR `DispatchOpticIntentRequest`. +#[wasm_bindgen] +pub fn dispatch_optic_intent(request_bytes: &[u8]) -> Uint8Array { + let request = match echo_wasm_abi::decode_cbor::(request_bytes) { + Ok(request) => request, + Err(err) => { + return encode_err(&AbiError { + code: kernel_port::error_codes::INVALID_PAYLOAD, + message: format!("invalid optic dispatch request payload: {err}"), + }) + } + }; + encode_result(with_kernel(|k| k.dispatch_optic_intent(request))) +} + +/// Observe through an explicit optic request. +/// +/// The request bytes must decode as canonical-CBOR `ObserveOpticRequest`. +#[wasm_bindgen] +pub fn observe_optic(request_bytes: &[u8]) -> Uint8Array { + let request = match echo_wasm_abi::decode_cbor::(request_bytes) { + Ok(request) => request, + Err(err) => { + return encode_err(&AbiError { + code: kernel_port::error_codes::INVALID_PAYLOAD, + message: format!("invalid optic observe request payload: {err}"), + }) + } + }; + encode_result(with_kernel_ref(|k| k.observe_optic(request))) +} + /// Observe a worldline at an explicit coordinate, frame, and projection. /// /// The request bytes must decode as canonical-CBOR `ObservationRequest`. @@ -654,6 +741,7 @@ mod init_tests { observer_plan: ReadingObserverPlan::Builtin { plan: BuiltinObserverPlan::CommitBoundaryHead, }, + observer_instance: None, observer_basis: ReadingObserverBasis::CommitBoundary, witness_refs: vec![ReadingWitnessRef::EmptyFrontier { worldline_id: WorldlineId::from_bytes([9; 32]), @@ -868,14 +956,15 @@ mod init_tests { fn neighborhood_observation_uses_installed_kernel() { clear_kernel(); install_kernel(Box::new(StubKernel)); - let request = ObservationRequest { - coordinate: kernel_port::ObservationCoordinate { + let request = ObservationRequest::builtin_one_shot( + kernel_port::ObservationCoordinate { worldline_id: WorldlineId::from_bytes([9; 32]), at: ObservationAt::Frontier, }, - frame: ObservationFrame::CommitBoundary, - projection: ObservationProjection::Head, - }; + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ) + .unwrap(); let site = with_kernel_ref(|k| k.observe_neighborhood_site(request)).unwrap(); assert_eq!(site.plurality, SitePlurality::Singleton); assert_eq!(site.participants.len(), 1); diff --git a/crates/warp-wasm/src/warp_kernel.rs b/crates/warp-wasm/src/warp_kernel.rs index e313be0a..761b76bd 100644 --- a/crates/warp-wasm/src/warp_kernel.rs +++ b/crates/warp-wasm/src/warp_kernel.rs @@ -10,26 +10,45 @@ use std::fmt; use echo_wasm_abi::kernel_port::{ - error_codes, AbiError, ControlIntentV1, DispatchResponse, GlobalTick as AbiGlobalTick, + error_codes, AbiError, AttachmentDescentPolicy as AbiAttachmentDescentPolicy, + AttachmentKey as AbiAttachmentKey, AttachmentOwnerRef as AbiAttachmentOwnerRef, + AttachmentPlane as AbiAttachmentPlane, AuthoredObserverPlan as AbiAuthoredObserverPlan, + BraidId as AbiBraidId, ControlIntentV1, CoordinateAt as AbiCoordinateAt, DispatchResponse, + EchoCoordinate as AbiEchoCoordinate, GlobalTick as AbiGlobalTick, HeadEligibility as AbiHeadEligibility, HeadId as AbiHeadId, HeadInfo, KernelPort, NeighborhoodSite as AbiNeighborhoodSite, ObservationArtifact as AbiObservationArtifact, ObservationFrame as AbiObservationFrame, ObservationProjection as AbiObservationProjection, - ObservationRequest as AbiObservationRequest, RegistryInfo, RunCompletion, RunId as AbiRunId, + ObservationReadBudget as AbiObservationReadBudget, ObservationRequest as AbiObservationRequest, + ObservationRights as AbiObservationRights, ObserveOpticRequest as AbiObserveOpticRequest, + ObserveOpticResult as AbiObserveOpticResult, ObserverInstanceRef as AbiObserverInstanceRef, + OpticAperture as AbiOpticAperture, OpticApertureShape as AbiOpticApertureShape, + OpticFocus as AbiOpticFocus, ProjectionVersion as AbiProjectionVersion, + ReadingObserverPlan as AbiReadingObserverPlan, ReducerVersion as AbiReducerVersion, + RegistryInfo, RetainedReadingKey as AbiRetainedReadingKey, RunCompletion, RunId as AbiRunId, SchedulerMode, SchedulerState, SchedulerStatus, SettlementDelta as AbiSettlementDelta, SettlementPlan as AbiSettlementPlan, SettlementRequest as AbiSettlementRequest, SettlementResult as AbiSettlementResult, WorkState, WorldlineId as AbiWorldlineId, WorldlineTick as AbiWorldlineTick, WriterHeadKey as AbiWriterHeadKey, ABI_VERSION, }; -use echo_wasm_abi::{unpack_control_intent_v1, unpack_intent_v1, CONTROL_INTENT_V1_OP_ID}; +use echo_wasm_abi::{ + unpack_control_intent_v1, unpack_import_suffix_intent_v1, unpack_intent_v1, + CONTROL_INTENT_V1_OP_ID, IMPORT_SUFFIX_INTENT_V1_OP_ID, +}; use warp_core::{ - make_head_id, make_intent_kind, make_node_id, make_type_id, Engine, EngineBuilder, GlobalTick, - GraphStore, HeadEligibility, HeadId, HistoryError, IngressDisposition, IngressEnvelope, - IngressTarget, NeighborhoodError, NeighborhoodSiteService, NodeRecord, ObservationAt, + make_head_id, make_intent_kind, make_node_id, make_type_id, AttachmentDescentPolicy, + AttachmentKey, AttachmentOwner, AttachmentPlane, AuthoredObserverPlan, BraidId, CoordinateAt, + EchoCoordinate, EdgeKey, Engine, EngineBuilder, EngineError, GlobalTick, GraphStore, + HeadEligibility, HeadId, HistoryError, IngressDisposition, IngressEnvelope, IngressTarget, + NeighborhoodError, NeighborhoodSiteService, NodeKey, NodeRecord, ObservationAt, ObservationCoordinate, ObservationError, ObservationFrame, ObservationPayload, - ObservationProjection, ObservationRequest, ObservationService, PlaybackMode, ProvenanceService, - RunId, RuntimeError, SchedulerCoordinator, SchedulerKind, SettlementError, SettlementService, - StrandId, WorldlineId, WorldlineRuntime, WorldlineState, WorldlineStateError, WorldlineTick, - WriterHead, WriterHeadKey, + ObservationProjection, ObservationReadBudget, ObservationRequest, ObservationRights, + ObservationService, ObserveOpticRequest, ObserverInstanceId, ObserverInstanceRef, + ObserverPlanId, OpticAperture, OpticApertureShape, OpticCapabilityId, OpticFocus, + OpticReadBudget, PlaybackMode, ProjectionVersion, ProvenanceRef, ProvenanceService, + ReadingObserverPlan, ReducerVersion, RetainedReadingKey, RunId, RuntimeError, + SchedulerCoordinator, SchedulerKind, SettlementError, SettlementService, StrandId, TypeId, + WorldlineId, WorldlineRuntime, WorldlineState, WorldlineStateError, WorldlineTick, WriterHead, + WriterHeadKey, }; /// Error returned when a [`WarpKernel`] cannot be initialized from a caller-supplied engine. @@ -43,6 +62,8 @@ pub enum KernelInitError { Provenance(HistoryError), /// Runtime registration failed while installing the default worldline/head. Runtime(RuntimeError), + /// Kernel-owned command rule registration failed. + Engine(EngineError), } impl fmt::Display for KernelInitError { @@ -52,6 +73,7 @@ impl fmt::Display for KernelInitError { Self::WorldlineState(err) => err.fmt(f), Self::Provenance(err) => err.fmt(f), Self::Runtime(err) => err.fmt(f), + Self::Engine(err) => err.fmt(f), } } } @@ -76,6 +98,12 @@ impl From for KernelInitError { } } +impl From for KernelInitError { + fn from(value: EngineError) -> Self { + Self::Engine(value) + } +} + /// App-agnostic kernel wrapping a `warp-core::Engine`. /// /// Constructed via [`WarpKernel::new`] (default empty engine) or @@ -94,8 +122,8 @@ pub struct WarpKernel { impl WarpKernel { /// Create a new kernel with a minimal empty engine. /// - /// The engine has a single root node and no rewrite rules. - /// Useful for testing the boundary or as a starting point. + /// The engine starts with a single root node; [`Self::with_engine`] + /// installs the generic Echo command rules used by the boundary. pub fn new() -> Result { let mut store = GraphStore::default(); let root = make_node_id("root"); @@ -128,10 +156,14 @@ impl WarpKernel { /// The engine must be fresh: `WarpKernel` can mirror graph state into the /// default worldline runtime, but it cannot reconstruct prior tick history /// or materialization state from an already-advanced engine. - pub fn with_engine(engine: Engine, registry: RegistryInfo) -> Result { + pub fn with_engine( + mut engine: Engine, + registry: RegistryInfo, + ) -> Result { if !engine.is_fresh_runtime_state() { return Err(KernelInitError::NonFreshEngine); } + engine.register_rule(warp_core::import_suffix_intent_rule())?; let root = engine.root_key(); let default_worldline = WorldlineId::from_bytes(root.warp_id.0); let mut runtime = WorldlineRuntime::new(); @@ -223,6 +255,29 @@ impl WarpKernel { code: error_codes::UNSUPPORTED_QUERY, message: "query observation is not supported by this kernel".into(), }, + ObservationError::UnsupportedObserverPlan(plan) => AbiError { + code: error_codes::UNSUPPORTED_OBSERVER_PLAN, + message: format!("unsupported observer plan: {plan:?}"), + }, + ObservationError::UnsupportedObserverInstance(instance) => AbiError { + code: error_codes::UNSUPPORTED_OBSERVER_INSTANCE, + message: format!("unsupported observer instance: {instance:?}"), + }, + ObservationError::UnsupportedRights(rights) => AbiError { + code: error_codes::UNSUPPORTED_OBSERVATION_RIGHTS, + message: format!("unsupported observation rights posture: {rights:?}"), + }, + ObservationError::BudgetExceeded { + max_payload_bytes, + payload_bytes, + max_witness_refs, + witness_refs, + } => AbiError { + code: error_codes::OBSERVATION_BUDGET_EXCEEDED, + message: format!( + "observation budget exceeded: payload {payload_bytes}/{max_payload_bytes} bytes, witness refs {witness_refs}/{max_witness_refs}" + ), + }, ObservationError::ObservationUnavailable { worldline_id, at } => AbiError { code: error_codes::OBSERVATION_UNAVAILABLE, message: format!( @@ -291,10 +346,282 @@ impl WarpKernel { vars_bytes, }, }; + let observer_plan = Self::to_core_observer_plan(request.observer_plan)?; + let observer_instance = request + .observer_instance + .map(Self::to_core_observer_instance) + .transpose()?; + let budget = Self::to_core_observation_budget(request.budget); + let rights = Self::to_core_observation_rights(request.rights); Ok(ObservationRequest { coordinate: ObservationCoordinate { worldline_id, at }, frame, projection, + observer_plan, + observer_instance, + budget, + rights, + }) + } + + fn to_core_observe_optic_request( + request: AbiObserveOpticRequest, + ) -> Result { + Ok(ObserveOpticRequest { + optic_id: warp_core::OpticId::from_bytes(*request.optic_id.as_bytes()), + focus: Self::to_core_optic_focus(request.focus)?, + coordinate: Self::to_core_echo_coordinate(request.coordinate)?, + aperture: Self::to_core_optic_aperture(request.aperture)?, + projection_version: Self::to_core_projection_version(request.projection_version), + reducer_version: request.reducer_version.map(Self::to_core_reducer_version), + capability: OpticCapabilityId::from_bytes(*request.capability.as_bytes()), + }) + } + + fn to_core_optic_focus(focus: AbiOpticFocus) -> Result { + Ok(match focus { + AbiOpticFocus::Worldline { worldline_id } => OpticFocus::Worldline { + worldline_id: Self::to_core_worldline_id(&worldline_id), + }, + AbiOpticFocus::Strand { strand_id } => OpticFocus::Strand { + strand_id: Self::to_core_strand_id(&strand_id), + }, + AbiOpticFocus::Braid { braid_id } => OpticFocus::Braid { + braid_id: Self::to_core_braid_id(&braid_id), + }, + AbiOpticFocus::RetainedReading { key } => OpticFocus::RetainedReading { + key: Self::to_core_retained_reading_key(&key), + }, + AbiOpticFocus::AttachmentBoundary { key } => OpticFocus::AttachmentBoundary { + key: Self::to_core_attachment_key(key)?, + }, + }) + } + + fn to_core_echo_coordinate(coordinate: AbiEchoCoordinate) -> Result { + Ok(match coordinate { + AbiEchoCoordinate::Worldline { worldline_id, at } => EchoCoordinate::Worldline { + worldline_id: Self::to_core_worldline_id(&worldline_id), + at: Self::to_core_coordinate_at(at)?, + }, + AbiEchoCoordinate::Strand { + strand_id, + at, + parent_basis, + } => EchoCoordinate::Strand { + strand_id: Self::to_core_strand_id(&strand_id), + at: Self::to_core_coordinate_at(at)?, + parent_basis: parent_basis.map(Self::to_core_provenance_ref).transpose()?, + }, + AbiEchoCoordinate::Braid { + braid_id, + projection_digest, + member_count, + } => EchoCoordinate::Braid { + braid_id: Self::to_core_braid_id(&braid_id), + projection_digest: Self::hash_from_vec( + "braid projection digest", + projection_digest, + )?, + member_count, + }, + AbiEchoCoordinate::RetainedReading { key } => EchoCoordinate::RetainedReading { + key: Self::to_core_retained_reading_key(&key), + }, + }) + } + + fn to_core_coordinate_at(at: AbiCoordinateAt) -> Result { + Ok(match at { + AbiCoordinateAt::Frontier => CoordinateAt::Frontier, + AbiCoordinateAt::Tick { worldline_tick } => { + CoordinateAt::Tick(WorldlineTick::from_raw(worldline_tick.0)) + } + AbiCoordinateAt::Provenance { reference } => { + CoordinateAt::Provenance(Self::to_core_provenance_ref(reference)?) + } + }) + } + + fn to_core_provenance_ref( + reference: echo_wasm_abi::kernel_port::ProvenanceRef, + ) -> Result { + Ok(ProvenanceRef { + worldline_id: Self::to_core_worldline_id(&reference.worldline_id), + worldline_tick: WorldlineTick::from_raw(reference.worldline_tick.0), + commit_hash: Self::hash_from_vec("provenance commit hash", reference.commit_hash)?, + }) + } + + fn to_core_optic_aperture(aperture: AbiOpticAperture) -> Result { + Ok(OpticAperture { + shape: Self::to_core_optic_aperture_shape(aperture.shape)?, + budget: OpticReadBudget { + max_bytes: aperture.budget.max_bytes, + max_nodes: aperture.budget.max_nodes, + max_ticks: aperture.budget.max_ticks, + max_attachments: aperture.budget.max_attachments, + }, + attachment_descent: match aperture.attachment_descent { + AbiAttachmentDescentPolicy::BoundaryOnly => AttachmentDescentPolicy::BoundaryOnly, + AbiAttachmentDescentPolicy::Explicit => AttachmentDescentPolicy::Explicit, + }, + }) + } + + fn to_core_optic_aperture_shape( + shape: AbiOpticApertureShape, + ) -> Result { + Ok(match shape { + AbiOpticApertureShape::Head => OpticApertureShape::Head, + AbiOpticApertureShape::SnapshotMetadata => OpticApertureShape::SnapshotMetadata, + AbiOpticApertureShape::TruthChannels { channels } => { + OpticApertureShape::TruthChannels { + channels: channels.map(|ids| { + ids.into_iter() + .map(|id| TypeId(*id.as_bytes())) + .collect::>() + }), + } + } + AbiOpticApertureShape::QueryBytes { + query_id, + vars_digest, + } => OpticApertureShape::QueryBytes { + query_id, + vars_digest: Self::hash_from_vec("optic query vars digest", vars_digest)?, + }, + AbiOpticApertureShape::ByteRange { start, len } => { + OpticApertureShape::ByteRange { start, len } + } + AbiOpticApertureShape::AttachmentBoundary => OpticApertureShape::AttachmentBoundary, + }) + } + + fn to_core_projection_version(version: AbiProjectionVersion) -> ProjectionVersion { + ProjectionVersion::from_raw(version.0) + } + + fn to_core_reducer_version(version: AbiReducerVersion) -> ReducerVersion { + ReducerVersion::from_raw(version.0) + } + + fn to_core_braid_id(id: &AbiBraidId) -> BraidId { + BraidId::from_bytes(*id.as_bytes()) + } + + fn to_core_retained_reading_key(id: &AbiRetainedReadingKey) -> RetainedReadingKey { + RetainedReadingKey::from_bytes(*id.as_bytes()) + } + + fn to_core_attachment_key(key: AbiAttachmentKey) -> Result { + let owner = match key.owner { + AbiAttachmentOwnerRef::Node { warp_id, node_id } => AttachmentOwner::Node(NodeKey { + warp_id: warp_core::WarpId(*warp_id.as_bytes()), + local_id: warp_core::NodeId(*node_id.as_bytes()), + }), + AbiAttachmentOwnerRef::Edge { warp_id, edge_id } => AttachmentOwner::Edge(EdgeKey { + warp_id: warp_core::WarpId(*warp_id.as_bytes()), + local_id: warp_core::EdgeId(*edge_id.as_bytes()), + }), + }; + let plane = match key.plane { + AbiAttachmentPlane::Alpha => AttachmentPlane::Alpha, + AbiAttachmentPlane::Beta => AttachmentPlane::Beta, + }; + let key = AttachmentKey { owner, plane }; + if !key.is_plane_valid() { + return Err(AbiError { + code: error_codes::INVALID_PAYLOAD, + message: "attachment key plane does not match owner kind".into(), + }); + } + Ok(key) + } + + fn to_core_observer_plan( + plan: AbiReadingObserverPlan, + ) -> Result { + match plan { + AbiReadingObserverPlan::Builtin { plan } => Ok(ReadingObserverPlan::Builtin { + plan: match plan { + echo_wasm_abi::kernel_port::BuiltinObserverPlan::CommitBoundaryHead => { + warp_core::BuiltinObserverPlan::CommitBoundaryHead + } + echo_wasm_abi::kernel_port::BuiltinObserverPlan::CommitBoundarySnapshot => { + warp_core::BuiltinObserverPlan::CommitBoundarySnapshot + } + echo_wasm_abi::kernel_port::BuiltinObserverPlan::RecordedTruthChannels => { + warp_core::BuiltinObserverPlan::RecordedTruthChannels + } + echo_wasm_abi::kernel_port::BuiltinObserverPlan::QueryBytes => { + warp_core::BuiltinObserverPlan::QueryBytes + } + }, + }), + AbiReadingObserverPlan::Authored { plan } => Ok(ReadingObserverPlan::Authored { + plan: Box::new(Self::to_core_authored_observer_plan(*plan)?), + }), + } + } + + fn to_core_authored_observer_plan( + plan: AbiAuthoredObserverPlan, + ) -> Result { + Ok(AuthoredObserverPlan { + plan_id: ObserverPlanId::from_bytes(*plan.plan_id.as_bytes()), + artifact_hash: Self::hash_from_vec("observer artifact hash", plan.artifact_hash)?, + schema_hash: Self::hash_from_vec("observer schema hash", plan.schema_hash)?, + state_schema_hash: Self::hash_from_vec( + "observer state schema hash", + plan.state_schema_hash, + )?, + update_law_hash: Self::hash_from_vec("observer update law hash", plan.update_law_hash)?, + emission_law_hash: Self::hash_from_vec( + "observer emission law hash", + plan.emission_law_hash, + )?, + }) + } + + fn to_core_observer_instance( + instance: AbiObserverInstanceRef, + ) -> Result { + Ok(ObserverInstanceRef { + instance_id: ObserverInstanceId::from_bytes(*instance.instance_id.as_bytes()), + plan_id: ObserverPlanId::from_bytes(*instance.plan_id.as_bytes()), + state_hash: Self::hash_from_vec("observer state hash", instance.state_hash)?, + }) + } + + fn to_core_observation_budget(budget: AbiObservationReadBudget) -> ObservationReadBudget { + match budget { + AbiObservationReadBudget::UnboundedOneShot => ObservationReadBudget::UnboundedOneShot, + AbiObservationReadBudget::Bounded { + max_payload_bytes, + max_witness_refs, + } => ObservationReadBudget::Bounded { + max_payload_bytes, + max_witness_refs, + }, + } + } + + fn to_core_observation_rights(rights: AbiObservationRights) -> ObservationRights { + match rights { + AbiObservationRights::KernelPublic => ObservationRights::KernelPublic, + AbiObservationRights::CapabilityScoped { capability } => { + ObservationRights::CapabilityScoped { + capability: OpticCapabilityId::from_bytes(*capability.as_bytes()), + } + } + } + } + + fn hash_from_vec(label: &str, bytes: Vec) -> Result { + bytes.try_into().map_err(|bytes: Vec| AbiError { + code: error_codes::INVALID_PAYLOAD, + message: format!("{label} must be exactly 32 bytes, got {}", bytes.len()), }) } @@ -307,14 +634,16 @@ impl WarpKernel { } pub(crate) fn current_head(&self) -> Result { - Self::head_info_from_observation(self.observe_core(ObservationRequest { - coordinate: ObservationCoordinate { + let request = ObservationRequest::builtin_one_shot( + ObservationCoordinate { worldline_id: self.default_worldline, at: ObservationAt::Frontier, }, - frame: ObservationFrame::CommitBoundary, - projection: ObservationProjection::Head, - })?) + ObservationFrame::CommitBoundary, + ObservationProjection::Head, + ) + .map_err(Self::map_observation_error)?; + Self::head_info_from_observation(self.observe_core(request)?) } fn head_info_from_observation( @@ -543,6 +872,13 @@ impl KernelPort for WarpKernel { }); } + if op_id == IMPORT_SUFFIX_INTENT_V1_OP_ID { + unpack_import_suffix_intent_v1(intent_bytes).map_err(|_| AbiError { + code: error_codes::INVALID_INTENT, + message: "invalid import suffix intent envelope".into(), + })?; + } + let envelope = IngressEnvelope::local_intent( IngressTarget::DefaultWriter { worldline_id: self.default_worldline, @@ -568,6 +904,42 @@ impl KernelPort for WarpKernel { } } + fn current_optic_coordinate( + &self, + focus: &AbiOpticFocus, + ) -> Result, AbiError> { + match focus { + AbiOpticFocus::Worldline { worldline_id } => { + if Self::to_core_worldline_id(worldline_id) != self.default_worldline { + return Ok(None); + } + + let head = self.current_head()?; + Ok(Some(AbiEchoCoordinate::Worldline { + worldline_id: *worldline_id, + at: AbiCoordinateAt::Tick { + worldline_tick: head.worldline_tick, + }, + })) + } + _ => Ok(None), + } + } + + fn observe_optic( + &self, + request: AbiObserveOpticRequest, + ) -> Result { + let request = Self::to_core_observe_optic_request(request)?; + Ok(ObservationService::observe_optic( + &self.runtime, + &self.provenance, + &self.engine, + request, + ) + .to_abi()) + } + fn observe(&self, request: AbiObservationRequest) -> Result { let request = Self::to_core_request(request)?; Ok(self.observe_core(request)?.to_abi()) @@ -627,15 +999,18 @@ impl KernelPort for WarpKernel { mod tests { use super::*; use echo_wasm_abi::{ + decode_cbor, kernel_port::{ - BuiltinObserverPlan as AbiBuiltinObserverPlan, ControlIntentV1, + BuiltinObserverPlan as AbiBuiltinObserverPlan, + CausalSuffixBundle as AbiCausalSuffixBundle, ControlIntentV1, GlobalTick as AbiGlobalTick, HeadEligibility as AbiHeadEligibility, - HeadId as AbiHeadId, ObservationAt as AbiObservationAt, + HeadId as AbiHeadId, ImportSuffixRequest as AbiImportSuffixRequest, + ImportSuffixResult as AbiImportSuffixResult, ObservationAt as AbiObservationAt, ObservationBasisPosture as AbiObservationBasisPosture, ObservationCoordinate as AbiObservationCoordinate, ObservationFrame as AbiObservationFrame, ObservationPayload as AbiObservationPayload, ObservationProjection as AbiObservationProjection, - ObservationRequest as AbiObservationRequest, + ObservationRequest as AbiObservationRequest, ProvenanceRef as AbiProvenanceRef, ReadingBudgetPosture as AbiReadingBudgetPosture, ReadingObserverBasis as AbiReadingObserverBasis, ReadingObserverPlan as AbiReadingObserverPlan, @@ -644,24 +1019,38 @@ mod tests { SchedulerState, SettlementDecision as AbiSettlementDecision, SettlementOverlapRevalidation as AbiSettlementOverlapRevalidation, SettlementParentRevalidation as AbiSettlementParentRevalidation, - SettlementRequest as AbiSettlementRequest, WorkState, WorldlineId as AbiWorldlineId, - WorldlineTick as AbiWorldlineTick, WriterHeadKey as AbiWriterHeadKey, + SettlementRequest as AbiSettlementRequest, + WitnessedSuffixAdmissionOutcome as AbiWitnessedSuffixAdmissionOutcome, + WitnessedSuffixShell as AbiWitnessedSuffixShell, WorkState, + WorldlineId as AbiWorldlineId, WorldlineTick as AbiWorldlineTick, + WriterHeadKey as AbiWriterHeadKey, }, - pack_control_intent_v1, pack_intent_v1, + pack_control_intent_v1, pack_import_suffix_intent_v1, pack_intent_v1, + IMPORT_SUFFIX_INTENT_V1_OP_ID, }; use warp_core::{ compute_commit_hash_v2, make_edge_id, make_head_id, make_node_id, make_strand_id, - make_type_id, make_warp_id, materialization::make_channel_id, BaseRef, EdgeRecord, - GlobalTick, GraphStore, HashTriplet, InboxPolicy, NodeKey, NodeRecord, PlaybackMode, - ProvenanceEntry, ProvenanceService, ProvenanceStore, SlotId, Strand, StrandId, - TickCommitStatus, WarpOp, WarpTickPatchV1, WorldlineRuntime, WorldlineState, WorldlineTick, - WorldlineTickHeaderV1, WorldlineTickPatchV1, WriterHead, WriterHeadKey, + make_type_id, make_warp_id, materialization::make_channel_id, AdmissionLawId, BaseRef, + CoordinateAt, EchoCoordinate, EdgeRecord, GlobalTick, GraphStore, HashTriplet, InboxPolicy, + IntentFamilyId, NodeId, NodeKey, NodeRecord, OpticActorId, OpticCapabilityId, OpticCause, + OpticReadBudget, PlaybackMode, ProvenanceEntry, ProvenanceService, ProvenanceStore, SlotId, + Strand, StrandId, TickCommitStatus, WarpOp, WarpTickPatchV1, WorldlineHeadOptic, + WorldlineRuntime, WorldlineState, WorldlineTick, WorldlineTickHeaderV1, + WorldlineTickPatchV1, WriterHead, WriterHeadKey, }; fn start_until_idle(kernel: &mut WarpKernel, cycle_limit: Option) -> DispatchResponse { start_until_idle_result(kernel, cycle_limit).unwrap() } + fn abi_builtin_one_shot( + coordinate: AbiObservationCoordinate, + frame: AbiObservationFrame, + projection: AbiObservationProjection, + ) -> AbiObservationRequest { + AbiObservationRequest::builtin_one_shot(coordinate, frame, projection).unwrap() + } + fn start_until_idle_result( kernel: &mut WarpKernel, cycle_limit: Option, @@ -681,6 +1070,41 @@ mod tests { AbiHeadId::from_bytes(*head_id.as_bytes()) } + fn abi_provenance_ref(worldline_id: WorldlineId, tick: u64, seed: u8) -> AbiProvenanceRef { + AbiProvenanceRef { + worldline_id: abi_worldline_id(worldline_id), + worldline_tick: AbiWorldlineTick(tick), + commit_hash: vec![seed; 32], + } + } + + fn sample_import_suffix_request(kernel: &WarpKernel) -> AbiImportSuffixRequest { + let worldline_id = kernel.default_worldline; + let base_frontier = abi_provenance_ref(worldline_id, 0, 1); + let target_frontier = abi_provenance_ref(worldline_id, 1, 2); + let source_suffix = AbiWitnessedSuffixShell { + source_worldline_id: abi_worldline_id(worldline_id), + source_suffix_start_tick: AbiWorldlineTick(1), + source_suffix_end_tick: Some(AbiWorldlineTick(1)), + source_entries: vec![target_frontier.clone()], + boundary_witness: Some(base_frontier.clone()), + witness_digest: vec![3; 32], + basis_report: None, + }; + + AbiImportSuffixRequest { + bundle: AbiCausalSuffixBundle { + base_frontier, + target_frontier, + source_suffix, + bundle_digest: vec![4; 32], + }, + target_worldline_id: abi_worldline_id(worldline_id), + target_basis: abi_provenance_ref(worldline_id, 0, 1), + basis_report: None, + } + } + fn wl(n: u8) -> WorldlineId { WorldlineId::from_bytes([n; 32]) } @@ -980,6 +1404,72 @@ mod tests { assert_eq!(head.commit_id.len(), 32); } + #[test] + fn worldline_head_optic_example_reads_and_dispatches_through_kernel() { + let mut kernel = WarpKernel::new().unwrap(); + let worldline_id = kernel.default_worldline; + let actor = OpticActorId::from_bytes([41; 32]); + let optic = WorldlineHeadOptic::open( + worldline_id, + CoordinateAt::Frontier, + actor, + OpticCapabilityId::from_bytes([42; 32]), + IntentFamilyId::from_bytes([43; 32]), + [44; 32], + ) + .unwrap(); + + let observe = optic + .observe_head_request(OpticReadBudget { + max_bytes: Some(1024), + max_nodes: Some(8), + max_ticks: Some(4), + max_attachments: Some(0), + }) + .to_abi(); + let reading = kernel.observe_optic(observe).unwrap(); + match reading { + AbiObserveOpticResult::Reading(reading) => { + assert_eq!( + reading.read_identity.optic_id, + echo_wasm_abi::kernel_port::OpticId::from_bytes( + *optic.optic.optic_id.as_bytes() + ) + ); + assert!(matches!( + reading.payload, + AbiObservationPayload::Head { .. } + )); + } + AbiObserveOpticResult::Obstructed(obstruction) => { + panic!("worldline head optic should read through kernel, got {obstruction:?}"); + } + } + + let base_coordinate = EchoCoordinate::Worldline { + worldline_id, + at: CoordinateAt::Tick(WorldlineTick::from_raw(0)), + }; + let dispatch = optic + .dispatch_eint_v1_request( + base_coordinate, + OpticCause { + actor, + cause_hash: [45; 32], + label: Some("kernel optic example".to_owned()), + }, + AdmissionLawId::from_bytes([46; 32]), + pack_intent_v1(88, b"kernel-optic-example").unwrap(), + ) + .to_abi(); + + let dispatch = kernel.dispatch_optic_intent(dispatch).unwrap(); + assert!(matches!( + dispatch, + echo_wasm_abi::kernel_port::IntentDispatchResult::Staged(_) + )); + } + /// Regression: init() must return real 32-byte hashes, not empty vecs. /// The init() WASM export reads the initial frontier head before boxing the /// kernel. This test verifies that the observation-backed head helper @@ -1114,6 +1604,173 @@ mod tests { assert_eq!(r1.intent_id, r2.intent_id); } + #[test] + fn import_suffix_intent_rejects_malformed_payload_without_ingress() { + let mut kernel = WarpKernel::new().unwrap(); + let head_before = kernel.current_head().unwrap(); + let provenance_len_before = kernel.provenance.len(kernel.default_worldline).unwrap(); + let intent = pack_intent_v1(IMPORT_SUFFIX_INTENT_V1_OP_ID, &[0xff]).unwrap(); + + let error = kernel.dispatch_intent(&intent).unwrap_err(); + + assert_eq!(error.code, error_codes::INVALID_INTENT); + assert!(error.message.contains("invalid import suffix intent")); + assert_eq!(kernel.current_head().unwrap(), head_before); + assert_eq!( + kernel.provenance.len(kernel.default_worldline).unwrap(), + provenance_len_before + ); + } + + #[test] + fn import_suffix_intent_enters_ingress_and_scheduler() { + let mut kernel = WarpKernel::new().unwrap(); + let request = sample_import_suffix_request(&kernel); + let intent = pack_import_suffix_intent_v1(&request).unwrap(); + + let dispatch = kernel.dispatch_intent(&intent).unwrap(); + assert!(dispatch.accepted); + assert_eq!(dispatch.intent_id.len(), 32); + + let response = start_until_idle(&mut kernel, Some(1)); + let head = kernel.current_head().unwrap(); + assert_eq!( + response.scheduler_status.last_run_completion, + Some(RunCompletion::Quiesced) + ); + assert_eq!(head.worldline_tick, AbiWorldlineTick(1)); + + let event_id = NodeId(dispatch.intent_id.as_slice().try_into().unwrap()); + let result_id = warp_core::import_suffix_result_node_id(&event_id); + let frontier = kernel + .runtime + .worldlines() + .get(&kernel.default_worldline) + .unwrap(); + let frontier_state = frontier.state(); + let root_warp = frontier_state.root().warp_id; + let store = frontier_state.store(&root_warp).unwrap(); + let result_node = store.node(&result_id); + assert!(result_node.is_some()); + + let result_attachment = store + .node_attachment(&result_id) + .expect("import suffix result attachment should be recorded"); + let warp_core::AttachmentValue::Atom(atom) = result_attachment else { + panic!("import suffix result must be a typed atom"); + }; + assert_eq!( + atom.type_id, + make_type_id(warp_core::IMPORT_SUFFIX_RESULT_ATTACHMENT_TYPE) + ); + + let result: AbiImportSuffixResult = decode_cbor(atom.bytes.as_ref()).unwrap(); + assert_eq!(result.bundle_digest, request.bundle.bundle_digest); + assert_eq!( + result.admission.source_shell_digest, + request.bundle.source_suffix.witness_digest + ); + assert_eq!(result.admission.target_basis, request.target_basis); + match result.admission.outcome { + AbiWitnessedSuffixAdmissionOutcome::Staged { staged_refs, .. } => { + assert_eq!(staged_refs, request.bundle.source_suffix.source_entries); + } + other => panic!("expected staged import result, got {other:?}"), + } + + let entry = kernel + .provenance + .entry(kernel.default_worldline, WorldlineTick::ZERO) + .unwrap(); + let patch = entry.patch.expect("import tick should record a patch"); + assert!(patch.ops.iter().any(|op| { + matches!( + op, + WarpOp::UpsertNode { node, .. } if node.local_id == result_id + ) + })); + assert!(patch.ops.iter().any(|op| { + matches!( + op, + WarpOp::SetAttachment { key, .. } + if *key == AttachmentKey::node_alpha(NodeKey { + warp_id: root_warp, + local_id: result_id, + }) + ) + })); + } + + #[test] + fn stale_optic_dispatch_obstructs_without_advancing_head_or_provenance() { + let mut kernel = WarpKernel::new().unwrap(); + let intent = pack_intent_v1(1, b"advance").unwrap(); + kernel.dispatch_intent(&intent).unwrap(); + start_until_idle(&mut kernel, Some(1)); + + let head_before = kernel.current_head().unwrap(); + let provenance_len_before = kernel.provenance.len(kernel.default_worldline).unwrap(); + assert_eq!(head_before.worldline_tick, AbiWorldlineTick(1)); + + let actor = echo_wasm_abi::kernel_port::OpticActorId::from_bytes([4; 32]); + let intent_family = echo_wasm_abi::kernel_port::IntentFamilyId::from_bytes([5; 32]); + let worldline_id = abi_worldline_id(kernel.default_worldline); + let focus = echo_wasm_abi::kernel_port::OpticFocus::Worldline { worldline_id }; + let stale_base = echo_wasm_abi::kernel_port::EchoCoordinate::Worldline { + worldline_id, + at: echo_wasm_abi::kernel_port::CoordinateAt::Tick { + worldline_tick: AbiWorldlineTick(0), + }, + }; + let request = echo_wasm_abi::kernel_port::DispatchOpticIntentRequest { + optic_id: echo_wasm_abi::kernel_port::OpticId::from_bytes([1; 32]), + base_coordinate: stale_base.clone(), + intent_family, + focus: focus.clone(), + cause: echo_wasm_abi::kernel_port::OpticCause { + actor, + cause_hash: vec![6; 32], + label: Some("stale optic dispatch".into()), + }, + capability: echo_wasm_abi::kernel_port::OpticCapability { + capability_id: echo_wasm_abi::kernel_port::OpticCapabilityId::from_bytes([7; 32]), + actor, + issuer_ref: None, + policy_hash: vec![8; 32], + allowed_focus: focus, + projection_version: echo_wasm_abi::kernel_port::ProjectionVersion(1), + reducer_version: None, + allowed_intent_family: intent_family, + max_budget: echo_wasm_abi::kernel_port::OpticReadBudget { + max_bytes: Some(4096), + max_nodes: Some(64), + max_ticks: Some(8), + max_attachments: Some(0), + }, + }, + admission_law: echo_wasm_abi::kernel_port::AdmissionLawId::from_bytes([9; 32]), + payload: echo_wasm_abi::kernel_port::OpticIntentPayload::EintV1 { + bytes: pack_intent_v1(2, b"stale-write").unwrap(), + }, + }; + + let result = kernel.dispatch_optic_intent(request).unwrap(); + assert!(matches!( + result, + echo_wasm_abi::kernel_port::IntentDispatchResult::Obstructed(obstruction) + if obstruction.kind == echo_wasm_abi::kernel_port::OpticObstructionKind::StaleBasis + && obstruction.coordinate == Some(stale_base) + )); + + let head_after = kernel.current_head().unwrap(); + assert_eq!(head_after.worldline_tick, head_before.worldline_tick); + assert_eq!(head_after.commit_id, head_before.commit_id); + assert_eq!( + kernel.provenance.len(kernel.default_worldline).unwrap(), + provenance_len_before + ); + } + #[test] fn dispatch_then_start_changes_state() { let mut kernel = WarpKernel::new().unwrap(); @@ -1226,16 +1883,16 @@ mod tests { fn observe_invalid_tick_returns_observation_error_code() { let kernel = WarpKernel::new().unwrap(); let err = kernel - .observe(AbiObservationRequest { - coordinate: AbiObservationCoordinate { + .observe(abi_builtin_one_shot( + AbiObservationCoordinate { worldline_id: abi_worldline_id(kernel.default_worldline), at: AbiObservationAt::Tick { worldline_tick: AbiWorldlineTick(999), }, }, - frame: AbiObservationFrame::CommitBoundary, - projection: AbiObservationProjection::Snapshot, - }) + AbiObservationFrame::CommitBoundary, + AbiObservationProjection::Snapshot, + )) .unwrap_err(); assert_eq!(err.code, error_codes::INVALID_TICK); } @@ -1247,16 +1904,16 @@ mod tests { kernel.dispatch_intent(&intent).unwrap(); start_until_idle(&mut kernel, Some(1)); let artifact = kernel - .observe(AbiObservationRequest { - coordinate: AbiObservationCoordinate { + .observe(abi_builtin_one_shot( + AbiObservationCoordinate { worldline_id: abi_worldline_id(kernel.default_worldline), at: AbiObservationAt::Tick { worldline_tick: AbiWorldlineTick(0), }, }, - frame: AbiObservationFrame::CommitBoundary, - projection: AbiObservationProjection::Snapshot, - }) + AbiObservationFrame::CommitBoundary, + AbiObservationProjection::Snapshot, + )) .unwrap(); let AbiObservationPayload::Snapshot { snapshot } = artifact.payload else { @@ -1272,14 +1929,14 @@ mod tests { fn observe_frontier_head_matches_current_head() { let kernel = WarpKernel::new().unwrap(); let artifact = kernel - .observe(AbiObservationRequest { - coordinate: AbiObservationCoordinate { + .observe(abi_builtin_one_shot( + AbiObservationCoordinate { worldline_id: abi_worldline_id(kernel.default_worldline), at: AbiObservationAt::Frontier, }, - frame: AbiObservationFrame::CommitBoundary, - projection: AbiObservationProjection::Head, - }) + AbiObservationFrame::CommitBoundary, + AbiObservationProjection::Head, + )) .unwrap(); let head = kernel.current_head().unwrap(); @@ -1323,14 +1980,14 @@ mod tests { fn observe_neighborhood_site_returns_singleton_site_for_default_worldline() { let kernel = WarpKernel::new().unwrap(); let site = kernel - .observe_neighborhood_site(AbiObservationRequest { - coordinate: AbiObservationCoordinate { + .observe_neighborhood_site(abi_builtin_one_shot( + AbiObservationCoordinate { worldline_id: abi_worldline_id(kernel.default_worldline), at: AbiObservationAt::Frontier, }, - frame: AbiObservationFrame::CommitBoundary, - projection: AbiObservationProjection::Head, - }) + AbiObservationFrame::CommitBoundary, + AbiObservationProjection::Head, + )) .unwrap(); assert_eq!( @@ -1464,14 +2121,14 @@ mod tests { fn observe_frontier_snapshot_reports_u0_without_fake_sentinels() { let kernel = WarpKernel::new().unwrap(); let artifact = kernel - .observe(AbiObservationRequest { - coordinate: AbiObservationCoordinate { + .observe(abi_builtin_one_shot( + AbiObservationCoordinate { worldline_id: abi_worldline_id(kernel.default_worldline), at: AbiObservationAt::Frontier, }, - frame: AbiObservationFrame::CommitBoundary, - projection: AbiObservationProjection::Snapshot, - }) + AbiObservationFrame::CommitBoundary, + AbiObservationProjection::Snapshot, + )) .unwrap(); let AbiObservationPayload::Snapshot { snapshot } = artifact.payload else { @@ -1494,14 +2151,14 @@ mod tests { let head_before = kernel.current_head().unwrap(); let _ = kernel - .observe(AbiObservationRequest { - coordinate: AbiObservationCoordinate { + .observe(abi_builtin_one_shot( + AbiObservationCoordinate { worldline_id: abi_worldline_id(kernel.default_worldline), at: AbiObservationAt::Frontier, }, - frame: AbiObservationFrame::RecordedTruth, - projection: AbiObservationProjection::TruthChannels { channels: None }, - }) + AbiObservationFrame::RecordedTruth, + AbiObservationProjection::TruthChannels { channels: None }, + )) .unwrap(); let head_after = kernel.current_head().unwrap(); @@ -1544,16 +2201,16 @@ mod tests { kernel.provenance = provenance; let artifact = kernel - .observe(AbiObservationRequest { - coordinate: AbiObservationCoordinate { + .observe(abi_builtin_one_shot( + AbiObservationCoordinate { worldline_id: abi_worldline_id(kernel.default_worldline), at: AbiObservationAt::Tick { worldline_tick: AbiWorldlineTick(1), }, }, - frame: AbiObservationFrame::RecordedTruth, - projection: AbiObservationProjection::TruthChannels { channels: None }, - }) + AbiObservationFrame::RecordedTruth, + AbiObservationProjection::TruthChannels { channels: None }, + )) .unwrap(); let AbiObservationPayload::TruthChannels { channels } = artifact.payload else { panic!("expected recorded-truth payload"); diff --git a/det-policy.yaml b/det-policy.yaml index 82ee7244..db6e1a90 100644 --- a/det-policy.yaml +++ b/det-policy.yaml @@ -69,10 +69,6 @@ crates: class: DET_CRITICAL owner_role: "Architect" paths: ["crates/ttd-protocol-rs/**"] - ttd-manifest: - class: DET_CRITICAL - owner_role: "Architect" - paths: ["crates/ttd-manifest/**"] ci: class: DET_CRITICAL owner_role: "CI Engineer" diff --git a/docs/.vitepress/config.ts b/docs/.vitepress/config.ts index 654530af..a1315fb9 100644 --- a/docs/.vitepress/config.ts +++ b/docs/.vitepress/config.ts @@ -9,6 +9,11 @@ export default withMermaid( description: "Deterministic WARP runtime docs for Echo", cleanUrls: true, srcExclude: ["method/**", "design/**"], + vite: { + build: { + chunkSizeWarningLimit: 700, + }, + }, themeConfig: { search: { provider: "local" }, nav: [ @@ -34,10 +39,18 @@ export default withMermaid( items: [ { text: "Docs Map", link: "/" }, { text: "Runtime Model", link: "/architecture/outline" }, + { + text: "There Is No Graph", + link: "/architecture/there-is-no-graph", + }, { text: "Application Contract Hosting", link: "/architecture/application-contract-hosting", }, + { + text: "WSC + Verkle + IPA", + link: "/architecture/wsc-verkle-ipa-retained-readings", + }, { text: "Theory Map", link: "/theory/THEORY" }, { text: "Current Bearing", link: "/BEARING" }, ], diff --git a/docs/BEARING.md b/docs/BEARING.md index aa3f263e..0b2330fe 100644 --- a/docs/BEARING.md +++ b/docs/BEARING.md @@ -3,7 +3,7 @@ # BEARING -Last updated: 2026-04-26. +Last updated: 2026-05-09. This signpost summarizes current direction. It does not create commitments or replace backlog items, design docs, retros, or CLI status. If it disagrees with @@ -18,6 +18,9 @@ iterate without turning docs into a museum or a second codebase: - Echo exposes current browser-hostable substrate through the WASM ABI, not a pile of historical ABI versions. - Observer-relative reading metadata travels in `ReadingEnvelope`. +- Retained holograms should converge on WSC-backed bytes, CAS retention, and + future Verkle/IPA-style proof-carrying apertures without making any storage or + proof layer the ontology. - Method cycles and dated audit ledgers track planning decisions. - Local iteration speed is a first-class hill, because slow gates make every design/code/doc correction more expensive. @@ -32,8 +35,16 @@ The runtime-doctrine cutover is no longer just design text: `SettlementService`. - `crates/warp-wasm/src/warp_kernel.rs` exposes neighborhood and settlement surfaces through the WASM kernel boundary. -- `crates/echo-wasm-abi/src/kernel_port.rs` is currently ABI version 8 and - carries `ReadingEnvelope` inside observation artifacts. +- `crates/echo-wasm-abi/src/kernel_port.rs` is currently ABI version 9 and + makes observation requests name observer plan, optional instance, budget, and + rights while carrying `ReadingEnvelope` inside observation artifacts. +- `docs/design/0019-reading-envelope-family-boundary/reading-envelope-family-boundary.md` + names the shared read-side family boundary for authored observer plans, + installed artifacts, runtime reading values, and retained reading identity. +- `docs/architecture/wsc-verkle-ipa-retained-readings.md` locks the future + retained-reading stack: WSC as canonical columnar reading bytes, Verkle as + authenticated commitment/index, IPA as compact aperture proof, and `echo-cas` + as byte retention. - `docs/spec/SPEC-0009-wasm-abi.md` now documents the current ABI contract instead of pretending to preserve ABI v1-v5. @@ -46,8 +57,11 @@ The runtime-doctrine cutover is no longer just design text: anchored by `docs/design/0011-optic-observer-runtime-doctrine/design.md`. 3. Improve local iteration by separating quick doc/code lanes from full release gates while keeping full verification before publication. -4. Keep hardening the reading envelope and basis-posture surfaces until the - observer contract is boring, current, and test-backed. +4. Implement QueryView observers against the accepted reading-envelope family + boundary instead of adding a parallel read-result wrapper. +5. Keep the first `jedit` contract-hosting proof generic: `jedit` owns the hot + rope model, Echo retains WSC/proof-ready readings by generic CAS and reading + identity surfaces. ## What feels wrong? diff --git a/docs/architecture/WARP_DRIFT.md b/docs/architecture/WARP_DRIFT.md index 70a47349..b08dc50d 100644 --- a/docs/architecture/WARP_DRIFT.md +++ b/docs/architecture/WARP_DRIFT.md @@ -17,6 +17,22 @@ The relevant baseline is now: - **There is no canonical materialized graph.** The substrate is witnessed causal history. Graph-like structure is an observer-relative reading over that history. +- **All public WARP surfaces are optics producing holograms.** Admission, + observation, topology change, transport import, slicing, materialization, and + retention all choose a bounded causal basis/site, apply a law, and produce a + witnessed artifact with explicit posture. +- **Continuum is a protocol, not a graph model.** Echo and `git-warp` are + compatible because they can exchange witnessed causal-history artifacts + through shared Continuum families, not because either runtime owns "the + graph." +- **Runtimes and tools can themselves be WARP optics.** Echo is the real-time + simulation optic; `warp-ttd` is a debugger optic; `git-warp` is a Git + projection/retention optic; Wesley is a compiler rewrite optic from authored + schema to IR and artifacts. +- **Those labels are product roles, not ontological categories.** Graft is an + aperture/support-obligation optic, WARPDrive is a POSIX materialization + optic, and `jedit` is a human-facing console over readings and admission. + They remain WARP optics rather than privileged graph owners. - **A strand is a real speculative lane, not a frozen snapshot.** Its realized state is resolved against inherited parent history at a chosen basis, and bounded reads should materialize only the backward @@ -175,29 +191,40 @@ distance: - normalize to a comparable frontier - carry the transported local situation in a witness-bearing shell +- submit inbound transport admission as an Intent against an explicit basis - return explicit import outcomes - preserve the shell-equivalence story for independent imports +Implementation note: the runtime now treats Echo's `WitnessedSuffixShell`, +`CausalSuffixBundle`, and `ImportSuffixResult` shapes as the source model for +the Continuum runtime-boundary family. Continuum should change to match this +typed evidence model instead of requiring Echo to adapt to a weaker +`SuffixShell` placeholder. + +The same Intent-only rule applies to external forking, merging, braiding, +settlement, support mutation, and inverse operations. Existing internal services +may remain implementation details, but they are not public mutation authority. + ### Fourth: update the docs/invariants to match the corrected runtime The bootstrap strand contract and README language should be revised only after the runtime direction is pinned, not before. -## ASAP backlog items - -These items now define the reconciliation path: - -- `docs/method/backlog/asap/KERNEL_live-holographic-strands.md` -- `docs/method/backlog/asap/PLATFORM_observer-plan-reading-artifacts.md` -- `docs/method/backlog/asap/PLATFORM_witnessed-suffix-admission-shells.md` +## Relevant design context -Relevant existing design context: +These packets define the reconciliation path now that completed backlog cards +are pruned from `docs/method/backlog/**`: - `docs/design/0004-strand-contract/design.md` +- `docs/design/0009-witnessed-causal-suffix-sync/design.md` +- `docs/design/0010-live-basis-settlement-plan/design.md` +- `docs/design/0011-optic-observer-runtime-doctrine/design.md` - `docs/design/0008-strand-settlement/design.md` - `docs/design/0009-witnessed-causal-suffix-sync/design.md` - `docs/design/0010-live-basis-settlement-plan/design.md` - `docs/design/0011-optic-observer-runtime-doctrine/design.md` +- `docs/design/0022-continuum-transport-identity/design.md` +- `docs/architecture/continuum-transport.md` - `docs/design/0006-echo-continuum-alignment/design.md` ## Practical rule diff --git a/docs/architecture/application-contract-hosting.md b/docs/architecture/application-contract-hosting.md index ca725612..115b236e 100644 --- a/docs/architecture/application-contract-hosting.md +++ b/docs/architecture/application-contract-hosting.md @@ -132,7 +132,8 @@ contract query operations onto this request shape when possible. **ReadingEnvelope** is the read-side evidence envelope. It carries observer plan, basis, witness refs, budget posture, rights posture, and residual, -plural, or obstructed posture. +plural, or obstructed posture. The current family boundary is named in +`docs/design/0019-reading-envelope-family-boundary/reading-envelope-family-boundary.md`. ## Write Path @@ -409,6 +410,12 @@ and cached materialized readings. CAS content hashes are not semantic truth by themselves. Meaning lives in the typed coordinate or reference above the CAS blob. +For future retained readings, the preferred payload direction is documented in +[WSC, Verkle, IPA, And Retained Readings](wsc-verkle-ipa-retained-readings.md): +WSC supplies canonical columnar reading bytes, Verkle-style roots may +authenticate those bytes, IPA-style proofs may support bounded apertures, and +`echo-cas` remains content-addressed byte retention. + ```mermaid flowchart TB history["Witnessed causal history"] diff --git a/docs/architecture/continuum-transport.md b/docs/architecture/continuum-transport.md new file mode 100644 index 00000000..4c383742 --- /dev/null +++ b/docs/architecture/continuum-transport.md @@ -0,0 +1,163 @@ + + + +# Continuum Transport + +_Echo exports and imports witnessed causal suffix bundles through Continuum +families. It does not synchronize materialized state._ + +## Boundary Rule + +Continuum is the shared WARP protocol layer. Like HTTP, it lets independent +runtimes exchange lawful boundary artifacts without sharing implementation +internals. Unlike a graph database protocol, it does not claim there is one +canonical graph to synchronize. + +Continuum transport uses Echo's witnessed suffix evidence model: + +```text +export_suffix -> CausalSuffixBundle +import_suffix -> ImportSuffixResult +``` + +The shared Continuum runtime-boundary family must name Echo's actual transport +objects: + +- `WitnessedSuffixShell` +- `CausalSuffixBundle` +- `WitnessedSuffixAdmissionResponse` +- `WitnessedSuffixAdmissionOutcome` +- `ImportOutcome` + +The older generic `SuffixShell` wording is not the canonical boundary. It was a +placeholder for the shape now present in Echo. + +## Ownership + +Continuum owns the shared authored family. + +Echo owns the runtime evidence shape for this boundary because Echo is the first +runtime implementing and consuming it. If a Continuum schema or registry row +does not match Echo's witnessed suffix model, update Continuum. + +Wesley compiles the authored family. It does not define transport semantics. + +Other Continuum-speaking tools may consume the family, but they do not get to +weaken the causal evidence contract into state snapshots, summary strings, or +host-time ordering. + +Echo and `git-warp` are compatible because they exchange witnessed causal +history through Continuum-shaped families. They are not compatible because they +both model "the graph." There is no substrate-owned graph. + +## Transport Object + +A `CausalSuffixBundle` is a compact witnessed history object: + +- source and target provenance frontiers +- ordered source provenance entries +- boundary witness when the suffix has no importable entries yet +- deterministic source shell identity +- deterministic bundle identity +- optional basis/overlap evidence reused from settlement + +It is not: + +- a materialized graph snapshot +- a reading cache +- a raw patch stream +- a peer mutation command +- a last-writer-wins delta + +## Import Law + +Import is ordinary admission at a distance. + +Inbound transport admission is Intent-driven: + +```text +transport adapter receives bytes +-> adapter forms a canonical import proposal +-> dispatch_intent(EINT import intent) +-> ingress / scheduler / admission +-> tick + receipt / witness +``` + +The runtime must: + +1. Verify the source shell and bundle identities. +2. Resolve the explicit target basis. +3. Reuse retained prior import outcomes for idempotence. +4. Classify the result as `Admitted`, `Staged`, `Plural`, `Conflict`, or + `Obstructed`. +5. Emit a receipt or witness for the local decision. + +The runtime must not: + +- silently mutate the current frontier when the base is stale +- dedupe by visible state hash alone +- dedupe by runtime-local tick, Lamport clock, or receipt hash alone +- hide self-history loops as new remote work +- collapse alternate support paths into no-op folklore + +The host adapter may receive, decode, retain, and cache transported bytes. It +must not mutate worldlines, strands, braids, settlement state, provenance, or +retained import outcomes directly. A transported suffix affects Echo history +only when an import Intent is admitted. + +## Causal Mutation Rule + +The same rule applies to every external topology-changing operation: + +- fork worldline / create strand +- append braid member +- collapse or settle braid +- merge / settlement import +- pin or unpin support when exposed to application flows +- admit transported causal suffix +- append inverse / compensating operation + +External callers propose these operations as Intents against explicit causal +bases. Echo admits, stages, pluralizes, conflicts, or obstructs them under a +named law and emits receipts. + +Internal services and evaluators may remain implementation details. They are not +public mutation authority. + +## Idempotence + +Exact bundle re-import is not new work. It is the same import question returning +again. Echo may return the retained result or emit a local receipt pointing at +the prior result, but it must preserve the evidence that the bundle was already +adjudicated. + +Same final state is not enough for idempotence. Two bundles can produce the +same visible reading while preserving different provenance, support, or intent +observer structure. + +## Relation To Optics + +Transport uses the same WARP shape as optics: + +```text +slice/project/normalise -> lower/admit -> pack/retain +``` + +Distribution changes the basis construction and transport path. It does not +create a second admission law. + +More generally, tick admission, transport import, fork, merge, braid, +settlement, support mutation, inverse admission, observation, materialization, +and hologram slicing are all WARP optic operations over witnessed causal +history. Their outputs are holograms with different effect postures: admitted +history, observer-relative reading, retained artifact, or obstruction. + +## Current Design Packet + +The active decision packet is: + +- `docs/design/0022-continuum-transport-identity/design.md` + +The earlier suffix-sync packet remains the broad design ancestor: + +- `docs/design/0009-witnessed-causal-suffix-sync/design.md` diff --git a/docs/architecture/echo-optics-adapter-notes.md b/docs/architecture/echo-optics-adapter-notes.md new file mode 100644 index 00000000..28840598 --- /dev/null +++ b/docs/architecture/echo-optics-adapter-notes.md @@ -0,0 +1,144 @@ + + + +# Echo Optics Adapter Notes + +This note describes where future consumer adapters sit relative to the Echo +Optics API. + +The boundary rule is: + +```text +Optic observes. +Admission admits. +Retention retains. +Plumber maintains. +Debug explains. +``` + +Adapters may make the API ergonomic for a product, protocol, or tool. They must +not turn Echo into a global graph API, a mutable state API, a file handle API, a +sync daemon, or a host-bag abstraction. + +## Adapter Shape + +An adapter may: + +- open a typed optic descriptor for a consumer workflow; +- choose an aperture and budget for a read; +- call `observe_optic`; +- decode observer-relative reading payload bytes into consumer-owned types; +- construct an explicit-base `dispatch_optic_intent` request; +- stage or submit generated EINT bytes; +- retain or reveal reading bytes by `ReadIdentity`. + +An adapter must not: + +- mutate Echo state by holding a handle; +- call direct service mutation paths as the public write model; +- replace typed admission outcomes with booleans or string statuses; +- silently retry against the latest frontier when a base coordinate is stale; +- satisfy reads by falling back to full materialization; +- hide missing witness, rights, budget, or attachment evidence; +- treat CAS content hashes as semantic reading identity. + +## Layering + +The intended layering is narrow: + +```text +Consumer UI / tool + -> consumer adapter + -> generated or handwritten optic request builder + -> Echo Optics API + -> observation / admission / retention services + -> witnessed causal history and receipts +``` + +Consumer adapters own workflow vocabulary. Echo owns causal coordinates, +capability checks, bounded observation, admission outcomes, receipts, witness +refs, and retained reading identity. + +## GraphQL And Wesley + +GraphQL is an authoring or adapter illustration, not the Echo runtime +substrate. + +Wesley-generated code may expose GraphQL-shaped helper names because those names +belong to the authored contract. Generated helpers should still lower into Echo +as generic Optics requests: + +- generated query helpers build `ObserveOpticRequest`; +- generated mutation helpers build EINT v1 payloads and + `DispatchOpticIntentRequest`; +- generated decoding helpers decode observer payload bytes after Echo has + emitted a reading. + +The helper may hide byte packing from application code. It must not hide the +fact that an intent was proposed against an explicit causal basis and then +admitted, staged, obstructed, pluralized, or conflicted by Echo. + +## Consumer Notes + +Editors may use optics for bounded visible-window readings and explicit-base +edit proposals. `jedit` is a useful ergonomic example because it stresses +bounded text reads, stale-basis handling, attachment boundaries, retained +fragments, and undo-as-inverse-intent. It is not an Echo core ontology. Echo +must not gain privileged jedit, editor, rope, buffer, or file setter APIs. + +Debuggers may use optics to inspect coordinates, frontiers, receipts, witness +sets, and replay slices. A debugger adapter may explain why a reading is +obstructed or budget-limited, but it must not bypass `observe_optic` with a +private materializer to make the UI look complete. + +Inspectors may use optics to reveal structural metadata, head identity, +attachment refs, retained-reading refs, and obstruction posture. Inspector +adapters should prefer small apertures and explicit recursive descent. + +Replay tools may use optics to read checkpoint-plus-tail identities, compare +frontiers, and build bounded reveal requests. A replay adapter must not present +a checkpoint hash as the live result unless the read identity honestly names +the live tail witness basis. + +Import/export flows may combine optics with witnessed suffix export/import, but +the adapter remains a coordinator. Import is still admission. Export is still a +read of witnessed causal material. Neither path should become a sync daemon or +latest-writer-wins merge policy. + +Retained reading caches may store payload bytes for a reading, but the cache key +must include semantic `ReadIdentity`, codec identity, byte length, and content +hash. The CAS hash names bytes. The read identity names the question those +bytes answer. + +## Deterministic Boundary + +Adapters may use convenient host-language DTOs internally, including serde on +non-authoritative diagnostic or bridge shapes. Anything that affects intents, +graph-preserved facts, causal history, receipts, witness material, read +identity, retained-reading identity, or admission posture must cross into Echo +as canonical deterministic bytes. + +Boundary code must normalize nondeterministic value shapes before admission or +retention. In particular, floats and other host-sensitive representations must +be canonicalized before they can affect hashes, receipts, witnesses, or graph +history. + +## Rejected Shapes + +These names and shapes are intentionally rejected: + +- `RuntimeFacade`; +- `ObservationManager`; +- `UniversalMaterializer`; +- `GraphLikeRuntimeAdapter`; +- global `getGraph` / `setGraph` APIs; +- mutable file or buffer handles; +- hidden materialization caches; +- GraphQL-first runtime dispatch; +- direct host-time ordering as admission law; +- adapter-owned causal history. + +If a future consumer needs one of those shapes for local ergonomics, it must +remain outside Echo and prove that the Echo-facing calls still go through +bounded observation, explicit-base intent dispatch, typed admission, and +retained witness identity. diff --git a/docs/architecture/outline.md b/docs/architecture/outline.md index 93fb3fd5..48977823 100644 --- a/docs/architecture/outline.md +++ b/docs/architecture/outline.md @@ -5,6 +5,21 @@ _Echo is a deterministic WARP runtime for witnessed causal history and bounded observation._ +Core doctrine: [there is no graph](there-is-no-graph.md). Graph-like structure +is an observer-relative holographic reading over witnessed causal history, not a +canonical substrate-owned object. + +Retained-reading direction: +[WSC, Verkle, IPA, And Retained Readings](wsc-verkle-ipa-retained-readings.md) +defines WSC as the canonical columnar byte layout for retained readings, +Verkle-style commitments as the future authenticated index over those bytes, +IPA-style openings as compact aperture support, and `echo-cas` as byte +retention. + +Echo itself is a WARP optic for real-time deterministic simulation. It admits, +observes, retains, and reveals witnessed causal history through its local +runtime law; it is not an implementation of a hidden global graph. + ## What Echo owns Echo owns the hot runtime path: @@ -23,6 +38,14 @@ Echo does not own every possible platform noun around WARP. This repo's live doc Carrier state: the WARP state held by `warp-core`. +WARP optic: a bounded, capability-scoped, law-named operation over causal +history. It may admit a transition, observe a projection, slice a hologram, or +retain/reveal an artifact. + +Hologram: the witnessed output of a WARP optic. A hologram carries enough basis, +law, aperture, evidence, identity, and posture to recreate the claimed object +up to the equivalence relation declared by the optic law. + Witness: the retained evidence that a transition or reading came from a specific state, policy, patch, or coordinate. Shell: a retained boundary artifact such as a tick patch, provenance payload, or boundary record. @@ -50,6 +73,9 @@ Worldline: the retained ordered history used for replay, slices, and coordinate- - [Merkle Commit](../spec/merkle-commit.md) - [Worldlines, Playback, and Observation](../spec/SPEC-0004-worldlines-playback-truthbus.md) - [WASM ABI Contract](../spec/SPEC-0009-wasm-abi.md) +- [There Is No Graph](there-is-no-graph.md) +- [WSC, Verkle, IPA, And Retained Readings](wsc-verkle-ipa-retained-readings.md) +- [Continuum Transport](continuum-transport.md) ## Design posture diff --git a/docs/architecture/there-is-no-graph.md b/docs/architecture/there-is-no-graph.md new file mode 100644 index 00000000..abfa5cf7 --- /dev/null +++ b/docs/architecture/there-is-no-graph.md @@ -0,0 +1,342 @@ + + + +# There Is No Graph + +_The graph is a coordinate chart over witnessed causal history. It is not +Echo's substrate ontology._ + +## Rule + +There is no privileged, substrate-owned, canonical materialized graph. + +The territory is witnessed causal history: + +- admitted transitions; +- frontiers; +- lane identities; +- payload hashes; +- receipts; +- witnesses; +- checkpoints; +- suffixes; +- boundary artifacts; +- retained readings. + +Graph-like structure exists as an observer-relative holographic reading over +that history. It may be retained, cached, transported, compared, revealed, or +debugged. It does not become substrate truth by being materialized. + +The hard formulation is: + +```text +Computation is the construction, inspection, and admission of witnessed +readings over causal history. +``` + +That does not mean state-like values disappear. Runtime state, files, +databases, editor buffers, build artifacts, terminal screens, and generated +code all still exist. They are materialized readings. They are not the +territory. + +## WARP Optics + +The common WARP shape is: + +```text +bounded causal basis/site ++ law ++ observer aperture ++ support obligations ++ capability, budget, and evidence posture +-> witnessed hologram +``` + +Everything public should be understood through this shape: + +- tick admission; +- graph rewrite admission; +- transport import; +- fork, merge, braid, and settlement; +- support pinning; +- inverse or compensating operation admission; +- observation; +- hologram slicing; +- materialization; +- retention and reveal. + +The difference is effect posture, not ontology. + +| Surface | Optic posture | Resulting hologram | +| ------------------- | --------------------------- | ------------------------------------------ | +| Intent / admission | propose causal rewrite | receipt, tick, provenance, outcome | +| Transport import | propose suffix admission | import receipt, staged/plural/conflict law | +| Topology operation | propose lane/topology law | topology receipt and witness | +| Observation | project causal history | reading envelope | +| Materialization | lower a bounded projection | materialized reading artifact | +| Retention / reveal | persist or recover artifact | retained hologram bytes plus identity | +| Debug / explanation | inspect law and evidence | explanation over named basis | + +All of them are WARP optics producing holograms. + +A hologram is a witnessed, law-named artifact carrying enough basis, aperture, +support, evidence, identity, and posture to recreate the claimed object up to +the equivalence relation declared by the optic law. + +The optic is stronger than a plain projection. It carries observer geometry: + +- who or what is observing; +- which aperture is lawful; +- why the reading is being requested; +- which support must travel with the claim; +- which support may be compressed, redacted, or blocked; +- which law admits or obstructs the result. + +## State Machines As A Special Case + +Traditional state machines are not abolished. They are demoted. + +A conventional mutable-state system is a narrow optic with: + +- one privileged observer; +- one privileged materialization; +- one local transition function; +- weak or implicit witness obligations. + +Echo may still implement state-like machinery internally. That machinery is an +implementation detail below the public WARP contract. It must not leak into API +language as a universal mutable state object. + +## Runtimes As Optics + +A WARP optic is not only a small API method. Whole runtimes and tools can be +understood as optics when they expose a law-governed way to admit, observe, +rewrite, retain, or project causal artifacts. + +These are product roles, not ontological categories: + +| Runtime or tool | WARP optic role | +| --------------- | ----------------------------------------------------------------- | +| Echo | live execution and deterministic admission optic | +| `git-warp` | Git-backed causal persistence optic | +| Wesley | semantic/compiler optic over authored contract history | +| `warp-ttd` | historical inspection and causal forensics optic | +| Graft | governed aperture and support-obligation optic | +| WARPDrive | POSIX/FUSE materialization and write-back optic for legacy tools | +| `jedit` | human-facing console that hosts readings, lanes, and admission UI | + +Echo, `git-warp`, Wesley, `warp-ttd`, Graft, WARPDrive, and `jedit` are not +separate kinds of machine at the WARP layer. They are WARP optics with +different apertures, substrates, admission laws, tick shapes, support +obligations, and hologram families. + +Wesley is a useful example because it is not a simulator at all. It still has +the WARP shape: authored GraphQL/schema input is projected into semantic +readings, target readings, and witnessed materializations under compiler law. +Generated artifacts are holograms over a semantic coordinate, not magic files. + +`warp-ttd` is the same kind of thing from another aperture. It is not outside +the system looking in. It is an observer that asks how a reading became +possible, which suffixes contributed, which obligations moved, which rejected +branches nearly happened, and which support was compressed, redacted, or +blocked. + +## Graph-Shaped Readings + +A graph-shaped reading is legal and useful. Echo may expose graph-shaped views, +indexes, cached readings, and materialized projections. + +The safety rule is that every graph-shaped object must remain scoped to the +question it answers: + +- causal coordinate or frontier; +- optic or observer law; +- aperture or local site; +- witness basis; +- rights posture; +- budget posture; +- projection and reducer versions; +- residual, plurality, or obstruction posture. + +No graph-shaped reading may pretend to be the runtime itself. + +## What Travels + +The graph is not the transport payload. + +The wrong model is: + +```text +Echo graph -> serialize -> git-warp graph -> modify -> send back +``` + +That smuggles a canonical object model back into the architecture. + +The WARP model is: + +```text +causal suffix ++ coordinate ++ optic or rule identity ++ support obligations ++ witness refs ++ hologram boundary +-> compatible local reading +``` + +Each runtime projects the reading appropriate to its own substrate and law. +Echo may project one chart, `git-warp` another, Wesley another, and `warp-ttd` +another. The readings can be compatible without being identical internal +objects. + +## Continuum + +Continuum is the shared WARP protocol layer. + +The useful analogy is HTTP: Continuum lets independent WARP runtimes exchange +lawful causal-history artifacts without sharing an implementation under the +hood. It is not a claim that every runtime stores the same graph. It is a claim +that runtimes can exchange, admit, retain, observe, and compare witnessed +causal history through shared boundary families. + +Echo and `git-warp` are compatible because they can speak this causal-history +protocol. They are not compatible because they both model a canonical graph. +There is no such graph. + +Continuum-speaking runtimes exchange things such as: + +- witnessed suffix bundles; +- coordinates; +- optic or rule identifiers; +- support obligations; +- receipts; +- witness refs; +- frontier identities; +- payload refs; +- admission outcomes; +- reading envelopes; +- retained hologram identities. + +They do not exchange: + +- runtime internals; +- scheduler state; +- private cache layout; +- materialized state as truth; +- graph database objects; +- host-time ordering folklore. + +Continuum is not another runtime and not another graph model. It is the +compatibility membrane that lets independent WARP optics exchange enough +witnessed causal evidence to produce mutually intelligible readings. + +## WARPDrive + +WARPDrive is the compatibility layer for normal tools. + +A mounted path is not primary storage. It is a POSIX-shaped aperture: + +```text +read path at coordinate C through optic O -> materialized bytes +``` + +A write is not an overwrite of substrate truth. It is a candidate suffix: + +```text +prior reading + new bytes -> delta/hunk -> Intent -> admission attempt +``` + +This lets ordinary editors, formatters, shells, and IDEs operate against a +normal-looking directory while Echo, `git-warp`, or another WARP runtime keeps +witnessed causal history as the authority. + +Files remain real as boundary readings. They stop being the source of truth. + +## Observer Geometry + +Observer Geometry is the discipline that prevents "reading" from becoming a +loose synonym for query. + +A reading must name or imply: + +- observer and purpose; +- aperture; +- causal basis; +- path-sensitive support obligations; +- rights posture; +- budget posture; +- residual, redaction, plurality, or obstruction posture. + +Missing support is not a cache miss to paper over. Missing support is an +obstruction, rehydration requirement, redaction, or explicit residual posture. + +## API Consequences + +Echo APIs must not expose mutable graph handles, global graph APIs, direct +setters, or hidden materialization fallbacks. + +External callers either: + +- propose an Intent against an explicit causal basis; or +- observe through a bounded optic and receive a reading/hologram; or +- retain/reveal an artifact by semantic identity and evidence posture. + +Internal services may keep whatever data structures are practical. They do not +become public mutation authority. + +Echo should not become the universal WARP runtime. Echo speaks Continuum and +implements one WARP optic family. It must not absorb Wesley, Graft, `git-warp`, +`warp-ttd`, or WARPDrive as privileged substrate concepts. + +## Operational Corrections + +WARP does not make hard problems disappear. It makes them typed and +witnessable. + +- Reproducibility is not automatic. It becomes a support obligation over + clocks, randomness, network, filesystem reads, environment variables, + toolchain versions, policy state, model versions, and human approvals. +- Conflict does not disappear. Text conflict is demoted into semantic, + support, policy, admission-law, or optic-compatibility conflict. +- Caches do not become truth. A cached reading is valid only for the coordinate, + aperture, law, witness basis, rights posture, and budget posture it names. +- Files do not disappear at the boundary. WARPDrive makes them materialized + readings and turns writes into candidate suffixes. + +## Mathematical Posture + +The useful category-theoretic intuition is: + +```text +causal history is the base territory; +optics are dependent/provenance-carrying projections over that territory; +readings are local charts; +holograms are witnessed boundary artifacts; +admission extends the territory with a lawful suffix. +``` + +A WARP optic is stronger than a plain functor. A functor captures +composition-preserving projection, but WARP also carries observer aperture, +support obligations, redaction/compression/blocking posture, admission law, and +witness production. + +Continuum is not itself the colimit. It is the protocol that lets runtimes +exchange diagram fragments, suffixes, coordinates, witnesses, and optic +contracts so they can form compatible readings and lawful admissions. + +## Sentence To Keep + +```text +There is witnessed causal history. +WARP optics chart it. +Holograms witness those charts. +Materialized graphs are optional readings. +Continuum is the protocol for lawful causal-history exchange. +``` + +Even shorter: + +```text +There is no state. +There are readings with obligations. +``` diff --git a/docs/architecture/wsc-verkle-ipa-retained-readings.md b/docs/architecture/wsc-verkle-ipa-retained-readings.md new file mode 100644 index 00000000..a50d04b2 --- /dev/null +++ b/docs/architecture/wsc-verkle-ipa-retained-readings.md @@ -0,0 +1,250 @@ + + + +# WSC, Verkle, IPA, And Retained Readings + +Status: future direction, architecture doctrine. + +Echo's retained-reading direction is: + +```text +WSC = canonical columnar bytes for a reading or checkpoint +Verkle = authenticated commitment/index over those bytes +IPA = compact proof mechanism for opening bounded apertures +echo-cas = content-addressed byte retention +``` + +Short version: + +```text +WSC gives us the table. +Verkle gives us the root. +IPA gives us the aperture proof. +echo-cas stores the bytes. +``` + +This direction does not replace witnessed causal history, `ReadIdentity`, or +Echo's WARP optic doctrine. It names the future storage/proof stack for +retained holograms and checkpoint-style readings. + +## Layer Roles + +### WSC + +WSC means **Write-Streaming Columnar**. + +WSC is the canonical physical layout for WARP-shaped readings: + +- header +- WARP directory +- node rows +- edge rows +- outbound edge indexes +- attachment indexes +- attachment rows +- blob bytes + +It is deterministic, columnar, aligned, and designed for low-copy or future +memory-mapped reads. WSC is a byte layout, not semantic truth. + +### Verkle + +Verkle is the future authenticated commitment/index layer over WSC sections, +rows, and chunks. + +WSC naturally supplies stable coordinates such as: + +```text +/wsc/v1/warp/0/nodes/123 +/wsc/v1/warp/0/edges/456 +/wsc/v1/warp/0/node_atts/789 +/wsc/v1/warp/0/blobs/chunk/42 +``` + +A Verkle root can commit to the WSC-backed reading while allowing compact +openings of selected cells, rows, chunks, or aperture-specific bundles. + +### IPA + +IPA means **Inner Product Argument**. + +In this direction, IPA is a proof backend for opening Verkle/vector commitment +claims. It lets an optic carry compact support for a bounded aperture without +materializing the full retained reading. + +The exact proof system is future work. The design requirement now is to avoid +closing off the shape: + +```text +aperture -> selected WSC coordinates -> opened values + compact proof +``` + +### echo-cas + +`echo-cas` stores opaque bytes by content hash: + +```text +BlobHash = BLAKE3(bytes) +``` + +CAS does not know WSC, Verkle, IPA, jedit, ropes, buffers, or schemas. It stores +bytes. Meaning lives in typed references, reading identities, witnesses, and +optic coordinates above the CAS blob. + +## Identity Stack + +These identities must stay separate: + +```text +CAS hash + exact byte identity + +WSC payload hash + canonical retained reading/checkpoint bytes + +Verkle root + authenticated commitment to a WSC-backed reading + +IPA proof hash + retained support for selected openings or relations + +ReadIdentity + semantic question answered by the reading +``` + +The CAS hash says "these bytes." It does not say "this buffer reading." A +`ReadIdentity` names the semantic question and basis those bytes answer. + +## jedit And Rope Fit + +`jedit` keeps its rope model as the hot editor structure. + +The rope is optimized for: + +- range replacement +- cursor-relative editing +- line and character metrics +- incremental dirty tracking +- editor ergonomics + +WSC is optimized for: + +- retained readings +- checkpoints +- deterministic bytes +- low-copy inspection +- CAS retention +- future proof openings + +So the boundary is: + +```text +jedit rope + hot app-owned text structure + +WSC + cold canonical retained reading/checkpoint layout + +echo-cas + byte store for WSC, proof, witness, receipt, and payload blobs +``` + +Echo must not learn rope semantics. The external `jedit` repo owns text law, +buffer law, edit-group law, and rope reconstruction. Echo hosts generated +contract artifacts, admits intents, emits readings, and retains bytes through +generic surfaces. + +## jedit Checkpoint Shape + +A future jedit checkpoint may look like: + +```text +jedit rope + -> WSC reading: + nodes: Buffer, RopeRoot, RopeInternal, RopeLeaf, EditGroup, Checkpoint + edges: parent/child/order/edit/checkpoint relationships + attachments: weights, line counts, encoding, newline policy, chunk refs + blobs: text chunk bytes + -> Verkle root over WSC sections and chunks + -> IPA proof for a requested aperture + -> echo-cas retention for WSC/proof/witness bytes +``` + +For a buffer range read, the response should not need the whole buffer: + +```text +request: + buffer B, range [a..b], checkpoint C + +response: + opened text chunks + relevant rope leaf metadata + Verkle root for checkpoint C + IPA or opening proof ref + reading envelope naming contract, schema, coordinate, and aperture +``` + +The editor still needs the text bytes it renders. It should not need the full +WSC, full rope, full Echo graph, or unrelated sibling chunks. + +## Materialization Levels + +This stack supports graded materialization: + +```text +full materialization: + CAS -> full WSC -> full jedit rope -> full buffer + +partial materialization: + CAS -> selected WSC chunks -> selected rope leaves -> visible viewport + +proof-backed aperture: + opened values + proof + commitment root -> verified bounded reading +``` + +Graft, warp-ttd, and other WARP optics can consume the same retained evidence at +their own aperture without pretending there is one canonical in-memory graph. + +## Current Reality + +Current implemented facts: + +- `warp-core` has WSC writing, validation, and borrowed view support. +- `echo-cas` stores opaque bytes by content hash. +- contract/read retention cards already require CAS hashes to stay separate + from semantic reading identity. + +Future work: + +- multi-warp WSC completion and retention integration +- retained-reading keys that can name WSC payloads honestly +- Verkle or equivalent authenticated indexes over WSC coordinates +- IPA or equivalent compact opening proofs for proof-carrying apertures +- jedit-owned projection from rope checkpoints into WSC-backed readings + +## Non-Goals + +- Do not make WSC the ontology. +- Do not make Verkle the ontology. +- Do not make IPA a storage substrate. +- Do not make `echo-cas` depend on WSC or proof systems. +- Do not make CAS hashes stand in for `ReadIdentity`. +- Do not add rope, buffer, or text APIs to Echo core. +- Do not require proof systems for the first jedit contract-hosting proof. +- Do not treat proof verification as admission without authority, policy, and + support-obligation checks. + +## Implementation Consequence + +Near-term Echo work should preserve slots for: + +- payload layout identifiers such as `wsc-v1` +- payload refs stored in `echo-cas` +- commitment family and commitment root +- proof family and proof ref +- opened WSC coordinates or aperture selectors +- verification posture +- residual or obstruction posture when support is unavailable + +The first implementation does not need Verkle or IPA. It needs the retained +reading identity to leave room for them. diff --git a/docs/assets/dags/deps-config.json b/docs/assets/dags/deps-config.json index ae5aeec3..0c7b3dc8 100644 --- a/docs/assets/dags/deps-config.json +++ b/docs/assets/dags/deps-config.json @@ -6,64 +6,94 @@ "This is a planning sketch, not canonical GitHub dependency truth." ], "issue_edges": [ - { "from": 166, "to": 170, "confidence": "strong", "note": "TT0 -> TT1" }, - { "from": 170, "to": 171, "confidence": "strong", "note": "TT1 -> TT2 (explicit)" }, - { "from": 166, "to": 172, "confidence": "strong", "note": "TT0 -> TT3" }, - - { "from": 88, "to": 173, "confidence": "strong", "note": "capability tokens for deterministic surface" }, - { "from": 21, "to": 173, "confidence": "medium", "note": "security contexts likely required" }, - { "from": 26, "to": 173, "confidence": "medium", "note": "plugin ABI likely required" }, - { "from": 166, "to": 173, "confidence": "medium", "note": "time model spec lock likely required" }, - { "from": 174, "to": 173, "confidence": "weak", "note": "Wesley grammar maybe required" }, - - { "from": 37, "to": 21, "confidence": "strong", "note": "draft spec feeds epic" }, - { "from": 38, "to": 21, "confidence": "strong", "note": "ffi limits feed epic" }, - { "from": 39, "to": 21, "confidence": "strong", "note": "wasm validation feed epic" }, - { "from": 40, "to": 21, "confidence": "strong", "note": "denial tests feed epic" }, - - { "from": 32, "to": 20, "confidence": "strong", "note": "draft spec feeds epic" }, - { "from": 33, "to": 20, "confidence": "strong", "note": "ci sign dry run feeds epic" }, - { "from": 34, "to": 20, "confidence": "strong", "note": "cli verify path feeds epic" }, - { "from": 35, "to": 20, "confidence": "strong", "note": "key mgmt doc feeds epic" }, - { "from": 36, "to": 20, "confidence": "strong", "note": "ci verify feeds epic" }, - - { "from": 47, "to": 23, "confidence": "strong", "note": "scaffold feeds epic" }, - { "from": 48, "to": 23, "confidence": "strong", "note": "verify subcommand feeds epic" }, - { "from": 49, "to": 23, "confidence": "strong", "note": "bench subcommand feeds epic" }, - { "from": 50, "to": 23, "confidence": "strong", "note": "inspect subcommand feeds epic" }, - { "from": 51, "to": 23, "confidence": "strong", "note": "docs/man pages feed epic" }, + { + "from": 21, + "to": 173, + "confidence": "medium", + "note": "security contexts likely required" + }, + { + "from": 174, + "to": 173, + "confidence": "weak", + "note": "Wesley grammar maybe required" + }, + { + "from": 33, + "to": 20, + "confidence": "strong", + "note": "ci sign dry run feeds epic" + }, + { + "from": 34, + "to": 20, + "confidence": "strong", + "note": "cli verify path feeds epic" + }, + { + "from": 35, + "to": 20, + "confidence": "strong", + "note": "key mgmt doc feeds epic" + }, + { + "from": 36, + "to": 20, + "confidence": "strong", + "note": "ci verify feeds epic" + }, { "from": 75, "to": 24, "confidence": "strong", "note": "spec feeds epic" }, - { "from": 76, "to": 24, "confidence": "strong", "note": "watcher feeds epic" }, - { "from": 77, "to": 24, "confidence": "strong", "note": "snapshot swap feeds epic" }, - { "from": 78, "to": 24, "confidence": "strong", "note": "gate+tests feed epic" }, - { "from": 79, "to": 24, "confidence": "strong", "note": "docs/logging feed epic" }, - - { "from": 80, "to": 25, "confidence": "strong", "note": "spec feeds epic" }, - { "from": 81, "to": 80, "confidence": "strong", "note": "reader depends on spec" }, - { "from": 82, "to": 80, "confidence": "strong", "note": "loader depends on spec" }, - { "from": 83, "to": 80, "confidence": "strong", "note": "integrity depends on spec" }, - { "from": 84, "to": 80, "confidence": "strong", "note": "sample+tests depends on spec" }, - { "from": 81, "to": 25, "confidence": "medium", "note": "reader likely part of epic" }, - { "from": 82, "to": 25, "confidence": "medium", "note": "loader likely part of epic" }, - { "from": 83, "to": 25, "confidence": "medium", "note": "integrity likely part of epic" }, - { "from": 84, "to": 25, "confidence": "medium", "note": "sample+tests likely part of epic" }, - - { "from": 85, "to": 26, "confidence": "strong", "note": "draft spec feeds epic" }, - { "from": 86, "to": 26, "confidence": "strong", "note": "header+loader feeds epic" }, - { "from": 87, "to": 26, "confidence": "strong", "note": "negotiation feeds epic" }, - { "from": 88, "to": 26, "confidence": "strong", "note": "capability tokens feed epic" }, - { "from": 89, "to": 26, "confidence": "strong", "note": "example plugin feeds epic" } + { + "from": 76, + "to": 24, + "confidence": "strong", + "note": "watcher feeds epic" + }, + { + "from": 79, + "to": 24, + "confidence": "strong", + "note": "docs/logging feed epic" + } ], "milestone_edges": [ - { "from": "TT0", "to": "TT1", "confidence": "strong", "note": "time model unlocks inspector frame" }, - { "from": "TT0", "to": "TT2", "confidence": "strong", "note": "time model unlocks time travel MVP" }, - { "from": "TT1", "to": "TT2", "confidence": "strong", "note": "inspector frame supports time travel UX" }, - { "from": "TT0", "to": "TT3", "confidence": "strong", "note": "time model unlocks worldline compare" }, + { + "from": "TT0", + "to": "TT2", + "confidence": "strong", + "note": "time model unlocks time travel MVP" + }, + { + "from": "TT0", + "to": "TT3", + "confidence": "strong", + "note": "time model unlocks worldline compare" + }, - { "from": "1C", "to": "S1", "confidence": "strong", "note": "bindings needed for deterministic Rhai" }, - { "from": "1E", "to": "S1", "confidence": "strong", "note": "security contexts constrain sandbox" }, - { "from": "TT0", "to": "S1", "confidence": "medium", "note": "deterministic time model likely needed" }, - { "from": "W1", "to": "S1", "confidence": "weak", "note": "boundary grammar maybe needed" } + { + "from": "1C", + "to": "S1", + "confidence": "strong", + "note": "bindings needed for deterministic Rhai" + }, + { + "from": "1E", + "to": "S1", + "confidence": "strong", + "note": "security contexts constrain sandbox" + }, + { + "from": "TT0", + "to": "S1", + "confidence": "medium", + "note": "deterministic time model likely needed" + }, + { + "from": "W1", + "to": "S1", + "confidence": "weak", + "note": "boundary grammar maybe needed" + } ] } diff --git a/docs/assets/dags/issue-deps.dot b/docs/assets/dags/issue-deps.dot index 8c93ec21..bcef37bb 100644 --- a/docs/assets/dags/issue-deps.dot +++ b/docs/assets/dags/issue-deps.dot @@ -4,7 +4,7 @@ digraph echo_issue_dependencies { graph [rankdir=LR, labelloc="t", fontsize=18, fontname="Helvetica", newrank=true, splines=true]; node [shape=box, style="rounded,filled", fontname="Helvetica", fontsize=10, margin="0.10,0.06"]; edge [fontname="Helvetica", fontsize=9, arrowsize=0.8]; - label="Echo — Issue Dependency Sketch\nEdge direction: prerequisite → dependent (do tail before head)\nEdge styles encode confidence (solid=strong, dashed=medium, dotted=weak).\nGreen = Confirmed in Issue Body; Red = In Issue Body but missing from Plan."; + label="Echo — Issue Dependency Sketch\nEdge direction: prerequisite → dependent (do tail before head)\nEdge styles encode confidence (solid=strong, dashed=medium, dotted=weak)."; subgraph cluster_legend { label="Legend"; @@ -14,34 +14,16 @@ digraph echo_issue_dependencies { L1 [label="strong", fillcolor="#ffffff"]; L2 [label="medium", fillcolor="#ffffff"]; L3 [label="weak", fillcolor="#ffffff"]; - LG [label="confirmed (reality)", color="green", fontcolor="green"]; - LR [label="missing from plan", color="red", fontcolor="red"]; L1 -> L2 [arrowhead=none, color="black", penwidth=1.4, style="solid"]; L2 -> L3 [arrowhead=none, color="gray40", penwidth=1.2, style="dashed"]; } - subgraph cluster__no_milestone_ { - label="(no milestone)"; - style="rounded"; - color="gray70"; - node [fillcolor="#ffffff"]; - i243 [label="#243\\nTT1: dt policy (fixed timestep vs admitted dt stream)", tooltip="TT1: dt policy (fixed timestep vs admitted dt stream)", URL="https://github.com/flyingrobots/echo/issues/243"]; - i244 [label="#244\\nTT1: TimeStream retention + spool compaction + wormhole density", tooltip="TT1: TimeStream retention + spool compaction + wormhole density", URL="https://github.com/flyingrobots/echo/issues/244"]; - i245 [label="#245\\nTT1: Merge semantics for admitted stream facts across worldlines", tooltip="TT1: Merge semantics for admitted stream facts across worldlines", URL="https://github.com/flyingrobots/echo/issues/245"]; - i246 [label="#246\\nTT1: Security/capabilities for fork/rewind/merge in multiplayer", tooltip="TT1: Security/capabilities for fork/rewind/merge in multiplayer", URL="https://github.com/flyingrobots/echo/issues/246"]; - } - subgraph cluster_1C___Rhai_TS_Bindings { label="1C – Rhai/TS Bindings"; style="rounded"; color="gray70"; node [fillcolor="#dcfce7"]; - i26 [label="#26\\nPlugin ABI (C) v0", tooltip="Plugin ABI (C) v0", URL="https://github.com/flyingrobots/echo/issues/26"]; - i85 [label="#85\\nDraft C ABI spec", tooltip="Draft C ABI spec", URL="https://github.com/flyingrobots/echo/issues/85"]; - i86 [label="#86\\nC header + host loader", tooltip="C header + host loader", URL="https://github.com/flyingrobots/echo/issues/86"]; - i87 [label="#87\\nVersion negotiation", tooltip="Version negotiation", URL="https://github.com/flyingrobots/echo/issues/87"]; - i88 [label="#88\\nCapability tokens", tooltip="Capability tokens", URL="https://github.com/flyingrobots/echo/issues/88"]; - i89 [label="#89\\nExample plugin + tests", tooltip="Example plugin + tests", URL="https://github.com/flyingrobots/echo/issues/89"]; + i173 [label="#173\\nS1: Deterministic Rhai surface (sandbox + claims/effects)", tooltip="S1: Deterministic Rhai surface (sandbox + claims/effects)", URL="https://github.com/flyingrobots/echo/issues/173"]; } subgraph cluster_1E___Networking___Confluence_MVP { @@ -50,10 +32,6 @@ digraph echo_issue_dependencies { color="gray70"; node [fillcolor="#ffedd5"]; i21 [label="#21\\nSpec: Security Contexts (FFI/WASM/CLI)", tooltip="Spec: Security Contexts (FFI/WASM/CLI)", URL="https://github.com/flyingrobots/echo/issues/21"]; - i37 [label="#37\\nDraft security contexts spec", tooltip="Draft security contexts spec", URL="https://github.com/flyingrobots/echo/issues/37"]; - i38 [label="#38\\nFFI limits and validation", tooltip="FFI limits and validation", URL="https://github.com/flyingrobots/echo/issues/38"]; - i39 [label="#39\\nWASM input validation", tooltip="WASM input validation", URL="https://github.com/flyingrobots/echo/issues/39"]; - i40 [label="#40\\nUnit tests for denials", tooltip="Unit tests for denials", URL="https://github.com/flyingrobots/echo/issues/40"]; } subgraph cluster_1F___Tooling_Integration { @@ -61,101 +39,15 @@ digraph echo_issue_dependencies { style="rounded"; color="gray70"; node [fillcolor="#f3f4f6"]; - i19 [label="#19\\nSpec: Persistent Store (on-disk)", tooltip="Spec: Persistent Store (on-disk)", URL="https://github.com/flyingrobots/echo/issues/19"]; i20 [label="#20\\nSpec: Commit/Manifest Signing", tooltip="Spec: Commit/Manifest Signing", URL="https://github.com/flyingrobots/echo/issues/20"]; i24 [label="#24\\nEditor Hot-Reload (spec + impl)", tooltip="Editor Hot-Reload (spec + impl)", URL="https://github.com/flyingrobots/echo/issues/24"]; - i25 [label="#25\\nImporter: TurtlGraph → Echo store", tooltip="Importer: TurtlGraph → Echo store", URL="https://github.com/flyingrobots/echo/issues/25"]; - i28 [label="#28\\nDraft spec document (header/ULEB128/property/string-pool)", tooltip="Draft spec document (header/ULEB128/property/string-pool)", URL="https://github.com/flyingrobots/echo/issues/28"]; - i32 [label="#32\\nDraft signing spec", tooltip="Draft signing spec", URL="https://github.com/flyingrobots/echo/issues/32"]; i33 [label="#33\\nCI: sign release artifacts (dry run)", tooltip="CI: sign release artifacts (dry run)", URL="https://github.com/flyingrobots/echo/issues/33"]; i34 [label="#34\\nCLI verify path", tooltip="CLI verify path", URL="https://github.com/flyingrobots/echo/issues/34"]; i35 [label="#35\\nKey management doc", tooltip="Key management doc", URL="https://github.com/flyingrobots/echo/issues/35"]; i36 [label="#36\\nCI: verify signatures", tooltip="CI: verify signatures", URL="https://github.com/flyingrobots/echo/issues/36"]; i75 [label="#75\\nDraft hot-reload spec", tooltip="Draft hot-reload spec", URL="https://github.com/flyingrobots/echo/issues/75"]; i76 [label="#76\\nFile watcher/debounce", tooltip="File watcher/debounce", URL="https://github.com/flyingrobots/echo/issues/76"]; - i77 [label="#77\\nAtomic snapshot swap", tooltip="Atomic snapshot swap", URL="https://github.com/flyingrobots/echo/issues/77"]; - i78 [label="#78\\nEditor gate + tests", tooltip="Editor gate + tests", URL="https://github.com/flyingrobots/echo/issues/78"]; i79 [label="#79\\nDocs/logging", tooltip="Docs/logging", URL="https://github.com/flyingrobots/echo/issues/79"]; - i80 [label="#80\\nDraft importer spec", tooltip="Draft importer spec", URL="https://github.com/flyingrobots/echo/issues/80"]; - i81 [label="#81\\nMinimal reader", tooltip="Minimal reader", URL="https://github.com/flyingrobots/echo/issues/81"]; - i82 [label="#82\\nEcho store loader", tooltip="Echo store loader", URL="https://github.com/flyingrobots/echo/issues/82"]; - i83 [label="#83\\nIntegrity verification", tooltip="Integrity verification", URL="https://github.com/flyingrobots/echo/issues/83"]; - i84 [label="#84\\nSample + tests", tooltip="Sample + tests", URL="https://github.com/flyingrobots/echo/issues/84"]; - } - - subgraph cluster_Demo_2___Splash_Guy__Deterministic_Lockstep_ { - label="Demo 2 — Splash Guy (Deterministic Lockstep)"; - style="rounded"; - color="gray70"; - node [fillcolor="#ffffff"]; - i222 [label="#222\\nDemo 2: Splash Guy — deterministic rules + state model", tooltip="Demo 2: Splash Guy — deterministic rules + state model", URL="https://github.com/flyingrobots/echo/issues/222"]; - i223 [label="#223\\nDemo 2: Splash Guy — lockstep input protocol + two-peer harness", tooltip="Demo 2: Splash Guy — lockstep input protocol + two-peer harness", URL="https://github.com/flyingrobots/echo/issues/223"]; - i224 [label="#224\\nDemo 2: Splash Guy — controlled desync lessons (make it fail on purpose)", tooltip="Demo 2: Splash Guy — controlled desync lessons (make it fail on purpose)", URL="https://github.com/flyingrobots/echo/issues/224"]; - i225 [label="#225\\nDemo 2: Splash Guy — minimal rendering / visualization path", tooltip="Demo 2: Splash Guy — minimal rendering / visualization path", URL="https://github.com/flyingrobots/echo/issues/225"]; - i226 [label="#226\\nDemo 2: Splash Guy — docs: networking-first course modules", tooltip="Demo 2: Splash Guy — docs: networking-first course modules", URL="https://github.com/flyingrobots/echo/issues/226"]; - } - - subgraph cluster_Demo_3___Tumble_Tower__Deterministic_Physics_ { - label="Demo 3 — Tumble Tower (Deterministic Physics)"; - style="rounded"; - color="gray70"; - node [fillcolor="#ffffff"]; - i231 [label="#231\\nDemo 3: Tumble Tower — Stage 0 physics (2D AABB stacking)", tooltip="Demo 3: Tumble Tower — Stage 0 physics (2D AABB stacking)", URL="https://github.com/flyingrobots/echo/issues/231"]; - i232 [label="#232\\nDemo 3: Tumble Tower — Stage 1 physics (rotation + angular, OBB contacts)", tooltip="Demo 3: Tumble Tower — Stage 1 physics (rotation + angular, OBB contacts)", URL="https://github.com/flyingrobots/echo/issues/232"]; - i233 [label="#233\\nDemo 3: Tumble Tower — Stage 2 physics (friction + restitution)", tooltip="Demo 3: Tumble Tower — Stage 2 physics (friction + restitution)", URL="https://github.com/flyingrobots/echo/issues/233"]; - i234 [label="#234\\nDemo 3: Tumble Tower — Stage 3 physics (sleeping + stack stability)", tooltip="Demo 3: Tumble Tower — Stage 3 physics (sleeping + stack stability)", URL="https://github.com/flyingrobots/echo/issues/234"]; - i235 [label="#235\\nDemo 3: Tumble Tower — lockstep harness + per-tick fingerprinting", tooltip="Demo 3: Tumble Tower — lockstep harness + per-tick fingerprinting", URL="https://github.com/flyingrobots/echo/issues/235"]; - i236 [label="#236\\nDemo 3: Tumble Tower — controlled desync breakers (physics edition)", tooltip="Demo 3: Tumble Tower — controlled desync breakers (physics edition)", URL="https://github.com/flyingrobots/echo/issues/236"]; - i237 [label="#237\\nDemo 3: Tumble Tower — visualization (2D view + debug overlays)", tooltip="Demo 3: Tumble Tower — visualization (2D view + debug overlays)", URL="https://github.com/flyingrobots/echo/issues/237"]; - i238 [label="#238\\nDemo 3: Tumble Tower — docs course (physics ladder)", tooltip="Demo 3: Tumble Tower — docs course (physics ladder)", URL="https://github.com/flyingrobots/echo/issues/238"]; - } - - subgraph cluster_M2_2___Playground_Slice { - label="M2.2 – Playground Slice"; - style="rounded"; - color="gray70"; - node [fillcolor="#fef9c3"]; - i23 [label="#23\\nCLI: verify/bench/inspect", tooltip="CLI: verify/bench/inspect", URL="https://github.com/flyingrobots/echo/issues/23"]; - i47 [label="#47\\nScaffold CLI subcommands", tooltip="Scaffold CLI subcommands", URL="https://github.com/flyingrobots/echo/issues/47"]; - i48 [label="#48\\nImplement 'verify'", tooltip="Implement 'verify'", URL="https://github.com/flyingrobots/echo/issues/48"]; - i49 [label="#49\\nImplement 'bench'", tooltip="Implement 'bench'", URL="https://github.com/flyingrobots/echo/issues/49"]; - i50 [label="#50\\nImplement 'inspect'", tooltip="Implement 'inspect'", URL="https://github.com/flyingrobots/echo/issues/50"]; - i51 [label="#51\\nDocs/man pages", tooltip="Docs/man pages", URL="https://github.com/flyingrobots/echo/issues/51"]; - } - - subgraph cluster_S1___Deterministic_Rhai_Surface { - label="S1 – Deterministic Rhai Surface"; - style="rounded"; - color="gray70"; - node [fillcolor="#ede9fe"]; - i173 [label="#173\\nS1: Deterministic Rhai surface (sandbox + claims/effects)", tooltip="S1: Deterministic Rhai surface (sandbox + claims/effects)", URL="https://github.com/flyingrobots/echo/issues/173"]; - } - - subgraph cluster_TT1___Streams_Inspector_Frame { - label="TT1 – Streams Inspector Frame"; - style="rounded"; - color="gray70"; - node [fillcolor="#dbeafe"]; - i170 [label="#170\\nTT1: StreamsFrame inspector support (backlog + cursors + admission decisions)", tooltip="TT1: StreamsFrame inspector support (backlog + cursors + admission decisions)", URL="https://github.com/flyingrobots/echo/issues/170"]; - } - - subgraph cluster_TT2___Time_Travel_MVP { - label="TT2 – Time Travel MVP"; - style="rounded"; - color="gray70"; - node [fillcolor="#dbeafe"]; - i171 [label="#171\\nTT2: Time Travel MVP (pause/rewind/buffer/catch-up)", tooltip="TT2: Time Travel MVP (pause/rewind/buffer/catch-up)", URL="https://github.com/flyingrobots/echo/issues/171"]; - i205 [label="#205\\nTT2: Reliving debugger MVP (scrub timeline + causal slice + fork branch)", tooltip="TT2: Reliving debugger MVP (scrub timeline + causal slice + fork branch)", URL="https://github.com/flyingrobots/echo/issues/205"]; - } - - subgraph cluster_TT3___Rulial_Diff___Worldline_Compare { - label="TT3 – Rulial Diff / Worldline Compare"; - style="rounded"; - color="gray70"; - node [fillcolor="#dbeafe"]; - i172 [label="#172\\nTT3: Rulial diff / worldline compare MVP", tooltip="TT3: Rulial diff / worldline compare MVP", URL="https://github.com/flyingrobots/echo/issues/172"]; - i199 [label="#199\\nTT3: Wesley worldline diff (compare query outputs/proofs across ticks)", tooltip="TT3: Wesley worldline diff (compare query outputs/proofs across ticks)", URL="https://github.com/flyingrobots/echo/issues/199"]; - i204 [label="#204\\nTT3: Provenance heatmap (blast radius / cohesion over time)", tooltip="TT3: Provenance heatmap (blast radius / cohesion over time)", URL="https://github.com/flyingrobots/echo/issues/204"]; } subgraph cluster_W1___Wesley_as_a_Boundary_Grammar { @@ -166,68 +58,13 @@ digraph echo_issue_dependencies { i174 [label="#174\\nW1: Wesley as a boundary grammar (hashable view artifacts)", tooltip="W1: Wesley as a boundary grammar (hashable view artifacts)", URL="https://github.com/flyingrobots/echo/issues/174"]; } - i166 -> i170 [color="black", penwidth=1.4, style="solid", tooltip="TT0 -> TT1"]; - i170 -> i171 [color="green3", penwidth=2.0, style="solid", tooltip="TT1 -> TT2 (explicit)"]; - i166 -> i172 [color="black", penwidth=1.4, style="solid", tooltip="TT0 -> TT3"]; - i88 -> i173 [color="black", penwidth=1.4, style="solid", tooltip="capability tokens for deterministic surface"]; i21 -> i173 [color="gray40", penwidth=1.2, style="dashed", tooltip="security contexts likely required"]; - i26 -> i173 [color="gray40", penwidth=1.2, style="dashed", tooltip="plugin ABI likely required"]; - i166 -> i173 [color="gray40", penwidth=1.2, style="dashed", tooltip="time model spec lock likely required"]; i174 -> i173 [color="gray70", penwidth=1.2, style="dotted", tooltip="Wesley grammar maybe required"]; - i37 -> i21 [color="green3", penwidth=2.0, style="solid", tooltip="draft spec feeds epic"]; - i38 -> i21 [color="green3", penwidth=2.0, style="solid", tooltip="ffi limits feed epic"]; - i39 -> i21 [color="green3", penwidth=2.0, style="solid", tooltip="wasm validation feed epic"]; - i40 -> i21 [color="green3", penwidth=2.0, style="solid", tooltip="denial tests feed epic"]; - i32 -> i20 [color="green3", penwidth=2.0, style="solid", tooltip="draft spec feeds epic"]; - i33 -> i20 [color="green3", penwidth=2.0, style="solid", tooltip="ci sign dry run feeds epic"]; - i34 -> i20 [color="green3", penwidth=2.0, style="solid", tooltip="cli verify path feeds epic"]; - i35 -> i20 [color="green3", penwidth=2.0, style="solid", tooltip="key mgmt doc feeds epic"]; - i36 -> i20 [color="green3", penwidth=2.0, style="solid", tooltip="ci verify feeds epic"]; - i47 -> i23 [color="black", penwidth=1.4, style="solid", tooltip="scaffold feeds epic"]; - i48 -> i23 [color="black", penwidth=1.4, style="solid", tooltip="verify subcommand feeds epic"]; - i49 -> i23 [color="black", penwidth=1.4, style="solid", tooltip="bench subcommand feeds epic"]; - i50 -> i23 [color="black", penwidth=1.4, style="solid", tooltip="inspect subcommand feeds epic"]; - i51 -> i23 [color="black", penwidth=1.4, style="solid", tooltip="docs/man pages feed epic"]; + i33 -> i20 [color="black", penwidth=1.4, style="solid", tooltip="ci sign dry run feeds epic"]; + i34 -> i20 [color="black", penwidth=1.4, style="solid", tooltip="cli verify path feeds epic"]; + i35 -> i20 [color="black", penwidth=1.4, style="solid", tooltip="key mgmt doc feeds epic"]; + i36 -> i20 [color="black", penwidth=1.4, style="solid", tooltip="ci verify feeds epic"]; i75 -> i24 [color="black", penwidth=1.4, style="solid", tooltip="spec feeds epic"]; i76 -> i24 [color="black", penwidth=1.4, style="solid", tooltip="watcher feeds epic"]; - i77 -> i24 [color="black", penwidth=1.4, style="solid", tooltip="snapshot swap feeds epic"]; - i78 -> i24 [color="black", penwidth=1.4, style="solid", tooltip="gate+tests feed epic"]; i79 -> i24 [color="black", penwidth=1.4, style="solid", tooltip="docs/logging feed epic"]; - i80 -> i25 [color="black", penwidth=1.4, style="solid", tooltip="spec feeds epic"]; - i81 -> i80 [color="black", penwidth=1.4, style="solid", tooltip="reader depends on spec"]; - i82 -> i80 [color="black", penwidth=1.4, style="solid", tooltip="loader depends on spec"]; - i83 -> i80 [color="black", penwidth=1.4, style="solid", tooltip="integrity depends on spec"]; - i84 -> i80 [color="black", penwidth=1.4, style="solid", tooltip="sample+tests depends on spec"]; - i81 -> i25 [color="gray40", penwidth=1.2, style="dashed", tooltip="reader likely part of epic"]; - i82 -> i25 [color="gray40", penwidth=1.2, style="dashed", tooltip="loader likely part of epic"]; - i83 -> i25 [color="gray40", penwidth=1.2, style="dashed", tooltip="integrity likely part of epic"]; - i84 -> i25 [color="gray40", penwidth=1.2, style="dashed", tooltip="sample+tests likely part of epic"]; - i85 -> i26 [color="black", penwidth=1.4, style="solid", tooltip="draft spec feeds epic"]; - i86 -> i26 [color="black", penwidth=1.4, style="solid", tooltip="header+loader feeds epic"]; - i87 -> i26 [color="black", penwidth=1.4, style="solid", tooltip="negotiation feeds epic"]; - i88 -> i26 [color="black", penwidth=1.4, style="solid", tooltip="capability tokens feed epic"]; - i89 -> i26 [color="black", penwidth=1.4, style="solid", tooltip="example plugin feeds epic"]; - i28 -> i19 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i170 -> i205 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i246 -> i170 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i245 -> i170 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i244 -> i170 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i243 -> i170 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i171 -> i172 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i171 -> i204 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i171 -> i199 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i222 -> i226 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i223 -> i226 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i224 -> i226 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i225 -> i226 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i231 -> i238 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i231 -> i232 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i232 -> i238 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i232 -> i233 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i233 -> i238 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i233 -> i234 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i234 -> i238 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i235 -> i238 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i236 -> i238 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; - i237 -> i238 [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from Issue Body (missing from Plan)"]; } diff --git a/docs/assets/dags/issue-deps.svg b/docs/assets/dags/issue-deps.svg index 09c1df03..daa6a9d9 100644 --- a/docs/assets/dags/issue-deps.svg +++ b/docs/assets/dags/issue-deps.svg @@ -1,1260 +1,256 @@ - - - + + echo_issue_dependencies - -Echo — Issue Dependency Sketch -Edge direction: prerequisite → dependent (do tail before head) -Edge styles encode confidence (solid=strong, dashed=medium, dotted=weak). -Green = Confirmed in Issue Body; Red = In Issue Body but missing from Plan. + +Echo — Issue Dependency Sketch +Edge direction: prerequisite → dependent (do tail before head) +Edge styles encode confidence (solid=strong, dashed=medium, dotted=weak). cluster_legend - -Legend + +Legend -cluster__no_milestone_ - -(no milestone) - - cluster_1C___Rhai_TS_Bindings - -1C – Rhai/TS Bindings + +1C – Rhai/TS Bindings - + cluster_1E___Networking___Confluence_MVP - -1E – Networking & Confluence MVP + +1E – Networking & Confluence MVP - + cluster_1F___Tooling_Integration - -1F – Tooling Integration - - -cluster_Demo_2___Splash_Guy__Deterministic_Lockstep_ - -Demo 2 — Splash Guy (Deterministic Lockstep) - - -cluster_Demo_3___Tumble_Tower__Deterministic_Physics_ - -Demo 3 — Tumble Tower (Deterministic Physics) - - -cluster_M2_2___Playground_Slice - -M2.2 – Playground Slice - - -cluster_S1___Deterministic_Rhai_Surface - -S1 – Deterministic Rhai Surface - - -cluster_TT1___Streams_Inspector_Frame - -TT1 – Streams Inspector Frame - - -cluster_TT2___Time_Travel_MVP - -TT2 – Time Travel MVP + +1F – Tooling Integration - -cluster_TT3___Rulial_Diff___Worldline_Compare - -TT3 – Rulial Diff / Worldline Compare - - + cluster_W1___Wesley_as_a_Boundary_Grammar - -W1 – Wesley as a Boundary Grammar + +W1 – Wesley as a Boundary Grammar L1 - -strong + +strong L2 - -medium + +medium L1->L2 - + L3 - -weak + +weak L2->L3 - - - - -LG - -confirmed (reality) - - - -LR - -missing from plan - - - -i243 - - -#243\nTT1: dt policy (fixed timestep vs admitted dt stream) - - - - - -i170 - - -#170\nTT1: StreamsFrame inspector support (backlog + cursors + admission decisions) - - - - - -i243->i170 - - - - - - - - -i244 - - -#244\nTT1: TimeStream retention + spool compaction + wormhole density - - - - - -i244->i170 - - - - - - - - -i245 - - -#245\nTT1: Merge semantics for admitted stream facts across worldlines - - - - - -i245->i170 - - - - - - - - -i246 - - -#246\nTT1: Security/capabilities for fork/rewind/merge in multiplayer - - - - - -i246->i170 - - - - - - - - -i26 - - -#26\nPlugin ABI (C) v0 - - + - + i173 - - -#173\nS1: Deterministic Rhai surface (sandbox + claims/effects) - - - - - -i26->i173 - - - - - - - - -i85 - - -#85\nDraft C ABI spec - - - - - -i85->i26 - - - - - - - - -i86 - - -#86\nC header + host loader - - - - - -i86->i26 - - - - - - - - -i87 - - -#87\nVersion negotiation - - - - - -i87->i26 - - - - - - - - -i88 - - -#88\nCapability tokens - - - - - -i88->i26 - - - - - - - - -i88->i173 - - - - - - - - -i89 - - -#89\nExample plugin + tests - - - - - -i89->i26 - - - + + +#173\nS1: Deterministic Rhai surface (sandbox + claims/effects) - + i21 - - -#21\nSpec: Security Contexts (FFI/WASM/CLI) + + +#21\nSpec: Security Contexts (FFI/WASM/CLI) - + i21->i173 - - - - - - - - -i37 - - -#37\nDraft security contexts spec - - - - - -i37->i21 - - - - - - - - -i38 - - -#38\nFFI limits and validation - - - - - -i38->i21 - - - - - - - - -i39 - - -#39\nWASM input validation - - - - - -i39->i21 - - - - - - - - -i40 - - -#40\nUnit tests for denials - - - - - -i40->i21 - - - - - - - - -i19 - - -#19\nSpec: Persistent Store (on-disk) + + + - + i20 - - -#20\nSpec: Commit/Manifest Signing + + +#20\nSpec: Commit/Manifest Signing - + i24 - - -#24\nEditor Hot-Reload (spec + impl) - - - - - -i25 - - -#25\nImporter: TurtlGraph → Echo store - - - - - -i28 - - -#28\nDraft spec document (header/ULEB128/property/string-pool) - - - - - -i28->i19 - - - - - - - - -i32 - - -#32\nDraft signing spec - - - - - -i32->i20 - - - + + +#24\nEditor Hot-Reload (spec + impl) - + i33 - - -#33\nCI: sign release artifacts (dry run) + + +#33\nCI: sign release artifacts (dry run) - + i33->i20 - - - + + + - + i34 - - -#34\nCLI verify path + + +#34\nCLI verify path - + i34->i20 - - - + + + - + i35 - - -#35\nKey management doc + + +#35\nKey management doc - + i35->i20 - - - + + + - + i36 - - -#36\nCI: verify signatures + + +#36\nCI: verify signatures - + i36->i20 - - - + + + - + i75 - - -#75\nDraft hot-reload spec + + +#75\nDraft hot-reload spec - + i75->i24 - - - + + + - + i76 - - -#76\nFile watcher/debounce + + +#76\nFile watcher/debounce - + i76->i24 - - - - - - - - -i77 - - -#77\nAtomic snapshot swap - - - - - -i77->i24 - - - - - - - - -i78 - - -#78\nEditor gate + tests - - - - - -i78->i24 - - - + + + - + i79 - - -#79\nDocs/logging + + +#79\nDocs/logging - + i79->i24 - - - - - - - - -i80 - - -#80\nDraft importer spec - - - - - -i80->i25 - - - - - - - - -i81 - - -#81\nMinimal reader - - - - - -i81->i25 - - - - - - - - -i81->i80 - - - - - - - - -i82 - - -#82\nEcho store loader - - - - - -i82->i25 - - - - - - - - -i82->i80 - - - - - - - - -i83 - - -#83\nIntegrity verification - - - - - -i83->i25 - - - - - - - - -i83->i80 - - - - - - - - -i84 - - -#84\nSample + tests - - - - - -i84->i25 - - - - - - - - -i84->i80 - - - - - - - - -i222 - - -#222\nDemo 2: Splash Guy — deterministic rules + state model - - - - - -i226 - - -#226\nDemo 2: Splash Guy — docs: networking-first course modules - - - - - -i222->i226 - - - - - - - - -i223 - - -#223\nDemo 2: Splash Guy — lockstep input protocol + two-peer harness - - - - - -i223->i226 - - - - - - - - -i224 - - -#224\nDemo 2: Splash Guy — controlled desync lessons (make it fail on purpose) - - - - - -i224->i226 - - - - - - - - -i225 - - -#225\nDemo 2: Splash Guy — minimal rendering / visualization path - - - - - -i225->i226 - - - - - - - - -i231 - - -#231\nDemo 3: Tumble Tower — Stage 0 physics (2D AABB stacking) - - - - - -i232 - - -#232\nDemo 3: Tumble Tower — Stage 1 physics (rotation + angular, OBB contacts) - - - - - -i231->i232 - - - - - - - - -i238 - - -#238\nDemo 3: Tumble Tower — docs course (physics ladder) - - - - - -i231->i238 - - - - - - - - -i233 - - -#233\nDemo 3: Tumble Tower — Stage 2 physics (friction + restitution) - - - - - -i232->i233 - - - - - - - - -i232->i238 - - - - - - - - -i234 - - -#234\nDemo 3: Tumble Tower — Stage 3 physics (sleeping + stack stability) - - - - - -i233->i234 - - - - - - - - -i233->i238 - - - - - - - - -i234->i238 - - - - - - - - -i235 - - -#235\nDemo 3: Tumble Tower — lockstep harness + per-tick fingerprinting - - - - - -i235->i238 - - - - - - - - -i236 - - -#236\nDemo 3: Tumble Tower — controlled desync breakers (physics edition) - - - - - -i236->i238 - - - - - - - - -i237 - - -#237\nDemo 3: Tumble Tower — visualization (2D view + debug overlays) - - - - - -i237->i238 - - - - - - - - -i23 - - -#23\nCLI: verify/bench/inspect - - - - - -i47 - - -#47\nScaffold CLI subcommands - - - - - -i47->i23 - - - - - - - - -i48 - - -#48\nImplement 'verify' - - - - - -i48->i23 - - - - - - - - -i49 - - -#49\nImplement 'bench' - - - - - -i49->i23 - - - - - - - - -i50 - - -#50\nImplement 'inspect' - - - - - -i50->i23 - - - - - - - - -i51 - - -#51\nDocs/man pages - - - - - -i51->i23 - - - - - - - - -i171 - - -#171\nTT2: Time Travel MVP (pause/rewind/buffer/catch-up) - - - - - -i170->i171 - - - - - - - - -i205 - - -#205\nTT2: Reliving debugger MVP (scrub timeline + causal slice + fork branch) - - - - - -i170->i205 - - - - - - - - -i172 - - -#172\nTT3: Rulial diff / worldline compare MVP - - - - - -i171->i172 - - - - - - - - -i199 - - -#199\nTT3: Wesley worldline diff (compare query outputs/proofs across ticks) - - - - - -i171->i199 - - - - - - - - -i204 - - -#204\nTT3: Provenance heatmap (blast radius / cohesion over time) - - - - - -i171->i204 - - - + + + - + i174 - - -#174\nW1: Wesley as a boundary grammar (hashable view artifacts) + + +#174\nW1: Wesley as a boundary grammar (hashable view artifacts) - + i174->i173 - - - - - - - - -i166 - -i166 - - - -i166->i173 - - - - - - - - -i166->i170 - - - - - - - - -i166->i172 - - - + + + diff --git a/docs/assets/dags/milestone-deps.dot b/docs/assets/dags/milestone-deps.dot index a1f63fbb..0093c6a1 100644 --- a/docs/assets/dags/milestone-deps.dot +++ b/docs/assets/dags/milestone-deps.dot @@ -22,14 +22,11 @@ digraph echo_milestone_dependencies { m1E [label="1E – Networking & Confluence MVP", fillcolor="#ffedd5", tooltip="1E – Networking & Confluence MVP", URL="https://github.com/flyingrobots/echo/milestone/5"]; mS1 [label="S1 – Deterministic Rhai Surface", fillcolor="#ede9fe", tooltip="S1 – Deterministic Rhai Surface", URL="https://github.com/flyingrobots/echo/milestone/18"]; mTT0 [label="TT0 – Time Model Spec Lock", fillcolor="#dbeafe", tooltip="TT0 – Time Model Spec Lock", URL="https://github.com/flyingrobots/echo/milestone/21"]; - mTT1 [label="TT1 – Streams Inspector Frame", fillcolor="#dbeafe", tooltip="TT1 – Streams Inspector Frame", URL="https://github.com/flyingrobots/echo/milestone/15"]; mTT2 [label="TT2 – Time Travel MVP", fillcolor="#dbeafe", tooltip="TT2 – Time Travel MVP", URL="https://github.com/flyingrobots/echo/milestone/16"]; mTT3 [label="TT3 – Rulial Diff / Worldline Compare", fillcolor="#dbeafe", tooltip="TT3 – Rulial Diff / Worldline Compare", URL="https://github.com/flyingrobots/echo/milestone/17"]; mW1 [label="W1 – Wesley as a Boundary Grammar", fillcolor="#ccfbf1", tooltip="W1 – Wesley as a Boundary Grammar", URL="https://github.com/flyingrobots/echo/milestone/19"]; - mTT0 -> mTT1 [color="black", penwidth=1.4, style="solid", tooltip="time model unlocks inspector frame"]; mTT0 -> mTT2 [color="black", penwidth=1.4, style="solid", tooltip="time model unlocks time travel MVP"]; - mTT1 -> mTT2 [color="black", penwidth=1.4, style="solid", tooltip="inspector frame supports time travel UX"]; mTT0 -> mTT3 [color="black", penwidth=1.4, style="solid", tooltip="time model unlocks worldline compare"]; m1C -> mS1 [color="black", penwidth=1.4, style="solid", tooltip="bindings needed for deterministic Rhai"]; m1E -> mS1 [color="black", penwidth=1.4, style="solid", tooltip="security contexts constrain sandbox"]; diff --git a/docs/assets/dags/milestone-deps.svg b/docs/assets/dags/milestone-deps.svg index fa067977..83645eab 100644 --- a/docs/assets/dags/milestone-deps.svg +++ b/docs/assets/dags/milestone-deps.svg @@ -1,56 +1,56 @@ - - - + + echo_milestone_dependencies - -Echo — Milestone Dependency Sketch -Edge direction: prerequisite → dependent (do tail before head) -Edge styles encode confidence (solid=strong, dashed=medium, dotted=weak). + +Echo — Milestone Dependency Sketch +Edge direction: prerequisite → dependent (do tail before head) +Edge styles encode confidence (solid=strong, dashed=medium, dotted=weak). cluster_legend - -Legend + +Legend L1 - -strong + +strong L2 - -medium + +medium L1->L2 - + L3 - -weak + +weak L2->L3 - + m1C - -1C – Rhai/TS Bindings + +1C – Rhai/TS Bindings @@ -58,17 +58,17 @@ mS1 - -S1 – Deterministic Rhai Surface + +S1 – Deterministic Rhai Surface - + m1C->mS1 - - - + + + @@ -76,17 +76,17 @@ m1E - -1E – Networking & Confluence MVP + +1E – Networking & Confluence MVP - + m1E->mS1 - - - + + + @@ -94,98 +94,71 @@ mTT0 - -TT0 – Time Model Spec Lock + +TT0 – Time Model Spec Lock - + mTT0->mS1 - - - - - - - - -mTT1 - - -TT1 – Streams Inspector Frame - - - - - -mTT0->mTT1 - - - + + + - + mTT2 - - -TT2 – Time Travel MVP + + +TT2 – Time Travel MVP - + mTT0->mTT2 - - - + + + - + mTT3 - - -TT3 – Rulial Diff / Worldline Compare + + +TT3 – Rulial Diff / Worldline Compare - + mTT0->mTT3 - - - - - - - - -mTT1->mTT2 - - - + + + - + mW1 - - -W1 – Wesley as a Boundary Grammar + + +W1 – Wesley as a Boundary Grammar - + mW1->mS1 - - - + + + diff --git a/docs/assets/dags/tasks-dag.dot b/docs/assets/dags/tasks-dag.dot deleted file mode 100644 index 62311521..00000000 --- a/docs/assets/dags/tasks-dag.dot +++ /dev/null @@ -1,164 +0,0 @@ -digraph tasks_dag { - graph [rankdir=LR, labelloc="t", fontsize=18, fontname="Helvetica", newrank=true, splines=true]; - node [shape=box, style="rounded,filled", fontname="Helvetica", fontsize=10, margin="0.10,0.06"]; - edge [fontname="Helvetica", fontsize=9, arrowsize=0.8]; - label="Echo — Tasks DAG (from docs/assets/dags/tasks-dag-source.md)\nGenerated by scripts/generate-tasks-dag.js"; - - subgraph cluster_legend { - label="Legend"; - color="gray70"; - fontcolor="gray30"; - style="rounded"; - LG [label="confirmed in docs/assets/dags/tasks-dag-source.md", color="green", fontcolor="green"]; - } - - subgraph cluster_Spec { - label="Spec"; - style="rounded"; color="gray70"; - node [fillcolor="#dbeafe"]; - i19 [label="#19 Spec: Persistent Store\\n(on-disk)", URL="https://github.com/flyingrobots/echo/issues/19", tooltip="Spec: Persistent Store (on-disk)"]; - i20 [label="#20 Spec: Commit/Manifest\\nSigning", URL="https://github.com/flyingrobots/echo/issues/20", tooltip="Spec: Commit/Manifest Signing"]; - i21 [label="#21 Spec: Security Contexts\\n(WASM/CLI)", URL="https://github.com/flyingrobots/echo/issues/21", tooltip="Spec: Security Contexts (WASM/CLI)"]; - } - subgraph cluster_Draft { - label="Draft"; - style="rounded"; color="gray70"; - node [fillcolor="#dbeafe"]; - i28 [label="#28 Draft spec document\\n(header/ULEB128/property/strin\\ng-pool)", URL="https://github.com/flyingrobots/echo/issues/28", tooltip="Draft spec document (header/ULEB128/property/string-pool)"]; - i32 [label="#32 Draft signing spec", URL="https://github.com/flyingrobots/echo/issues/32", tooltip="Draft signing spec"]; - i37 [label="#37 Draft security contexts\\nspec", URL="https://github.com/flyingrobots/echo/issues/37", tooltip="Draft security contexts spec"]; - } - subgraph cluster_Misc { - label="Misc"; - style="rounded"; color="gray70"; - node [fillcolor="#dcfce7"]; - i33 [label="#33 CI: sign release artifacts\\n(dry run)", URL="https://github.com/flyingrobots/echo/issues/33", tooltip="CI: sign release artifacts (dry run)"]; - i34 [label="#34 CLI verify path", URL="https://github.com/flyingrobots/echo/issues/34", tooltip="CLI verify path"]; - i35 [label="#35 Key management doc", URL="https://github.com/flyingrobots/echo/issues/35", tooltip="Key management doc"]; - i36 [label="#36 CI: verify signatures", URL="https://github.com/flyingrobots/echo/issues/36", tooltip="CI: verify signatures"]; - i39 [label="#39 WASM input validation", URL="https://github.com/flyingrobots/echo/issues/39", tooltip="WASM input validation"]; - i40 [label="#40 Unit tests for denials", URL="https://github.com/flyingrobots/echo/issues/40", tooltip="Unit tests for denials"]; - i38 [label="#38 FFI limits and validation", URL="https://github.com/flyingrobots/echo/issues/38", tooltip="FFI limits and validation"]; - i202 [label="#202 Provenance Payload (PP)\\nv1 — spec + implementation", URL="https://github.com/flyingrobots/echo/issues/202", tooltip="Provenance Payload (PP) v1 — spec + implementation"]; - i270 [label="#270 Hardening: Fuzz the\\nScenePort boundary (proptest)", URL="https://github.com/flyingrobots/echo/issues/270", tooltip="Hardening: Fuzz the ScenePort boundary (proptest)"]; - i286 [label="#286 CI: Add unit tests for\\nclassify_changes.cjs and\\nmatches()", URL="https://github.com/flyingrobots/echo/issues/286", tooltip="CI: Add unit tests for classify_changes.cjs and matches()"]; - i287 [label="#287 Docs: Document\\nban-nondeterminism.sh\\nallowlist process in\\nRELEASE_POLICY.md", URL="https://github.com/flyingrobots/echo/issues/287", tooltip="Docs: Document ban-nondeterminism.sh allowlist process in RELEASE_POLICY.md"]; - } - subgraph cluster_TT1 { - label="TT1"; - style="rounded"; color="gray70"; - node [fillcolor="#fef9c3"]; - i170 [label="#170 TT1: StreamsFrame\\ninspector support (backlog +\\ncursors + admission decisions)", URL="https://github.com/flyingrobots/echo/issues/170", tooltip="TT1: StreamsFrame inspector support (backlog + cursors + admission decisions)"]; - i246 [label="#246 TT1:\\nSecurity/capabilities for\\nfork/rewind/merge in\\nmultiplayer", URL="https://github.com/flyingrobots/echo/issues/246", tooltip="TT1: Security/capabilities for fork/rewind/merge in multiplayer"]; - i245 [label="#245 TT1: Merge semantics for\\nadmitted stream facts across\\nworldlines", URL="https://github.com/flyingrobots/echo/issues/245", tooltip="TT1: Merge semantics for admitted stream facts across worldlines"]; - i244 [label="#244 TT1: TimeStream retention\\n+ spool compaction + wormhole\\ndensity", URL="https://github.com/flyingrobots/echo/issues/244", tooltip="TT1: TimeStream retention + spool compaction + wormhole density"]; - i243 [label="#243 TT1: dt policy (fixed\\ntimestep vs admitted dt\\nstream)", URL="https://github.com/flyingrobots/echo/issues/243", tooltip="TT1: dt policy (fixed timestep vs admitted dt stream)"]; - } - subgraph cluster_TT2 { - label="TT2"; - style="rounded"; color="gray70"; - node [fillcolor="#fee2e2"]; - i171 [label="#171 TT2: Time Travel MVP\\n(pause/rewind/buffer/catch-up)", URL="https://github.com/flyingrobots/echo/issues/171", tooltip="TT2: Time Travel MVP (pause/rewind/buffer/catch-up)"]; - i205 [label="#205 TT2: Reliving debugger\\nMVP (scrub timeline + causal\\nslice + fork branch)", URL="https://github.com/flyingrobots/echo/issues/205", tooltip="TT2: Reliving debugger MVP (scrub timeline + causal slice + fork branch)"]; - } - subgraph cluster_TT3 { - label="TT3"; - style="rounded"; color="gray70"; - node [fillcolor="#ccfbf1"]; - i172 [label="#172 TT3: Rulial diff /\\nworldline compare MVP", URL="https://github.com/flyingrobots/echo/issues/172", tooltip="TT3: Rulial diff / worldline compare MVP"]; - i204 [label="#204 TT3: Provenance heatmap\\n(blast radius / cohesion over\\ntime)", URL="https://github.com/flyingrobots/echo/issues/204", tooltip="TT3: Provenance heatmap (blast radius / cohesion over time)"]; - i199 [label="#199 TT3: Wesley worldline\\ndiff (compare query\\noutputs/proofs across ticks)", URL="https://github.com/flyingrobots/echo/issues/199", tooltip="TT3: Wesley worldline diff (compare query outputs/proofs across ticks)"]; - } - subgraph cluster_Demo_2 { - label="Demo 2"; - style="rounded"; color="gray70"; - node [fillcolor="#fef9c3"]; - i222 [label="#222 Demo 2: Splash Guy —\\ndeterministic rules + state\\nmodel", URL="https://github.com/flyingrobots/echo/issues/222", tooltip="Demo 2: Splash Guy — deterministic rules + state model"]; - i226 [label="#226 Demo 2: Splash Guy —\\ndocs: networking-first course\\nmodules", URL="https://github.com/flyingrobots/echo/issues/226", tooltip="Demo 2: Splash Guy — docs: networking-first course modules"]; - i223 [label="#223 Demo 2: Splash Guy —\\nlockstep input protocol +\\ntwo-peer harness", URL="https://github.com/flyingrobots/echo/issues/223", tooltip="Demo 2: Splash Guy — lockstep input protocol + two-peer harness"]; - i224 [label="#224 Demo 2: Splash Guy —\\ncontrolled desync lessons\\n(make it fail on purpose)", URL="https://github.com/flyingrobots/echo/issues/224", tooltip="Demo 2: Splash Guy — controlled desync lessons (make it fail on purpose)"]; - i225 [label="#225 Demo 2: Splash Guy —\\nminimal rendering /\\nvisualization path", URL="https://github.com/flyingrobots/echo/issues/225", tooltip="Demo 2: Splash Guy — minimal rendering / visualization path"]; - } - subgraph cluster_Demo_3 { - label="Demo 3"; - style="rounded"; color="gray70"; - node [fillcolor="#f3f4f6"]; - i231 [label="#231 Demo 3: Tumble Tower —\\nStage 0 physics (2D AABB\\nstacking)", URL="https://github.com/flyingrobots/echo/issues/231", tooltip="Demo 3: Tumble Tower — Stage 0 physics (2D AABB stacking)"]; - i238 [label="#238 Demo 3: Tumble Tower —\\ndocs course (physics ladder)", URL="https://github.com/flyingrobots/echo/issues/238", tooltip="Demo 3: Tumble Tower — docs course (physics ladder)"]; - i232 [label="#232 Demo 3: Tumble Tower —\\nStage 1 physics (rotation +\\nangular, OBB contacts)", URL="https://github.com/flyingrobots/echo/issues/232", tooltip="Demo 3: Tumble Tower — Stage 1 physics (rotation + angular, OBB contacts)"]; - i233 [label="#233 Demo 3: Tumble Tower —\\nStage 2 physics (friction +\\nrestitution)", URL="https://github.com/flyingrobots/echo/issues/233", tooltip="Demo 3: Tumble Tower — Stage 2 physics (friction + restitution)"]; - i234 [label="#234 Demo 3: Tumble Tower —\\nStage 3 physics (sleeping +\\nstack stability)", URL="https://github.com/flyingrobots/echo/issues/234", tooltip="Demo 3: Tumble Tower — Stage 3 physics (sleeping + stack stability)"]; - i235 [label="#235 Demo 3: Tumble Tower —\\nlockstep harness + per-tick\\nfingerprinting", URL="https://github.com/flyingrobots/echo/issues/235", tooltip="Demo 3: Tumble Tower — lockstep harness + per-tick fingerprinting"]; - i236 [label="#236 Demo 3: Tumble Tower —\\ncontrolled desync breakers\\n(physics edition)", URL="https://github.com/flyingrobots/echo/issues/236", tooltip="Demo 3: Tumble Tower — controlled desync breakers (physics edition)"]; - i237 [label="#237 Demo 3: Tumble Tower —\\nvisualization (2D view + debug\\noverlays)", URL="https://github.com/flyingrobots/echo/issues/237", tooltip="Demo 3: Tumble Tower — visualization (2D view + debug overlays)"]; - } - - i28 -> i19 [color="green3", penwidth=2.5, style="solid", tooltip="`crates/echo-config-fs` exists for tool preferences, but no dedicated graph store crate (e.g. `echo-store`) exists yet."]; - i32 -> i20 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on Draft Spec task"]; - i33 -> i20 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on constituent task"]; - i34 -> i20 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on constituent task"]; - i35 -> i20 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on constituent task"]; - i36 -> i20 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on constituent task"]; - i37 -> i21 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on Draft Spec task"]; - i39 -> i21 [color="green3", penwidth=2.5, style="solid", tooltip="`crates/warp-wasm/src/lib.rs` implements `validate_object_against_args` with 4 test cases."]; - i40 -> i21 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on constituent task (scoped to WASM/CLI denials)"]; - i28 -> i19 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on Draft Spec task"]; - i32 -> i20 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on Draft Spec task"]; - i33 -> i20 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on constituent task"]; - i34 -> i20 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on constituent task"]; - i35 -> i20 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on constituent task"]; - i36 -> i20 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on constituent task"]; - i37 -> i21 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on Draft Spec task"]; - i38 -> i21 [color="green3", penwidth=2.5, style="solid", tooltip=""]; - i39 -> i21 [color="green3", penwidth=2.5, style="solid", tooltip="`crates/warp-wasm/src/lib.rs` implements `validate_object_against_args` with full schema validation + 4 test cases. GitHub issue closed."]; - i40 -> i21 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Epic completion depends on constituent task"]; - i170 -> i171 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: TT2 task depends on TT1 Inspector scaffolding"]; - i170 -> i205 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: TT2 task depends on TT1 Inspector scaffolding"]; - i246 -> i170 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: TT1 Implementation blocks on TT1 Spec clarifications"]; - i245 -> i170 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: TT1 Implementation blocks on TT1 Spec clarifications"]; - i244 -> i170 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: TT1 Implementation blocks on TT1 Spec clarifications"]; - i243 -> i170 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: TT1 Implementation blocks on TT1 Spec clarifications"]; - i171 -> i172 [color="red", penwidth=1.0, style="dashed", tooltip="Inferred: TT3 task depends on TT2 MVP"]; - i171 -> i204 [color="red", penwidth=1.0, style="dashed", tooltip="Inferred: TT3 task depends on TT2 MVP"]; - i171 -> i199 [color="red", penwidth=1.0, style="dashed", tooltip="Inferred: TT3 task depends on TT2 MVP"]; - i170 -> i171 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: TT2 task depends on TT1 Inspector scaffolding"]; - i171 -> i172 [color="red", penwidth=1.0, style="dashed", tooltip="Inferred: TT3 task depends on TT2 MVP"]; - i171 -> i199 [color="red", penwidth=1.0, style="dashed", tooltip="Inferred: TT3 task depends on TT2 MVP"]; - i202 -> i170 [color="green3", penwidth=2.5, style="solid", tooltip="Time travel debugging requires provenance payloads for replay, slicing, and causal cone analysis."]; - i171 -> i204 [color="red", penwidth=1.0, style="dashed", tooltip="Inferred: TT3 task depends on TT2 MVP"]; - i170 -> i205 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: TT2 task depends on TT1 Inspector scaffolding"]; - i222 -> i226 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i223 -> i226 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i224 -> i226 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i225 -> i226 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i222 -> i226 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i223 -> i226 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i224 -> i226 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i225 -> i226 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i231 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i231 -> i232 [color="green3", penwidth=2.5, style="solid", tooltip="`crates/warp-geom` implements geometric primitives (AABB, Transform, broad-phase detection) but no physics simulation code exists: zero gravity, zero solver, zero contact resolution. Status corrected from \"In Progress\" to \"Open\" (2026-03-03)."]; - i232 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i232 -> i233 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Stage 2 physics depends on Stage 1"]; - i231 -> i232 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Stage 1 physics depends on Stage 0"]; - i233 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i233 -> i234 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Stage 3 physics depends on Stage 2"]; - i232 -> i233 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Stage 2 physics depends on Stage 1"]; - i234 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i233 -> i234 [color="green3", penwidth=2.5, style="solid", tooltip="Inferred: Stage 3 physics depends on Stage 2"]; - i235 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i236 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i237 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i231 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i232 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i233 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i234 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i235 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i236 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i237 -> i238 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: Docs follow Implementation"]; - i243 -> i170 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: TT1 Implementation blocks on TT1 Spec clarifications"]; - i244 -> i170 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: TT1 Implementation blocks on TT1 Spec clarifications"]; - i245 -> i170 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: TT1 Implementation blocks on TT1 Spec clarifications"]; - i246 -> i170 [color="orange", penwidth=2.0, style="solid", tooltip="Inferred: TT1 Implementation blocks on TT1 Spec clarifications"]; - i270 -> i21 [color="orange", penwidth=2.0, style="solid", tooltip="Hardening the port boundary provides evidence for security context enforcement."]; - i286 -> i287 [color="orange", penwidth=2.0, style="solid", tooltip="CodeRabbit's ASSERTIVE review mode ran `grep` and `git log` scripts on the current codebase to verify CHANGELOG claims, but the verification ran AFTER the fix deleted the evidence (replaced `DIND_STATE_HASH_V2` strings, rewrote 3,185-line file). This produced a Critical false positive claiming \"fabricated\" line counts and reference counts. Consider adding a `.coderabbitignore` pattern or CHANGELOG annotation convention that prevents post-hoc verification of claims about deleted/replaced content."]; -} \ No newline at end of file diff --git a/docs/assets/dags/tasks-dag.svg b/docs/assets/dags/tasks-dag.svg deleted file mode 100644 index a15b647a..00000000 --- a/docs/assets/dags/tasks-dag.svg +++ /dev/null @@ -1,1035 +0,0 @@ - - - - - - -tasks_dag - -Echo — Tasks DAG (from docs/assets/dags/tasks-dag-source.md) -Generated by scripts/generate-tasks-dag.js - -cluster_legend - -Legend - - -cluster_Spec - -Spec - - -cluster_Draft - -Draft - - -cluster_Misc - -Misc - - -cluster_TT1 - -TT1 - - -cluster_TT2 - -TT2 - - -cluster_TT3 - -TT3 - - -cluster_Demo_2 - -Demo 2 - - -cluster_Demo_3 - -Demo 3 - - - -LG - -confirmed in docs/assets/dags/tasks-dag-source.md - - - -i19 - - -#19 Spec: Persistent Store\n(on-disk) - - - - - -i20 - - -#20 Spec: Commit/Manifest\nSigning - - - - - -i21 - - -#21 Spec: Security Contexts\n(WASM/CLI) - - - - - -i28 - - -#28 Draft spec document\n(header/ULEB128/property/strin\ng-pool) - - - - - -i28->i19 - - - - - - - - -i28->i19 - - - - - - - - -i32 - - -#32 Draft signing spec - - - - - -i32->i20 - - - - - - - - -i32->i20 - - - - - - - - -i37 - - -#37 Draft security contexts\nspec - - - - - -i37->i21 - - - - - - - - -i37->i21 - - - - - - - - -i33 - - -#33 CI: sign release artifacts\n(dry run) - - - - - -i33->i20 - - - - - - - - -i33->i20 - - - - - - - - -i34 - - -#34 CLI verify path - - - - - -i34->i20 - - - - - - - - -i34->i20 - - - - - - - - -i35 - - -#35 Key management doc - - - - - -i35->i20 - - - - - - - - -i35->i20 - - - - - - - - -i36 - - -#36 CI: verify signatures - - - - - -i36->i20 - - - - - - - - -i36->i20 - - - - - - - - -i39 - - -#39 WASM input validation - - - - - -i39->i21 - - - - - - - - -i39->i21 - - - - - - - - -i40 - - -#40 Unit tests for denials - - - - - -i40->i21 - - - - - - - - -i40->i21 - - - - - - - - -i38 - - -#38 FFI limits and validation - - - - - -i38->i21 - - - - - -i202 - - -#202 Provenance Payload (PP)\nv1 — spec + implementation - - - - - -i170 - - -#170 TT1: StreamsFrame\ninspector support (backlog +\ncursors + admission decisions) - - - - - -i202->i170 - - - - - - - - -i270 - - -#270 Hardening: Fuzz the\nScenePort boundary (proptest) - - - - - -i270->i21 - - - - - - - - -i286 - - -#286 CI: Add unit tests for\nclassify_changes.cjs and\nmatches() - - - - - -i287 - - -#287 Docs: Document\nban-nondeterminism.sh\nallowlist process in\nRELEASE_POLICY.md - - - - - -i286->i287 - - - - - - - - -i171 - - -#171 TT2: Time Travel MVP\n(pause/rewind/buffer/catch-up) - - - - - -i170->i171 - - - - - - - - -i170->i171 - - - - - - - - -i205 - - -#205 TT2: Reliving debugger\nMVP (scrub timeline + causal\nslice + fork branch) - - - - - -i170->i205 - - - - - - - - -i170->i205 - - - - - - - - -i246 - - -#246 TT1:\nSecurity/capabilities for\nfork/rewind/merge in\nmultiplayer - - - - - -i246->i170 - - - - - - - - -i246->i170 - - - - - - - - -i245 - - -#245 TT1: Merge semantics for\nadmitted stream facts across\nworldlines - - - - - -i245->i170 - - - - - - - - -i245->i170 - - - - - - - - -i244 - - -#244 TT1: TimeStream retention\n+ spool compaction + wormhole\ndensity - - - - - -i244->i170 - - - - - - - - -i244->i170 - - - - - - - - -i243 - - -#243 TT1: dt policy (fixed\ntimestep vs admitted dt\nstream) - - - - - -i243->i170 - - - - - - - - -i243->i170 - - - - - - - - -i172 - - -#172 TT3: Rulial diff /\nworldline compare MVP - - - - - -i171->i172 - - - - - - - - -i171->i172 - - - - - - - - -i204 - - -#204 TT3: Provenance heatmap\n(blast radius / cohesion over\ntime) - - - - - -i171->i204 - - - - - - - - -i171->i204 - - - - - - - - -i199 - - -#199 TT3: Wesley worldline\ndiff (compare query\noutputs/proofs across ticks) - - - - - -i171->i199 - - - - - - - - -i171->i199 - - - - - - - - -i222 - - -#222 Demo 2: Splash Guy —\ndeterministic rules + state\nmodel - - - - - -i226 - - -#226 Demo 2: Splash Guy —\ndocs: networking-first course\nmodules - - - - - -i222->i226 - - - - - - - - -i222->i226 - - - - - - - - -i223 - - -#223 Demo 2: Splash Guy —\nlockstep input protocol +\ntwo-peer harness - - - - - -i223->i226 - - - - - - - - -i223->i226 - - - - - - - - -i224 - - -#224 Demo 2: Splash Guy —\ncontrolled desync lessons\n(make it fail on purpose) - - - - - -i224->i226 - - - - - - - - -i224->i226 - - - - - - - - -i225 - - -#225 Demo 2: Splash Guy —\nminimal rendering /\nvisualization path - - - - - -i225->i226 - - - - - - - - -i225->i226 - - - - - - - - -i231 - - -#231 Demo 3: Tumble Tower —\nStage 0 physics (2D AABB\nstacking) - - - - - -i238 - - -#238 Demo 3: Tumble Tower —\ndocs course (physics ladder) - - - - - -i231->i238 - - - - - - - - -i231->i238 - - - - - - - - -i232 - - -#232 Demo 3: Tumble Tower —\nStage 1 physics (rotation +\nangular, OBB contacts) - - - - - -i231->i232 - - - - - - - - -i231->i232 - - - - - - - - -i232->i238 - - - - - - - - -i232->i238 - - - - - - - - -i233 - - -#233 Demo 3: Tumble Tower —\nStage 2 physics (friction +\nrestitution) - - - - - -i232->i233 - - - - - - - - -i232->i233 - - - - - - - - -i233->i238 - - - - - - - - -i233->i238 - - - - - - - - -i234 - - -#234 Demo 3: Tumble Tower —\nStage 3 physics (sleeping +\nstack stability) - - - - - -i233->i234 - - - - - - - - -i233->i234 - - - - - - - - -i234->i238 - - - - - - - - -i234->i238 - - - - - - - - -i235 - - -#235 Demo 3: Tumble Tower —\nlockstep harness + per-tick\nfingerprinting - - - - - -i235->i238 - - - - - - - - -i235->i238 - - - - - - - - -i236 - - -#236 Demo 3: Tumble Tower —\ncontrolled desync breakers\n(physics edition) - - - - - -i236->i238 - - - - - - - - -i236->i238 - - - - - - - - -i237 - - -#237 Demo 3: Tumble Tower —\nvisualization (2D view + debug\noverlays) - - - - - -i237->i238 - - - - - - - - -i237->i238 - - - - - - - - diff --git a/docs/audits/backlog-staleness-audit.md b/docs/audits/backlog-staleness-audit.md new file mode 100644 index 00000000..f2a58fd9 --- /dev/null +++ b/docs/audits/backlog-staleness-audit.md @@ -0,0 +1,282 @@ + + + +# Backlog Staleness Audit + +This is a human triage layer over the generated METHOD DAG. Task truth still +belongs in `docs/method/backlog/**`, GitHub issues, design packets, and retros. +Use this audit to decide which unresolved cards to pull, rewrite, merge, or +close. + +This snapshot was taken after completed backlog items were removed from +`docs/method/backlog/**`. The generated DAG now reports zero completed backlog +tasks. Generated `M###` IDs are not durable across backlog pruning; the source +path and task title are the durable handles. + +Source snapshot: + +- `docs/method/task-matrix.md` +- `docs/method/task-dag.dot` +- `docs/audits/suspicious-stuff.md` + +## Staleness Labels + +- `Current`: actionable as written or close enough to pull directly. +- `Current-after-tightening`: the concern is valid, but update wording before or + during execution so it matches current doctrine. +- `Review-before-pull`: do a short stale-task audit before implementing. +- `Merge-or-close`: likely overlaps newer doctrine/cards; merge into the owning + card or close with evidence. +- `Stale-close`: does not align with recent work; close/delete if no hidden + dependency remains. +- `Future-park`: coherent enough to keep, but not useful as current frontier. + +## High-Signal Cuts + +- The stale inspector stream card was removed from the live backlog. The useful + capability concern now lives in `M016`; the useful merge/settlement concern + now lives in `M013`; debugger UI/protocol work belongs in `warp-ttd`. +- Rewrite or close Echo-core cards that still name Graft, direct editor + hot-reload, or Shadow REALM as substrate work: `M028`, `M057`, `M058`, `M059`, + and `M094`. +- Review Wesley/browser GraphQL, QIR, and typegen cards before pulling: + `M040`, `M041`, `M043`, `M044`, and `M045`. Echo should own canonical + Intent/observation boundaries, not a GraphQL-first runtime substrate. +- Treat `M016` and `M083` as consolidation candidates around one capability + doctrine rather than separate drifting cards. + +## Current Pull Bias + +The least-stale open work is the deterministic/release-gate lane, the Echo +optics/reading envelope lane, the Wesley-to-Echo contract proof lane, and the +Continuum witnessed suffix lane. + +Good current pulls include `M001`, `M002`, `M003`, `M004`, `M005`, `M006`, +`M007`, `M009`, `M010`, `M021`, `M024`, `M032`, `M034`, and `M042`. + +## Inventory By Feature + +### METHOD, Docs, And Process + +- `M001` `Current` - Docs cleanup. +- `M031` `Review-before-pull` - Triage METHOD drift against `~/git/method`. + Useful only if the external METHOD source is still intended to govern this + repo. +- `M046` `Review-before-pull` - Wesley information architecture consolidation. + May belong in Wesley once Wesley is the Rust library owner. +- `M047` `Future-park` - Wesley tutorial series and API reference. Keep behind + ownership decisions. +- `M050` `Current` - First-class invariant documents. +- `M062` `Current` - Legend progress in `method status`. +- `M068` `Current-after-tightening` - Docs/logging improvements. Keep scoped to + concrete defects. +- `M069` `Current` - Naming consistency audit. +- `M071` `Current` - Local rustdoc warning gate. +- `M073` `Current` - Current-head PR review / merge summary tool. +- `M074` `Current` - CI trigger rationalization. +- `M075` `Current` - Background Cargo lock isolation. +- `M076` `Current` - Small-commit pre-commit latency reduction. +- `M078` `Current` - PR review thread reply / resolution helper. +- `M079` `Current` - Shell script style / format lane. +- `M080` `Current` - Review-fix fast path for staged verification. +- `M081` `Current` - Pre-PR preflight gate. +- `M082` `Current` - Self-review command. +- `M083` `Current-after-tightening` - Pre-PR checklist and boundary-change + policy. Tie it to current Echo doctrine rather than generic process prose. +- `M084` `Current` - Docs validation beyond Markdown. +- `M085` `Current` - Implementation-backed docs claims policy. +- `M086` `Review-before-pull` - Remove committed generated DAG artifacts. This + conflicts with current use of committed generated METHOD artifacts. +- `M096` `Current` - Enforce Echo design vocabulary. +- `M117` `Future-park` - Cross-repo METHOD dashboard. +- `M127` `Future-park` - Extract METHOD crate to its own repo. +- `M128` `Current-after-tightening` - METHOD drift check as pre-push hook. Keep + opt-in or clearly bounded. +- `M132` `Review-before-pull` - RED/GREEN cannot be separate commits. Reconcile + with RED-first practice and the never-amend git rule. +- `M133` `Current` - `xtask main.rs` is a god file. + +### CLI, Inspect, Verify, And Agent Surface + +- `M005` `Current` - Config file support and shell completions. +- `M006` `Current` - Make decoder control coverage auditable. +- `M023` `Current-after-tightening` - Explicit Echo CLI and MCP agent surface. + Keep it narrow; do not create a global mutable graph API. + +### Determinism, Time, Hashing, And Release Gates + +- `M003` `Current` - Deterministic trig oracle release gate. +- `M004` `Current` - CI determinism policy hardening. +- `M015` `Current-after-tightening` - SHA-256 to BLAKE3 migration spec. Frame it + around canonical identity migration, not storage convenience. +- `M072` `Current` - Deterministic test engine helper. +- `M088` `Current` - SIMD canonicalization. + +### Echo Optics, Observations, And Reading Envelopes + +- `M012` `Current` - Contract-aware receipts and readings. +- `M014` `Current` - Parent drift and owned-footprint revalidation. +- `M032` `Current` - Reading envelope family boundary. +- `M090` `Current-after-tightening` - Hashable view artifacts. Reframe around + `ReadIdentity`, witness basis, aperture, and projection/reducer versions. +- `M093` `Current-after-tightening` - Provenance as query semantics. Keep if + rewritten as observer-relative reading/provenance query semantics. +- `M129` `Current-after-tightening` - Reading envelope inspector. Pull only + after envelope families are clear enough to inspect. + +### Wesley And Contract Hosting + +- `M007` `Current` - Echo contract-hosting roadmap. +- `M010` `Current` - Wesley compiled contract-hosting doctrine. +- `M017` `Current` - Authenticated Wesley Intent admission posture. +- `M030` `Current-after-tightening` - jedit text contract MVP. Keep only as an + example contract fixture, not Echo core ontology. +- `M037` `Review-before-pull` - Wesley go-public docs/CI. Confirm Echo versus + Wesley ownership. +- `M038` `Review-before-pull` - Migration backfill script generation. Likely + Wesley-owned unless Echo needs a host-side migration proof. +- `M039` `Review-before-pull` - Migration switch-over and contract validation. +- `M040` `Review-before-pull` - GraphQL operation parser for QIR. Likely stale + in Echo if Wesley owns GraphQL/QIR parsing. +- `M041` `Review-before-pull` - SQL query plan generation from QIR. Very likely + Wesley-owned or out of scope for Echo core. +- `M042` `Current` - Wesley to Echo toy contract proof. +- `M043` `Review-before-pull` - TypeScript type generation from Wesley IR. +- `M044` `Review-before-pull` - Zod validators from Wesley IR. +- `M045` `Review-before-pull` - CBOR bridge from TS types to WASM Rust. Keep + only if it is a canonical adapter boundary, not causal ontology. +- `M063` `Review-before-pull` - Reconcile relocated Wesley Echo schemas. +- `M091` `Current-after-tightening` - Schema hash chain pinning. Align with + artifact identity and read/receipt identity. +- `M092` `Review-before-pull` - SchemaDelta vocabulary. May be Wesley-owned. +- `M094` `Stale-close` - Shadow REALM investigation. +- `M095` `Future-park` - Multi-language generator survey. Probably + Wesley-owned and not current Echo execution. + +### Continuum, Suffix Admission, Import, And Interchange + +- `M002` `Current` - Echo and git-warp compatibility sanity check. +- `M011` `Review-before-pull` - Compliance reporting as a TTD protocol + extension. Check whether TTD is still the right host name/path. +- `M021` `Current` - Continuum proof family runtime cutover. +- `M026` `Current` - Echo / git-warp witnessed suffix sync. +- `M027` `Current-after-tightening` - Split `echo-session-proto` into retained + bridge contracts vs legacy transport residue. Avoid broad host-bag + abstractions. +- `M029` `Current` - Import outcome idempotence and loop law. +- `M060` `Review-before-pull` - git-mind NEXUS. Need evidence that this is + still part of Echo's current integration map. +- `M061` `Review-before-pull` - Importer umbrella audit and close. +- `M116` `Future-park` - Continuum contract artifact interchange. + +### Strands, Braids, Settlement, And Capability-Scoped Forking + +- `M013` `Current` - Contract strands and counterfactuals. +- `M016` `Current-after-tightening` - Security/capabilities for + fork/rewind/merge. Keep as the canonical Echo capability-law card; align it + with the Optics capability model. +- `M028` `Stale-close` - Graft live frontier structural readings. Rewrite + generically or close. +- `M099` `Future-park` - Parallel execution counterfactuals. + +### Retention, CAS, Deep Storage, And Cached Readings + +- `M022` `Current` - Contract artifact retention in `echo-cas`. +- `M024` `Current` - MemoryTier WASM compilation gate. +- `M025` `Current` - JS bindings for CAS store/retrieve. +- `M118` `Review-before-pull` - `Arc<[u8]>` to `bytes::Bytes` migration. + Justify with measured storage/API friction. +- `M119` `Future-park` - `AsyncBlobStore` trait. +- `M120` `Future-park` - Enumeration and metadata API. +- `M121` `Current-after-tightening` - File-per-blob DiskTier implementation. + Keep CAS bytes separate from ontology. +- `M122` `Current-after-tightening` - Tiered promotion/demotion. Must not + affect causal identity. +- `M123` `Current-after-tightening` - Mark-sweep reachability analysis. Respect + retained reading identity and witness needs. +- `M124` `Current-after-tightening` - Eviction policy and background sweep. + Missing evidence must fail closed with obstruction. +- `M125` `Future-park` - Deep-storage wire protocol messages and binary + encoding. +- `M126` `Future-park` - Deep-storage request/response protocol and + backpressure. + +### Browser, WASM, TTD, And Visualization Hosts + +- `M008` `Review-before-pull` - Commit-ordered rollback playbooks for TTD + integration. +- `M009` `Current` - Reconcile TTD protocol schemas with `warp-ttd`. +- `M018` `Current-after-tightening` - Canvas graph renderer. Update wording + away from "static materialized reading" if it implies full hidden + materialization. +- `M019` `Current-after-tightening` - Live tick playback and rewrite animation. + Keep as observation/replay, not mutable runtime truth. +- `M020` `Current` - Node inspection panel. +- `M033` `Current` - Narrow `ttd-browser` into an Echo browser host bridge. +- `M034` `Current` - Wire Engine lifecycle behind `wasm-bindgen` exports. +- `M035` `Current` - Snapshot and `ViewOp` drain exports. +- `M036` `Current-after-tightening` - JS/WASM memory bridge and error protocol. + Keep deterministic/canonical boundary constraints explicit. +- `M087` `Current` - Fuzzing the port. +- `M089` `Future-park` - Causal visualizer. + +### Plugin, ABI, Sandbox, And Signing Security + +- `M048` `Review-before-pull` - Rhai sandbox configuration. Confirm Rhai + remains a live execution path. +- `M049` `Review-before-pull` - ViewClaim / EffectClaim receipts. Reframe + through current receipt/reading doctrine if Rhai remains. +- `M051` `Review-before-pull` - Commit/manifest signing spec. Check old issue + lineage. +- `M052` `Current-after-tightening` - Security contexts. Align with Optic + capability, actor/cause, and admission law. +- `M053` `Review-before-pull` - FFI limits and validation. +- `M054` `Review-before-pull` - JS-ABI packet checksum v2. Confirm this is + still the active JS/WASM boundary. +- `M055` `Current-after-tightening` - Provenance payload v1. Align with + receipts, witness basis, and causal identity. +- `M056` `Current` - ABI nested evidence strictness. +- `M064` `Review-before-pull` - Key management doc. +- `M065` `Future-park` - CI sign release artifacts dry run. +- `M066` `Future-park` - CLI verify path for signatures. +- `M067` `Future-park` - CI verify signatures. +- `M077` `Current-after-tightening` - Feature-gate contract verification. + +### Editor Hot Reload And Consumer-Specific Work + +- `M057` `Stale-close` - Draft hot-reload spec. Do not make Echo core a + file-handle/editor-hot-reload substrate; rewrite as adapter-only if needed. +- `M058` `Stale-close` - File watcher / debounce. Host-adapter concern, not + Echo core. +- `M059` `Stale-close` - Hot-reload implementation. Host-adapter concern, not + Echo core. +- `M070` `Future-park` - Reliving debugger UX design. Keep only as a consumer + of optics/replay. + +### Time Travel, Admission Inspector, And Rulial Diff + +- `M100` `Future-park` - Rulial diff / worldline compare MVP. +- `M101` `Future-park` - Wesley worldline diff. Wait for contract query/read + proof work. +- `M102` `Future-park` - Provenance heatmap. +- `M106` `Current-after-tightening` - Time travel core. Reframe around fixed + ticks, playback coordinates, bounded reveal, and admitted timer history. +- `M107` `Future-park` - Reliving debugger MVP. + +### Example Apps, Game Fixtures, And Course Material + +- `M097` `Future-park` - Splash Guy course material. +- `M098` `Future-park` - Tumble Tower course material. +- `M103` `Future-park` - Splash Guy controlled desync. +- `M104` `Future-park` - Splash Guy lockstep protocol. +- `M105` `Future-park` - Splash Guy rules and state model. +- `M108` `Future-park` - Tumble Tower desync breakers. +- `M109` `Future-park` - Tumble Tower lockstep harness. +- `M110` `Current` - Replay-from-checkpoint convergence tests. +- `M111` `Current` - Replay-from-patches convergence property tests. +- `M112` `Future-park` - Tumble Tower stage 0 AABB. +- `M113` `Future-park` - Tumble Tower stage 1 rotation. +- `M114` `Future-park` - Tumble Tower stage 2 friction. +- `M115` `Future-park` - Tumble Tower stage 3 sleeping. +- `M130` `Future-park` - Splash Guy visualization. +- `M131` `Future-park` - Tumble Tower visualization. diff --git a/docs/audits/suspicious-stuff.md b/docs/audits/suspicious-stuff.md new file mode 100644 index 00000000..77bc750a --- /dev/null +++ b/docs/audits/suspicious-stuff.md @@ -0,0 +1,80 @@ + + + +# Suspicious Repository Surfaces + +For each of the following, please investigate: + +1. What uses it? +2. Why is it here? +3. Is it needed? +4. Can it be removed? +5. Are there alternatives? +6. Are there any security concerns? +7. Are there any performance concerns? +8. Are there any maintainability concerns? +9. How would removing it affect the project's invariants? +10. How does it interact with other parts of the codebase? +11. Recommendation: keep, remove, or refactor + +The following are probably trash: + +.dx-debug/ +.venv/ +apps/ttd-app/ +blog/ + +The following are suspicious: + +crates/echo-config-fs/ +crates/echo-dry-tests/ +crates/echo-graph/ +crates/echo-runtime-schema/ +crates/echo-scene-codec/ +crates/echo-scene-port/ +crates/echo-session-proto/ +crates/echo-session-ws-gateway/assets/vendor/ + +docs/.vitepress/ +docs/archive/ +docs/book/ +docs/man/ +docs/theory/ + +docs/macros.tex +docs/ref.bib + +docs/workflows.md + +node_modules/ +packages/ +schemas/runtime/\* + +scripts/hooks/ +scripts/tests/ +scirpts/ in general, i suppose + +specs/spec-000-rewrite/ +tests/hooks/ + +xtask/src/main.rs + +.ban-globals-allowlist +.ban-nondeterminism-allowlist +.ban-unordered-abi-allowlist + +audit.toml +CLAUDE.md +deny.toml +Makefile +plawright.config.ts + +The following should probably be factored out into a different repo: + +crates/method/ + +crates/echo-ttd/ +crates/echo-wesley-gen/ +crates/ttd-browser/ +crates/ttd-manifest/ +crates/ttd-protocol-rs/ diff --git a/docs/book/.gitignore b/docs/book/.gitignore deleted file mode 100644 index ec8aad64..00000000 --- a/docs/book/.gitignore +++ /dev/null @@ -1,31 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# © James Ross Ω FLYING•ROBOTS -# LaTeX build artifacts -*.aux -*.log -*.toc -*.lof -*.lot -*.fls -*.synctex.gz -*.xdv -*.ps -*.dvi -*.out -*.nav -*.snm -*.vrb -*.fdb_latexmk -*.fls -_minted* -*.run.xml -*.bbl -*.blg -*.idx -*.ind -*.ilg -*.gz -*.lbx - -# Output PDF -main.pdf diff --git a/docs/design/0003-dt-policy/KERNEL_dt-policy.md b/docs/design/0003-dt-policy/KERNEL_dt-policy.md index 1656245f..38067ac9 100644 --- a/docs/design/0003-dt-policy/KERNEL_dt-policy.md +++ b/docs/design/0003-dt-policy/KERNEL_dt-policy.md @@ -1,17 +1,16 @@ -# dt policy: fixed timestep vs admitted dt stream +# dt Policy: Fixed Timestep Ref: #243 -Decide whether Echo's simulation loop uses a fixed timestep (every -tick is the same duration) or an admitted dt stream (ticks carry -variable time deltas as stream facts). +Status: superseded by `docs/invariants/FIXED-TIMESTEP.md`. -Fixed timestep is simpler and more deterministic. Variable dt is -more flexible for real-time applications but introduces a new class -of divergence (two clients with different dt streams produce -different states). +Decision: Echo uses fixed deterministic ticks. `dt` is not admitted as a +variable causal fact. Host-observed elapsed time may wake an adapter and cause +that adapter to propose an Intent, but only admitted ticks and receipts affect +replay, rewind, read identity, and causal ordering. -This is a fundamental time model decision that gates TT1 work. +This file remains only as a historical pointer for #243. The normative doctrine +is now the fixed-timestep invariant. diff --git a/docs/design/0003-dt-policy/design.md b/docs/design/0003-dt-policy/design.md index 5fc78c26..8277b3f5 100644 --- a/docs/design/0003-dt-policy/design.md +++ b/docs/design/0003-dt-policy/design.md @@ -19,8 +19,8 @@ Echo's hardest open problem is canonical cross-worldline settlement. The settlement backlog says Echo needs "one deterministic result, not eventual convergence." If ticks can carry different durations, then equal tick counts stop meaning equal simulated time, and -compare/braid/settle gets uglier fast. Issue #243 blocks TT1 -StreamsFrame work, so this is exactly the kind of foundational +compare/braid/settle gets uglier fast. Issue #243 blocks older +time-travel inspector planning, so this is exactly the kind of foundational invariant worth locking early instead of letting it leak everywhere later. @@ -125,8 +125,8 @@ individual ticks. rulings, rationale, and consequences. 3. Cross-reference from SPEC-0004 (worldlines) — add a one-line normative reference to the invariant. -4. Cross-reference from `CONTINUUM.md` — note that the hot runtime's - time model is fixed-quantum. +4. Cross-reference from the current runtime/invariant docs — note that the hot + runtime's time model is fixed-quantum. 5. Verify that `warp_geom::Tick` doc comment is consistent with the invariant (it already says "fixed `dt` per branch"). 6. Update the strand-contract and strand-settlement backlog items diff --git a/docs/design/0009-witnessed-causal-suffix-sync/design.md b/docs/design/0009-witnessed-causal-suffix-sync/design.md index a8cc8adf..0f52a7e4 100644 --- a/docs/design/0009-witnessed-causal-suffix-sync/design.md +++ b/docs/design/0009-witnessed-causal-suffix-sync/design.md @@ -131,13 +131,16 @@ to the transitions themselves. ### 3. `ImportSuffixResult` Echo should surface the same honest outcome categories it already uses for -admission and settlement-style plurality: +admission and settlement-style plurality. Older sketches used `Braided` as a +top-level result word, but the current law is stricter: braid membership is a +possible local realization of preserved plurality, not a transport kind and not +the incoming bundle itself. ```text ImportSuffixResult = Admitted { frontier, receipt } | Staged { lane_id, reason, receipt } - | Braided { braid_id, cells, receipt } + | Plural { candidate_refs, residual_posture, receipt } | Conflict { artifact, receipt } | Obstructed { witness, receipt } ``` @@ -202,6 +205,38 @@ export_suffix(request) -> CausalSuffixBundle import_suffix(bundle) -> ImportSuffixResult ``` +Implementation note: Echo's first Rust/ABI slice exposes this shape as suffix +shell construction plus local admission classification. `export_suffix` builds a +`CausalSuffixBundle` from read-only suffix evidence and derives the source shell +/ bundle identities. `import_suffix` normalizes through the existing witnessed +suffix admission evaluator and returns `ImportSuffixResult`. + +For external mutation, that evaluator is not the public mutation path. Inbound +transport admission must be submitted as an Intent: + +```text +CausalSuffixBundle +-> canonical import Intent +-> dispatch_intent +-> ingress / scheduler / admission +-> tick + receipt / witness +``` + +The same rule applies to external forking, merging, braiding, settlement, +support mutation, and inverse operations. Internal services may remain +implementation details, but no public direct mutation API is the authority for +these topology-changing operations. + +This is deliberately not import execution by side effect; it does not append +target provenance or apply patches directly outside admission. + +The active transport-identity decision for M027 is stricter: Continuum's shared +runtime-boundary family should now promote Echo's witnessed suffix model +directly. The canonical shared names are `WitnessedSuffixShell`, +`CausalSuffixBundle`, `WitnessedSuffixAdmissionResponse`, and `ImportOutcome`; +the older `SuffixShell` wording is only a historical placeholder. See +`docs/design/0022-continuum-transport-identity/design.md`. + One supporting read surface is also useful: ```text diff --git a/docs/design/0010-live-basis-settlement-plan/design.md b/docs/design/0010-live-basis-settlement-plan/design.md index 3cb2c4ff..6045bd57 100644 --- a/docs/design/0010-live-basis-settlement-plan/design.md +++ b/docs/design/0010-live-basis-settlement-plan/design.md @@ -288,8 +288,7 @@ Likely code surfaces: ### Step 4: Observer plans and bounded reading artifacts Status: partially implemented by `ObservationArtifact::reading`; authored -observer plans remain planned by -[PLATFORM_observer-plan-reading-artifacts](../../method/backlog/asap/PLATFORM_observer-plan-reading-artifacts.md). +observer plans remain follow-up work. Doctrine: [0011 — Optic and observer runtime doctrine](../0011-optic-observer-runtime-doctrine/design.md). Required behavior: @@ -309,9 +308,7 @@ Current implementation evidence: ### Step 5: Witnessed suffix admission shells -Status: planned by -[PLATFORM_witnessed-suffix-admission-shells](../../method/backlog/asap/PLATFORM_witnessed-suffix-admission-shells.md) -and [0009](../0009-witnessed-causal-suffix-sync/design.md). +Status: planned by [0009](../0009-witnessed-causal-suffix-sync/design.md). Required behavior: diff --git a/docs/design/0011-optic-observer-runtime-doctrine/design.md b/docs/design/0011-optic-observer-runtime-doctrine/design.md index e93b0bd0..95cd131d 100644 --- a/docs/design/0011-optic-observer-runtime-doctrine/design.md +++ b/docs/design/0011-optic-observer-runtime-doctrine/design.md @@ -17,7 +17,6 @@ Depends on: - [0009 — Witnessed causal suffix export and import](../0009-witnessed-causal-suffix-sync/design.md) - [0010 — Live-basis settlement correction plan](../0010-live-basis-settlement-plan/design.md) - [Continuum Foundations](../../architecture/continuum-foundations.md) -- [Observer plans and reading artifacts](../../method/backlog/asap/PLATFORM_observer-plan-reading-artifacts.md) Source theory inputs: @@ -221,8 +220,10 @@ ReadingArtifact { The first implementation slice exposed parent-basis posture. The second slice wraps observation artifacts in `ReadingEnvelope`, includes it in -`ObservationHashInput`, and makes built-in observer plan, observer basis, -witness refs, budget posture, rights posture, and residual posture ABI-visible. +`ObservationHashInput`, and makes observer plan, optional hosted observer +instance, observer basis, witness refs, budget posture, rights posture, and +residual posture ABI-visible. `ObservationRequest` also names observer plan, +optional instance, read budget, and rights posture explicitly. The kernel keeps full overlap slots internally; the ABI carries overlap count plus a deterministic slot digest until a stable public slot representation exists. @@ -348,7 +349,7 @@ Current implementation evidence: ### Step 4: Witnessed suffix shells Status: planned by -[Witnessed suffix admission shells](../../method/backlog/asap/PLATFORM_witnessed-suffix-admission-shells.md). +[0009 — Witnessed causal suffix export and import](../0009-witnessed-causal-suffix-sync/design.md). Scope: diff --git a/docs/design/0014-eint-registry-observation-boundary-inventory/design.md b/docs/design/0014-eint-registry-observation-boundary-inventory/design.md index 42fd1a6e..5dd52977 100644 --- a/docs/design/0014-eint-registry-observation-boundary-inventory/design.md +++ b/docs/design/0014-eint-registry-observation-boundary-inventory/design.md @@ -11,8 +11,6 @@ Legend: [PLATFORM](../../method/legends/PLATFORM.md) Depends on: - [0013 - Wesley Compiled Contract Hosting Doctrine](../0013-wesley-compiled-contract-hosting-doctrine/design.md) -- [Existing EINT, Registry, And Observation Boundary Inventory](../../method/backlog/asap/PLATFORM_contract-aware-intent-observation-envelope.md) -- [Registry Provider Wiring And Host Boundary Decision](../../method/backlog/asap/PLATFORM_static-contract-registry-and-host-boundary.md) ## Why this packet exists diff --git a/docs/design/0015-registry-provider-host-boundary-decision/design.md b/docs/design/0015-registry-provider-host-boundary-decision/design.md index fefcbb24..87983e65 100644 --- a/docs/design/0015-registry-provider-host-boundary-decision/design.md +++ b/docs/design/0015-registry-provider-host-boundary-decision/design.md @@ -12,7 +12,6 @@ Depends on: - [0013 - Wesley Compiled Contract Hosting Doctrine](../0013-wesley-compiled-contract-hosting-doctrine/design.md) - [0014 - EINT, Registry, And Observation Boundary Inventory](../0014-eint-registry-observation-boundary-inventory/design.md) -- [Registry Provider Wiring And Host Boundary Decision](../../method/backlog/asap/PLATFORM_static-contract-registry-and-host-boundary.md) ## Decision diff --git a/docs/design/0016-wesley-to-echo-toy-contract-proof/design.md b/docs/design/0016-wesley-to-echo-toy-contract-proof/design.md index 3de5cd7d..fc85bd4d 100644 --- a/docs/design/0016-wesley-to-echo-toy-contract-proof/design.md +++ b/docs/design/0016-wesley-to-echo-toy-contract-proof/design.md @@ -13,11 +13,11 @@ Depends on: - [0013 - Wesley Compiled Contract Hosting Doctrine](../0013-wesley-compiled-contract-hosting-doctrine/design.md) - [0014 - EINT, Registry, And Observation Boundary Inventory](../0014-eint-registry-observation-boundary-inventory/design.md) - [0015 - Registry Provider Host Boundary Decision](../0015-registry-provider-host-boundary-decision/design.md) -- [Wesley To Echo Toy Contract Proof](../../method/backlog/up-next/PLATFORM_wesley-to-echo-toy-contract-proof.md) +- [Retro: 0016 - Wesley To Echo Toy Contract Proof](../../method/retro/0016-wesley-to-echo-toy-contract-proof/retro.md) ## Status -GREEN 4. +Accepted. ## Hill @@ -205,6 +205,38 @@ cargo clippy -p echo-wesley-gen --all-targets -- -D warnings -D missing_docs Result: passed. +## GREEN 5 witness + +Implementation: + +- `warp-wasm` now exposes native Rust CBOR-envelope helpers matching the + installed-kernel WASM boundary: + - `dispatch_intent_cbor(...)`; + - `observe_cbor(...)`; + - `get_registry_info_cbor()`. +- These helpers do not change the `wasm_bindgen` exports. They make the + installed-kernel envelope path testable without `js_sys::Uint8Array`. +- The generated toy consumer smoke crate now depends on local `warp-wasm`. +- The smoke crate installs its application-owned `ToyKernel` with + `warp_wasm::install_kernel(...)`. +- The smoke crate then verifies: + - installed registry metadata matches generated `CODEC_ID`, + `REGISTRY_VERSION`, and `SCHEMA_SHA256`; + - generated `pack_increment_intent(...)` bytes dispatch through + `warp_wasm::dispatch_intent_cbor(...)`; + - generated `counter_value_observation_request(...)` bytes observe through + `warp_wasm::observe_cbor(...)`; + - the returned read is a `QueryBytes` `ObservationArtifact`. + +Focused witness: + +```sh +cargo test -p echo-wesley-gen \ + test_toy_contract_generated_output_compiles_in_consumer_crate +``` + +Result: passed. + ## GREEN direction GREEN stayed inside `echo-wesley-gen`. @@ -218,16 +250,20 @@ Implemented shape: canonical vars bytes; - generate read-helper shapes for query ops that map to `ObservationRequest`; - keep Echo core app-agnostic; -- keep host-side generated payload validation deferred. +- keep host-side generated payload validation deferred; +- prove installed-kernel dispatch, observation, and registry metadata through + `warp-wasm` native CBOR envelope helpers. -Still deferred: +Still deferred to follow-on cards: -- installed-kernel `dispatch_intent(...)` integration proof; -- installed-kernel `observe(...)` integration proof; -- registry metadata handshake proof against an installed kernel. +- contract-aware receipt and reading identity; +- contract artifact retention in `echo-cas`; +- real `jedit` generated fixture hosting; +- dynamic contract loading. The phrase "actual integration proof" now means an Echo-installed or -application-owned kernel path, not a generated-output compile proof. +application-owned kernel path. This cycle closes that proof for the toy counter +contract without adding app-specific Echo APIs. ## Non-goals @@ -242,7 +278,10 @@ application-owned kernel path, not a generated-output compile proof. ## Remaining design question -The RED deliberately includes a generated query/read helper. If the current -`ObservationRequest` shape cannot honestly express the toy query, the next -GREEN should stop at the precise missing observation bridge instead of -inventing a broad `query_contract(...)` ABI. +Resolved for the toy proof. `ObservationRequest` can honestly carry the toy +query as `ObservationFrame::QueryView` plus `ObservationProjection::Query`, and +the generated optic read helper can carry it as +`OpticApertureShape::QueryBytes`. + +Follow-on cards still need to harden the identity and retention semantics of +those readings before `jedit` uses the path as a serious consumer. diff --git a/docs/design/0018-echo-optics-api-design/design.md b/docs/design/0018-echo-optics-api-design/design.md new file mode 100644 index 00000000..0eca6ad8 --- /dev/null +++ b/docs/design/0018-echo-optics-api-design/design.md @@ -0,0 +1,1356 @@ + + + +# Echo Optics API Design + +Source request: [request.md](./request.md) + +Depends on: + +- [0011 - Optic and observer runtime doctrine](../0011-optic-observer-runtime-doctrine/design.md) +- [0013 - Wesley Compiled Contract Hosting Doctrine](../0013-wesley-compiled-contract-hosting-doctrine/design.md) +- [0014 - EINT, Registry, And Observation Boundary Inventory](../0014-eint-registry-observation-boundary-inventory/design.md) +- [Continuum Runtime And CAS Readings](../continuum-runtime-and-cas-readings.md) +- [Wesley-Compiled Optic Bindings For Echo](./wesley-compiled-optic-bindings.md) +- [Echo Optics Adapter Notes](../../architecture/echo-optics-adapter-notes.md) + +## Summary + +An Echo Optic is the first-class API noun for bounded, capability-scoped, +coordinate-anchored observation and intent dispatch over Echo causal history. +It is Echo's local API cut of the broader WARP optic: a bounded, law-named +operation over witnessed causal history that produces a hologram. + +An optic is: + +```text +Optic = capability + focus + coordinate + projection law + intent family +``` + +An optic is not a mutable handle. It is not a file handle, graph handle, object +handle, editor handle, or hidden materialization cursor. + +There is no substrate-owned graph behind the optic. A graph-shaped result is a +holographic reading emitted by an observer or optic over witnessed causal +history. + +This scales up. Echo itself is a WARP optic for real-time deterministic +simulation. `warp-ttd`, `git-warp`, and Wesley are other WARP optics with +different projection/admission targets. The Optics API is Echo's local public +cut of that broader pattern. + +An optic names two lawful things: + +1. a bounded way to observe a focused projection at a causal coordinate; +2. a family of intents that may be proposed against that focused projection + under an explicit causal basis. + +The ideal API has one disciplined read path and one disciplined write-side +proposal path: + +```text +observe_optic(request) -> reading | obstruction +dispatch_optic_intent(request) -> admission outcome +``` + +Everything else is support: + +```text +open_optic validates and names an optic descriptor +close_optic releases session-local optic resources only +retain_reading stores reading bytes plus their semantic read identity +reveal_reading retrieves retained reading bytes only when identity matches +``` + +This design is generic. `jedit` may validate ergonomics as a future consumer, +but it is not the design target and must not create privileged text APIs in +Echo core. + +Wesley-compiled output should target this model as generated optic bindings. +Generated bindings may hide byte-level EINT packing from application code, but +they must not hide intent dispatch from Echo. The request crossing into Echo +still names optic id, focus, base coordinate, capability, actor/cause, admission +law, intent family, and proposal payload. + +## Core Doctrine + +```text +Optic reads. +Intent proposes. +Echo admits. +Receipt witnesses. +``` + +The deeper WARP doctrine is: + +```text +There is witnessed causal history. +WARP optics chart it. +Holograms witness those charts. +Materialized graphs are optional readings. +Continuum is the protocol for lawful causal-history exchange. +``` + +The prohibitions are part of the API contract: + +- no direct setters; +- no global graph API; +- no global mutable state API; +- no file-handle API; +- no hidden full-materialization fallback; +- no latest-writer-wins fallback; +- no stringly status outcome; +- no GraphQL-first runtime substrate; +- no host-bag abstractions such as `RuntimeFacade`, `ObservationManager`, + `UniversalMaterializer`, or `GraphLikeRuntimeAdapter`. + +Optic read truth is observer-relative and witness-backed. Substrate truth remains +the witnessed causal history and admitted receipts. + +Every successful read, admission, materialization, retention, or obstruction +produces a hologram: a witnessed artifact that names basis, law, aperture, +identity, evidence, and posture. Some holograms are admitted history. Some are +readings. Some are retained materializations. None is a mutable graph handle. + +Optic intent dispatch is not mutation by handle. It is proposal against an +explicit causal basis. Echo may admit, stage, preserve plurality, conflict, or +obstruct. It must not silently mutate the current frontier when the caller named +a stale basis. + +Generated code may make this ergonomic: + +```rust +text_optic.dispatch_replace_range(port, base_coordinate, vars, actor, cause) +``` + +but the generated method must build and submit an explicit +`DispatchOpticIntentRequest`. It must not become a setter. + +## Optic Model + +An optic has five required components. + +| Component | Meaning | +| -------------- | -------------------------------------------------------------- | +| capability | what actor/session/policy may reveal or propose | +| focus | what worldline, strand, braid, retained reading, or attachment | +| coordinate | which causal frontier or historical point is being named | +| projection law | how causal history lowers into a reading | +| intent family | which proposal family is lawful against this focus | + +The focus is not global state. The same worldline may be observed through +different optics with different apertures, projection versions, capabilities, +rights, budgets, and retained-reading policies. + +Optics range over: + +- worldlines; +- strands; +- braids; +- coordinates/frontiers; +- retained readings; +- cached readings; +- observer apertures; +- witness-backed projections; +- explicit attachment boundaries. + +The read path is: + +```text +choose aperture + -> slice causal history + -> lower under law + -> witness + -> retain if needed + -> emit observer-relative reading +``` + +The intent path is: + +```text +construct intent + -> validate capability + -> validate causal basis + -> apply/admit under contract law + -> emit tick/admission result + -> emit receipt/witness +``` + +## Public API Surface + +The smallest useful Rust-facing API surface is: + +```rust +pub trait EchoOptics { + fn open_optic(&mut self, request: OpenOpticRequest) + -> Result; + + fn close_optic(&mut self, request: CloseOpticRequest) + -> Result; + + fn observe_optic(&self, request: ObserveOpticRequest) + -> ObserveOpticResult; + + fn dispatch_optic_intent(&mut self, request: DispatchOpticIntentRequest) + -> IntentDispatchResult; +} + +pub trait EchoReadingRetention { + fn retain_reading(&mut self, request: RetainReadingRequest) + -> Result; + + fn reveal_reading(&self, request: RevealReadingRequest) + -> Result; +} +``` + +The ABI or application adapter may expose camelCase wrappers such as +`openOptic`, `observeOptic`, and `dispatchOpticIntent`. Core Rust should follow +the repository's snake_case style. + +The API deliberately separates responsibilities: + +```text +Optic observes. +Admission admits. +Retention retains. +Plumber maintains. +Debug explains. +``` + +Plumber/debug APIs may inspect, repair, prewarm, materialize, or explain, but +they must be named as operational APIs. A public optic read must never call a +plumber/debug fallback and pretend the result is a witnessed bounded reading. + +`close_optic` is intentionally weak. It releases session-local descriptor +resources. It does not mutate the subject, invalidate old readings, revoke +history, or close a file-like handle. + +## Wesley Compiler Extension + +Echo owns the Echo-facing Wesley compiler extension in `crates/echo-wesley-gen`. +That generator should compile Wesley contract operations into typed optic +bindings, not into Echo-core subclasses or app-specific runtime APIs. + +The generated output should provide: + +- contract family metadata; +- generated DTOs and canonical codecs; +- typed `OpenOpticRequest` builders; +- typed `ObserveOpticRequest` builders for query/read operations; +- typed `DispatchOpticIntentRequest` builders for mutation/proposal operations; +- optional convenience dispatch methods that still require explicit causal + basis and call `dispatch_optic_intent`. + +Current generated helpers remain useful during migration: + +```text +*_observation_request(...) +pack_*_intent(...) +``` + +The preferred Optics surface should add: + +```text +*_observe_optic_request(...) +*_dispatch_optic_intent_request(...) +``` + +The low-level EINT helper is allowed to be internal to the generated binding. +The Echo boundary remains explicit: + +```text +EINT bytes are a binding implementation detail. +Intent dispatch is not an optic implementation detail. +``` + +See [Wesley-Compiled Optic Bindings For Echo](./wesley-compiled-optic-bindings.md) +for the generated API contract. + +## Types And Interfaces + +These sketches use Rust-style DTOs to show the intended public shape. They are +not committed wire formats. + +```rust +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct OpticId(pub [u8; 32]); + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ProjectionVersion(pub u32); + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ReducerVersion(pub u32); + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct EchoOptic { + pub optic_id: OpticId, + pub focus: OpticFocus, + pub coordinate: EchoCoordinate, + pub projection_law: ProjectionLawRef, + pub reducer_law: Option, + pub intent_family: IntentFamilyRef, + pub capability: OpticCapabilityRef, +} +``` + +Focus names the lawful subject without exposing a global graph handle: + +```rust +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum OpticFocus { + Worldline { worldline_id: WorldlineId }, + Strand { strand_id: StrandId }, + Braid { braid_id: BraidId }, + RetainedReading { key: RetainedReadingKey }, + Attachment { owner: AttachmentOwnerRef, attachment_ref: AttachmentRef }, +} +``` + +Coordinate names the causal basis: + +```rust +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum EchoCoordinate { + Worldline { + worldline_id: WorldlineId, + at: CoordinateAt, + }, + Strand { + strand_id: StrandId, + at: CoordinateAt, + parent_basis: Option, + }, + Braid { + braid_id: BraidId, + projection_digest: Hash, + member_count: u64, + }, + RetainedReading { + key: RetainedReadingKey, + }, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum CoordinateAt { + Frontier, + Tick(WorldlineTick), + Provenance(ProvenanceRef), +} +``` + +Aperture is the bound on observation: + +```rust +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct OpticAperture { + pub shape: OpticApertureShape, + pub budget: OpticReadBudget, + pub attachment_descent: AttachmentDescentPolicy, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum OpticApertureShape { + Head, + SnapshotMetadata, + TruthChannels { channels: Option> }, + QueryBytes { query_id: u32, vars_digest: Hash }, + EntityRange { entity_family: EntityFamilyRef, range: ApertureRange }, + AttachmentBoundary, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct OpticReadBudget { + pub max_bytes: Option, + pub max_nodes: Option, + pub max_ticks: Option, + pub max_attachments: Option, +} +``` + +The open request validates a descriptor. It does not open a mutable object: + +```rust +pub struct OpenOpticRequest { + pub focus: OpticFocus, + pub coordinate: EchoCoordinate, + pub projection_law: ProjectionLawRef, + pub reducer_law: Option, + pub intent_family: IntentFamilyRef, + pub capability: OpticCapability, + pub cause: OpticCause, +} + +pub struct OpenOpticResult { + pub optic: EchoOptic, + pub capability_posture: CapabilityPosture, +} + +pub struct CloseOpticRequest { + pub optic_id: OpticId, + pub cause: OpticCause, +} + +pub struct CloseOpticResult { + pub optic_id: OpticId, +} +``` + +Observation names optic, aperture, coordinate, and versions: + +```rust +pub struct ObserveOpticRequest { + pub optic_id: OpticId, + pub focus: OpticFocus, + pub coordinate: EchoCoordinate, + pub aperture: OpticAperture, + pub projection_version: ProjectionVersion, + pub reducer_version: Option, + pub capability: OpticCapabilityRef, +} + +pub enum ObserveOpticResult { + Reading(OpticReading), + Obstructed(OpticObstruction), +} + +pub struct OpticReading { + pub envelope: ReadingEnvelope, + pub read_identity: ReadIdentity, + pub payload: ObservationPayload, + pub retained: Option, +} +``` + +Reading identity names the question, not just the bytes: + +```rust +pub struct ReadIdentity { + pub optic_id: OpticId, + pub focus_digest: Hash, + pub coordinate: EchoCoordinate, + pub aperture_digest: Hash, + pub projection_version: ProjectionVersion, + pub reducer_version: Option, + pub witness_basis: WitnessBasis, + pub rights_posture: ReadingRightsPosture, + pub budget_posture: ReadingBudgetPosture, + pub residual_posture: ReadingResidualPosture, +} + +pub struct RetainedReadingKey { + pub read_identity: ReadIdentity, + pub content_hash: Hash, + pub codec_id: CodecId, + pub byte_len: u64, +} +``` + +Witness basis must be honest about checkpoint plus live tail: + +```rust +pub enum WitnessBasis { + ResolvedCommit { + reference: ProvenanceRef, + state_root: Hash, + commit_hash: Hash, + }, + CheckpointPlusTail { + checkpoint_ref: ProvenanceRef, + checkpoint_hash: Hash, + tail_witness_refs: Vec, + tail_digest: Hash, + }, + WitnessSet { + refs: Vec, + witness_set_hash: Hash, + }, + Missing { + reason: OpticObstructionKind, + }, +} +``` + +Intent dispatch names the proposal and its causal base: + +```rust +pub struct DispatchOpticIntentRequest { + pub optic_id: OpticId, + pub base_coordinate: EchoCoordinate, + pub intent_family: IntentFamilyRef, + pub focus: OpticFocus, + pub actor: OpticActor, + pub cause: OpticCause, + pub capability: OpticCapabilityRef, + pub admission_law: AdmissionLawRef, + pub intent: OpticIntentPayload, +} + +pub enum OpticIntentPayload { + EintV1 { bytes: Vec }, + ContractOp { + op_id: u32, + vars_bytes: Vec, + vars_digest: Hash, + }, +} + +pub enum IntentDispatchResult { + Admitted(AdmittedIntent), + Staged(StagedIntent), + Plural(PluralIntent), + Conflict(IntentConflict), + Obstructed(OpticObstruction), +} +``` + +Capability is explicit and auditable: + +```rust +pub struct OpticCapability { + pub capability_ref: OpticCapabilityRef, + pub subject: OpticActor, + pub allowed_focus: Vec, + pub allowed_apertures: Vec, + pub allowed_intent_families: Vec, + pub max_budget: OpticReadBudget, + pub rights: OpticRights, +} + +pub struct OpticCapabilityRef { + pub capability_id: Hash, + pub issuer_ref: Option, + pub policy_id: Hash, +} +``` + +Obstruction is typed: + +```rust +pub struct OpticObstruction { + pub kind: OpticObstructionKind, + pub optic_id: Option, + pub focus: Option, + pub coordinate: Option, + pub witness_basis: Option, + pub message: String, +} + +pub enum OpticObstructionKind { + MissingWitness, + MissingRetainedReading, + StaleBasis, + CapabilityDenied, + BudgetExceeded, + UnsupportedAperture, + UnsupportedProjectionLaw, + UnsupportedIntentFamily, + AttachmentDescentRequired, + AttachmentDescentDenied, + LiveTailRequiresReduction, + ConflictingFrontier, + PluralityRequiresExplicitPolicy, +} +``` + +## Read Semantics + +An optic read must be bounded. + +Every read request names: + +- optic id; +- focus; +- aperture; +- causal coordinate/frontier; +- projection law/version; +- reducer law/version where relevant; +- witness basis; +- read identity; +- residual or obstruction posture; +- bounds/budget posture; +- rights posture. + +Read execution must not fall back to full materialization. If the requested +aperture cannot be answered from available evidence, the result is +`OpticObstruction`, not a large scan disguised as success. + +`ObservationService::observe(...)` is the current read boundary. The first +Optics API should wrap and sharpen that boundary rather than replace it. Existing +`ObservationRequest`, `ObservationArtifact`, `ReadingEnvelope`, and +`ObservationPayload::QueryBytes` remain the nearest concrete implementation +surface. + +## Intent Dispatch Semantics + +The optic write-side surface is `dispatch_optic_intent`. The name intentionally +uses dispatch/propose language, not set/update/mutate. + +Every dispatch names: + +- optic id; +- base coordinate/frontier; +- intent family; +- subject/focus; +- actor/cause; +- capability basis; +- admission law; +- intent payload; +- resulting tick, receipt, or admission posture. + +If the base coordinate is stale, Echo must not silently mutate the current +frontier. It may: + +- reject; +- obstruct; +- stage; +- preserve plurality; +- require rebase; +- admit under an explicitly named law. + +`dispatch_optic_intent` should initially reuse EINT v1 where possible. A future +outer admission certificate may bind capability, contract identity, and causal +basis, but that certificate must remain explicit. It must not become hidden +host-side mutation authority. + +Implementation note: the first Rust/ABI slice now exposes +`DispatchOpticIntentRequest`, `OpticIntentPayload::EintV1`, and +`AdmissionLawId`. The shared `KernelPort` default validates focus, base +coordinate, capability, actor/cause, and intent family before routing EINT v1 +bytes through the existing `dispatch_intent` path. Because that existing path +ingests into the runtime inbox instead of proving a committed tick/receipt, the +optic result is `IntentDispatchResult::Staged` with an explicit stage ref and +reason, not a fabricated `Admitted` result. + +Stale-basis validation is explicit. Core proposals can be checked against a +known current coordinate, and engine-backed dispatch resolves the current +worldline coordinate before staging. If the proposal names an older concrete +base, dispatch returns `OpticObstructionKind::StaleBasis`; it does not enqueue +the EINT or advance current provenance. + +Generated-binding readiness is ABI-level, not a generator promise. The current +`echo-wasm-abi` DTO set exposes the optic ids, focus/coordinate/aperture +models, observe and dispatch requests, EINT payload wrapper, typed admission +result, and support refs needed by generated helper code. `echo-wesley-gen` +tests include a hand-written generated-helper-shaped smoke crate so ABI drift +breaks before the generator implementation task. + +The first concrete implementation is deliberately narrow. `WorldlineHeadOptic` +is a generic request-builder example over a worldline head, not a mutable handle +and not a universal optic engine. It builds bounded head-read requests, +QueryBytes-shaped requests that currently return typed projection-law +obstructions when no contract observer is installed, and EINT v1 dispatch +requests with explicit base coordinates. Engine-backed `warp-wasm` now exposes +`observe_optic` beside `dispatch_optic_intent`, so the example validates the +same ABI/kernel boundary future generated bindings will use. + +## Admission Outcomes + +Admission outcomes are not `Ok/Err`. + +The API family is: + +```rust +pub enum IntentDispatchResult { + Admitted(AdmittedIntent), + Staged(StagedIntent), + Plural(PluralIntent), + Conflict(IntentConflict), + Obstructed(OpticObstruction), +} +``` + +Meanings: + +- `Admitted`: Echo accepted the intent into witnessed history and can name the + resulting tick/admission receipt. +- `Staged`: Echo retained the proposal for explicit later admission, review, or + rebase. It did not mutate the named frontier. +- `Plural`: Echo preserved lawful plurality instead of forcing a single latest + result. +- `Conflict`: Echo found incompatible causal claims under the named admission + law. +- `Obstructed`: Echo cannot lawfully proceed because evidence, rights, basis, + budget, attachment, or projection law is missing. + +No outcome may be represented as boolean success, latest-writer-wins, hidden +host-time ordering, or string status. + +## Cached And Retained Readings + +`echo-cas` names bytes. A read identity names the question those bytes answer. + +A retained reading therefore needs both: + +```text +content_hash -> byte identity +read_identity -> semantic coordinate and law identity +``` + +A cached reading is valid only for exactly the identity it names: + +- coordinate/frontier; +- witness basis; +- projection version; +- reducer version; +- aperture; +- rights posture; +- budget posture; +- residual posture. + +New ticks create new frontiers. They do not mutate old readings. A retained +reading can be reused only when its `ReadIdentity` is exactly valid for the +request or when an explicit witness relation proves containment/equivalence and +the returned identity names that relation. + +`retain_reading` and `reveal_reading` belong to retention, not optic mutation. +They must not create or alter substrate truth. + +The initial core surface is intentionally small: `RetainedReadingCache` stores +encoded reading bytes under a `RetainedReadingDescriptor`, and +`reveal_reading` succeeds only when the retained key and exact `ReadIdentity` +match. A content hash can index candidate retained readings for diagnostics, but +it is not reveal authority and is not the cache key. + +## Live Tail Honesty + +An optic read must not return a stale checkpoint hash as if it identified the +live result. + +Honest options are: + +- reduce the live tail under a bounded witness basis; +- return a read identity that names checkpoint basis plus tail witness set; +- return a slice hash or witness-set hash with explicit meaning; +- fail closed with obstruction or missing-basis posture. + +The key law: + +```text +checkpoint_hash != live_read_identity +``` + +unless the read identity also proves there is no live tail or names the exact +tail witness set included in the reading. + +The initial `observe_optic` bridge uses the second honest option when a replay +checkpoint exists behind the live frontier: the `ReadIdentity` witness basis is +`CheckpointPlusTail`, with the checkpoint basis, post-checkpoint provenance refs, +and a tail digest. If that tail cannot be enumerated within the requested tick +budget, the read must fail closed instead of identifying the live result with +the checkpoint alone. + +## Attachments And Recursive Apertures + +Attachments are aperture boundaries. + +Default readings should expose attachment refs, attachment posture, or +obstruction. They must not recursively load attachment content. + +Recursive descent into attachments requires: + +- explicit aperture request; +- explicit capability rights; +- budget sufficient for the descent; +- projection law for the nested subject; +- witness basis for the boundary; +- residual/obstruction posture if descent is refused or incomplete. + +This applies equally to content blobs, causal artifacts, retained readings, +receipt/witness refs, foreign suffix shells, and future nested WARP coordinates. + +The initial bridge implements the boundary as a typed fail-closed posture: +`AttachmentBoundary` focus with `BoundaryOnly` descent returns +`AttachmentDescentRequired`, carrying the attachment key in the obstruction +focus. `Explicit` descent requires a positive attachment budget and then returns +`AttachmentDescentDenied` until an attachment projection law and capability +checker are installed. No nested WARP or attachment payload is materialized by +default. + +## Capability Model + +Capabilities limit both reveal and proposal. + +Read capability controls: + +- focus scope; +- allowed apertures; +- attachment descent; +- max budget; +- rights posture; +- retained-reading reveal. + +Intent capability controls: + +- allowed intent families; +- base-coordinate policy; +- admission law; +- actor/cause binding; +- whether staging, plurality preservation, conflict artifact creation, or rebase + request is allowed. + +Capabilities must be explicit request fields or resolvable by explicit +capability refs. They must not be ambient host state hidden behind a runtime bag. + +## Relationship To Existing Echo Doctrine + +This design aligns with existing Echo doctrine: + +- **Witnessed causal history as substrate truth:** optics read and propose + against history; admitted ticks and receipts remain truth. +- **Observer-relative readings:** `ReadingEnvelope`, observer basis, projection + law, reducer law, residual posture, and rights posture describe the emitted + reading. +- **Bounded replay/reveal:** aperture and witness basis make bounded reads and + retained reveal explicit. +- **Suffix admission:** optic dispatch can feed existing admission/evaluator + paths; it does not invent a sync daemon. +- **Tick receipts as holographic witnesses:** admitted intent outcomes must name + ticks, receipts, and witness refs. +- **echo-cas as retention, not ontology:** CAS stores bytes; `ReadIdentity` + stores semantic meaning. +- **Deterministic boundaries:** serde may exist on non-authoritative adapter or + diagnostic shapes, but it must not be the authority for intents, + graph-preserved facts, receipts, witness material, read identity, retained + reading identity, or causal history. Those surfaces enter Echo as + domain-separated canonical bytes, and values with nondeterministic encodings + such as floats must be normalized before admission or retention. +- **Echo as peer Continuum runtime:** optics are Echo-local runtime/API law. + They do not make Echo subordinate to git-warp and do not require a git-warp + dependency. + +This design also sharpens earlier doctrine from `0011`: an optic is the runtime +boundary object, an observer is revelation-side, and reading artifacts are +observer-relative emissions with coordinate and witness posture. + +## Test Strategy + +Initial RED tests should prove: + +- optic reads name causal basis in the returned `ReadIdentity`; +- optic reads are bounded by `OpticAperture` and `OpticReadBudget`; +- missing evidence returns `OpticObstruction`, not full materialization; +- cached readings are keyed by `ReadIdentity`, not only content hash; +- live-tail reads do not reuse stale checkpoint hashes; +- intent dispatch requires explicit base coordinate; +- stale base coordinate does not silently mutate current frontier; +- admission outcomes remain typed as admitted, staged, plural, conflict, or + obstructed; +- attachment descent is explicit and capability/budget checked; +- plumber/debug APIs cannot satisfy public optic reads as hidden fallbacks. + +Use a fake/example optic implementation before broadening: + +- one worldline head optic; +- one QueryView-style contract optic; +- one retained reading reveal; +- one attachment-boundary placeholder. + +No test should call a global graph mutation or direct setter as the public path. + +## Backlog + +The executable versions of these tasks were split into top-level METHOD backlog +cards. Completed backlog cards are pruned from `docs/method/backlog/**`; current +unresolved work is visible through `cargo xtask method status` and +`docs/audits/backlog-staleness-audit.md`. This section records the source task +detail for the design packet; it is not the scheduling surface. New executable +work should be added as a visible card with explicit `Depends on:` links. + +### TASK-001: Add Echo Optics doctrine packet + +Title: Add Echo Optics doctrine packet. + +Goal: Land the controlling design that defines optics as bounded, +capability-scoped, coordinate-anchored read/propose surfaces. + +Files likely touched: + +- `docs/design/0018-echo-optics-api-design/design.md` +- `docs/design/0018-echo-optics-api-design/request.md` + +Acceptance criteria: + +- Design uses the required output sections. +- Design rejects direct mutation, global graph/state APIs, and host-bag + abstractions. +- Backlog sequence is METHOD-friendly and small-sliced. + +Non-goals: + +- Do not implement runtime code. +- Do not design jedit as the primary subject. + +Test expectations: + +- `markdownlint-cli2` and Prettier pass. +- `pnpm docs:build` passes. + +### TASK-002: Define core optic nouns and IDs + +Title: Define core optic nouns and IDs. + +Goal: Add initial Rust DTOs for `EchoOptic`, `OpticId`, `OpticFocus`, +`OpticAperture`, `EchoCoordinate`, `ProjectionVersion`, and `ReducerVersion`. + +Files likely touched: + +- `crates/warp-core/src/optic.rs` +- `crates/warp-core/src/lib.rs` +- `crates/echo-wasm-abi/src/kernel_port.rs` + +Acceptance criteria: + +- DTOs are deterministic, canonical where ABI-facing, and domain-separated where + hashed. Serde is not the authoritative encoding for any causal or retained + identity surface. +- Focus covers worldline, strand, braid, retained reading, and attachment + boundary without exposing a global graph handle. + +Non-goals: + +- Do not add a universal optic engine. +- Do not add jedit/editor/file types. + +Test expectations: + +- Unit tests for stable ID hashing and focus/coordinate encoding. +- ABI round-trip tests for public DTOs. + +### TASK-003: Define ReadingEnvelope and ReadIdentity extensions + +Title: Define ReadingEnvelope and ReadIdentity. + +Goal: Extend current reading metadata with first-class read identity fields +without breaking existing observation behavior. + +Files likely touched: + +- `crates/warp-core/src/observation.rs` +- `crates/echo-wasm-abi/src/kernel_port.rs` +- `crates/warp-wasm/src/warp_kernel.rs` + +Acceptance criteria: + +- Read identity names optic id, focus digest, coordinate, aperture digest, + projection version, reducer version, witness basis, rights, budget, and + residual posture. +- Existing observations can produce compatible identity for built-in plans. + +Non-goals: + +- Do not make CAS hash the read identity. +- Do not require full materialization to compute identity. + +Test expectations: + +- Same read question yields same identity. +- Coordinate, aperture, projection version, or witness basis changes identity. + +### TASK-004: Define WitnessBasis and retained reading key + +Title: Define WitnessBasis and retained reading key. + +Goal: Represent commit, checkpoint-plus-tail, witness-set, and missing-basis +postures for honest retained readings. + +Files likely touched: + +- `crates/warp-core/src/observation.rs` +- `crates/echo-cas/src/lib.rs` +- `crates/echo-wasm-abi/src/kernel_port.rs` + +Acceptance criteria: + +- Retained reading key includes content hash and semantic read identity. +- Checkpoint-plus-tail identity cannot collapse to checkpoint hash alone. + +Non-goals: + +- Do not build storage GC policy. +- Do not implement proof systems. + +Test expectations: + +- Retained reading lookup by content hash alone fails. +- Checkpoint-plus-tail and checkpoint-only identities differ. + +### TASK-005: Define obstruction and admission result families + +Title: Define optic obstruction and admission result families. + +Goal: Add typed `OpticObstruction` and `IntentDispatchResult` enums. + +Files likely touched: + +- `crates/warp-core/src/optic.rs` +- `crates/echo-wasm-abi/src/kernel_port.rs` +- `crates/warp-wasm/src/warp_kernel.rs` + +Acceptance criteria: + +- Outcomes include Admitted, Staged, Plural, Conflict, and Obstructed. +- Stale basis, missing witness, budget exceeded, capability denied, and + attachment descent required are distinct obstruction kinds. + +Non-goals: + +- Do not model outcomes as `Ok/Err`, bool, or string status. +- Do not introduce latest-writer-wins fallback. + +Test expectations: + +- ABI serialization preserves outcome variants. +- Exhaustive matching tests fail if variants collapse. + +### TASK-006: Define open_optic and close_optic request models + +Title: Define open optic and close optic request models. + +Goal: Add descriptor-validation DTOs for opening and closing session-local optic +resources. + +Files likely touched: + +- `crates/warp-core/src/optic.rs` +- `crates/warp-wasm/src/warp_kernel.rs` +- `crates/echo-wasm-abi/src/kernel_port.rs` + +Acceptance criteria: + +- `open_optic` validates focus, coordinate, projection law, intent family, and + capability. +- `close_optic` releases only session-local descriptor resources. +- Closing an optic does not mutate subject history or invalidate old readings. + +Non-goals: + +- Do not make optics file handles. +- Do not implement mutable object handles. + +Test expectations: + +- Opening denied capability returns typed obstruction/error. +- Closing does not change observed frontier or provenance length. + +### TASK-007: Define observe_optic model with bounds and aperture + +Title: Define observe optic model with bounds and aperture. + +Goal: Add the bounded read request/result model and adapt one existing +ObservationService path through it. + +Files likely touched: + +- `crates/warp-core/src/optic.rs` +- `crates/warp-core/src/observation.rs` +- `crates/warp-wasm/src/warp_kernel.rs` + +Acceptance criteria: + +- Observe request includes optic id, focus, coordinate, aperture, projection + version, reducer version, and capability ref. +- Result returns reading or obstruction. +- No hidden full materialization fallback exists. + +Non-goals: + +- Do not replace all ObservationService internals in this slice. +- Do not add global graph query API. + +Test expectations: + +- Bounded head/snapshot optic returns read identity. +- Oversized aperture returns budget obstruction. + +### TASK-008: Define dispatch_optic_intent with explicit base coordinate + +Title: Define dispatch optic intent with explicit base coordinate. + +Goal: Add the write-side proposal DTO and route one existing EINT path through +the optic dispatch model. + +Files likely touched: + +- `crates/warp-core/src/optic.rs` +- `crates/warp-core/src/head_inbox.rs` +- `crates/warp-wasm/src/warp_kernel.rs` +- `crates/echo-wasm-abi/src/kernel_port.rs` + +Acceptance criteria: + +- Request names optic id, base coordinate, intent family, focus, actor/cause, + capability, admission law, and payload. +- Current EINT v1 payloads can be carried. +- Dispatch outcome is typed. + +Non-goals: + +- Do not add setters. +- Do not create a second intent envelope without a failing RED. + +Test expectations: + +- Missing base coordinate is impossible or rejected. +- Accepted intent names resulting tick/receipt/admission posture. + +### TASK-009: Add stale-basis obstruction tests + +Title: Add stale-basis obstruction tests. + +Goal: Prove stale base coordinate does not silently mutate current frontier. + +Files likely touched: + +- `crates/warp-core/tests/optic_dispatch_tests.rs` +- `crates/warp-core/src/optic.rs` + +Acceptance criteria: + +- Dispatch against stale base returns Obstructed, Staged, Plural, Conflict, or + explicitly law-admitted result. +- The default path must not mutate latest frontier silently. + +Non-goals: + +- Do not implement rebase workflow. +- Do not hide host-time ordering. + +Test expectations: + +- Provenance length and current head remain unchanged for obstructed stale-base + dispatch. + +### TASK-010: Add cached-reading identity tests + +Title: Add cached-reading identity tests. + +Goal: Prove retained/cached readings are keyed by read identity, not just +content hash. + +Files likely touched: + +- `crates/warp-core/tests/optic_retention_tests.rs` +- `crates/echo-cas/src/lib.rs` +- `crates/warp-core/src/observation.rs` + +Acceptance criteria: + +- Same content bytes under different coordinate or aperture produce distinct + retained keys. +- Reveal requires matching read identity. + +Non-goals: + +- Do not build distributed CAS. +- Do not add semantic ontology to CAS hashes. + +Test expectations: + +- Content-hash-only reveal returns obstruction or lookup miss. +- Matching read identity reveals payload. + +### TASK-011: Add live-tail hash honesty tests + +Title: Add live-tail hash honesty tests. + +Goal: Prevent stale checkpoint hashes from identifying live optic readings. + +Files likely touched: + +- `crates/warp-core/tests/optic_live_tail_tests.rs` +- `crates/warp-core/src/observation.rs` + +Acceptance criteria: + +- A live frontier with checkpoint plus tail cannot return checkpoint-only + identity. +- Result either reduces live tail, names checkpoint-plus-tail witness basis, or + obstructs. + +Non-goals: + +- Do not implement production compaction/wormholes. + +Test expectations: + +- Add tick after checkpoint; live read identity changes and names tail evidence. + +### TASK-012: Add attachment boundary/descent placeholder model + +Title: Add attachment boundary and descent placeholder model. + +Goal: Make attachments explicit aperture boundaries in optic reads. + +Files likely touched: + +- `crates/warp-core/src/optic.rs` +- `crates/warp-core/src/attachment.rs` +- `crates/echo-wasm-abi/src/kernel_port.rs` + +Acceptance criteria: + +- Default readings expose attachment refs or obstruction posture. +- Recursive descent requires explicit aperture, capability, budget, and law. + +Non-goals: + +- Do not recursively materialize attachments by default. +- Do not implement nested WARP runtime. + +Test expectations: + +- Read with no descent returns attachment boundary posture. +- Read with unauthorized descent returns typed obstruction. + +### TASK-013: Add narrow fake/example optic implementation + +Title: Add narrow fake/example optic implementation. + +Goal: Implement one simple optic path to validate ergonomics without broad +runtime abstraction. + +Files likely touched: + +- `crates/warp-core/src/optic.rs` +- `crates/warp-core/tests/optic_example_tests.rs` +- `crates/warp-wasm/src/warp_kernel.rs` + +Acceptance criteria: + +- Example optic can read a worldline head or QueryBytes-style payload. +- Example optic can dispatch one EINT intent with explicit base coordinate. +- It uses typed read/admission outcomes. + +Non-goals: + +- Do not implement a universal optic engine. +- Do not use jedit as the concrete runtime dependency. + +Test expectations: + +- Read, dispatch, stale-basis, and obstruction tests pass on the example. + +### TASK-014: Add adapter notes for future consumers + +Title: Add adapter notes for future consumers. + +Goal: Document how editors, debuggers, inspectors, replay tools, import/export +flows, retained reading caches, and GraphQL adapters should sit above the core +Optics API. + +Files likely touched: + +- `docs/architecture/echo-optics-adapter-notes.md` +- `docs/design/0018-echo-optics-api-design/design.md` + +Acceptance criteria: + +- Notes clearly say GraphQL is an adapter illustration, not the runtime + substrate. +- Notes reject global state adapters and host-bag abstractions. +- Notes show `jedit` only as an ergonomic example consumer. + +Non-goals: + +- Do not design product-specific APIs. +- Do not add a sync daemon or git-warp dependency. + +Test expectations: + +- Docs checks pass. +- Links from design packet and backlog card resolve in docs build. + +### TASK-015: Add Echo-owned Wesley optic binding spec + +Title: Add Echo-owned Wesley optic binding spec. + +Goal: Specify how `echo-wesley-gen` emits typed optic families and bindings +without turning Echo core into application subclasses. + +Files likely touched: + +- `docs/design/0018-echo-optics-api-design/wesley-compiled-optic-bindings.md` +- `docs/design/0018-echo-optics-api-design/design.md` + +Acceptance criteria: + +- Spec says generated output builds `ObserveOpticRequest` and + `DispatchOpticIntentRequest`. +- Spec says EINT packing may be hidden but intent dispatch remains explicit at + the Echo boundary. +- Spec rejects generated setters and mutable handles. + +Non-goals: + +- Do not implement generator changes. +- Do not invent a replacement for EINT v1. + +Test expectations: + +- Docs checks pass. + +### TASK-016: Extend echo-wesley-gen with optic request builders + +Title: Extend echo-wesley-gen with optic request builders. + +Goal: Generate typed `*_observe_optic_request` and +`*_dispatch_optic_intent_request` helpers alongside existing compatibility +helpers. + +Files likely touched: + +- `crates/echo-wesley-gen/src/main.rs` +- `crates/echo-wesley-gen/src/ir.rs` +- `crates/echo-wesley-gen/tests/generation.rs` +- `crates/echo-wesley-gen/tests/fixtures/toy-counter/echo-ir-v1.json` + +Acceptance criteria: + +- Query ops emit typed optic observation request builders. +- Mutation ops emit typed optic dispatch request builders. +- Mutation builders require explicit base coordinate by default. +- Existing EINT and `ObservationRequest` helpers remain available. +- Generated names do not collide with user contract types. + +Non-goals: + +- Do not remove existing helper surface in this slice. +- Do not add jedit-specific codegen. + +Test expectations: + +- Generated std smoke crate compiles. +- Generated no-std smoke crate compiles where request builders are no-std-safe. +- Tests assert no generated method uses `set_*` naming. + +### TASK-017: Add Echo Optics ABI DTOs required by generated bindings + +Title: Add Echo Optics ABI DTOs required by generated bindings. + +Goal: Add the minimum ABI DTOs needed for generated optic request builders to +compile against `echo-wasm-abi`. + +Files likely touched: + +- `crates/echo-wasm-abi/src/kernel_port.rs` +- `crates/warp-wasm/src/warp_kernel.rs` +- `crates/echo-wesley-gen/tests/generation.rs` + +Acceptance criteria: + +- ABI exposes `OpticId`, `OpticFocus`, `EchoCoordinate`, `OpticAperture`, + `ObserveOpticRequest`, `DispatchOpticIntentRequest`, + `OpticIntentPayload`, `IntentDispatchResult`, and supporting refs. +- DTOs encode deterministically across the ABI boundary. +- Generated optic helper smoke crate compiles against the ABI. + +Non-goals: + +- Do not implement full runtime semantics. +- Do not add global graph APIs. + +Test expectations: + +- ABI encode/decode round-trips. +- Generated consumer crate compiles with generated optic helpers. diff --git a/docs/design/0018-echo-optics-api-design/request.md b/docs/design/0018-echo-optics-api-design/request.md new file mode 100644 index 00000000..f0d21987 --- /dev/null +++ b/docs/design/0018-echo-optics-api-design/request.md @@ -0,0 +1,376 @@ + + + +# Echo Optics API Design Request + +This file archives the source prompt for Echo's first-class Optics API design. +The design doc and backlog tasks for this body of work should use this request +as the controlling reference. + +````text +Yep — use this instead: + +You are working in the Echo repository. +Your task is to design Echo’s first-class Optics API and API surfaces. +This is not a jedit design task. jedit may be used only as an example consumer to validate ergonomics. +# Goal +Design a generic Echo Optic model for bounded, capability-scoped, coordinate-anchored observation and intent dispatch over Echo causal history. +An Echo Optic must support future consumers such as editors, debuggers, inspectors, replay tools, import/export flows, and retained reading caches. +# Core Doctrine +Use this rule: +```text +Optic reads. +Intent proposes. +Echo admits. +Receipt witnesses. + +An optic is not a mutable handle. + +An optic is: + +Optic = capability + focus + coordinate + projection law + intent family + +An optic does not mutate its subject. It names: + +1. a lawful way to observe a focused projection +2. a lawful family of intents that may be submitted against that projection + +Optic writes are never setters. They are intent dispatches against an explicit causal basis. + +Required Conceptual Model + +Design optics over: + +* worldlines +* strands +* braids +* coordinates/frontiers +* retained readings +* cached readings +* observer apertures +* witness-backed projections + +An optic read should: + +choose aperture +-> slice causal history +-> lower under law +-> witness +-> retain if needed +-> emit observer-relative reading + +An optic intent dispatch should: + +construct intent +-> validate capability +-> validate causal basis +-> apply/admit under contract law +-> emit tick/admission result +-> emit receipt/witness + +Required API Surface + +Design the smallest useful Echo Optic API. + +It should include generic concepts such as: + +* EchoOptic +* OpticId +* OpticFocus +* OpticAperture +* EchoCoordinate +* ReadingEnvelope +* ObserveOpticRequest +* DispatchOpticIntentRequest +* IntentDispatchResult +* OpticCapability +* OpticObstruction +* ReadIdentity +* WitnessBasis +* ProjectionVersion +* ReducerVersion + +Sketch possible interfaces in the repo’s preferred language/style. + +Do not make GraphQL the core runtime API. GraphQL-like examples may be included only as adapter illustrations. + +Required Semantics + +Reads + +An optic read must be bounded. + +A read must name: + +* optic id +* focus +* aperture +* causal coordinate/frontier +* projection law/version +* reducer law/version where relevant +* witness basis +* read identity +* residual or obstruction posture +* bounds/budget posture + +Reads must not secretly fall back to full materialization. + +If evidence is missing, return an explicit obstruction. + +Writes / Intent Dispatch + +The optic must not expose setters. + +The write-side surface should be named something like: + +* dispatchOpticIntent +* submitOpticIntent +* proposeIntent + +Do not call it set. + +An intent dispatch must name: + +* optic id +* base coordinate/frontier +* intent family +* subject/focus +* actor/cause +* capability basis +* admission law +* resulting tick/receipt/admission posture + +If the base coordinate is stale, the API must not silently mutate current state. + +It may: + +* reject +* obstruct +* stage +* preserve plurality +* require rebase +* admit under explicitly named law + +Admission Outcomes + +Keep admission outcomes typed and explicit: + +* Admitted +* Staged +* Plural +* Conflict +* Obstructed + +Do not collapse this into: + +* Ok/Err +* boolean success +* string status +* latest-writer-wins +* hidden host-time ordering + +Cached / Retained Readings + +Design how optics interact with retained readings and echo-cas. + +A retained reading needs both: + +* content hash +* semantic coordinate / read identity + +The CAS hash names bytes. + +The read identity names the question those bytes answer. + +A cached reading remains valid only for the coordinate, witness basis, projection version, reducer version, aperture, rights, and budget posture it names. + +New ticks should create new frontiers, not mutate old readings. + +Live Tail Honesty + +Do not allow an optic read to return a stale checkpoint hash as if it identified the live result. + +Honest options: + +* reduce the live tail under bounded witness basis +* return a read identity naming checkpoint basis plus tail witness set +* return slice hash or witness-set hash with explicit meaning +* fail closed with obstruction or missing-basis posture + +Attachments / Recursive Boundaries + +Optics should treat attachments as explicit aperture boundaries. + +Default readings should expose attachment refs or obstruction posture. + +Recursive descent into attachments must be explicit and capability/budget/law checked. + +Non-Goals + +Do not design: + +* a global graph API +* a global mutable state API +* a file-handle API +* direct setters +* hidden materialization fallback +* a sync daemon +* git-warp dependency +* proof system implementation +* GraphQL-first runtime substrate +* host-bag abstractions like RuntimeFacade, ObservationManager, UniversalMaterializer, or GraphLikeRuntimeAdapter + +Deliverables + +Produce: + +1. Design Summary + +Explain the ideal Echo Optics API. + +Include: + +* what an optic is +* what an optic is not +* read semantics +* intent dispatch semantics +* coordinate/frontier behavior +* cached reading behavior +* live-tail honesty +* admission outcomes +* attachment boundaries +* capability model + +2. Proposed Types / Interfaces + +Draft concrete type/interface sketches for the Optics API. + +Include: + +* optic descriptor +* open optic request/result +* observe request/result +* intent dispatch request/result +* reading envelope +* read identity +* witness basis +* obstruction model +* admission outcome model +* retained reading key +* capability model + +3. API Surface Proposal + +Propose the smallest initial public API surface. + +Prefer something like: + +openOptic +closeOptic +observeOptic +dispatchOpticIntent +retainReading +revealReading + +But challenge this list if better names or smaller cuts exist. + +Clearly separate: + +Optic observes. +Admission admits. +Retention retains. +Plumber maintains. +Debug explains. + +4. Compatibility With Existing Echo Doctrine + +Explain how this design aligns with: + +* witnessed causal history as substrate truth +* observer-relative readings +* bounded replay/reveal +* suffix admission +* tick receipts as holographic witnesses +* echo-cas as retention, not ontology +* Echo as a peer Continuum runtime, not a git-warp subordinate + +5. Test Strategy + +Propose tests proving: + +* optic reads name causal basis +* optic reads are bounded +* missing evidence obstructs instead of materializing everything +* cached readings are keyed by read identity, not just content hash +* live-tail reads do not reuse stale checkpoint hashes +* intent dispatch requires explicit base coordinate +* stale base coordinate does not silently mutate current state +* admission outcomes stay typed +* attachment descent is explicit +* plumber/debug APIs do not become hidden fallbacks + +6. Backlog Tasks + +Produce a METHOD-friendly backlog task series. + +Each task must include: + +* title +* goal +* files likely touched +* acceptance criteria +* non-goals +* test expectations + +Prefer small slices. + +Suggested order: + +1. Add doctrine/design packet for Echo Optics. +2. Define core optic nouns and IDs. +3. Define ReadingEnvelope and ReadIdentity. +4. Define WitnessBasis and retained reading key. +5. Define optic obstruction/admission result families. +6. Define openOptic / closeOptic request models. +7. Define observeOptic model with bounds and aperture. +8. Define dispatchOpticIntent model with explicit base coordinate. +9. Add stale-basis obstruction tests. +10. Add cached-reading identity tests. +11. Add live-tail hash honesty tests. +12. Add attachment boundary/descent placeholder model. +13. Add narrow fake/example optic implementation for one simple contract. +14. Add adapter notes for future editor/debugger/replay consumers. + +Output Format + +Use: + +# Echo Optics API Design +## Summary +## Core Doctrine +## Optic Model +## Public API Surface +## Types And Interfaces +## Read Semantics +## Intent Dispatch Semantics +## Admission Outcomes +## Cached And Retained Readings +## Live Tail Honesty +## Attachments And Recursive Apertures +## Capability Model +## Relationship To Existing Echo Doctrine +## Test Strategy +## Backlog +### TASK-001: ... +### TASK-002: ... + +Be strict. + +Reject direct mutation. + +Reject global graph/state APIs. + +Reject broad host-bag abstractions. + +Keep the design small, typed, bounded, causal, capability-scoped, and testable. +```` diff --git a/docs/design/0018-echo-optics-api-design/wesley-compiled-optic-bindings.md b/docs/design/0018-echo-optics-api-design/wesley-compiled-optic-bindings.md new file mode 100644 index 00000000..27e55dd7 --- /dev/null +++ b/docs/design/0018-echo-optics-api-design/wesley-compiled-optic-bindings.md @@ -0,0 +1,490 @@ + + + +# Wesley-Compiled Optic Bindings For Echo + +This companion spec defines the Echo-owned Wesley compiler extension required +by [Echo Optics API Design](./design.md). + +The implementation home is `crates/echo-wesley-gen`. Wesley owns authored +contract semantics and IR production. Echo owns the generated Echo-facing +runtime bindings that turn those contracts into typed optic descriptors, +observation requests, and intent-dispatch proposals. + +## Decision + +Wesley output should compile application contracts into typed Echo Optic +bindings. + +It should not compile them into Echo-core subclasses, global graph adapters, or +mutable handles. + +The correct shape is: + +```text +Wesley contract + -> Echo-owned generator extension + -> typed optic family + -> typed optic binding + -> ObserveOpticRequest for reads + -> DispatchOpticIntentRequest for proposals +``` + +Byte-level EINT packing may be hidden inside the generated binding. Intent +dispatch itself must remain explicit at the Echo API boundary. + +The rule: + +```text +EINT bytes are a binding implementation detail. +Intent dispatch is not an optic implementation detail. +``` + +## Why Not Subclasses + +Avoid "Optics subclasses" as the mental model. + +Subclass language implies that Echo core owns application-specific runtime +types or inheritance slots. Echo should not grow specialized text, debugger, +Graft, editor, or consumer subclasses. + +Use these names instead: + +- Wesley-generated optic bindings; +- Wesley-generated optic families; +- generated contract optic descriptors; +- typed optic adapters. + +The generated Rust type may be a struct with methods, but those methods are +typed request builders or dispatch convenience wrappers over generic Echo +requests. + +## Generated Layers + +`echo-wesley-gen` should emit four layers. + +1. Contract metadata: + - schema hash; + - codec id; + - registry version; + - contract family id or derivation inputs; + - operation catalog; + - projection and reducer versions. +2. DTO and codec layer: + - generated input/result structs; + - canonical vars encoders; + - result decoders where available. +3. Optic descriptor layer: + - typed focus builders; + - typed `OpenOpticRequest` builders; + - typed query/read aperture helpers. +4. Intent proposal layer: + - typed mutation vars encoders; + - EINT v1 packing where applicable; + - typed `DispatchOpticIntentRequest` builders. + +The generated module may still expose low-level helpers for tests and advanced +hosts, but the primary app-facing surface should be optic request builders. + +## Generated Type Shape + +Candidate generated output: + +```rust +pub struct GeneratedContractOpticFamily { + pub family: ContractFamilyRef, + pub registry: &'static dyn RegistryProvider, + pub projection_version: ProjectionVersion, + pub reducer_version: Option, +} + +pub struct GeneratedCounterValueOptic { + pub optic: EchoOptic, +} +``` + +The family object names static generated metadata. The opened optic binding +names one validated `EchoOptic` descriptor. Neither is a mutable handle. + +## Opening A Typed Optic + +Generated read/query helpers should create `OpenOpticRequest` values rather +than directly calling `observe`. + +Example: + +```rust +impl GeneratedContractOpticFamily { + pub fn counter_value_optic( + &self, + worldline_id: WorldlineId, + coordinate: EchoCoordinate, + capability: OpticCapability, + cause: OpticCause, + ) -> OpenOpticRequest { + OpenOpticRequest { + focus: OpticFocus::Worldline { worldline_id }, + coordinate, + projection_law: ProjectionLawRef::ContractQuery { + family: self.family, + op_id: OP_COUNTER_VALUE, + version: self.projection_version, + }, + reducer_law: self.reducer_version.map(ReducerLawRef::Contract), + intent_family: IntentFamilyRef::Contract { + family: self.family, + allowed_ops: &'static [OP_INCREMENT], + }, + capability, + cause, + } + } +} +``` + +The exact syntax will change with real DTO names. The semantic requirements are +stable: + +- the generated request names focus; +- it names coordinate; +- it names projection law and version; +- it names intent family; +- it carries capability and cause. + +## Observing Through A Typed Optic + +Generated query helpers should build `ObserveOpticRequest`. + +Example: + +```rust +impl GeneratedCounterValueOptic { + pub fn observe_counter_value( + &self, + vars: &CounterValueVars, + aperture: OpticAperture, + ) -> Result { + let vars_bytes = encode_counter_value_vars(vars)?; + let vars_digest = hash_vars(&vars_bytes); + + Ok(ObserveOpticRequest { + optic_id: self.optic.optic_id, + focus: self.optic.focus.clone(), + coordinate: self.optic.coordinate.clone(), + aperture: aperture.with_query(OP_COUNTER_VALUE, vars_digest), + projection_version: self.optic.projection_law.version(), + reducer_version: self.optic.reducer_law.as_ref().map(ReducerLawRef::version), + capability: self.optic.capability.clone(), + }) + } +} +``` + +The helper may also expose a lower-level raw-vars variant: + +```rust +pub fn observe_counter_value_raw_vars( + &self, + vars_bytes: &[u8], + aperture: OpticAperture, +) -> ObserveOpticRequest; +``` + +This mirrors the existing `*_observation_request_raw_vars` pattern while moving +the output from `ObservationRequest` to `ObserveOpticRequest`. + +## Dispatching Through A Typed Optic + +Generated mutation helpers should build `DispatchOpticIntentRequest`. + +They may provide convenience methods that call an injected `EchoOptics` port, +but the request object must remain visible and testable. + +Request-builder form: + +```rust +impl GeneratedCounterValueOptic { + pub fn increment_intent( + &self, + base_coordinate: EchoCoordinate, + vars: &IncrementVars, + actor: OpticActor, + cause: OpticCause, + ) -> Result { + let vars_bytes = encode_increment_vars(vars) + .map_err(GeneratedIntentError::EncodeVars)?; + let vars_digest = hash_vars(&vars_bytes); + let eint = pack_intent_v1(OP_INCREMENT, &vars_bytes) + .map_err(GeneratedIntentError::PackEnvelope)?; + + Ok(DispatchOpticIntentRequest { + optic_id: self.optic.optic_id, + base_coordinate, + intent_family: IntentFamilyRef::Contract { + family: self.optic.contract_family(), + op_id: OP_INCREMENT, + }, + focus: self.optic.focus.clone(), + actor, + cause, + capability: self.optic.capability.clone(), + admission_law: AdmissionLawRef::ContractDefault { + family: self.optic.contract_family(), + }, + intent: OpticIntentPayload::EintV1 { + bytes: eint, + op_id: OP_INCREMENT, + vars_digest, + }, + }) + } +} +``` + +Convenience dispatch form: + +```rust +impl GeneratedCounterValueOptic { + pub fn dispatch_increment( + &self, + port: &mut dyn EchoOptics, + base_coordinate: EchoCoordinate, + vars: &IncrementVars, + actor: OpticActor, + cause: OpticCause, + ) -> Result { + let request = self.increment_intent(base_coordinate, vars, actor, cause)?; + Ok(port.dispatch_optic_intent(request)) + } +} +``` + +This is allowed because the generated method still requires an explicit base +coordinate and still crosses Echo through `dispatch_optic_intent`. + +Forbidden generated forms: + +```rust +optic.set_counter_value(...) +optic.replace_range(...) +optic.update(...) +``` + +unless the method name and signature clearly express intent proposal and require +an explicit causal basis. Prefer: + +- `build_*_intent`; +- `*_intent`; +- `dispatch_*`; +- `submit_*_intent`; +- `propose_*`. + +## Causal Basis Rule + +Generated mutation helpers must not default to "current frontier". + +They must require one of: + +- explicit `base_coordinate`; +- explicit `BasePolicy::UseOpenedCoordinate`; +- explicit `BasePolicy::ResolveFrontierAtDispatch` with a named admission law + that can obstruct, stage, or preserve plurality. + +The default generated API should require `base_coordinate`. + +If a convenience method resolves frontier at dispatch time, the method name must +make that visible: + +```rust +dispatch_increment_at_resolved_frontier(...) +``` + +and the result must still be typed as `IntentDispatchResult`. + +## Query Helper Migration + +Current `echo-wesley-gen` emits: + +```text +*_observation_request(...) +*_observation_request_raw_vars(...) +``` + +The Optics extension should add, not immediately replace: + +```text +*_observe_optic_request(...) +*_observe_optic_request_raw_vars(...) +``` + +The old helpers can remain during migration. They build the lower-level +`ObservationRequest` used by the current ABI. The new helpers build the +first-class optic request. + +Migration rule: + +```text +ObservationRequest helpers are compatibility helpers. +ObserveOpticRequest helpers are the preferred generated read surface. +``` + +Implementation note: `echo-wesley-gen` now emits both helper families. Query +ops keep `*_observation_request` and `*_observation_request_raw_vars` while also +emitting `*_observe_optic_request` and `*_observe_optic_request_raw_vars`. +The optic helpers require an optic id, focus, coordinate, capability, projection +version, optional reducer version, and explicit read budget, then build a +bounded QueryBytes aperture. + +## Mutation Helper Migration + +Current `echo-wesley-gen` emits: + +```text +pack_*_intent(...) +pack_*_intent_raw_vars(...) +``` + +The Optics extension should add: + +```text +*_dispatch_optic_intent_request(...) +*_dispatch_optic_intent_request_raw_vars(...) +``` + +The old helpers remain useful because EINT v1 is still the inner canonical +payload. The new helpers wrap those bytes into an optic dispatch request with +explicit base coordinate, focus, actor/cause, capability, and admission law. + +Migration rule: + +```text +EINT pack helpers are low-level payload helpers. +DispatchOpticIntentRequest helpers are the preferred generated proposal surface. +``` + +Implementation note: mutation ops now emit +`*_dispatch_optic_intent_request` and +`*_dispatch_optic_intent_request_raw_vars` beside existing +`pack_*_intent` helpers. The generated dispatch builders require an explicit +base coordinate by default and keep EINT v1 as the inner payload. Setter-like +operation names are not emitted as `set_*` optic methods; a `setTheme` mutation +becomes a proposal builder such as +`propose_set_theme_dispatch_optic_intent_request`. + +## Echo-Owned IR Requirements + +The current Echo IR has enough for basic op ids and vars: + +- op kind; +- op name; +- op id; +- args; +- result type; +- schema hash; +- codec id; +- registry version. + +Optic bindings need additional optional metadata. Add only what RED tests prove +missing, but expect these fields: + +```json +{ + "contract_family": "toy-counter", + "projection_version": 1, + "reducer_version": null, + "ops": [ + { + "kind": "QUERY", + "name": "counterValue", + "op_id": 1002, + "optic": { + "focus": "worldline", + "aperture": "query_bytes", + "projection_law": "contract_query" + } + }, + { + "kind": "MUTATION", + "name": "increment", + "op_id": 1001, + "optic": { + "intent_family": "contract_mutation", + "admission_law": "contract_default" + } + } + ] +} +``` + +Do not make GraphQL directives the Echo runtime API. Directives may feed Wesley +IR. The Echo-owned generator consumes IR and emits Rust DTOs. + +## Registry And Artifact Identity + +Generated optic bindings must include registry/artifact identity in every +request they build. + +At minimum: + +- schema hash; +- codec id; +- registry version; +- contract family id; +- op id; +- projection version; +- reducer version when present. + +Future authenticated admission may add artifact attestation and capability +certificates. The generated binding must leave a slot for those identities +without requiring production crypto in the first slice. + +## no_std Requirements + +The generated optic bindings must preserve the existing `--no-std` path. + +Requirements: + +- use `alloc::vec::Vec` and `alloc::string::String` when needed; +- no ambient filesystem, time, randomness, or host IO; +- compile in a no-std consumer smoke crate; +- expose request-builder helpers even when convenience dispatch helpers require + a std-hosted trait object. + +If convenience dispatch helpers cannot be no-std, gate them. Request builders +should remain no-std-capable. + +## Tests + +Add generator tests before implementation: + +1. Generated query op emits `*_observe_optic_request`. +2. Generated mutation op emits `*_dispatch_optic_intent_request`. +3. Mutation helper requires explicit base coordinate. +4. Generated request includes optic id, focus, intent family, capability, + actor/cause, and admission law. +5. EINT bytes remain inner payload and decode to the original op id/vars. +6. No generated helper is named `set_*`. +7. Generated output compiles under std and no-std consumer crates. +8. Existing `ObservationRequest` and `pack_*_intent` helpers remain available + during migration. + +## Acceptance Criteria + +- Application code can interact with typed generated optic bindings. +- Application code does not need to manually pack EINT for the happy path. +- Echo still receives explicit `ObserveOpticRequest` or + `DispatchOpticIntentRequest`. +- Dispatch requests always name causal basis unless an explicit named base + policy says otherwise. +- Generated read helpers remain bounded and aperture-aware. +- Echo core remains generic and imports no application nouns. +- Intent admission remains witnessable through Echo receipts. + +## Non-Goals + +- Do not make generated bindings mutable handles. +- Do not add inheritance/subclass machinery to Echo core. +- Do not make Intent dispatch disappear from Echo's public boundary. +- Do not replace EINT v1 until a RED proves it insufficient. +- Do not make GraphQL the runtime API. +- Do not add jedit-specific generated behavior. diff --git a/docs/design/0019-reading-envelope-family-boundary/reading-envelope-family-boundary.md b/docs/design/0019-reading-envelope-family-boundary/reading-envelope-family-boundary.md new file mode 100644 index 00000000..7ec6cc84 --- /dev/null +++ b/docs/design/0019-reading-envelope-family-boundary/reading-envelope-family-boundary.md @@ -0,0 +1,264 @@ + + + +# Reading envelope family boundary + +Status: Accepted and partially implemented. + +Depends on: + +- [0011 - Optic and observer runtime doctrine](../0011-optic-observer-runtime-doctrine/design.md) +- [0018 - Echo Optics API Design](../0018-echo-optics-api-design/design.md) + +## Decision + +Echo has one generic read-side family boundary: + +```text +ObservationRequest + -> observer plan / optional observer instance + -> bounded runtime read + -> ObservationArtifact { + resolved coordinate, + ReadingEnvelope, + frame, + projection, + payload, + artifact_hash + } + +ObserveOpticRequest + -> ObservationRequest bridge, when the aperture maps to built-in reads + -> OpticReading { + ReadingEnvelope, + ReadIdentity, + ObservationPayload, + optional RetainedReadingKey + } +``` + +The reading envelope is not a UI wrapper and not a cache handle. It is the +runtime evidence envelope for an observer-relative reading. It names enough +read posture for downstream consumers to know what question was answered, under +which law, at which causal basis, and with which budget/rights/residual posture. + +Echo does not expose a global graph result type. Echo emits bounded, +coordinate-anchored readings. + +## Family Boundary + +This design separates three families that must not collapse into one bag. + +### Authored Family + +The authored family names the read law a contract, adapter, or kernel observer +intends to use. + +Current type anchors: + +- `ReadingObserverPlan` +- `BuiltinObserverPlan` +- `AuthoredObserverPlan` +- `ObserverPlanId` + +Rules: + +- Built-in plans are kernel-owned and finite. +- Authored plans are identified by plan id plus artifact/schema/law hashes. +- Echo core must not import application nouns to understand an authored plan. +- If an authored plan is requested but not installed, the read obstructs. + +### Compiled Or Installed Artifact Family + +The compiled or installed artifact family names the generated code or law object +that can execute the authored read law. + +Current type anchors: + +- `AuthoredObserverPlan::artifact_hash` +- `AuthoredObserverPlan::schema_hash` +- `AuthoredObserverPlan::state_schema_hash` +- `AuthoredObserverPlan::update_law_hash` +- `AuthoredObserverPlan::emission_law_hash` +- `ObserverInstanceRef` + +Rules: + +- Generated artifacts may be Wesley-produced or produced by another lawful + adapter, but Echo sees them generically. +- Runtime observer instances are optional. One-shot built-in reads use `None`. +- Stateful observer reads must name the observer instance and state hash. +- Echo must reject unsupported observer plans or instances instead of falling + back to a built-in read. +- Built-in one-shot request helpers must fail closed when the frame/projection + pair is invalid. They must not silently relabel an invalid request as + `QueryBytes`, because the observer plan participates in the reading contract. + +### Runtime Emitted Value Family + +The runtime emitted value family is the actual read result produced by Echo. + +Current type anchors: + +- `ObservationArtifact` +- `ReadingEnvelope` +- `ObservationPayload` +- `ObservationHashInput` +- `OpticReading` +- `ReadIdentity` +- `RetainedReadingDescriptor` +- `RetainedReadingKey` + +Rules: + +- `ObservationArtifact` carries coordinate resolution, envelope, frame, + projection, payload, and deterministic artifact hash. +- `ReadingEnvelope` carries read evidence posture. +- `OpticReading` pairs the existing envelope with `ReadIdentity`. +- Retained readings are keyed by semantic read identity plus byte identity, not + by CAS content hash alone. + +## Minimum Runtime Fields + +Every emitted reading family must be able to name the following: + +| Field | Current anchor | Why it matters | +| --------------------------- | ------------------------------- | ------------------------------------------------------------ | +| Observer plan identity | `ReadingObserverPlan` | Names the read law. | +| Optional observer instance | `ObserverInstanceRef` | Names stateful observer state when used. | +| Resolved coordinate | `ResolvedObservationCoordinate` | Names what was actually observed. | +| Observer basis | `ReadingObserverBasis` | Names commit-boundary, recorded-truth, or query-view basis. | +| Witness or shell refs | `ReadingWitnessRef` | Names evidence supporting the reading. | +| Parent/strand basis posture | `ObservationBasisPosture` | Preserves strand-relative truth and revalidation needs. | +| Budget posture | `ReadingBudgetPosture` | Prevents hidden full materialization. | +| Rights posture | `ReadingRightsPosture` | Names revelation/capability posture. | +| Residual posture | `ReadingResidualPosture` | Names complete, residual, plurality, or obstruction posture. | +| Payload | `ObservationPayload` | Carries the bounded emitted value. | +| Read identity | `ReadIdentity` for optic reads | Names the semantic question answered. | + +## Identity Rules + +`ObservationArtifact::artifact_hash` is computed from +`ObservationHashInput`, which includes: + +- resolved coordinate, +- `ReadingEnvelope`, +- frame, +- projection, +- payload. + +That means envelope posture is part of observation identity. A payload emitted +with different budget, rights, witness, observer, or residual posture is not the +same artifact. + +`ReadIdentity` is the semantic identity of an optic read question. It includes: + +- optic id, +- focus digest, +- coordinate, +- aperture digest, +- projection version, +- reducer version where present, +- witness basis, +- rights posture, +- budget posture, +- residual posture. + +`RetainedReadingKey` is derived from: + +- `ReadIdentity`, +- content hash, +- codec id, +- byte length. + +CAS hashes bytes. `ReadIdentity` names the question those bytes answer. + +## Current Implementation + +Implemented for built-in one-shot observations: + +- `ObservationArtifact::reading` +- `ReadingEnvelope` +- `ReadingObserverPlan` +- `ReadingObserverBasis` +- `ReadingWitnessRef` +- `ReadingBudgetPosture` +- `ReadingRightsPosture` +- `ReadingResidualPosture` +- `ObservationHashInput::reading` +- `ReadIdentity` +- `RetainedReadingKey` +- `ObserveOpticRequest` for bounded head and snapshot metadata reads +- fail-closed obstructions for unsupported apertures, missing witness basis, + unsupported authored observer plans, unsupported observer instances, and + capability-scoped rights without an installed checker + +Still open: + +- authored observer plans +- hosted/stateful observer instances +- app-specific budget and rights enforcement +- obstruction/plurality variants beyond the current `complete` posture +- QueryView contract observers + +## Consumer Contract + +Downstream consumers should depend on this family rather than inventing custom +reading wrappers. + +Allowed: + +- inspect `ReadingEnvelope` before trusting or rendering a payload; +- retain payload bytes by `RetainedReadingKey`; +- reveal retained bytes only when the requested `ReadIdentity` matches; +- adapt the envelope into debugger/editor/replay UI terminology outside Echo. + +Rejected: + +- treating payload bytes as canonical state; +- treating a CAS hash as sufficient semantic identity; +- silently materializing beyond the requested aperture; +- converting unsupported query/view reads into empty successful payloads; +- adding application-specific nouns to Echo core. + +## Tests + +Current test anchors: + +- `ordinary_worldline_observation_reports_worldline_posture` +- `explicit_bounded_observer_request_returns_bounded_reading_artifact` +- `authored_observer_plan_obstructs_without_hidden_builtin_fallback` +- `hosted_observer_instance_obstructs_without_stateful_fallback` +- `builtin_one_shot_rejects_invalid_frame_projection` +- `capability_scoped_observer_rights_obstruct_without_public_fallback` +- `bounded_head_optic_returns_read_identity` +- `read_identity_is_stable_for_same_read_question` +- `read_identity_changes_when_question_or_witness_changes` +- `retained_reading_key_requires_content_hash_and_read_identity` +- `live_tail_read_identity_names_checkpoint_plus_tail` +- `reading_envelope_posture_participates_in_artifact_identity` + +These tests keep the boundary honest: envelope posture participates in artifact +identity, optic reads have semantic identity, retained readings are not keyed by +bytes alone, and unsupported richer observer paths fail closed. + +## Closure Criteria + +- one packet names the minimum reading-envelope fields Echo should emit +- the boundary clearly distinguishes: + - authored family + - compiled artifacts + - runtime-emitted values +- downstream repos can depend on one named family instead of reconstructing + their own "reading result" wrappers +- the family stays narrow enough to be shared by Echo, Continuum, and debugger + consumers + +## Repo evidence + +- `crates/echo-wasm-abi/src/kernel_port.rs` +- `crates/warp-core/src/observation.rs` +- `crates/warp-core/src/optic.rs` +- `docs/architecture/WARP_DRIFT.md` +- `docs/design/0006-echo-continuum-alignment/design.md` +- `docs/design/0009-witnessed-causal-suffix-sync/design.md` diff --git a/docs/method/backlog/up-next/PLATFORM_echo-cas-browser.md b/docs/design/0020-echo-cas-browser/echo-cas-browser.md similarity index 80% rename from docs/method/backlog/up-next/PLATFORM_echo-cas-browser.md rename to docs/design/0020-echo-cas-browser/echo-cas-browser.md index 0433e7a9..18d7b637 100644 --- a/docs/method/backlog/up-next/PLATFORM_echo-cas-browser.md +++ b/docs/design/0020-echo-cas-browser/echo-cas-browser.md @@ -7,6 +7,12 @@ Validate and wire MemoryTier in the WASM context. echo-cas Phase 1 provides `MemoryTier` (in-memory, `HashMap`-backed). This feature confirms it compiles to WASM, exposes JS bindings for store/retrieve, and validates blob integrity in-browser. +Status: + +- `T-4-3-1` is accepted in this cycle. +- `T-4-3-2` remains deferred as a separate backlog item: + [`PLATFORM_echo-cas-js-bindings.md`](../../method/backlog/up-next/PLATFORM_echo-cas-js-bindings.md). + ## T-4-3-1: MemoryTier WASM compilation gate **User Story:** As a developer, I want echo-cas to compile to `wasm32-unknown-unknown` so that the browser demo can use content-addressed storage. @@ -20,9 +26,9 @@ Validate and wire MemoryTier in the WASM context. echo-cas Phase 1 provides `Mem **Acceptance Criteria:** -- [ ] AC1: `cargo build --target wasm32-unknown-unknown -p echo-cas` exits 0 in CI. -- [ ] AC2: All existing echo-cas tests still pass on native (`cargo test -p echo-cas`). -- [ ] AC3: CI matrix includes the WASM target check. +- [x] AC1: `cargo build --target wasm32-unknown-unknown -p echo-cas` exits 0 in CI. +- [x] AC2: All existing echo-cas tests still pass on native (`cargo test -p echo-cas`). +- [x] AC3: CI matrix includes the WASM target check. **Definition of Done:** @@ -33,6 +39,24 @@ Validate and wire MemoryTier in the WASM context. echo-cas Phase 1 provides `Mem **Scope:** WASM compilation verification, CI gate, any necessary `cfg` adjustments to echo-cas. **Out of Scope:** JS bindings (T-4-3-2). Async API. DiskTier (MS-5). +### Result + +- `MemoryTier` builds for `wasm32-unknown-unknown` without collection `cfg` + substitutions. +- `blake3` and `thiserror` compile in the `echo-cas` WASM build lane as-is. +- CI now includes `Build echo-cas (wasm32)`, which runs: + +```sh +cargo build --target wasm32-unknown-unknown -p echo-cas +``` + +Local witnesses: + +```sh +cargo build --target wasm32-unknown-unknown -p echo-cas +cargo test -p echo-cas +``` + **Test Plan:** - **Goldens:** CI artifact: successful `wasm32-unknown-unknown` build log. @@ -50,6 +74,9 @@ Validate and wire MemoryTier in the WASM context. echo-cas Phase 1 provides `Mem ## T-4-3-2: JS bindings for CAS store/retrieve +Status: deferred back to the backlog as +[`PLATFORM_echo-cas-js-bindings.md`](../../method/backlog/up-next/PLATFORM_echo-cas-js-bindings.md). + **User Story:** As a web developer, I want to store and retrieve blobs from JavaScript so that the demo can persist simulation snapshots in content-addressed storage. **Requirements:** diff --git a/docs/method/backlog/up-next/KERNEL_parent-drift-owned-footprint-revalidation.md b/docs/design/0021-parent-drift-owned-footprint-revalidation/parent-drift-owned-footprint-revalidation.md similarity index 63% rename from docs/method/backlog/up-next/KERNEL_parent-drift-owned-footprint-revalidation.md rename to docs/design/0021-parent-drift-owned-footprint-revalidation/parent-drift-owned-footprint-revalidation.md index 1b961140..3d057faf 100644 --- a/docs/method/backlog/up-next/KERNEL_parent-drift-owned-footprint-revalidation.md +++ b/docs/design/0021-parent-drift-owned-footprint-revalidation/parent-drift-owned-footprint-revalidation.md @@ -3,9 +3,10 @@ # Parent drift and owned-footprint revalidation +Status: accepted as existing implementation/evidence consolidation. + Depends on: -- [KERNEL_live-holographic-strands](../asap/KERNEL_live-holographic-strands.md) - [0010 — Live-basis settlement correction plan](../../../design/0010-live-basis-settlement-plan/design.md) ## Why now @@ -38,7 +39,7 @@ two bad outcomes: - explicit conflict - the revalidation state is inspectable and not just an internal retry loop -## Current implementation consequence +## Accepted implementation consequence The runtime can now distinguish the two parent-drift classes, and settlement has the first explicit overlap revalidation law: @@ -52,10 +53,38 @@ has the first explicit overlap revalidation law: - overlapped replay that would mutate target state remains `ParentFootprintOverlap` residue with `Conflict` revalidation metadata -Owned-footprint overlap still needs the same posture threaded into observer and -bounded-read artifacts. The current tests cover no-overlap, disjoint parent -advance, clean overlap, and conflicting overlap; an obstruction-specific fixture -should be added when a natural patch-level obstruction case is available. +Observation artifacts also carry the same parent-basis posture for live strand +frontier reads: + +- ordinary worldline reads remain `Worldline` +- non-frontier strand reads are `StrandHistorical` +- live strand frontier reads at the anchor are `StrandAtAnchor` +- live strand frontier reads after disjoint parent movement are + `StrandParentAdvancedDisjoint` +- live strand frontier reads after parent movement inside the owned footprint + are `StrandRevalidationRequired`, with deterministic overlap slot evidence + +The current tests cover no-overlap, disjoint parent advance, clean overlap, and +conflicting overlap. An obstruction-specific fixture can still be added later +when a natural patch-level obstruction case is available. + +## Evidence + +- `crates/warp-core/src/strand.rs` defines + `StrandRevalidationState`, `StrandOverlapRevalidation`, and + `Strand::live_basis_report(...)`. +- `crates/warp-core/src/settlement.rs` carries overlap revalidation metadata on + import candidates and conflict artifact drafts. +- `crates/warp-core/src/witnessed_suffix.rs` preserves settlement basis and + overlap revalidation posture through witnessed-suffix ABI conversion. +- `crates/warp-core/src/observation.rs` carries `ObservationBasisPosture` on + reading artifacts and converts it to ABI. +- Targeted witnesses: + +```sh +cargo test -p warp-core live_basis_report +cargo test -p warp-core strand_frontier_observation_reports_overlap_revalidation_posture +``` ## Done looks like diff --git a/docs/design/0022-continuum-transport-identity/design.md b/docs/design/0022-continuum-transport-identity/design.md new file mode 100644 index 00000000..2cf51311 --- /dev/null +++ b/docs/design/0022-continuum-transport-identity/design.md @@ -0,0 +1,265 @@ + + + +# 0022 - Continuum transport identity and import idempotence + +_Lock the M027 import/loop-law decision: Echo's witnessed suffix model is the +source shape for Continuum transport, not a local adapter around a thinner +Continuum shell._ + +Legend: PLATFORM + +Depends on: + +- [0009 - Witnessed causal suffix export and import](../0009-witnessed-causal-suffix-sync/design.md) +- [0012 - Witnessed suffix posture canonicalization](../0012-witnessed-suffix-posture-canonicalization/design.md) +- [0018 - Echo Optics API Design](../0018-echo-optics-api-design/design.md) + +## Decision + +The runtime-boundary transport family is being shaped from Echo outward. + +Recorded decisions for this task: + +1. Continuum is free to change. Echo is the first serious consumer of this + boundary, so the shared family should be corrected now instead of preserving + an underspecified schema. +2. Echo's witnessed suffix nouns are promoted into Continuum's runtime-boundary + family. The shared schema should name `WitnessedSuffixShell`, + `CausalSuffixBundle`, `WitnessedSuffixAdmissionResponse`, and the + `ImportOutcome` that wraps them. +3. Echo should consume the Continuum family explicitly once that schema matches + Echo's evidence model. +4. If the current Continuum runtime-boundary shape conflicts with Echo's + witnessed suffix/admission model, Continuum changes. Echo's causal evidence + shape wins. +5. Transport identity and import idempotence are facts carried by typed + evidence, not by summary strings, final-state hashes, host-time arrival + order, or runtime-local Lamport clocks. + +## Core Rule + +Echo imports witnessed causal suffix bundles, not state. + +Idempotence is shell equivalence under the retained causal evidence: + +```text +same bundle identity ++ same source shell identity ++ same base and target frontiers ++ same witness basis +=> same import question +``` + +That rule is narrower than visible-state equality and wider than a local receipt +hash. A local receipt proves what this runtime did with a bundle. It is not the +portable identity of the bundle itself. + +## Why This Exists + +The older Continuum runtime-boundary schema had the right intent but a thinner +shape: + +- `SuffixShell` carried frontier digests and counts. +- `ImportOutcome` carried a shared outcome kind and optional receipt reference. + +That is not enough for M027. Echo needs to decide whether a repeated import is: + +- the same bundle arriving again +- self-history returning through another runtime +- a support supplement for already-known evidence +- state-equivalent but witness-distinct history +- a lawful plurality +- a conflict +- an obstruction + +Those cases cannot be collapsed into "same final state" or "same frontier +digest." OG-II's operational point applies directly here: state convergence does +not imply observer convergence. Two imports that materialize the same state can +still differ in provenance, intent, support path, or replayability. + +## Canonical Echo Shape + +Echo already names the minimum runtime shape in code: + +```text +WitnessedSuffixShell { + source_worldline_id, + source_suffix_start_tick, + source_suffix_end_tick, + source_entries, + boundary_witness, + witness_digest, + basis_report, +} + +CausalSuffixBundle { + base_frontier, + target_frontier, + source_suffix, + bundle_digest, +} + +ImportSuffixResult { + bundle_digest, + admission: WitnessedSuffixAdmissionResponse, +} + +WitnessedSuffixAdmissionOutcome = + Admitted { target_worldline_id, admitted_refs, basis_report } + | Staged { staged_refs, basis_report } + | Plural { candidate_refs, residual_posture, basis_report } + | Conflict { reason, source_ref, evidence_digest, overlap_revalidation } + | Obstructed { source_ref, residual_posture, evidence_digest } +``` + +The Continuum family should expose this meaning directly. It may add runtime +metadata around it, such as source runtime, target runtime, history family, or +lane labels, but it must not replace the core suffix evidence with a weaker +state/hash/count summary. + +## Transport Identity + +Transport identity has layers. They must not be confused: + +| Layer | Meaning | May identify duplicates? | Must not be used as | +| ------------------- | -------------------------------------------- | --------------------------- | --------------------------- | +| Content hash | Bytes in CAS or payload storage | Exact byte reuse | Causal-history identity | +| Reading identity | The question a retained reading answers | Cached read reuse | Import identity | +| Bundle digest | The witnessed suffix bundle question | Exact import-shell reuse | Local admission receipt | +| Source shell digest | Compact source suffix evidence | Shell equivalence | Target-local tick identity | +| Local receipt | What this runtime admitted/staged/conflicted | Local audit and witness | Portable source identity | +| Local Lamport/tick | Runtime-local ordering coordinate | Local replay/order evidence | Cross-runtime duplicate key | + +Lamport clocks and local ticks can affect local hashes. They therefore cannot be +the universal duplicate key for a transported suffix. They are scoped evidence, +not the shared import identity. + +## Import Idempotence Law + +When Echo receives a `CausalSuffixBundle`, the runtime must: + +1. Verify bundle identity and source shell identity. +2. Resolve the target basis explicitly. +3. Compare the bundle against retained prior import outcomes. +4. Classify the import with typed posture. +5. Return a witnessed result without silently mutating current state. + +Re-import of the exact same bundle should be idempotent. It may return the +prior import outcome or a new local receipt that points at the prior outcome, +but it must not create fake novelty. + +Self-history returning through another runtime is not fresh remote work. It must +be classified as a loop or already-adjudicated import path, with evidence. + +State-equivalent but witness-distinct imports are not duplicates. They may be +support supplements, alternate support paths, lawful plural history, conflicts, +or obstructions depending on evidence and policy. + +## Intent-Driven Mutation Law + +Incoming transport arrival is host I/O. It is not Echo causal history by +itself. + +The causal path is: + +```text +transport adapter receives bytes +-> adapter forms a canonical import proposal +-> dispatch_intent(EINT import intent) +-> ingress envelope +-> scheduler/admission +-> tick + receipt/witness +``` + +The same rule applies to all external topology-changing surfaces: + +- fork worldline / create strand +- append braid member +- collapse or settle braid +- merge / settlement import +- pin or unpin support when exposed to application flows +- admit transported causal suffix +- append inverse / compensating operation + +External callers do not receive setters or direct mutation handles for these +operations. They submit Intents against explicit causal bases. Echo's scheduler +and admission law decide whether the proposal is admitted, staged, plural, +conflicted, or obstructed. + +Internal services may still perform the implementation work after admission, but +they are not public mutation authority. + +## Transport, Strands, And Braids + +Incoming transport is not itself a strand. +Incoming transport is not itself a braid. +Incoming transport is a witnessed suffix claim. + +Admission may realize that claim as: + +- target-lane suffix history +- staged shell evidence +- preserved plurality, including a braid member when the local projection law + calls for one +- conflict artifact +- obstruction + +The property "braided" belongs to the local admission/projection result. It is +not a wire-level or bundle-level transport kind. + +## Continuum Runtime-Boundary Cut + +Continuum's `continuum-runtime-boundary-family.graphql` should use Echo's +witnessed suffix model as the shared family cut: + +- `ProvenanceRef` +- `SettlementBasisReport` +- `SettlementOverlapRevalidation` +- `WitnessedSuffixShell` +- `CausalSuffixBundle` +- `WitnessedSuffixAdmissionOutcome` +- `WitnessedSuffixAdmissionResponse` +- `ImportOutcome` + +The old `SuffixShell` name is too vague for this boundary. If a compatibility +alias is needed later, it should be an adapter term. The family itself should +name the witnessed suffix shell and causal suffix bundle explicitly. + +## M027 RED Targets + +The next implementation slice should add RED tests before changing runtime +behavior: + +1. Exact bundle re-import returns an idempotent import outcome, not fresh + admission. +2. Self-history returning through a peer is classified as a loop or + already-adjudicated import path. +3. Same visible state with different witness/source shell identity is not + deduped by state hash alone. +4. Local tick or Lamport-like order fields do not participate in portable + duplicate detection. +5. `ImportOutcome` preserves the nested + `WitnessedSuffixAdmissionResponse.outcome` variant. +6. Obstruction and conflict evidence remain typed and deterministic. +7. Continuum-compiled runtime-boundary artifacts expose the same suffix nouns + that Echo consumes. +8. Inbound transport affects Echo only through an admitted import Intent. +9. Forking, merging, braiding, settlement, support mutation, and inverse + operations have Intent-level external paths and no public direct mutation + setter. + +## Non-Goals + +This decision does not add: + +- a sync daemon +- direct peer mutation +- last-writer-wins import +- host-time ordering +- materialized state exchange as transport truth +- a git-warp-first schema +- a GraphQL-first Echo runtime API + +GraphQL is the authored family surface that Wesley compiles. Echo's runtime +truth remains witnessed causal admission and observation. diff --git a/docs/design/0023-import-transport-intent-admission-path/import-transport-intent-admission-path.md b/docs/design/0023-import-transport-intent-admission-path/import-transport-intent-admission-path.md new file mode 100644 index 00000000..6fcdf687 --- /dev/null +++ b/docs/design/0023-import-transport-intent-admission-path/import-transport-intent-admission-path.md @@ -0,0 +1,111 @@ + + + +# Import transport Intent admission path + +Status: implemented initial staged admission path. + +Depends on: + +- [0022 - Continuum transport identity and import idempotence](../../../design/0022-continuum-transport-identity/design.md) + +## Why now + +Echo now has the right witnessed suffix vocabulary and doctrine, but inbound +transport admission is still only documented. The runtime has +`CausalSuffixBundle`, `ImportSuffixRequest`, and the local +`import_suffix(...)` evaluator, but the external causal path is not real until a +transported suffix is submitted as an EINT Intent and admitted through Echo. + +The rule to make executable: + +```text +transport adapter receives bytes +-> adapter forms canonical import proposal +-> dispatch_intent(EINT import intent) +-> ingress / scheduler / admission +-> tick + receipt / witness +``` + +## Goal + +Add the first narrow import-transport Intent family for a `CausalSuffixBundle` +against an explicit target basis. + +This should prove the external path without trying to implement peer sync, +networking, or full idempotence indexing. + +## Likely files touched + +- `crates/echo-wasm-abi/src/kernel_port.rs` +- `crates/warp-core/src/witnessed_suffix.rs` +- `crates/warp-core/src/cmd.rs` +- `crates/warp-core/src/engine_impl.rs` +- `crates/warp-wasm/src/warp_kernel.rs` +- `crates/warp-core/tests/**` +- `crates/warp-wasm/tests/**` or `warp_kernel` tests + +## Acceptance criteria + +- A canonical import-transport EINT payload shape exists for: + - bundle or retained bundle ref + - target worldline/focus + - explicit target basis + - admission law/version where needed + - actor/cause placeholder if the current capability model is not ready +- A RED/GREEN test dispatches the import proposal through `dispatch_intent`, + not by calling `import_suffix(...)` as the external path. +- The dispatched intent enters Echo through `IngressEnvelope::local_intent` and + is consumed by scheduler/admission machinery. +- The handler/evaluator returns a typed witnessed suffix outcome: + - admitted + - staged + - plural + - conflict + - obstructed +- The path emits or preserves receipt/witness evidence for the local decision. +- Malformed EINT or malformed import payload returns typed error/obstruction + without mutating causal history. +- Existing direct evaluator functions remain available as internal helpers, not + as public mutation authority. + +## Non-goals + +- Do not add a sync daemon. +- Do not add networking. +- Do not implement `git-warp` interop here. +- Do not solve full duplicate import retention/indexing here. +- Do not add jedit nouns. +- Do not add a second non-EINT intent envelope. +- Do not make transport arrival itself causal history. + +## Test expectations + +- One failing test first proves direct `import_suffix(...)` is not the external + mutation path being exercised. +- One passing test proves the same import proposal goes through EINT, + `dispatch_intent`, ingress, scheduler/admission, and returns a typed outcome. +- One malformed-payload test proves no direct mutation or fake success occurs. + +## Implementation notes + +This slice landed the first causal runtime path, deliberately stopping at a +typed `Staged` result instead of pretending full remote import/settlement +admission is done. + +- `echo-wasm-abi` now defines `IMPORT_SUFFIX_INTENT_V1_OP_ID` and canonical + pack/unpack helpers for `ImportSuffixRequest`. +- `WarpKernel::dispatch_intent` validates the Echo-owned import payload before + accepting it into ingress. Malformed import payloads fail closed and do not + advance the worldline or provenance. +- `warp-core` registers a generic `cmd/import_suffix_intent` handler through the + engine-backed kernel. The handler preserves the original ingress event and + writes a deterministic result node carrying canonical CBOR + `ImportSuffixResult`. +- The initial result outcome is `WitnessedSuffixAdmissionOutcome::Staged`. That + is the honest posture until later slices add basis-aware remote admission, + novelty indexes, and settlement/collapse behavior. + +The important invariant is now executable: transport arrival is still host I/O, +but transport admission enters Echo only after it is wrapped as an EINT intent +and chosen by the scheduler into witnessed causal history. diff --git a/docs/design/continuum-runtime-and-cas-readings.md b/docs/design/continuum-runtime-and-cas-readings.md index 4c51e7f7..fcfe3aa7 100644 --- a/docs/design/continuum-runtime-and-cas-readings.md +++ b/docs/design/continuum-runtime-and-cas-readings.md @@ -17,6 +17,11 @@ Scope: doctrine and implementation runway only Echo is a peer Continuum runtime implementation. +Echo itself is a WARP optic for real-time deterministic simulation. `warp-ttd` +is a WARP optic for debugger inspection. `git-warp` is a WARP optic that +projects witnessed causal history onto Git as a primitive substrate. Wesley is +a WARP optic that rewrites authored schema input into IR and output artifacts. + Echo stores, executes, admits, observes, exports, imports, and retains witnessed causal history in its own runtime style. Echo may use `echo-cas`, indexes, checkpoints, and cached materialized readings for performance and @@ -39,6 +44,22 @@ The substrate is witnessed causal history: State-like values are observer-relative readings over that history. +There is no privileged graph object behind those readings. The graph is a +coordinate chart emitted by an observer or optic over witnessed causal history. + +All public WARP surfaces share one shape: + +```text +bounded causal basis/site ++ law ++ capability, budget, and evidence posture +-> witnessed hologram +``` + +An admission hologram may extend history. A reading hologram observes history. A +retained hologram caches or reveals a derived artifact. None of those +holograms becomes canonical substrate truth merely by existing. + ## Doctrine Correction Older Continuum and Echo docs sometimes describe Echo as the hot side and @@ -60,9 +81,19 @@ Echo does not produce facts for `git-warp`. Echo and `git-warp` both produce, admit, retain, and observe witnessed causal history. They interoperate by exchanging protocol-shaped causal artifacts, not by sharing runtime internals. -The browser analogy is the useful one: Continuum is the shared protocol and -contract law; Echo and `git-warp` are independent implementations with their -own engines, caches, storage models, and developer surfaces. +The browser analogy is the useful one. More directly, Continuum is HTTP-like: +it is the shared WARP protocol and contract law that lets independent runtimes +exchange lawful causal-history artifacts. Echo and `git-warp` are independent +implementations with their own engines, caches, storage models, and developer +surfaces. + +Echo and `git-warp` are compatible because they can speak this protocol. They +are not compatible because they both model a canonical graph. There is no +canonical graph. + +`warp-ttd` and Wesley fit the same frame. They may speak shared Continuum +families where that is useful, but their compatibility comes from lawful +causal/artifact exchange, not from sharing an internal graph representation. ## Echo As A Continuum Runtime @@ -144,6 +175,10 @@ graph. No public API should imply that Echo owns one canonical graph or state object that all observers secretly read. +Canonical architecture note: + +- [There Is No Graph](../architecture/there-is-no-graph.md) + ## echo-cas Role `echo-cas` is Echo's content-addressed retention layer. It may store: @@ -183,6 +218,67 @@ Example coordinate components: `echo-cas` may retain the answer. It must not become the semantic authority for the question. +## Holographic Retention Pressure + +Echo should assume memory and local disk are finite. + +The answer is not to materialize a full graph state at every tick. Echo should +retain witnessed causal history and enough boundary artifacts to support +bounded replay, bounded reveal, and honest obstruction. Optics then read by +slicing the required causal history, lowering only the focused aperture, and +optionally retaining the emitted reading. + +For example, an optic that asks for `x` at coordinate `n+2` should be able to +use an index to find the nearest retained basis that affects `x`, stream the +required causal slice, lower the value, and retain the answer under a semantic +read key such as: + +```text +focus=x +coordinate=n+2 +aperture=value +witness_basis=... +projection_version=... +reducer_version=... +``` + +The retained bytes may live in `echo-cas` under a content hash, but the lookup +key is the read identity. A later optic that asks the same question may reveal +the retained bytes directly. A different coordinate, aperture, witness basis, +projection version, reducer version, rights posture, or budget posture is a +different question even if it happens to emit identical bytes. + +Indexes that make this fast are performance aids. They should be streamable and +should not assume the full graph, full provenance log, or full index can fit in +memory at once. If the necessary retained basis or causal evidence is no longer +available locally, the read must return an obstruction or a rehydration-required +posture rather than secretly materializing unrelated state or pretending a cache +hit answers a different question. + +Cache pressure is storage policy: + +- evicting a cached reading does not rewrite history +- evicting an index shard does not invalidate receipts +- deleting unpinned CAS cache bytes may make a fast reveal unavailable +- deleting required witness material requires either rehydration or obstruction +- durable archival policy is separate from the content hash itself + +`echo-cas` implementations may use content-defined chunking to reduce storage. +For large blobs or retained readings, a CAS tier may split bytes into variable +chunks chosen by content, MIME type, layout hints, or storage policy; buzhash +chunking is one plausible implementation technique. This can deduplicate +repeated substrings or common retained regions across related readings. + +Chunking policy must remain below causal semantics: + +- chunk boundaries are storage layout, not read identity +- changing a chunker must not change Intent identity, tick identity, receipt + identity, admission outcome, or replay result +- semantic references above CAS must still name contract/schema/type/layout + information where that information is required +- canonical byte encodings used by Echo history remain canonical before bytes + enter retention + ## Cached Reading Invalidation Cached readings are immutable answers at a named basis. Echo should not mutate @@ -532,6 +628,12 @@ They do not exchange: The shared law is witnessed causal admission. Each runtime remains free to store, cache, index, schedule, and observe in the way that fits its purpose. +Continuum therefore sits at the protocol boundary, not below the runtime as a +shared storage engine. It standardizes the lawful causal-history exchange, the +admission/reading families, and the evidence carried across boundaries. It does +not standardize an internal graph representation because WARP has no privileged +graph representation to standardize. + ## Implementation Runway ### Step 1: Keep This As Doctrine diff --git a/docs/determinism/dind-harness.md b/docs/determinism/dind-harness.md index 65f6aec4..0b83efbb 100644 --- a/docs/determinism/dind-harness.md +++ b/docs/determinism/dind-harness.md @@ -59,6 +59,30 @@ The FootprintGuard is active in debug builds unless the `unsafe_graph` feature is enabled, meaning undeclared reads or writes surface as a `FootprintViolation` before convergence checks can hide the issue. +### Snapshot/Restore Fuzz Gate + +`warp-core` also carries a snapshot/restore fuzz gate for replay-state +serialization determinism: + +```sh +cargo test -p warp-core --test snapshot_restore_fuzz +``` + +The gate builds a deterministic 500-tick worldline, snapshots materialized state +at 50 deterministic pseudo-random coordinates, restores the snapshot from +canonical WSC bytes, replays the remaining suffix from recorded provenance, and +compares the restored `state_root` with the uninterrupted run. The report names +the snapshot tick, restore tick, comparison tick, and expected/actual hashes for +each iteration. + +The current applicable snapshot format is canonical WSC v1. If Echo adds a +separate debug snapshot encoding later, that format should be added to the same +matrix rather than becoming an un-gated restore path. + +The corruption test flips one stored WSC byte. Passing behavior is either a +closed restore/validation failure or an explicit suffix-replay hash mismatch; +silent success is not acceptable. + ## Convergence scope (Invariant B) For commutative scenarios, `MANIFEST.json` can specify a `converge_scope` diff --git a/docs/index.md b/docs/index.md index 8a138d6b..a0d51fd0 100644 --- a/docs/index.md +++ b/docs/index.md @@ -8,6 +8,7 @@ Echo's live documentation centers on the runtime carrier, the retained witnesses ## Start Here - Runtime model: [/architecture/outline](/architecture/outline) +- WSC, Verkle, IPA, and retained readings: [/architecture/wsc-verkle-ipa-retained-readings](/architecture/wsc-verkle-ipa-retained-readings) - Application contract hosting: [/architecture/application-contract-hosting](/architecture/application-contract-hosting) - Theory map: [/theory/THEORY](/theory/THEORY) - Current bearing: [/BEARING](/BEARING) @@ -39,6 +40,7 @@ Echo's live documentation centers on the runtime carrier, the retained witnesses - JS to canonical CBOR mapping: [/spec/js-cbor-mapping](/spec/js-cbor-mapping) - ABI golden vectors: [/spec/abi-golden-vectors](/spec/abi-golden-vectors) - WARP view protocol: [/spec/warp-view-protocol](/spec/warp-view-protocol) +- WSC, Verkle, IPA, and retained readings: [/architecture/wsc-verkle-ipa-retained-readings](/architecture/wsc-verkle-ipa-retained-readings) ## Determinism Evidence diff --git a/docs/invariants/FIXED-TIMESTEP.md b/docs/invariants/FIXED-TIMESTEP.md index f6f75c78..aa2240e0 100644 --- a/docs/invariants/FIXED-TIMESTEP.md +++ b/docs/invariants/FIXED-TIMESTEP.md @@ -43,17 +43,29 @@ downstream structures MUST NOT contain a per-tick dt field. ### R5 — Tick-denominated time semantics -All TTL, deadline, retry, and expiry semantics MUST be -tick-denominated. Wall-clock durations MUST NOT appear in semantic -state. Timers are expressed as tick counts or epoch counts. +All TTL, deadline, retry, and expiry semantics MUST be expressed in +HistoryTime: ticks, epochs, causal coordinates, or admitted timer +events. Wall-clock durations MUST NOT appear in semantic state as +hidden expiry authority. Timers are expressed as tick counts, epoch +counts, or admitted timer-event history. + +Timers are not mutable host-local handles. A timer request is an +Intent. Submitting the Intent does not start the semantic timer. The +scheduler/admission path MUST choose an explicit admission outcome, and +only an admitted timer-start tick arms the semantic timer. A timer +firing, expiry, or cancellation is also an Intent. It becomes semantic +only if admitted against an explicit causal basis that names the +admitted start/request receipt. ### R6 — HostTime enters only through canonical decisions HostTime (wall-clock, frame time, real-time telemetry) MUST NOT -influence simulation semantics directly. HostTime MAY influence -semantics only through a recorded canonical decision — an adapter -emits a deterministic decision record before the simulation consumes -the result. The decision record is the artifact of record, not the +influence simulation semantics directly. HostTime MUST NOT directly +affect admission, commit identity, read identity, replay outcome, or +causal ordering. HostTime MAY influence semantics only through a +recorded canonical decision — an adapter emits a deterministic +decision record before the simulation consumes the result. The +admitted decision record is the artifact of record, not the wall-clock value that motivated it. ### R7 — Cross-worldline operations require identical tick_quantum @@ -64,6 +76,132 @@ between worldlines with different quanta MUST be rejected in v1. Equal tick numbers represent equal elapsed simulation time only when the quanta match. +## Time field classification + +Echo distinguishes deterministic causal time from host-observed time. + +- **HistoryTime** names deterministic causal coordinates: ticks, + worldline append positions, runtime scheduler cycle coordinates, + receipt ticks, and tick-denominated deadlines. +- **HostTime** names wall-clock, monotonic host clocks, browser + timestamps, adapter-local timestamps, pacing durations, logging + timestamps, and UI telemetry. + +This classification is about semantic authority. A HistoryTime field +may still be diagnostic metadata rather than a commit hash input; a +HostTime field may still be useful telemetry. The boundary is that +HostTime is never consumed as deterministic history unless it first +becomes an admitted canonical decision record. + +| Surface / field | Class | Rationale | +| ---------------------------------------------------------------------- | ----------- | -------------------------------------------------------------------------- | +| `WorldlineTick` | HistoryTime | Per-worldline logical append coordinate; explicitly not wall-clock time. | +| `GlobalTick` | HistoryTime | Runtime-cycle logical correlation coordinate; no wall-clock semantics. | +| `SchedulerStatus.latestCycleGlobalTick` | HistoryTime | Reports the latest runtime scheduler cycle coordinate. | +| `SchedulerStatus.latestCommitGlobalTick` | HistoryTime | Reports the scheduler cycle coordinate that produced the latest commit. | +| `SchedulerStatus.lastQuiescentGlobalTick` | HistoryTime | Reports the scheduler cycle coordinate at quiescence. | +| `TtdrHeader.tick` | HistoryTime | Tick receipt coordinate for witnessed deterministic verification. | +| TTD protocol `tick` / `fromTick` / `toTick` / `targetTick` fields | HistoryTime | Cursor, seek, violation, snapshot, and truth-frame coordinates. | +| TTD protocol `initialTick` / `finalTick` fields | HistoryTime | Cursor lifecycle tick coordinates, not host timestamps. | +| TTD protocol `deadlineTick` fields | HistoryTime | Deadlines are tick-denominated semantic time. | +| Legacy `OpEnvelope.ts` | HostTime | Monotonic per-host transport timestamp; must not order causal history. | +| Generated TTD protocol `timestamp` / `Timestamp` fields | HostTime | Milliseconds-since-epoch event telemetry; not replay/admission authority. | +| Hook, CI, and verification timing fields such as `elapsed_ms` or dates | HostTime | Tooling telemetry and audit logs; outside the deterministic history plane. | + +## Timer and deadline doctrine + +Timer causality is admitted, not observed implicitly. + +The safe timer pattern is: + +1. An adapter or contract submits a timer Intent. +2. Echo validates capability, causal basis, and admission law. +3. The scheduler/admission path returns a typed outcome such as + `Admitted`, `Staged`, `Plural`, `Conflict`, or `Obstructed`. +4. Only an admitted `timer.start` tick arms the semantic timer. +5. A later adapter observation MAY cause submission of `timer.fire`, + `timer.expire`, or `timer.cancel`. +6. Only an admitted fire/expire/cancel tick changes semantic history. +7. Replay consumes the admitted ticks and receipts. Replay MUST NOT + recalculate elapsed wall-clock time. + +HostTime may cause an adapter to propose timer Intents. HostTime is +not itself the semantic decision. The admitted tick and its receipt are +the artifact of record. + +### Worked example + +```text +C100: coordinate before timer request +I0: Intent(timer.start, + timer_id = A, + requested_delay_hint = 5s, + base = C100) +O0: AdmissionOutcome::Admitted(tick = T100, receipt = R100) + +Host wall clock later wakes the adapter. + +I1: Intent(timer.fire, + timer_id = A, + start_receipt = R100, + observed_host_delay_hint = 5.02s, + base = C149) +O1: AdmissionOutcome::Admitted(tick = T150, receipt = R150) +``` + +The semantic facts are `T100` and `T150`. The wall-clock delay may +explain why the adapter proposed `I1`, but replay, rewind, fork, and +read identity consume only the admitted timer ticks and receipts. + +If `I0` is `Staged`, `Conflict`, or `Obstructed`, timer `A` is not +armed. If `I1` cites a missing, stale, conflicting, or unadmitted +start receipt, Echo MUST reject, stage, preserve plurality, or return a +typed obstruction. It MUST NOT silently mutate the latest frontier. + +A paused observer view does not advance its observed HistoryTime +coordinate. A tick-based TTL in that view does not expire merely +because host wall-clock time passed. A live writer may continue to +admit new ticks on its own frontier, but any reading must name the +coordinate whose timer state it observes. + +### TTL and deadline touch points + +| Touch point | Semantic rule | +| ------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------- | +| Session keep-alive | Transport pings are HostTime telemetry. Semantic liveness requires an admitted heartbeat, timeout, or obstruction event. | +| Admission budgets | Semantic budgets are ticks, operations, bytes, fuel, or explicit admission posture. Host-time execution limits are tooling guards, not state facts. | +| Retry policies | Retry eligibility is a tick/epoch coordinate or an admitted retry Intent. A wall-clock sleep may only trigger proposal of that Intent. | +| Wormhole/checkpoint retention | Semantic validity is tied to retained receipts, ranges, and witness basis. HostTime retention may evict cache bytes only if reads fail closed. | +| Cached and retained readings | Cache age is operational. A retained reading remains valid only for its read identity, coordinate, witness basis, versions, aperture, and posture. | +| Adapter-driven real-time timers | Wall-clock wakeups may propose timer fire/expire Intents. Only admitted fire/expire ticks affect replayable semantics. | + +### Violation checklist + +Treat any of the following as a violation until proven otherwise: + +- Replay, reducer, query, admission, or scheduler code calls `now()`, + `Instant::now`, `SystemTime::now`, sleep, browser timestamps, or + host frame time to decide semantic expiry. +- A committed artifact stores `expires_at_ms`, `deadline_unix_ms`, + wall-clock duration, or similar HostTime fields as causal authority. +- A timer fire/expiry path assumes submission success instead of + checking for an admitted start receipt and admitted fire receipt. +- A cache or retention TTL returns stale bytes as a live reading + without naming the original read identity and coordinate. +- Missing retained history is treated as successful expiry or successful + replay instead of returning a typed obstruction. + +## Static enforcement + +The current static guard is `scripts/ban-nondeterminism.sh`. It scans +determinism-critical crate paths and bans wall-clock and pacing APIs +including `std::time::SystemTime`, `SystemTime::now`, +`std::time::Instant`, `Instant::now`, `std::thread::sleep`, and +async runtime sleep calls. The release allowlist rules in +`docs/determinism/RELEASE_POLICY.md` require every exemption to prove +that the nondeterministic API cannot reach the deterministic engine +loop. + ## Rationale Echo's hardest open problem is canonical cross-worldline settlement: @@ -97,5 +235,6 @@ as uniform integers with no per-tick metadata. - [SPEC-0004 — Worldlines, Playback, and Observation](../spec/SPEC-0004-worldlines-playback-truthbus.md) - [SPEC-0005 — Provenance Payload](../spec/SPEC-0005-provenance-payload.md) -- `CONTINUUM.md` — repo-root hot runtime time model +- [Continuum foundations](../architecture/continuum-foundations.md) — archived + bridge note for older Continuum framing - `warp_geom::Tick` — code-level precedent diff --git a/docs/invariants/STRAND-CONTRACT.md b/docs/invariants/STRAND-CONTRACT.md index b0109063..d81ed08a 100644 --- a/docs/invariants/STRAND-CONTRACT.md +++ b/docs/invariants/STRAND-CONTRACT.md @@ -7,18 +7,26 @@ ## Invariant -A strand is a named, ephemeral, speculative execution lane derived -from a base worldline. It is a relation over a child worldline -created by `ProvenanceStore::fork()`, not a separate substrate. A -strand either exists in the `StrandRegistry` (live) or does not -(dropped). There is no tombstone state. +A strand is a named, session-scoped speculative execution lane derived +from a base worldline. It records an immutable parent anchor, owns local +divergence over a closed footprint, and realizes reads against an explicit +parent basis. The current implementation may still use a child worldline +created by `ProvenanceStore::fork()` as a realization detail, but the +public invariant is live-basis semantics: parent movement outside the owned +footprint may flow through, and parent movement inside the owned footprint +requires explicit revalidation, conflict, or obstruction. + +A strand either exists in the `StrandRegistry` (live) or does not (dropped). +Dropping a strand releases the live handle and implementation-local caches or +worldline machinery. It must not be interpreted as proof that the speculative +lane was never real. ## Invariants The following invariants are normative. "MUST" and "MUST NOT" follow RFC 2119 convention. -### INV-S1 — Immutable base +### INV-S1 — Immutable parent anchor A strand's `base_ref` MUST NOT change after creation. The `BaseRef` pins the exact provenance coordinate the strand was forked from: @@ -26,6 +34,10 @@ source worldline ID, fork tick (last included tick in the copied prefix), commit hash at fork tick, output boundary hash (state root after applying the patch), and a `ProvenanceRef` handle. +The anchor is not the full realized basis forever. Live reads and settlement +planning MUST compare the anchor with current parent history and report the +resulting basis posture. + ### INV-S2 — Own heads A strand's child worldline MUST NOT share writer heads with its base @@ -78,20 +90,34 @@ declared pin MUST: - avoid self-reference and duplicate targets - remain read-only support, not write authority -### INV-S10 — Clean drop +### INV-S10 — Live-basis revalidation + +When a strand is realized at a frontier, the runtime MUST report one of these +basis postures: + +- parent remains at the strand anchor; +- parent advanced outside the strand-owned closed footprint; +- parent advanced inside the owned footprint and revalidation is required. + +Reads, settlement, and comparison MUST preserve that posture instead of +pretending every strand remains a frozen fork from its anchor. + +### INV-S11 — Clean drop After `drop_strand`, no runnable heads for the child worldline MUST remain in the `PlaybackHeadRegistry`. Drop is hard-delete: the strand, its child worldline, its heads, and its provenance are all -removed. `get(strand_id)` returns `None` after drop. A `DropReceipt` -is returned as the only proof the strand existed. +removed from the live session machinery. `get(strand_id)` returns `None` after +drop. A `DropReceipt` is returned as the session-local proof that the strand was +dropped. This cleanup rule is lifecycle hygiene, not the ontology of a strand. ## Rationale -Echo can fork worldlines via `ProvenanceStore::fork()` but has no -concept of the relationship between forked worldlines. The strand -contract names that relationship explicitly: what was forked, from -where, with what heads, under what lifecycle rules. +Echo can fork worldlines via `ProvenanceStore::fork()`, but a strand is not +merely a copied prefix. The strand contract names the relationship explicitly: +what parent coordinate anchored it, what local divergence it owns, what parent +basis it is being realized against, and what posture is required when parent +history moves. This enables warp-ttd to surface strand topology through its existing `LaneKind::STRAND` and `LaneRef.parentId` protocol, and it provides @@ -103,4 +129,6 @@ strands into base worldlines under channel policy). - [FIXED-TIMESTEP](./FIXED-TIMESTEP.md) — inherited quantum - [SPEC-0004 — Worldlines](../spec/SPEC-0004-worldlines-playback-truthbus.md) - [SPEC-0005 — Provenance Payload](../spec/SPEC-0005-provenance-payload.md) +- `docs/design/0008-strand-settlement/design.md` +- `docs/design/0010-live-basis-settlement-plan/design.md` - `warp_core::strand` — code-level implementation diff --git a/docs/macros.tex b/docs/macros.tex deleted file mode 100644 index 02dce5ad..00000000 --- a/docs/macros.tex +++ /dev/null @@ -1,11 +0,0 @@ -% SPDX-License-Identifier: Apache-2.0 OR LicenseRef-MIND-UCAL-1.0 -% © James Ross Ω FLYING•ROBOTS -% Macros for the WARPs paper -% Shared commands to keep notation consistent across the manuscript. -\RequirePackage{url} -\DeclareRobustCommand{\AION}{\textrm{AI}\ensuremath{\Omega}\textrm{N}} -\DeclareRobustCommand{\AIONProjectURL}{\url{https://github.com/flyingrobots/aion}} - -% WARP term: small caps in prose, italic in math. -% Force upright small caps in text to avoid missing scit font shapes. -\DeclareRobustCommand{\WARP}{\ifmmode\mathit{WARP}\else{\upshape\scshape warp}\fi} diff --git a/docs/man/echo-cli-bench.1 b/docs/man/echo-cli-bench.1 index bd5e8895..efb71b3a 100644 --- a/docs/man/echo-cli-bench.1 +++ b/docs/man/echo-cli-bench.1 @@ -1,10 +1,10 @@ .ie \n(.g .ds Aq \(aq .el .ds Aq ' -.TH echo-cli-bench 1 "echo-cli-bench " +.TH echo-cli-bench 1 "echo-cli-bench " .SH NAME echo\-cli\-bench \- Run benchmarks and format results .SH SYNOPSIS -\fBecho\-cli\-bench\fR [\fB\-\-filter\fR] [\fB\-h\fR|\fB\-\-help\fR] +\fBecho\-cli\-bench\fR [\fB\-\-filter\fR] [\fB\-\-baseline\fR] [\fB\-h\fR|\fB\-\-help\fR] .SH DESCRIPTION Run benchmarks and format results .SH OPTIONS @@ -12,5 +12,8 @@ Run benchmarks and format results \fB\-\-filter\fR \fI\fR Filter benchmarks by pattern .TP +\fB\-\-baseline\fR \fI\fR +Compare current medians against a saved baseline +.TP \fB\-h\fR, \fB\-\-help\fR Print help diff --git a/docs/man/echo-cli-inspect.1 b/docs/man/echo-cli-inspect.1 index 1cfef011..cc7d2ef1 100644 --- a/docs/man/echo-cli-inspect.1 +++ b/docs/man/echo-cli-inspect.1 @@ -1,10 +1,10 @@ .ie \n(.g .ds Aq \(aq .el .ds Aq ' -.TH echo-cli-inspect 1 "echo-cli-inspect " +.TH echo-cli-inspect 1 "echo-cli-inspect " .SH NAME echo\-cli\-inspect \- Inspect a WSC snapshot .SH SYNOPSIS -\fBecho\-cli\-inspect\fR [\fB\-\-tree\fR] [\fB\-h\fR|\fB\-\-help\fR] <\fISNAPSHOT\fR> +\fBecho\-cli\-inspect\fR [\fB\-\-tree\fR] [\fB\-\-raw\fR] [\fB\-h\fR|\fB\-\-help\fR] <\fISNAPSHOT\fR> .SH DESCRIPTION Inspect a WSC snapshot .SH OPTIONS @@ -12,6 +12,9 @@ Inspect a WSC snapshot \fB\-\-tree\fR Show ASCII tree of graph structure .TP +\fB\-\-raw\fR +Show attachment payload bytes as hex instead of decoding known payloads +.TP \fB\-h\fR, \fB\-\-help\fR Print help .TP diff --git a/docs/man/echo-cli-verify.1 b/docs/man/echo-cli-verify.1 index 92a2bfc9..be17ed7d 100644 --- a/docs/man/echo-cli-verify.1 +++ b/docs/man/echo-cli-verify.1 @@ -1,10 +1,10 @@ .ie \n(.g .ds Aq \(aq .el .ds Aq ' -.TH echo-cli-verify 1 "echo-cli-verify " +.TH echo-cli-verify 1 "echo-cli-verify " .SH NAME echo\-cli\-verify \- Verify hash integrity of a WSC snapshot .SH SYNOPSIS -\fBecho\-cli\-verify\fR [\fB\-\-expected\fR] [\fB\-h\fR|\fB\-\-help\fR] <\fISNAPSHOT\fR> +\fBecho\-cli\-verify\fR [\fB\-\-expected\fR] [\fB\-h\fR|\fB\-\-help\fR] <\fISNAPSHOT\fR> .SH DESCRIPTION Verify hash integrity of a WSC snapshot .SH OPTIONS diff --git a/docs/man/echo-cli.1 b/docs/man/echo-cli.1 index d9be7929..78945e34 100644 --- a/docs/man/echo-cli.1 +++ b/docs/man/echo-cli.1 @@ -1,6 +1,6 @@ .ie \n(.g .ds Aq \(aq .el .ds Aq ' -.TH echo-cli 1 "echo-cli 0.1.0" +.TH echo-cli 1 "echo-cli 0.1.0" .SH NAME echo\-cli \- Echo developer CLI .SH SYNOPSIS @@ -11,19 +11,9 @@ Echo developer CLI .TP \fB\-\-format\fR \fI\fR [default: text] Output format (text or json) -.br - -.br -\fIPossible values:\fR -.RS 14 -.IP \(bu 2 -text: Human\-readable text output -.IP \(bu 2 -json: Machine\-readable JSON output -.RE .TP \fB\-h\fR, \fB\-\-help\fR -Print help (see a summary with \*(Aq\-h\*(Aq) +Print help .TP \fB\-V\fR, \fB\-\-version\fR Print version diff --git a/docs/method/README.md b/docs/method/README.md index ddcf75b6..4e030651 100644 --- a/docs/method/README.md +++ b/docs/method/README.md @@ -130,6 +130,16 @@ PLATFORM_xtask-method-cli.md debt-scheduler-god-module.md ``` +### Visibility + +Backlog cards must not hide executable subtasks that need independent +scheduling or dependency tracking. If a card discovers a sequence of +implementation slices, promote those slices into visible backlog cards and +connect them with `Depends on:` links. + +A card may remain as an index for a design packet or hill, but that index must +not be the only place executable work exists. + ### Promoting When a backlog item is pulled into a cycle, it becomes a design doc: @@ -315,6 +325,7 @@ If you can answer these questions by reading the repo, you do not need a standup: - What is everyone working on? → active design docs in `docs/design/` + that do not have a matching `docs/method/retro//retro.md` - What is committed? → each design doc names its sponsors and hill - What is next? → `ls docs/method/backlog/asap/` - What closed, failed, or drifted? → `ls docs/method/retro/` @@ -387,6 +398,11 @@ following commands are implemented: | `cargo xtask method inbox "idea"` | Capture a backlog note in `inbox/`. | | `cargo xtask method status` | Summarize backlog lanes, active cycles, and legend load. | | `cargo xtask method status --json` | Emit the same status report for agents and tooling. | +| `cargo xtask method matrix` | Regenerate `task-matrix.md` and `task-matrix.csv`. | +| `cargo xtask method dag` | Regenerate `task-dag.dot` and `task-dag.svg`. | +| `cargo xtask method frontier` | Print tasks with no unresolved backlog-task blockers. | +| `cargo xtask method critical-path` | Print the unweighted longest dependency chain. | +| `cargo xtask method check-dag` | Fail if graph artifacts are stale or cyclic. | The following commands are planned but **not yet implemented**: diff --git a/docs/method/backlog/asap/DOCS_cli-man-pages.md b/docs/method/backlog/asap/DOCS_cli-man-pages.md deleted file mode 100644 index e9242e76..00000000 --- a/docs/method/backlog/asap/DOCS_cli-man-pages.md +++ /dev/null @@ -1,55 +0,0 @@ - - - -> **Milestone:** Developer CLI | **Priority:** P0 - -# Docs/man pages (#51) - -CLI documentation: man pages, usage examples, and integration with the docs site. - -Status: partially implemented. `clap_mangen`, `cargo xtask man-pages`, and the -checked-in `docs/man/echo-cli*.1` pages exist. The remaining work is README -copy-paste examples plus a CI freshness gate that regenerates and diffs the -man pages. - -## T-6-5-1: Man page generation and README examples - -**User Story:** As a developer, I want `man echo-cli` to work and the README to have copy-pasteable examples so that CLI usage is discoverable. - -**Requirements:** - -- R1: Use the existing `clap_mangen` dependency and `cargo xtask man-pages` - command to generate man pages to `docs/man/`. -- R2: Generate man pages for the top-level command and each current subcommand - (`verify`, `bench`, `inspect`). -- R3: Add a "CLI Usage" section to the repository README with examples for each subcommand. -- R4: CI step verifies man pages are up-to-date (regenerate and diff; fail if stale). - -**Acceptance Criteria:** - -- [ ] AC1: `man docs/man/echo-cli.1` renders correctly in a terminal. -- [ ] AC2: `man docs/man/echo-cli-verify.1` shows verify-specific options and examples. -- [ ] AC3: CI fails if someone changes clap args without regenerating man pages. -- [ ] AC4: README examples are copy-pasteable and exit 0 when run against a valid fixture. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Man page generation, xtask integration, README section, CI freshness check. -**Out of Scope:** mdbook integration. Online docs site deployment. Localization. - -**Test Plan:** - -- **Goldens:** Generated man pages checked in; CI diffs against regenerated output. -- **Failures:** Stale man pages (CI gate). -- **Edges:** Subcommand with no specific options (man page should still be generated with inherited global flags). -- **Fuzz/Stress:** N/A. - -**Blocked By:** T-6-1-1, T-6-2-1, T-6-3-1, T-6-4-1 -**Blocking:** none - -**Est. Hours:** 5h -**Expected Complexity:** ~100 LoC (xtask) + generated man pages diff --git a/docs/method/backlog/asap/KERNEL_determinism-torture.md b/docs/method/backlog/asap/KERNEL_determinism-torture.md deleted file mode 100644 index c2106c02..00000000 --- a/docs/method/backlog/asap/KERNEL_determinism-torture.md +++ /dev/null @@ -1,99 +0,0 @@ - - - -> **Milestone:** Proof Core | **Priority:** P1 - -# Determinism Torture Harness - -Prove that single-threaded and multi-threaded execution produce identical results. Snapshot/restore fuzz to catch nondeterminism in state serialization. - -**Issues:** #190 - -Status: active backlog item. Existing determinism gates cover many related -cases; this item is specifically for the remaining 1-thread vs N-thread report -and snapshot/restore fuzz gate. - ---- - -## T-9-1-1: Implement 1-thread vs N-thread determinism harness - -**User Story:** As a release engineer, I want an automated harness that runs the same simulation single-threaded and multi-threaded and proves they produce identical state hashes so that I can gate releases on determinism. - -**Requirements:** - -- R1: Harness accepts a simulation scenario (initial state + input sequence) and runs it twice: once with 1 thread, once with N threads (configurable, default 4). -- R2: Compare `state_root` and `commit_id` at every tick; report the first divergent tick if any. -- R3: Run the scheduler's parallel drain path (existing Phase 5-6B parallel execution) and verify canonical ordering is maintained. -- R4: Support both `F32Scalar` and `DFix64` scalar backends. -- R5: Output a structured report (JSON) with per-tick comparison results. - -**Acceptance Criteria:** - -- [ ] AC1: Harness passes with 0 divergences on the existing golden test scenarios from MS-1. -- [ ] AC2: Harness passes for 1, 2, 4, and 8 thread configurations. -- [ ] AC3: Harness passes for both `F32Scalar` and `DFix64` backends. -- [ ] AC4: Intentionally breaking scheduler ordering (test hook) causes the harness to detect divergence. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Thread-count comparison harness, structured report, both scalar backends. -**Out of Scope:** GPU determinism; WASM vs native comparison (separate concern); performance benchmarking. - -**Test Plan:** - -- **Goldens:** All MS-1 golden vectors must pass through the harness with zero divergences. -- **Failures:** Simulation with an intentionally nondeterministic rule (detect and report); simulation with zero ticks (no-op, report "trivially deterministic"). -- **Edges:** N=1 vs N=1 (should trivially pass); N=1 vs N=256 (extreme thread count). -- **Fuzz/Stress:** Run 100 random 50-tick simulations, each with random thread counts, verify zero divergences. - -**Blocked By:** none -**Blocking:** T-9-1-2 - -**Est. Hours:** 5h -**Expected Complexity:** ~400 LoC - ---- - -## T-9-1-2: Implement snapshot/restore fuzz - -**User Story:** As a release engineer, I want fuzz testing that snapshots simulation state at random ticks, restores it, and continues execution — verifying the restored run matches the original so that I can catch nondeterminism in serialization/deserialization. - -**Requirements:** - -- R1: Fuzz loop: run a simulation, snapshot state at a randomly chosen tick T, restore from snapshot, continue to tick T+K, compare `state_root` at T+K with the uninterrupted run. -- R2: Vary the snapshot format (canonical encoding, debug encoding if applicable) to catch format-dependent bugs. -- R3: Run at least 50 iterations per fuzz invocation with different random snapshot points. -- R4: Report any divergence with full context: snapshot tick, restore tick, comparison tick, expected vs actual hash. - -**Acceptance Criteria:** - -- [ ] AC1: 50 iterations with random snapshot points on a 500-tick simulation produce zero divergences. -- [ ] AC2: Corrupting a single byte in the snapshot (test hook) causes the restore to fail or the comparison to detect divergence. -- [ ] AC3: Fuzz runs in under 60 seconds for 50 iterations of a 500-tick simulation. -- [ ] AC4: Report includes snapshot tick, restore tick, and hash comparison for each iteration. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Snapshot/restore fuzz loop, divergence detection, corruption detection. -**Out of Scope:** Snapshot performance optimization; snapshot compression; distributed snapshot. - -**Test Plan:** - -- **Goldens:** Each fuzz iteration's final hash must match the uninterrupted run's hash at the same tick. -- **Failures:** Corrupted snapshot (detected at restore or comparison); snapshot at tick 0 (genesis snapshot, valid). -- **Edges:** Snapshot at the last tick (restore and immediately compare); snapshot and restore at the same tick (no simulation between). -- **Fuzz/Stress:** 500 iterations on a 1000-tick simulation (extended run, CI nightly). - -**Blocked By:** T-9-1-1 -**Blocking:** none - -**Est. Hours:** 5h -**Expected Complexity:** ~350 LoC diff --git a/docs/method/backlog/asap/KERNEL_echo-git-warp-compatibility-sanity-check.md b/docs/method/backlog/asap/KERNEL_echo-git-warp-compatibility-sanity-check.md index 5d176b01..0ba056bf 100644 --- a/docs/method/backlog/asap/KERNEL_echo-git-warp-compatibility-sanity-check.md +++ b/docs/method/backlog/asap/KERNEL_echo-git-warp-compatibility-sanity-check.md @@ -22,6 +22,12 @@ Status: active coordination backlog. This is a compatibility map, not an implementation spec; keep it aligned with current Echo runtime facts before using it to drive cross-repo protocol work. +Triage update: keep this card, but sharpen the next pass around causal-history +interchange. The live question is no longer just whether Echo and git-warp can +share a debugger, protocol, and schema compiler; it is whether they can share +witnessed causal history through Continuum transport without making either +runtime subordinate to the other. + ## Where they align ### Causal model @@ -135,6 +141,9 @@ Both substrates use Wesley for schema compilation, but: ### Short term (coordinate, don't build) +- Verify whether Echo and git-warp can exchange witnessed causal history through + Continuum transport while preserving each runtime's local admission law, + receipt semantics, and storage model. - Audit whether Echo's WASM ABI surface is sufficient for a warp-ttd host adapter (see `PLATFORM_echo-ttd-host-adapter` backlog item) - Reconcile TTD protocol types — one schema, one source of truth @@ -144,8 +153,8 @@ Both substrates use Wesley for schema compilation, but: ### Medium term (design decisions needed) -- Finish live holographic strand semantics in Echo's canonical/deterministic - model (see `KERNEL_live-holographic-strands` backlog item) +- Build on the live-basis strand semantics captured in + `docs/design/0010-live-basis-settlement-plan/design.md` - Design compliance reporting as a protocol extension (see `KERNEL_compliance-protocol-envelope` backlog item) - Evaluate `ttd-browser` crate overlap with warp-ttd's browser story diff --git a/docs/method/backlog/asap/KERNEL_live-holographic-strands.md b/docs/method/backlog/asap/KERNEL_live-holographic-strands.md deleted file mode 100644 index 6e869a79..00000000 --- a/docs/method/backlog/asap/KERNEL_live-holographic-strands.md +++ /dev/null @@ -1,107 +0,0 @@ - - - -# Live holographic strands - -Status: active WARP/Observer implementation backlog. The first settlement -slice exists; observer/read artifacts still need to consume the same -live-basis and revalidation posture. - -Depends on: - -- [0004 — Strand contract](../../../design/0004-strand-contract/design.md) -- [0008 — Strand settlement](../../../design/0008-strand-settlement/design.md) -- [0010 — Live-basis settlement correction plan](../../../design/0010-live-basis-settlement-plan/design.md) - -## Why now - -Echo's current strand cut is still a bootstrap strand: - -- fork a child worldline at one exact tick -- pin that copied prefix forever -- tick the child manually -- hard-delete the whole speculative lane on drop - -That got honest substrate nouns into the repo, but it is no longer -the right theory target. Paper VII now says a strand is a real -speculative lane whose realised state is resolved against inherited -parent history at the chosen basis. Bounded reads should materialize -the backward causal cone required by the local divergence and optic -footprint, not a fully copied child world. - -Echo needs to stop hardening the bootstrap cut into ontology. - -## What it should look like - -- A strand is rooted at a parent worldline plus an anchor coordinate, - but it follows the parent live for untouched regions. -- Local divergence owns only the closed optic footprint required for - lawful speculative change. -- Materialization is holographic: - - resolve inherited parent history at a chosen basis - - overlay only the strand-owned divergence - - slice only the backward causal cone needed for the read -- Parent changes outside the owned footprint flow through. -- Parent changes overlapping the owned footprint force revalidation, - explicit conflict, or obstruction. No fake cleanliness. -- `support_pins` remain a comparison/braid aid, not an excuse to - collapse plurality early. -- The implementation may still use child-worldline machinery - internally, but the public/runtime semantics must be - live-following strand semantics, not frozen-fork semantics. -- Dropping a strand should drop the live handle and caches. It should - not require the theory to pretend the speculative lane was never - real. - -## Current implementation slice - -The first slice is now deliberately smaller than the full target: - -- `Strand::live_basis_report(...)` reports parent basis movement, child owned - footprint, and revalidation state. -- settlement compare/plan carries the basis report internally. -- disjoint parent movement is detected separately from owned-footprint overlap. -- disjoint parent movement now plans a target-local import candidate instead of - a conflict artifact. -- owned-footprint overlap now runs explicit settlement revalidation: - - replay already satisfied on the parent basis imports as `Clean` - - replay failure is `Obstructed` - - replay that would mutate overlapped parent state is `Conflict` residue - under `ParentFootprintOverlap` - -The runtime settlement path now has the first concrete overlap revalidation -law. Observer/read artifacts still need to consume the same posture instead of -inventing a separate reading law. The full decision record and runway live in -[0010 — Live-basis settlement correction plan](../../../design/0010-live-basis-settlement-plan/design.md). - -## Done looks like - -- `docs/invariants/STRAND-CONTRACT.md` no longer defines a strand as - merely a prefix-copy child worldline with hard-delete semantics. -- `warp_core::strand` distinguishes: - - parent anchor - - owned local divergence - - revalidation state - - realised basis -- one bounded materialization path proves: - - untouched parent regions follow live truth - - owned regions stay local - - overlap with parent change yields explicit revalidation or - conflict -- one comparison path proves braid/settlement reads are basis-relative - presentations over plural lane claims, not fake merge previews. - -## Repo evidence - -- `crates/warp-core/src/strand.rs` -- `docs/invariants/STRAND-CONTRACT.md` -- `docs/design/0004-strand-contract/design.md` -- `docs/design/0008-strand-settlement/design.md` -- `docs/design/0010-live-basis-settlement-plan/design.md` - -## Non-goals - -- Do not design final multi-party braid collapse in this item. -- Do not require durable strand persistence in the first slice. -- Do not throw away child-worldline machinery if it remains useful as - a realization detail. diff --git a/docs/method/backlog/asap/PLATFORM_WESLEY_protocol-consumer-cutover.md b/docs/method/backlog/asap/PLATFORM_WESLEY_protocol-consumer-cutover.md deleted file mode 100644 index 63f403ab..00000000 --- a/docs/method/backlog/asap/PLATFORM_WESLEY_protocol-consumer-cutover.md +++ /dev/null @@ -1,41 +0,0 @@ - - - -# WESLEY Protocol Consumer Cutover - -Coordination: `WESLEY_protocol_surface_cutover` - -Status: active and partially implemented. Echo's local TTD protocol -crates/packages are already marked as generated consumers, but the repo still -has protocol ownership and regeneration-command drift to reconcile. - -Echo still carries local TTD protocol artifacts that predate the current -Continuum ownership split: - -- `crates/ttd-protocol-rs` -- `packages/ttd-protocol-ts` -- `crates/echo-ttd/src/compliance.rs` - -For the current Wesley-sponsored hill, Echo should stop acting like a backup -source of truth for the host-neutral debugger protocol and become a boring -consumer of the canonical authored schema plus Wesley-generated bundle. - -Current repo truth: - -- Echo no longer carries a local `schemas/ttd-protocol.graphql` -- the remaining drift is ownership language and consumer wiring around the - generated crates/packages -- `crates/ttd-protocol-rs/Cargo.toml` advertises `cargo xtask wesley sync`, - but no `xtask wesley` command exists in this repo yet - -Work: - -- point local protocol crates and packages at the chosen canonical protocol - bundle -- remove or clearly mark vendored schema and IR copies as derived or temporary -- keep Echo-owned hot runtime semantics and schema fragments separate from - host-neutral debugger protocol nouns -- reconcile the advertised regeneration command with the actual repo tooling -- verify the local compliance lane still passes against generated artifacts -- coordinate with `PLATFORM_ttd-schema-reconciliation` instead of reopening the - ownership question from scratch diff --git a/docs/method/backlog/asap/PLATFORM_cli-bench.md b/docs/method/backlog/asap/PLATFORM_cli-bench.md deleted file mode 100644 index ab5c0344..00000000 --- a/docs/method/backlog/asap/PLATFORM_cli-bench.md +++ /dev/null @@ -1,56 +0,0 @@ - - - -> **Milestone:** Developer CLI | **Priority:** P0 - -# bench (#49) - -Run the warp-benches suite and present results. - -Status: partially implemented. `echo-cli bench` already invokes -`cargo bench -p warp-benches`, supports `--filter`, parses -`target/criterion/**/new/estimates.json`, and emits text/JSON summaries. The -remaining active gap is CLI-level baseline comparison and deciding whether the -CLI should expose samples/raw Criterion metadata, while CI regression gating is -already handled by the G3 perf gate and `perf-baseline.json`. - -## T-6-3-1: Bench subcommand -- criterion invocation and reporting - -**User Story:** As a developer, I want to run benchmarks from the CLI and see formatted results so that I can track performance without memorizing cargo commands. - -**Requirements:** - -- R1: `echo-cli bench [--filter ]` invokes `cargo bench -p warp-benches` as a subprocess. -- R2: Collect criterion JSON output from `target/criterion/`. -- R3: Format results as an ASCII table (bench name, mean, median, stddev) for text output. -- R4: `--format json` outputs a merged summary array from parsed Criterion estimates. -- R5: If CLI baseline comparison remains desired, add `--baseline ` and report percentage deltas against saved baseline data without duplicating the CI G3 gate. - -**Acceptance Criteria:** - -- [x] AC1: `echo-cli bench` runs all benchmarks and prints an ASCII table to stdout. -- [x] AC2: `echo-cli bench --filter snapshot` runs only benchmarks matching "snapshot". -- [x] AC3: `echo-cli bench --format json` outputs valid JSON. -- [ ] AC4: `echo-cli bench --baseline main` shows percentage change columns when a baseline exists. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Subprocess invocation, criterion JSON parsing, table/JSON formatting, baseline comparison. -**Out of Scope:** CI integration (handled by existing GitHub Actions). Custom benchmark definitions. Flamegraph generation. - -**Test Plan:** - -- **Goldens:** ASCII table output for a mock criterion JSON fixture. -- **Failures:** `cargo bench` not found (clear error: "cargo not in PATH"). No benchmark results found (empty table with message). -- **Edges:** Filter that matches nothing (empty results). Baseline file missing (print "no baseline" and show absolute values only). -- **Fuzz/Stress:** N/A. - -**Blocked By:** none (T-6-1-1 is implemented enough for current CLI dispatch) -**Blocking:** none - -**Est. Hours:** 5h -**Expected Complexity:** ~250 LoC diff --git a/docs/method/backlog/asap/PLATFORM_cli-inspect.md b/docs/method/backlog/asap/PLATFORM_cli-inspect.md deleted file mode 100644 index ffd47c0c..00000000 --- a/docs/method/backlog/asap/PLATFORM_cli-inspect.md +++ /dev/null @@ -1,102 +0,0 @@ - - - -> **Milestone:** Developer CLI | **Priority:** P0 - -# inspect (#50) - -Snapshot summary, graph statistics, and optional terminal visualization. - -Status: partially implemented. `echo-cli inspect` already loads and validates -WSC files, reports tick/schema hash/warp count plus per-warp IDs, root node, -state root, node/edge counts, type breakdown, connected components, and optional -tree output in text or JSON. WSC v1 does not currently store `commit_id`, -parent list, or `policy_id`, so those fields must not be treated as implemented -metadata unless the WSC format grows them. The remaining active gap is -attachment payload display and `--raw`. - -## T-6-4-1: Inspect subcommand -- metadata and graph stats - -**User Story:** As a developer, I want to inspect a snapshot's metadata and graph structure so that I can debug simulation state without writing code. - -**Requirements:** - -- R1: `echo-cli inspect ` prints current WSC metadata: tick count, schema hash, warp count, per-warp ID, root node ID, and computed state root. -- R2: Graph statistics: total nodes, total edges, node types breakdown (count per TypeId), connected components count. -- R3: `--format json` outputs all stats as structured JSON. -- R4: `--tree` flag renders a simple ASCII tree of the graph starting from the root node (depth-limited to 5 levels). - -**Acceptance Criteria:** - -- [x] AC1: Inspect on a demo snapshot prints all current WSC metadata fields. -- [x] AC2: Node type breakdown sums to total node count. -- [x] AC3: `--tree` output shows root at level 0 with children indented. -- [x] AC4: JSON output includes both metadata and graph stats. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Metadata display, graph stats computation, ASCII tree rendering, JSON output. -**Out of Scope:** Interactive graph exploration (that is the website demo). Diff between two snapshots. - -**Test Plan:** - -- **Goldens:** Text and JSON output for a known snapshot fixture. -- **Failures:** Snapshot not found. Corrupt snapshot (graceful error, not panic). -- **Edges:** Snapshot with 0 nodes. Snapshot with disconnected components (tree shows only root's component). Very deep graph with `--tree` (respects depth limit). -- **Fuzz/Stress:** Inspect a 50,000-node snapshot; must complete in <2s. - -**Blocked By:** none (T-6-1-1 is implemented enough for current CLI dispatch) -**Blocking:** none - -**Est. Hours:** 5h -**Expected Complexity:** ~250 LoC - ---- - -## T-6-4-2: Inspect -- attachment payload pretty-printing - -Status: not implemented. WSC stores attachment rows and blobs, and -`warp-cli` reconstructs them for state-root verification, but inspect does not -yet render attachment payloads. - -**User Story:** As a developer, I want inspect to decode and display attachment payloads so that I can see entity data without manual hex decoding. - -**Requirements:** - -- R1: When the codec registry is available, decode `AtomPayload` bytes using the registered codec and display as formatted fields. -- R2: For motion payloads (v0 and v2 Q32.32), display decoded position/velocity as decimal values. -- R3: For unknown payload types, display hex dump with type_id annotation. -- R4: `--raw` flag disables decoding and shows hex for all payloads. - -**Acceptance Criteria:** - -- [ ] AC1: Motion payload displays as `position: (x, y, z), velocity: (vx, vy, vz)` with decimal values. -- [ ] AC2: Unknown payload type shows `[type_id: abcd1234...] 0x48656c6c6f...`. -- [ ] AC3: `--raw` flag shows hex for all payloads including known types. -- [ ] AC4: Truncated payloads display a warning and partial hex. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Payload decoding, motion payload formatting, hex fallback, --raw flag. -**Out of Scope:** Interactive payload editing. Custom codec plugin loading. - -**Test Plan:** - -- **Goldens:** Formatted output for a snapshot containing a motion-rule entity with known Q32.32 values. -- **Failures:** Payload bytes shorter than expected for declared type (warning + hex fallback). -- **Edges:** Empty payload bytes. Payload with all-zero bytes. Maximum-length payload (64KB). -- **Fuzz/Stress:** N/A. - -**Blocked By:** none (T-6-4-1 is implemented enough to support payload display work) -**Blocking:** none - -**Est. Hours:** 4h -**Expected Complexity:** ~150 LoC diff --git a/docs/method/backlog/asap/PLATFORM_cli-scaffold.md b/docs/method/backlog/asap/PLATFORM_cli-scaffold.md index 8f95e23f..b18dc325 100644 --- a/docs/method/backlog/asap/PLATFORM_cli-scaffold.md +++ b/docs/method/backlog/asap/PLATFORM_cli-scaffold.md @@ -8,61 +8,8 @@ Subcommand structure and ergonomic defaults for the current clap-based `echo-cli`. -Status: partially implemented. `crates/warp-cli/src/cli.rs` and `main.rs` -already provide the clap subcommand shell for `verify`, `bench`, and `inspect`, -the global `--format` flag, and the `echo-cli` binary name. The remaining active -work is narrower: revalidate whether `--verbose` and `--snapshot-dir` are still -wanted as global flags, then implement config-file defaults and shell -completions if they remain part of the CLI surface. - -## T-6-1-1: clap subcommand structure and global flags - -Implementation status: mostly complete. `clap`, the subcommand enum, binary -name, no-subcommand error path, unknown-subcommand error path, and global -`--format` parsing are implemented and tested. `--verbose` and `--snapshot-dir` -are not implemented and should be revalidated before adding them because the -current subcommands take explicit paths. - -**User Story:** As a developer, I want a well-structured CLI with `echo verify|bench|inspect` subcommands so that I can interact with Echo from the terminal. - -**Requirements:** - -- R1: Add `clap = { version = "4", features = ["derive"] }` dependency to warp-cli. -- R2: Define top-level `Cli` struct with `#[command(subcommand)]` and variants: `Verify`, `Bench`, `Inspect`. -- R3: Global flags: `--format [text|json]` is implemented; `--verbose` and `--snapshot-dir ` are candidate residual flags that need a current-use check before implementation. -- R4: Running `echo` with no subcommand prints help. Unknown subcommands print error + help. -- R5: Binary name is `echo-cli` (avoid collision with `/bin/echo`). - -**Acceptance Criteria:** - -- [x] AC1: `echo-cli --help` prints usage with all three subcommands listed. -- [x] AC2: `echo-cli verify --help` prints verify-specific options. -- [x] AC3: `echo-cli --format json verify` parses the global flag correctly before the subcommand. -- [x] AC4: `echo-cli unknown` exits with code 2. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** clap setup, subcommand enum, global flags, help text, binary name. -**Out of Scope:** Subcommand implementations (separate tasks). Config file parsing. Shell completions. - -**Test Plan:** - -- **Goldens:** `echo-cli --help` output checked in as a golden text file. -- **Failures:** Missing required subcommand-specific args. Invalid format value. -- **Edges:** `--verbose --verbose` (count-based verbosity). `--snapshot-dir` with spaces in path. -- **Fuzz/Stress:** N/A (argument parsing only). - -**Blocked By:** none -**Blocking:** T-6-2-1, T-6-3-1, T-6-4-1 - -**Est. Hours:** 3h -**Expected Complexity:** ~100 LoC - ---- +Status: active backlog item. The clap subcommand shell is already implemented; +only config file support and shell completions remain in this backlog card. ## T-6-1-2: Config file support and shell completions @@ -98,7 +45,7 @@ current subcommands take explicit paths. - **Edges:** Config file with only some fields set. Empty config file. Config dir does not exist. - **Fuzz/Stress:** N/A. -**Blocked By:** residual global-flag decision from T-6-1-1 +**Blocked By:** none **Blocking:** none **Est. Hours:** 3h diff --git a/docs/method/backlog/asap/PLATFORM_cli-verify.md b/docs/method/backlog/asap/PLATFORM_cli-verify.md deleted file mode 100644 index cc17c7b6..00000000 --- a/docs/method/backlog/asap/PLATFORM_cli-verify.md +++ /dev/null @@ -1,58 +0,0 @@ - - - -> **Milestone:** Developer CLI | **Priority:** P0 - -# verify (#48) - -Snapshot integrity verification. Reads a WSC snapshot file, recomputes hashes, -and reports mismatches. - -Status: partially implemented. `echo-cli verify` validates WSC structure, -reconstructs each warp into a `GraphStore`, recomputes per-warp state roots, -supports `--expected` for warp 0, emits text/JSON reports, and exits nonzero on -expected-hash mismatch. WSC v1 stores schema/tick/warp graph data but no stored -`state_root`, parent list, or `commit_id`, so commit verification needs either a -receipt/metadata source or a narrower stated scope. - -## T-6-2-1: Verify subcommand -- hash recomputation - -**User Story:** As a developer, I want to verify snapshot integrity from the CLI so that I can detect corruption or tampering. - -**Requirements:** - -- R1: `echo-cli verify ` reads and validates a WSC snapshot file. -- R2: Recompute per-warp `state_root` from the graph data using the same `GraphStore::canonical_state_hash()` path as WSC roundtrip verification. -- R3: If a receipt/snapshot metadata source is added, recompute `commit_id` using `compute_commit_hash_v2` or `compute_tick_commit_hash_v2` with the stored metadata fields. -- R4: Compare the recomputed state root against `--expected` when supplied; compare stored commit metadata only once such metadata is available. -- R5: `--format json` outputs the current structured verify report: file, tick, schema hash, warp count, per-warp state roots, statuses, and overall result. - -**Acceptance Criteria:** - -- [x] AC1: A valid WSC snapshot passes verification with exit code 0. -- [x] AC2: A snapshot checked with a mismatched `--expected` state root fails with exit code 1 and reports the mismatch. -- [x] AC3: JSON output is valid JSON parseable by `jq`. -- [ ] AC4: Text output uses color (green check / red X) when stdout is a TTY, plain text otherwise. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Hash recomputation, mismatch reporting, text/JSON output, exit codes. -**Out of Scope:** Snapshot loading from network. Batch verification of multiple snapshots. Auto-repair. - -**Test Plan:** - -- **Goldens:** JSON output for a known-good snapshot. JSON output for an - expected-hash mismatch. -- **Failures:** Snapshot file not found. Snapshot file is not valid WSC. Snapshot with missing fields. -- **Edges:** Empty graph snapshot (0 nodes). Snapshot with 10,000 nodes (performance: verify completes in <1s). -- **Fuzz/Stress:** Randomly flip bytes in a valid snapshot; verify fails structurally or reports a changed state root without panicking or falsely passing. - -**Blocked By:** none (T-6-1-1 is implemented enough for current CLI dispatch) -**Blocking:** none - -**Est. Hours:** 5h -**Expected Complexity:** ~200 LoC diff --git a/docs/method/backlog/asap/PLATFORM_contract-aware-intent-observation-envelope.md b/docs/method/backlog/asap/PLATFORM_contract-aware-intent-observation-envelope.md deleted file mode 100644 index 7960acb8..00000000 --- a/docs/method/backlog/asap/PLATFORM_contract-aware-intent-observation-envelope.md +++ /dev/null @@ -1,85 +0,0 @@ - - - -# Existing EINT, Registry, And Observation Boundary Inventory - -Status: design packet complete. - -Depends on: - -- [Wesley compiled contract hosting doctrine](./PLATFORM_wesley-compiled-contract-hosting-doctrine.md) -- [0014 - EINT, Registry, And Observation Boundary Inventory](../../../design/0014-eint-registry-observation-boundary-inventory/design.md) -- [Reading envelope family boundary](../up-next/PLATFORM_reading-envelope-family-boundary.md) -- [Observer plans and reading artifacts](./PLATFORM_observer-plan-reading-artifacts.md) - -## Why now - -Echo already exposes generic WASM calls such as `dispatch_intent(...)` and -`observe(...)`. It also already has EINT v1 intent envelopes, registry metadata -exports, a generic `echo-registry-api::RegistryProvider`, and -`echo-wesley-gen` output that includes a generated registry provider. - -The risk now is planning duplicate substrate. Echo does not need a second -intent envelope or a second registry model before a consumer can use it. The -next slice should inventory the existing path and identify the narrow missing -bridge for Wesley-generated app consumers. - -## Current repo truth to preserve - -- `echo-wasm-abi` defines EINT v1 as - `"EINT" || op_id:u32le || vars_len:u32le || vars`. -- `warp-wasm` exposes `dispatch_intent(...)` as the write/control ingress. -- `KernelPort::dispatch_intent(...)` is already app-agnostic over canonical - intent bytes. -- `warp-wasm` exposes `get_registry_info`, `get_codec_id`, - `get_registry_version`, and `get_schema_sha256_hex`. -- `echo-registry-api` defines the app-supplied `RegistryProvider` interface. -- `echo-wesley-gen` emits op ids, op catalogs, `GeneratedRegistry`, and - `REGISTRY`. -- `observe(...)` already returns `ObservationArtifact` with `ReadingEnvelope` - metadata for built-in observations. -- Existing schema validation helpers in `warp-wasm` are currently test-only. - -## Questions to answer - -1. Should app-level generated code validate op ids and vars before calling - `dispatch_intent(...)`, leaving Echo to ingest opaque canonical EINT bytes? -2. Should `warp-wasm` link an app-supplied `RegistryProvider` and reject - unknown op ids or malformed vars at the WASM boundary? -3. Is EINT v1 sufficient for app contract identity when one generated registry - is installed, or does multi-family hosting require an EINT v2 or registry - scope rule? -4. How should Wesley-generated `QUERY` operations relate to `observe(...)`, - `ObservationRequest`, `ReadingEnvelope`, and built-in observer plans? -5. Is `RegistryInfo` metadata enough for `jedit` handshakes, or does browser - code need the full generated op catalog at runtime? - -## What it should look like - -Add a design/inventory packet before writing RED tests. - -That packet now exists: -[0014 - EINT, Registry, And Observation Boundary Inventory](../../../design/0014-eint-registry-observation-boundary-inventory/design.md). - -## Acceptance criteria - -- The packet cites the existing EINT, registry metadata, `RegistryProvider`, - `echo-wesley-gen`, and observation/read-envelope surfaces. -- The packet states whether the next implementation validates op ids in - app-level generated code, Echo's WASM boundary, or both. -- The packet states whether EINT v1 remains the contract path for the next - consumer proof. -- The packet states the first narrow missing bridge for generated query/read - operations. -- Existing backlog cards are corrected so they do not claim that basic intent - ingress or registry metadata are missing. - -## Non-goals - -- Do not create a new intent envelope unless the inventory proves EINT v1 is - insufficient. -- Do not implement a new registry model. -- Do not move app-specific validation into Echo core by default. -- Do not add dynamic loading. -- Do not add application payload types. -- Do not change `jedit`. diff --git a/docs/method/backlog/asap/PLATFORM_echo-contract-hosting-roadmap.md b/docs/method/backlog/asap/PLATFORM_echo-contract-hosting-roadmap.md index 59f70c8b..73523ef7 100644 --- a/docs/method/backlog/asap/PLATFORM_echo-contract-hosting-roadmap.md +++ b/docs/method/backlog/asap/PLATFORM_echo-contract-hosting-roadmap.md @@ -27,15 +27,18 @@ registry model. 1. [Wesley compiled contract hosting doctrine](./PLATFORM_wesley-compiled-contract-hosting-doctrine.md) - Design packet: [0013 - Wesley Compiled Contract Hosting Doctrine](../../../design/0013-wesley-compiled-contract-hosting-doctrine/design.md) -2. [Existing EINT, registry, and observation boundary inventory](./PLATFORM_contract-aware-intent-observation-envelope.md) - - Design packet: +2. Existing EINT, registry, and observation boundary inventory + - Completed design packet: [0014 - EINT, Registry, And Observation Boundary Inventory](../../../design/0014-eint-registry-observation-boundary-inventory/design.md) -3. [Registry provider wiring and host boundary decision](./PLATFORM_static-contract-registry-and-host-boundary.md) - - Design packet: +3. Registry provider wiring and host boundary decision + - Completed design packet: [0015 - Registry Provider Host Boundary Decision](../../../design/0015-registry-provider-host-boundary-decision/design.md) -4. [Wesley to Echo toy contract proof](../up-next/PLATFORM_wesley-to-echo-toy-contract-proof.md) +4. Wesley to Echo toy contract proof + - Status: accepted. - Design packet: [0016 - Wesley To Echo Toy Contract Proof](../../../design/0016-wesley-to-echo-toy-contract-proof/design.md) + - Retro: + [0016 - Wesley To Echo Toy Contract Proof](../../retro/0016-wesley-to-echo-toy-contract-proof/retro.md) 5. [Contract-aware receipts and readings](../up-next/KERNEL_contract-aware-receipts-and-readings.md) 6. [Contract artifact retention in echo-cas](../up-next/PLATFORM_contract-artifact-retention-in-echo-cas.md) 7. [jedit text contract MVP](../up-next/PLATFORM_jedit-text-contract-mvp.md) diff --git a/docs/method/backlog/asap/PLATFORM_observer-plan-reading-artifacts.md b/docs/method/backlog/asap/PLATFORM_observer-plan-reading-artifacts.md deleted file mode 100644 index f6000428..00000000 --- a/docs/method/backlog/asap/PLATFORM_observer-plan-reading-artifacts.md +++ /dev/null @@ -1,102 +0,0 @@ - - - -# Observer plans and reading artifacts - -Status: active and partially implemented. `ObservationService` and the ABI now -emit one-shot built-in observation artifacts with `ReadingEnvelope` metadata. -The remaining gap is authored `ObserverPlan` support plus hosted/stateful -observer instances; query-shaped reads still exist only as a placeholder plan -and return unsupported at runtime. - -Depends on: - -- `crates/echo-wasm-abi/src/kernel_port.rs` -- [0006 — Echo Continuum alignment](../../../design/0006-echo-continuum-alignment/design.md) -- [0005 — Echo TTD witness surface](../../../design/0005-echo-ttd-witness-surface/design.md) -- [0011 — Optic and observer runtime doctrine](../../../design/0011-optic-observer-runtime-doctrine/design.md) - -## Why now - -Echo has the right instinct at the ABI boundary: - -- `dispatch_intent(...)` -- `observe(...)` -- neighborhood publication -- settlement publication - -But the public/runtime story is still too thin on the revelation side. -An observer is not "just a query" and a reading is not "just a state -snapshot." The current doctrine is stronger: - -- app/authored observer spec is not the runtime observer instance -- the observer is only the revelation-side object, not the whole optic -- the observer basis is not the same thing as the parent basis used to realize - a strand -- reads should come back as witness-bearing artifacts over causal - history - -Echo needs an explicit observer-plan boundary instead of letting -"observation" collapse back into ad hoc materialization. - -Current implementation note: `ObservationArtifact` now carries ABI-visible -`reading: ReadingEnvelope`, which covers built-in observer plan identity, -observer basis, witness refs, parent-basis posture, budget posture, rights -posture, and residual posture for one-shot observations. Authored -`ObserverPlan` and hosted/stateful observer instances are still open. - -## What it should look like - -- One authored/configured **ObserverPlan** shape exists for the read - side. -- That plan names at least: - - aperture / projection - - basis - - observer state schema - - update law - - emission law - - slice budget - - rights / exposure tier -- The runtime distinguishes: - - observer spec / plan - - observer instance / accumulated state - - emitted reading artifact -- `observe(request)` returns a bounded **reading artifact**, not a - raw "full state" story. -- A reading artifact carries: - - frontier / coordinate - - reading payload - - witness or shell reference - - parent-basis posture when the read is strand-relative - - observer-basis metadata for native distinctions retained by the reading - - budget posture - - obstruction / plurality / residual when relevant -- One-shot observation and hosted/stateful observation should share - the same artifact family. - -## Done looks like - -- ABI request/response types make plan/source/budget/rights explicit. -- The docs stop teaching "observer = filtered state read". -- One end-to-end path proves: - - dispatch intent - - get admission/result envelope - - observe via plan at a frontier or shell - - receive a reading artifact with witness-bearing metadata -- TTD/host integration can consume readings without demanding a - universal materialized graph object. - -## Repo evidence - -- `crates/warp-core/src/observation.rs` -- `crates/echo-wasm-abi/src/kernel_port.rs` -- `crates/warp-wasm/src/warp_kernel.rs` -- `docs/design/0006-echo-continuum-alignment/design.md` -- `docs/design/0005-echo-ttd-witness-surface/design.md` - -## Non-goals - -- Do not embed app-specific observer business logic into Echo. -- Do not require a full app-code compiler surface in the same slice. -- Do not remove low-level diagnostic materialization helpers that are - still useful for tests and proofs. diff --git a/docs/method/backlog/asap/PLATFORM_static-contract-registry-and-host-boundary.md b/docs/method/backlog/asap/PLATFORM_static-contract-registry-and-host-boundary.md deleted file mode 100644 index b637f045..00000000 --- a/docs/method/backlog/asap/PLATFORM_static-contract-registry-and-host-boundary.md +++ /dev/null @@ -1,69 +0,0 @@ - - - -# Registry Provider Wiring And Host Boundary Decision - -Status: design packet complete. - -Depends on: - -- [Existing EINT, registry, and observation boundary inventory](./PLATFORM_contract-aware-intent-observation-envelope.md) -- [0015 - Registry Provider Host Boundary Decision](../../../design/0015-registry-provider-host-boundary-decision/design.md) - -## Why now - -Echo already has a generic registry interface: -`echo-registry-api::RegistryProvider`. `echo-wesley-gen` already emits a -`GeneratedRegistry` implementation and op catalog from Wesley IR. - -The missing decision is not "invent a registry." The missing decision is how -the existing generated registry is wired into consumers and whether Echo itself -should consult it during `dispatch_intent(...)` / `observe(...)` or leave that -validation to app-level generated code. - -## What it should look like - -Make one explicit host-boundary decision: - -Option A: - -- app-level generated code validates op ids and vars using `REGISTRY` -- app-level generated code packs EINT -- Echo ingests canonical EINT bytes opaquely -- Echo exposes registry metadata for handshake - -Option B: - -- `warp-wasm` or the installed kernel links an app-supplied - `RegistryProvider` -- Echo rejects unknown op ids or malformed vars before ingress -- generated query/read ops get a clear path through `observe(...)` - -Either option must preserve Echo's app-agnostic substrate boundary. - -That decision now exists: -[0015 - Registry Provider Host Boundary Decision](../../../design/0015-registry-provider-host-boundary-decision/design.md). - -The first consumer uses Option A. Host-side app payload validation is deferred -until a future RED proves why the Echo boundary must reject generated app -payloads before ingress. - -## Acceptance criteria - -- The decision cites `echo-registry-api`, `echo-wesley-gen`, EINT v1, and - current `RegistryInfo` exports. -- The decision explains where op id lookup happens for the first consumer. -- The decision explains where vars payload validation happens for the first - consumer. -- The decision explains whether generated `QUERY` ops are app-level helpers, - built-in `observe(...)` requests, or a future observe bridge. -- Any later implementation reuses `RegistryProvider` rather than creating a - parallel registry abstraction. - -## Non-goals - -- Do not implement WASM dynamic module loading. -- Do not fetch contracts over the network. -- Do not add jedit-specific registration. -- Do not invent a new registry trait while `RegistryProvider` is sufficient. -- Do not make the host boundary a broad runtime facade. diff --git a/docs/method/backlog/asap/PLATFORM_ttd-rollback-playbooks.md b/docs/method/backlog/asap/PLATFORM_ttd-rollback-playbooks.md index 87d75927..2a4c455f 100644 --- a/docs/method/backlog/asap/PLATFORM_ttd-rollback-playbooks.md +++ b/docs/method/backlog/asap/PLATFORM_ttd-rollback-playbooks.md @@ -17,8 +17,8 @@ Current integration seams: - `crates/ttd-protocol-rs` and `packages/ttd-protocol-ts` are generated protocol consumers. - `crates/echo-ttd` owns Echo-side compliance and violation reporting. -- `crates/ttd-browser`, `apps/ttd-app`, and `crates/echo-wasm-bindings/src/ttd.rs` - are local browser/debugger adapter surfaces. +- `crates/ttd-browser` and `crates/echo-wasm-bindings/src/ttd.rs` are local + browser/debugger adapter surfaces. - `warp-ttd` owns debugger protocol semantics; Echo owns generated consumer wiring and substrate-side compatibility. diff --git a/docs/method/backlog/asap/PLATFORM_ttd-schema-reconciliation.md b/docs/method/backlog/asap/PLATFORM_ttd-schema-reconciliation.md index 5a6b6851..92876dbe 100644 --- a/docs/method/backlog/asap/PLATFORM_ttd-schema-reconciliation.md +++ b/docs/method/backlog/asap/PLATFORM_ttd-schema-reconciliation.md @@ -4,11 +4,10 @@ # Reconcile TTD protocol schemas with warp-ttd Status: active and partially implemented. Echo's generated Rust and TypeScript -protocol consumers are already labeled as generated from the canonical -`warp-ttd` protocol. The remaining gap is provenance/tooling: the advertised -regeneration command does not exist locally, and Echo still needs a verified -handoff from the external canonical schema to the checked-in generated -artifacts. +protocol consumers are labeled as generated from the canonical `warp-ttd` +protocol, and `cargo xtask wesley sync` now verifies local downstream-consumer +provenance. The remaining gap is the full external handoff from the canonical +schema bundle to checked-in generated artifacts. Echo has local TTD protocol artifacts that must stay downstream of `warp-ttd`: @@ -24,13 +23,14 @@ acting as a backup schema owner. Work: -- Reconcile `crates/ttd-protocol-rs/Cargo.toml` advertising - `cargo xtask wesley sync` with the actual repo tooling. +- Extend the current `cargo xtask wesley sync` provenance check into a + regeneration or bundle-ingest path once the external canonical bundle is + published for Echo consumption. - Point protocol generation at the canonical `warp-ttd` schema or document the exact external bundle handoff if generation stays outside this repo. - Keep generated crates/packages clearly marked as downstream consumers, not backup protocol owners. - Verify generated types still satisfy the `echo-ttd` compliance checker and local browser adapter surfaces. -- Coordinate with `PLATFORM_WESLEY_protocol-consumer-cutover` instead of +- Preserve the completed WESLEY protocol consumer cutover decision instead of reopening protocol ownership from scratch. diff --git a/docs/method/backlog/asap/PLATFORM_witnessed-suffix-admission-shells.md b/docs/method/backlog/asap/PLATFORM_witnessed-suffix-admission-shells.md deleted file mode 100644 index 1bf951b3..00000000 --- a/docs/method/backlog/asap/PLATFORM_witnessed-suffix-admission-shells.md +++ /dev/null @@ -1,81 +0,0 @@ - - - -# Witnessed suffix admission shells - -Status: active planned design. Echo has settlement, neighborhood publication, -observer reading envelopes, and design 0009 for witnessed causal suffix sync. -It does not yet have implemented `ExportSuffixRequest`, `CausalSuffixBundle`, -`ImportSuffixResult`, `export_suffix`, or `import_suffix` surfaces. This card is -the execution handle for that gap. - -Refines: - -- [Echo / git-warp witnessed suffix sync](../up-next/PLATFORM_echo-git-warp-witnessed-suffix-sync.md) -- [0011 — Optic and observer runtime doctrine](../../../design/0011-optic-observer-runtime-doctrine/design.md) - -## Why now - -Echo already has real publication surfaces for settlement and -neighborhood truth. The remaining risk is semantic downgrade: -export/import could still devolve into packet sync, patch shipping, or -state-sync folklore. - -Paper VII's stronger target is tighter: - -- remote suffixes are transported claim families -- import is ordinary witnessed admission after normalization to a - comparable frontier -- the durable object is an admission shell / hologram, not a vague - bundle of patches - -This note exists to pin that stronger target before Echo bakes an -older sync contract into its runtime and ABI. - -## What it should look like - -- Echo exports a **witnessed suffix shell** rather than a naked patch - stream. -- The export shell names: - - graph / lane identity - - source frontier and claimed base frontier - - transported local site or comparable basis - - payload / provenance references - - witness required for replay, audit, and bounded revelation -- Import is an admission act, not a patch-apply loop. -- Import returns an explicit outcome algebra member: - - admitted - - staged - - plural / braided - - conflict - - obstruction -- Independent imports are expected to converge up to shell - equivalence, not merely "same eventual state." -- Divergence is never silently swallowed into "skipped writer" style - behavior. - -## Done looks like - -- one export path produces a typed suffix shell / hologram -- one import path normalizes to a comparable frontier before deciding -- one proof test shows independent import order yields shell-equivalent - retained results -- one non-independent case returns explicit plural/conflict/obstruction - outcome rather than pretending commutativity -- the Echo / git-warp boundary speaks in suffix shells and admission - outcomes, not state snapshots - -## Repo evidence - -- `docs/design/0009-witnessed-causal-suffix-sync/design.md` -- `docs/design/0008-strand-settlement/design.md` -- `crates/warp-core/src/settlement.rs` -- `crates/warp-core/src/neighborhood.rs` -- `crates/echo-wasm-abi/src/kernel_port.rs` - -## Non-goals - -- Do not settle the final network transport encoding here. -- Do not require full multi-peer trust policy in the first slice. -- Do not regress to "sync means same final state" as the only success - criterion. diff --git a/docs/method/backlog/asap/PLATFORM_xtask-method-close.md b/docs/method/backlog/asap/PLATFORM_xtask-method-close.md deleted file mode 100644 index dcee7e08..00000000 --- a/docs/method/backlog/asap/PLATFORM_xtask-method-close.md +++ /dev/null @@ -1,18 +0,0 @@ - - - -# xtask method close - -Status: active and not implemented. `cargo xtask method --help` exposes only -`status`; `xtask/src/main.rs` has only `MethodCommand::Status`. Keep this as -the execution handle until cycle-closing can be done through xtask instead of -manual filesystem edits. - -Implement `cargo xtask method close [cycle]` — close a cycle with a -retro and witness directory. - -## Acceptance - -- Creates `docs/method/retro//` with a retro template. -- Creates a `witness/` subdirectory for artifacts. -- Defaults to the current (most recent) active cycle if none specified. diff --git a/docs/method/backlog/asap/PLATFORM_xtask-method-drift.md b/docs/method/backlog/asap/PLATFORM_xtask-method-drift.md deleted file mode 100644 index e10b96e9..00000000 --- a/docs/method/backlog/asap/PLATFORM_xtask-method-drift.md +++ /dev/null @@ -1,19 +0,0 @@ - - - -# xtask method drift - -Status: active and not implemented. `cargo xtask method --help` exposes only -`status`; `xtask/src/main.rs` has only `MethodCommand::Status`. Current retros -contain manual "Drift check" sections, so the remaining work is the automated -coverage check. - -Implement `cargo xtask method drift [cycle]` — check active cycle -playback questions against committed test descriptions. - -## Acceptance - -- Parses playback questions from the design doc. -- Searches test files for matching test names or descriptions. -- Reports coverage: which questions have tests, which don't. -- Exit code 1 if any playback question has no matching test. diff --git a/docs/method/backlog/asap/PLATFORM_xtask-method-pull.md b/docs/method/backlog/asap/PLATFORM_xtask-method-pull.md deleted file mode 100644 index 6e19863f..00000000 --- a/docs/method/backlog/asap/PLATFORM_xtask-method-pull.md +++ /dev/null @@ -1,19 +0,0 @@ - - - -# xtask method pull - -Status: active and not implemented. `cargo xtask method --help` exposes only -`status`; `xtask/src/main.rs` has only `MethodCommand::Status`. `crates/method` -already exposes `MethodWorkspace::design_root()`, so the missing work is command -behavior, naming, and safe file movement. - -Implement `cargo xtask method pull ` — promote a backlog item -into the next numbered cycle. - -## Acceptance - -- Moves the backlog file to `docs/design//`. -- Auto-numbers the cycle directory (e.g., `0001-/`). -- Strips the legend prefix from the design doc filename. -- Prints the cycle number and path. diff --git a/docs/method/backlog/cool-ideas/KERNEL_time-travel-mvp.md b/docs/method/backlog/cool-ideas/KERNEL_time-travel-mvp.md index 58122161..e68beb59 100644 --- a/docs/method/backlog/cool-ideas/KERNEL_time-travel-mvp.md +++ b/docs/method/backlog/cool-ideas/KERNEL_time-travel-mvp.md @@ -26,7 +26,7 @@ for timeline scrubbing and causal slicing. **Requirements:** - R1: Define and implement pause-buffer admission policy: freeze - simulation-view cursors while tool-view cursors remain live; stream events + simulation-view cursors while tool-view cursors remain live; source events accumulate in backlog. - R2: Implement a capability-gated `Fork(worldline, tick)` operation that creates a new worldline fork/branch at the specified tick. @@ -38,12 +38,13 @@ for timeline scrubbing and causal slicing. compute budget is exhausted. - R5: All operations emit deterministic decision/provenance records into the authoritative worldline history. -- R6: Inspector/stream frames reflect paused/buffered state accurately during - time travel. +- R6: Typed admission and observation evidence reflects paused/buffered state + accurately during time travel. **Acceptance Criteria:** -- [ ] AC1: Unit test: pause a 2-stream simulation at tick 50, verify tool-view cursors advance while sim-view cursors are frozen. +- [ ] AC1: Unit test: pause a simulation with two input sources at tick 50, + verify tool-view cursors advance while sim-view cursors are frozen. - [ ] AC2: Unit test: fork at tick 20, advance the fork to tick 25 with independent state, verify original worldline is unaffected. - [ ] AC3: Integration test: rewind from tick 100 to tick 10 using a checkpoint, verify state matches the original tick-10 snapshot hash. - [ ] AC4: Integration test: catch-up from tick 10 to tick 100 via @@ -71,7 +72,9 @@ checks. - **Edges:** Fork at tick 0 (genesis); rewind to current tick (no-op); catch-up when already at target. - **Fuzz/Stress:** Property test: fork-then-catchup from random tick pairs produces state hashes matching the original worldline. -**Blocked By:** T-7-2-5, T-7-2-3, T-7-2-4 +**Blocked By:** +`docs/method/backlog/up-next/KERNEL_time-travel-capabilities.md`, +`docs/method/backlog/up-next/KERNEL_contract-strands-and-counterfactuals.md` **Blocking:** T-7-3-2, T-7-4-1 **Est. Hours:** 6h @@ -121,7 +124,7 @@ checks. - **Edges:** Scrub to tick 0 (genesis); scrub to the head tick; fork from genesis. - **Fuzz/Stress:** Rapid scrubbing across 10,000 ticks without UI freeze (debounced seek, < 16ms frame time). -**Blocked By:** T-7-3-1, T-7-2-6 +**Blocked By:** T-7-3-1 **Blocking:** T-7-4-1 **Est. Hours:** 6h diff --git a/docs/method/backlog/cool-ideas/PLATFORM_continuum-contract-artifact-interchange.md b/docs/method/backlog/cool-ideas/PLATFORM_continuum-contract-artifact-interchange.md index 86697e3a..c98f16f0 100644 --- a/docs/method/backlog/cool-ideas/PLATFORM_continuum-contract-artifact-interchange.md +++ b/docs/method/backlog/cool-ideas/PLATFORM_continuum-contract-artifact-interchange.md @@ -8,7 +8,6 @@ Status: cool idea, future protocol lane. Depends on: - [Contract strands and counterfactuals](../up-next/KERNEL_contract-strands-and-counterfactuals.md) -- [Witnessed suffix admission shells](../asap/PLATFORM_witnessed-suffix-admission-shells.md) - external Continuum protocol publication work ## Why later diff --git a/docs/method/backlog/cool-ideas/PLATFORM_proof-carrying-apertures.md b/docs/method/backlog/cool-ideas/PLATFORM_proof-carrying-apertures.md new file mode 100644 index 00000000..878fc028 --- /dev/null +++ b/docs/method/backlog/cool-ideas/PLATFORM_proof-carrying-apertures.md @@ -0,0 +1,115 @@ + + + +# Proof-Carrying Apertures + +Status: cool idea, future proof backend lane. + +Depends on: + +- [Contract-aware receipts and readings](../up-next/KERNEL_contract-aware-receipts-and-readings.md) +- [Contract artifact retention in echo-cas](../up-next/PLATFORM_contract-artifact-retention-in-echo-cas.md) +- [WSC, Verkle, IPA, And Retained Readings](../../../architecture/wsc-verkle-ipa-retained-readings.md) +- [WARPDrive POSIX Materialization Optic](./PLATFORM_warpdrive-posix-optic.md) + +## Why later + +Echo readings are WARP optic outputs: they name a coordinate, aperture, law, +observer basis, payload, and support posture. Some readings need full +materialization today because the verifier has no compact proof shape for the +claim being made. + +Future proof backends should let an optic carry compact support for selected +claims without widening the observer's revelation aperture. IPA fits this shape: +the aperture becomes public inputs or selectors, the hidden territory is a +committed vector, polynomial, or WSC column family, the reading is a claimed +relation or evaluation, and the IPA proof is the transported support. + +Doctrine phrase: + +```text +Proof-carrying apertures. +``` + +## What it should look like + +Contract-aware readings and retained holograms should be able to carry optional +proof support without requiring proof systems in the first implementation: + +- commitment family and commitment ref +- proof family and proof ref +- public inputs hash +- verification posture +- support obligation status +- payload codec and payload ref + +The future posture vocabulary should be able to distinguish: + +- materialized bytes are present +- selected data is opened by Merkle or Verkle-style witness +- a relation is verified by IPA or equivalent compact proof +- a predicate is verified by a ZK-style proof +- support is rehydratable +- support is obstructed or underdetermined + +WSC is the natural future payload layout for WARP-shaped committed structure. +`echo-cas` stores bytes. WSC provides canonical columnar state/read-model bytes. +Merkle, Verkle, IPA, SNARK, STARK, or related proof families may sit above those +bytes as proof backends over retained holograms. + +The current architectural direction is captured in +[WSC, Verkle, IPA, And Retained Readings](../../../architecture/wsc-verkle-ipa-retained-readings.md): +WSC gives the table, Verkle gives the root, IPA gives the aperture proof, and +`echo-cas` stores the bytes. + +## Acceptance criteria + +- `M012` leaves room for commitment refs, proof refs, public-input hashes, and + verification posture without implementing a proof system. +- A future design packet can describe one proof-carrying reading whose verifier + does not materialize the full slice. +- The design separates `ReadIdentity` from CAS hash, WSC payload hash, and proof + identity. +- The design distinguishes materialization, inclusion/opening witnesses, + relational proofs, and predicate proofs. +- Missing proof support returns an explicit obstruction, rehydration-required, + or underdetermined posture. + +## Non-goals + +- Do not implement IPA, Verkle, SNARK, STARK, or polynomial commitments in this + card. +- Do not make IPA a storage substrate. +- Do not make Verkle the ontology. +- Do not make `echo-cas` depend on WSC or proof systems. +- Do not treat CAS hashes as semantic reading identity. +- Do not treat `Verify(proof) = accept` as admissibility without context, + authority, policy, and support-obligation checks. + +## Notes + +Useful primitive stack: + +```text +BLAKE3 / content hash + exact-byte identity + +Merkle path + inclusion or exclusion under a root + +Verkle / vector commitment + compact state-cell or update witness + +IPA / polynomial commitment opening + compact relation or evaluation over committed structure + +ZK proof + predicate verification with bounded revelation +``` + +The design rule for future proof-friendly optics: + +```text +Build optics whose readings can be expressed as relations over committed +columns, vectors, polynomial evaluations, or explicitly supported predicates. +``` diff --git a/docs/method/backlog/cool-ideas/PLATFORM_reading-envelope-inspector.md b/docs/method/backlog/cool-ideas/PLATFORM_reading-envelope-inspector.md index 6a6a6fa1..871beddc 100644 --- a/docs/method/backlog/cool-ideas/PLATFORM_reading-envelope-inspector.md +++ b/docs/method/backlog/cool-ideas/PLATFORM_reading-envelope-inspector.md @@ -44,5 +44,5 @@ A maintainer can inspect one reading envelope and immediately see: ## Repo evidence -- `docs/method/backlog/asap/PLATFORM_observer-plan-reading-artifacts.md` +- `docs/design/0011-optic-observer-runtime-doctrine/design.md` - `docs/method/backlog/up-next/PLATFORM_reading-envelope-family-boundary.md` diff --git a/docs/method/backlog/cool-ideas/PLATFORM_streams-inspector.md b/docs/method/backlog/cool-ideas/PLATFORM_streams-inspector.md deleted file mode 100644 index 6fea8ad2..00000000 --- a/docs/method/backlog/cool-ideas/PLATFORM_streams-inspector.md +++ /dev/null @@ -1,295 +0,0 @@ - - - -> **Milestone:** Time Travel | **Priority:** P2 - -# TT1 — Streams Inspector Frame - -Status: active but stale cool idea. Task DAG issues #170, #203, and -the #244-#246 set remain open, while #243 now has the fixed-timestep -invariant in `docs/invariants/FIXED-TIMESTEP.md`. No `StreamsFrame`, -inspector stream frame, or Constraint Lens UI exists yet. This card -remains operational as the handle for turning current -playback/provenance/checkpoint substrate into a stream/admission inspection -surface; it is not implemented protocol truth. - -Define the stream/admission inspector surface and resolve the remaining TT1 -design questions (#244, #245, #246) required before the time-travel MVP. - -**Issues:** #170, #203, #243, #244, #245, #246 - ---- - -## T-7-2-1: Spec — dt policy: fixed timestep vs admitted dt stream (#243) - -**User Story:** As an engine architect, I want a locked design decision on whether Echo uses a fixed timestep or variable dt admitted as a stream so that all downstream code (physics, animation, admission budgets) can commit to one model. - -**Requirements:** - -- R1: Treat `docs/invariants/FIXED-TIMESTEP.md` as the current decision - artifact. -- R2: Confirm downstream TT1 work follows the locked decision: fixed - timestep is default; `dt` is not an admitted stream fact. -- R3: Document any remaining catch-up/checkpoint implications in the - time-model or retention follow-up docs rather than reopening dt policy. - -**Acceptance Criteria:** - -- [ ] AC1: The fixed-timestep invariant remains the normative decision. -- [ ] AC2: TT1 follow-up docs do not reintroduce per-tick or admitted - variable `dt`. -- [ ] AC3: Catch-up/checkpoint implications are covered by the current - time-model or retention docs. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Verify downstream TT1 alignment with the fixed-timestep invariant. -**Out of Scope:** Reopening variable-dt support; changes to the scheduler. - -**Test Plan:** - -- **Goldens:** n/a (spec-only) -- **Failures:** n/a -- **Edges:** What happens when variable-dt admission is disabled mid-session (answer: revert to fixed timestep, document the transition). -- **Fuzz/Stress:** n/a - -**Blocked By:** T-7-1-1, T-7-1-2 -**Blocking:** T-7-2-4 - -**Est. Hours:** 3h -**Expected Complexity:** ~100 LoC (markdown) - ---- - -## T-7-2-2: Spec — TimeStream retention, spool compaction, wormhole density (#244) - -**User Story:** As an operator deploying Echo sessions, I want documented policies for how long TimeStream spools are retained, when compaction occurs, and how wormhole density is managed so that I can size storage and predict seek latency. - -**Requirements:** - -- R1: Define retention tiers: hot (in-memory ring buffer), warm (on-disk WAL), cold (CAS archive). -- R2: Specify compaction triggers: tick count threshold, byte budget, or explicit GC request. -- R3: Define wormhole density policy: minimum one wormhole checkpoint per N ticks (configurable), plus mandatory checkpoints at branch/merge points. -- R4: Document the relationship between retention and replay cost (seek latency formula). -- R5: Align with `docs/method/backlog/up-next/KERNEL_timestream-retention.md` - and the existing `ProvenanceStore::checkpoint_before()` seam. - -**Acceptance Criteria:** - -- [ ] AC1: Retention tiers are defined with default thresholds. -- [ ] AC2: Compaction triggers are enumerable and configurable. -- [ ] AC3: Wormhole density policy includes a default N value and explains the tradeoff. -- [ ] AC4: A "replay cost" formula or heuristic is documented. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Policy spec for retention, compaction, and wormhole density. -**Out of Scope:** Implementation of tiered storage (that is echo-cas work); GC runtime code. - -**Test Plan:** - -- **Goldens:** n/a (spec-only) -- **Failures:** n/a -- **Edges:** What happens when compaction runs during an active rewind (answer: compaction must not remove ticks reachable from any active view cursor). -- **Fuzz/Stress:** n/a - -**Blocked By:** T-7-1-1, T-7-1-2 -**Blocking:** T-7-2-4 - -**Est. Hours:** 4h -**Expected Complexity:** ~200 LoC (markdown) - ---- - -## T-7-2-3: Spec — Merge semantics for admitted stream facts across worldlines (#245) - -**User Story:** As a multiplayer game developer, I want clear merge semantics for when worldlines rejoin so that buffered "future" events are handled deterministically and I can reason about conflict resolution. - -**Requirements:** - -- R1: Define three merge strategies: discard-and-reattach, replay-and-revalidate, authority-wins. -- R2: Specify what happens to stream facts admitted on a diverged branch when merging back to canonical: revalidation rules, conflict detection, paradox quarantine. -- R3: Document the interaction between merge semantics and `admission_digest` (merged branch must produce a valid digest chain). -- R4: Provide a worked example: two peers diverge, one admits events the other did not, they merge. - -**Acceptance Criteria:** - -- [ ] AC1: Three merge strategies are defined with tradeoff analysis. -- [ ] AC2: Revalidation rules are specified for at least two stream types (NetworkRx, GameInput). -- [ ] AC3: Worked example covers diverge, independent admission, and merge with conflict. -- [ ] AC4: Conflict/quarantine behavior is cross-referenced to current - worldline/provenance docs or a live follow-up card, not a retired spec - path. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Merge semantics spec for stream facts across worldlines. -**Out of Scope:** Runtime merge implementation; UI for conflict resolution. - -**Test Plan:** - -- **Goldens:** n/a (spec-only) -- **Failures:** n/a -- **Edges:** What if a merged branch contains an observation fact referencing a stream seq that the canonical branch also admitted at a different tick (answer: seq collision detection, documented). -- **Fuzz/Stress:** n/a - -**Blocked By:** T-7-1-1, T-7-1-2 -**Blocking:** T-7-3-1 - -**Est. Hours:** 4h -**Expected Complexity:** ~250 LoC (markdown) - ---- - -## T-7-2-4: Spec — Security/capabilities for fork/rewind/merge in multiplayer (#246) - -**User Story:** As a session host, I want a capability model that controls who can fork, rewind, and merge worldlines so that time-travel operations cannot be abused in multiplayer. - -**Requirements:** - -- R1: Define the capability names or rights model for fork, rewind, and - merge, aligned with - `docs/method/backlog/up-next/KERNEL_time-travel-capabilities.md`. -- R2: Specify per-session and per-player capability grants (host can restrict rewind to observers only, etc.). -- R3: Document provenance sovereignty: a player's forked branch carries their signer identity; merging requires authority from the branch owner or session host. -- R4: Define fault codes for unauthorized time-travel operations. - -**Acceptance Criteria:** - -- [ ] AC1: Time-travel capability names and denial faults are documented in - the live capability follow-up. -- [ ] AC2: Per-session capability grant model is documented with example configurations. -- [ ] AC3: Provenance sovereignty rules are stated as normative requirements. -- [ ] AC4: At least 2 new fault codes are defined (e.g., `ERR_FORK_DENIED`, `ERR_MERGE_UNAUTHORIZED`). - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Capability model spec for time-travel operations. -**Out of Scope:** Runtime enforcement implementation; key management infrastructure. - -**Test Plan:** - -- **Goldens:** n/a (spec-only) -- **Failures:** n/a -- **Edges:** What happens when a player's capability is revoked while they have an active forked branch (answer: branch is quarantined, not silently destroyed). -- **Fuzz/Stress:** n/a - -**Blocked By:** T-7-2-1, T-7-2-2 -**Blocking:** T-7-3-1 - -**Est. Hours:** 4h -**Expected Complexity:** ~180 LoC (markdown) - ---- - -## T-7-2-5: Implement StreamsFrame inspector support (#170) - -**User Story:** As a developer debugging a live Echo session, I want an inspector frame that shows per-stream backlog, per-view cursor positions, and recent admission decisions so that I can understand why events are or are not entering the simulation. - -**Requirements:** - -- R1: Define the stream/admission frame shape in the inspector or observer - protocol, covering `stream_id`, backlog metrics, cursor positions, - recent admission-decision summaries, and `admission_digest`. -- R2: Add a streams/admission frame kind to the selected inspector envelope - once that envelope exists. -- R3: Emit the frame at a deterministic tick boundary consistent with the - current scheduler/playback emission order. -- R4: Serialize to JSONL for offline analysis; expose via WebSocket transport. -- R5: Add subscription/filter support for the streams/admission frame in the - selected inspector command surface. - -**Acceptance Criteria:** - -- [ ] AC1: The streams/admission frame type compiles and is included in the - selected inspector or observer module. -- [ ] AC2: A unit test constructs the frame with mock data and serializes it - to JSON matching a golden snapshot. -- [ ] AC3: Integration test: run a 10-tick simulation with at least 2 streams, verify `StreamsFrame` is emitted each tick with correct backlog and cursor values. -- [ ] AC4: The selected inspector command surface accepts a streams/admission - frame subscription or filter. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Streams/admission frame shape, serialization, emission, and -subscription. -**Out of Scope:** UI rendering of streams data (that is T-7-2-6); checkpoint -density metrics (deferred to TT2). - -**Test Plan:** - -- **Goldens:** Golden JSON snapshot for streams/admission frame serialization - (at least 2 streams, 3 recent decisions). -- **Failures:** Verify graceful handling when a stream has zero backlog; when a view has no cursor for a stream. -- **Edges:** Stream with exactly one event admitted at the current tick (boundary between empty and non-empty backlog). -- **Fuzz/Stress:** Property test: random stream/cursor configurations produce valid serialized frames. - -**Blocked By:** T-7-2-1, T-7-2-2, T-7-2-3, T-7-2-4 -**Blocking:** T-7-2-6, T-7-3-1 - -**Est. Hours:** 6h -**Expected Complexity:** ~400 LoC - ---- - -## T-7-2-6: Implement Constraint Lens panel — admission explain-why + counterfactual sliders (#203) - -**User Story:** As a designer tuning admission policies, I want a UI panel that explains why each event was admitted or rejected and lets me adjust policy parameters with counterfactual sliders so that I can iterate on admission budgets without modifying code. - -**Requirements:** - -- R1: Render recent stream/admission decision records from the inspector - frame in a scrollable list with admit/reject status and reason summary. -- R2: Display the policy parameters (budget, fairness order) that were active for each decision. -- R3: Provide counterfactual sliders for `max_events`, `max_bytes`, and `max_work_units` that re-evaluate the most recent tick's admission decisions locally (read-only "what-if", no mutation of the simulation). -- R4: Highlight decisions that would change under the adjusted parameters. - -**Acceptance Criteria:** - -- [ ] AC1: Panel renders in the inspector UI with at least the last 10 admission decisions. -- [ ] AC2: Each decision shows: stream_id, admitted range, policy_hash, budget values, and admit/reject. -- [ ] AC3: Moving a counterfactual slider recomputes and highlights changed decisions within 100ms. -- [ ] AC4: Panel degrades gracefully when no streams/admission frame data is - available (shows "no streams data" message). - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Constraint Lens panel UI; counterfactual re-evaluation of admission decisions. -**Out of Scope:** Persisting counterfactual parameter changes; applying adjusted parameters to future ticks; multi-tick counterfactual replay. - -**Test Plan:** - -- **Goldens:** Screenshot golden of panel with 3 streams, mixed admit/reject decisions. -- **Failures:** Panel with zero decisions; panel with a decision referencing a stream that no longer exists. -- **Edges:** Slider set to 0 (reject all); slider set to max u64 (admit all). -- **Fuzz/Stress:** Render 1000 decisions without UI freeze (< 16ms frame time). - -**Blocked By:** T-7-2-5 -**Blocking:** T-7-3-2 - -**Est. Hours:** 6h -**Expected Complexity:** ~500 LoC diff --git a/docs/method/backlog/cool-ideas/PLATFORM_warpdrive-posix-optic.md b/docs/method/backlog/cool-ideas/PLATFORM_warpdrive-posix-optic.md new file mode 100644 index 00000000..b4d9e513 --- /dev/null +++ b/docs/method/backlog/cool-ideas/PLATFORM_warpdrive-posix-optic.md @@ -0,0 +1,71 @@ + + + +# WARPDrive POSIX Materialization Optic + +Status: cool idea. + +Depends on: + +- [There Is No Graph](../../../architecture/there-is-no-graph.md) +- [Echo Optics API Design](../../../design/0018-echo-optics-api-design/design.md) +- [Continuum Transport](../../../architecture/continuum-transport.md) + +## Why + +Humans and legacy tools operate on files, directories, and save events. The +Continuum operates on witnessed causal history, coordinates, optics, suffixes, +and holograms. + +WARPDrive is the compatibility optic between those worlds: + +```text +POSIX/FUSE read -> bounded reading/materialization at a WARP coordinate +POSIX/FUSE write -> delta/hunk -> causal Intent/admission attempt +``` + +This keeps files as boundary readings instead of substrate truth. + +## Goal + +Design a WARPDrive architecture packet for a FUSE/POSIX mount that materializes +path-like readings from WARP coordinates and translates writes back into +candidate causal suffixes. + +## Likely files touched + +- `docs/architecture/there-is-no-graph.md` +- `docs/design/0018-echo-optics-api-design/design.md` +- `docs/design/continuum-runtime-and-cas-readings.md` +- future WARPDrive repository or crate, if this graduates out of cool ideas + +## Acceptance criteria + +- The design states that mounted files are materialized readings, not canonical + truth. +- Reads name coordinate, optic, aperture, witness basis, budget posture, and + residual/obstruction posture. +- Writes compute a delta against the prior reading and submit an Intent against + an explicit causal basis. +- Stale basis, missing evidence, policy denial, and conflict return typed + obstructions instead of silently mutating current state. +- The design explains how multiple human/agent lanes can operate without Git + worktrees by mounting different coordinates or strands. +- The design keeps Echo, `git-warp`, Wesley, Graft, and `warp-ttd` as peer + WARP optics rather than making WARPDrive a god runtime. + +## Non-goals + +- Do not implement FUSE in this card. +- Do not replace Git in current developer workflows in this card. +- Do not make files substrate truth. +- Do not require every WARP runtime to share an internal graph representation. + +## Test expectations + +- Future tests should prove read identity includes coordinate and optic law, + not just path bytes. +- Future tests should prove a stale write is rejected, staged, or obstructed + explicitly. +- Future tests should prove cache hits cannot answer a different coordinate or + aperture. diff --git a/docs/method/backlog/inbox/KERNEL_plugin-abi.md b/docs/method/backlog/inbox/KERNEL_plugin-abi.md deleted file mode 100644 index 536bad25..00000000 --- a/docs/method/backlog/inbox/KERNEL_plugin-abi.md +++ /dev/null @@ -1,222 +0,0 @@ - - - -# Plugin ABI - -> **Milestone:** Backlog | **Priority:** Unscheduled - -A C-compatible plugin ABI enabling third-party extensions to hook into the Echo runtime without recompilation. Covers spec, host loader, version negotiation, capability tokens, and a reference plugin. - -**Issues:** #26, #85, #86, #87, #88, #89 -**Chain:** #85 → #86 → #87 → #88 → #89 - -## T-10-1-1: Draft C ABI Spec (#85) - -**User Story:** As a plugin author, I want a clear C ABI specification so that I can write plugins in any language that targets C calling conventions. - -**Requirements:** - -- R1: Define the function signature contract (init, tick, shutdown hooks) -- R2: Define memory ownership rules (who allocates, who frees) -- R3: Define the vtable layout for host-provided callbacks -- R4: Specify alignment and padding guarantees for all struct types -- R5: Document ABI stability policy (semver rules for breaking changes) - -**Acceptance Criteria:** - -- [ ] AC1: Spec document exists at `docs/spec/SPEC-PLUGIN-ABI.md` -- [ ] AC2: Spec covers all five requirements above -- [ ] AC3: Spec includes at least one worked example (pseudocode or C) -- [ ] AC4: Spec reviewed by at least one other contributor - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Specification document only. No code. -**Out of Scope:** Implementation, version negotiation protocol, capability system. - -**Test Plan:** - -- **Goldens:** n/a (spec document) -- **Failures:** n/a -- **Edges:** n/a -- **Fuzz/Stress:** n/a - -**Blocked By:** none -**Blocking:** T-10-1-2 - -**Est. Hours:** 4h -**Expected Complexity:** ~300 lines (markdown) - ---- - -## T-10-1-2: C Header + Host Loader (#86) - -**User Story:** As the Echo runtime, I want to dynamically load plugin shared libraries via a C ABI so that plugins can be developed and deployed independently. - -**Requirements:** - -- R1: Generate a `echo_plugin.h` C header from the spec -- R2: Implement `PluginLoader` in Rust that `dlopen`s a `.so`/`.dylib`/`.dll` -- R3: Resolve the required symbol table (init, tick, shutdown) with clear error messages on missing symbols -- R4: Loader must validate ABI magic number before calling any plugin function -- R5: Loader must be `#[cfg(not(target_arch = "wasm32"))]` — no WASM support yet - -**Acceptance Criteria:** - -- [ ] AC1: `echo_plugin.h` is generated or hand-written and checked in -- [ ] AC2: `PluginLoader::load(path)` returns `Result` -- [ ] AC3: Missing-symbol and ABI-mismatch errors produce actionable diagnostics -- [ ] AC4: Unit tests cover successful load, missing symbol, and magic mismatch - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Header file, host-side loader, error types. -**Out of Scope:** Version negotiation, capability tokens, WASM target. - -**Test Plan:** - -- **Goldens:** Snapshot of generated `echo_plugin.h` -- **Failures:** Missing symbol, corrupt library, ABI magic mismatch -- **Edges:** Library with extra unexpected symbols (should still load) -- **Fuzz/Stress:** n/a - -**Blocked By:** T-10-1-1 -**Blocking:** T-10-1-3 - -**Est. Hours:** 6h -**Expected Complexity:** ~400 LoC - ---- - -## T-10-1-3: Version Negotiation (#87) - -**User Story:** As the Echo runtime, I want to negotiate ABI versions with plugins at load time so that incompatible plugins are rejected gracefully instead of causing undefined behavior. - -**Requirements:** - -- R1: Define a version negotiation handshake (host offers supported range, plugin declares its version) -- R2: Implement `negotiate_version()` call as part of the plugin init sequence -- R3: Support a compatibility matrix: exact match, minor-compatible, and rejected -- R4: Log negotiation result at `info` level - -**Acceptance Criteria:** - -- [ ] AC1: `PluginHandle` exposes `negotiated_version()` after successful load -- [ ] AC2: Incompatible version returns `PluginError::VersionMismatch` with both versions in the message -- [ ] AC3: Tests cover exact match, minor-compatible, and mismatch scenarios - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Version handshake protocol and implementation. -**Out of Scope:** Automatic plugin updates, capability negotiation. - -**Test Plan:** - -- **Goldens:** n/a -- **Failures:** Version mismatch (major), version mismatch (too old minor) -- **Edges:** Plugin at exact max supported version, plugin at min supported version -- **Fuzz/Stress:** n/a - -**Blocked By:** T-10-1-2 -**Blocking:** T-10-1-4 - -**Est. Hours:** 4h -**Expected Complexity:** ~200 LoC - ---- - -## T-10-1-4: Capability Tokens (#88) - -**User Story:** As a host operator, I want to grant plugins fine-grained capability tokens so that untrusted plugins cannot access resources they were not explicitly authorized for. - -**Requirements:** - -- R1: Define a `Capability` enum (e.g., `ReadState`, `WriteState`, `Network`, `FileSystem`) -- R2: Plugin init receives a `CapabilitySet` from the host -- R3: Host-side callbacks validate capability before executing -- R4: Capability violations return `PluginError::CapabilityDenied` (not a panic) -- R5: Capabilities are immutable after init — no runtime escalation - -**Acceptance Criteria:** - -- [ ] AC1: `CapabilitySet` type is defined and documented -- [ ] AC2: At least three capability variants exist -- [ ] AC3: Host callbacks check capabilities; denied access returns a typed error -- [ ] AC4: Tests cover granted access, denied access, and empty capability set - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Capability type, token validation in host callbacks. -**Out of Scope:** Persistent capability storage, capability delegation between plugins. - -**Test Plan:** - -- **Goldens:** n/a -- **Failures:** Capability denied for each variant, empty set denies all -- **Edges:** Plugin requesting capability not in the enum (forward-compat) -- **Fuzz/Stress:** n/a - -**Blocked By:** T-10-1-3 -**Blocking:** T-10-1-5 - -**Est. Hours:** 5h -**Expected Complexity:** ~300 LoC - ---- - -## T-10-1-5: Example Plugin + Tests (#89) - -**User Story:** As a plugin author, I want a reference plugin implementation with integration tests so that I have a concrete starting point for building my own plugins. - -**Requirements:** - -- R1: Write a trivial "echo" plugin in C that receives state, transforms it, and returns it -- R2: Plugin exercises init, tick, and shutdown hooks -- R3: Plugin uses at least one capability token -- R4: Integration test loads the plugin, runs a tick, and asserts the output -- R5: Include a `Makefile` or `build.rs` step to compile the example plugin - -**Acceptance Criteria:** - -- [ ] AC1: Example plugin compiles on CI (Linux + macOS) -- [ ] AC2: Integration test passes end-to-end (load → negotiate → tick → shutdown) -- [ ] AC3: Example plugin is documented with inline comments explaining each ABI hook -- [ ] AC4: README in `examples/plugin/` explains how to build and run - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Reference plugin, build script, integration tests, example README. -**Out of Scope:** Complex plugin logic, plugin registry, package distribution. - -**Test Plan:** - -- **Goldens:** Snapshot of plugin output for a known input state -- **Failures:** Plugin that panics in init, plugin that returns error from tick -- **Edges:** Plugin with no capabilities, plugin with all capabilities -- **Fuzz/Stress:** n/a - -**Blocked By:** T-10-1-4 -**Blocking:** none - -**Est. Hours:** 6h -**Expected Complexity:** ~500 LoC (C + Rust + build glue) diff --git a/docs/method/backlog/up-next/KERNEL_contract-aware-receipts-and-readings.md b/docs/method/backlog/up-next/KERNEL_contract-aware-receipts-and-readings.md index ff24f944..885129be 100644 --- a/docs/method/backlog/up-next/KERNEL_contract-aware-receipts-and-readings.md +++ b/docs/method/backlog/up-next/KERNEL_contract-aware-receipts-and-readings.md @@ -7,8 +7,10 @@ Status: planned kernel hardening. Depends on: -- [Wesley to Echo toy contract proof](./PLATFORM_wesley-to-echo-toy-contract-proof.md) -- [Reading envelope family boundary](./PLATFORM_reading-envelope-family-boundary.md) +- Accepted + [0016 - Wesley To Echo Toy Contract Proof](../../../design/0016-wesley-to-echo-toy-contract-proof/design.md) +- Accepted + [0019 - Reading Envelope Family Boundary](../../../design/0019-reading-envelope-family-boundary/reading-envelope-family-boundary.md) ## Why now diff --git a/docs/method/backlog/up-next/KERNEL_contract-strands-and-counterfactuals.md b/docs/method/backlog/up-next/KERNEL_contract-strands-and-counterfactuals.md index 821724ad..a4803c5d 100644 --- a/docs/method/backlog/up-next/KERNEL_contract-strands-and-counterfactuals.md +++ b/docs/method/backlog/up-next/KERNEL_contract-strands-and-counterfactuals.md @@ -3,12 +3,13 @@ # Contract Strands And Counterfactuals +Folded from: #245 + Status: planned kernel/runtime implementation. Depends on: - [Graft live frontier structural readings](./PLATFORM_graft-live-frontier-structural-readings.md) -- [Live holographic strands](../asap/KERNEL_live-holographic-strands.md) - [0010 - Live-basis settlement correction plan](../../../design/0010-live-basis-settlement-plan/design.md) ## Why now @@ -32,6 +33,20 @@ Add generic contract-aware strand operations: - compare strand with parent basis - admit, preserve plurality, conflict, obstruct, or discard +## Merge and settlement semantics + +The old TT1 framing asked about merging debugger-era per-source admission +records across worldlines. The current Echo substrate should not preserve that +as a special ontology. The useful requirement belongs here: + +- divergent work is represented as worldline/strand/braid history +- each member has an explicit causal basis and actor/cause evidence +- settlement is an Intent/admission operation, not a direct service mutation +- independent work may admit, stage, preserve plurality, conflict, or obstruct +- conflicts are typed by contract law and witness basis, not by host-time order +- adapters such as `warp-ttd` can explain these outcomes, but Echo owns the + substrate decision and receipt evidence + ## Acceptance criteria - A fake contract intent can be applied inside a strand without changing the @@ -40,6 +55,8 @@ Add generic contract-aware strand operations: contract identity. - Parent movement outside owned divergence revalidates cleanly. - Parent overlap returns explicit conflict or obstruction. +- Settlement of divergent contract work produces a typed admission posture and + receipt/witness evidence. - `jedit` and Graft examples remain consumer-level, not Echo core APIs. ## Non-goals @@ -48,3 +65,4 @@ Add generic contract-aware strand operations: - Do not implement semantic refactor prediction here. - Do not add text or Graft domain types to Echo core. - Do not require durable strand persistence in the first slice. +- Do not model historical stream/debugger frame names as Echo core nouns. diff --git a/docs/method/backlog/up-next/KERNEL_time-model-spec.md b/docs/method/backlog/up-next/KERNEL_time-model-spec.md deleted file mode 100644 index bd0f7d93..00000000 --- a/docs/method/backlog/up-next/KERNEL_time-model-spec.md +++ /dev/null @@ -1,94 +0,0 @@ - - - -> **Milestone:** Time Semantics Lock | **Priority:** P1 - -# TT0 — Time Model Spec Lock - -Lock the vocabulary and semantics for HistoryTime vs HostTime, tick-based TTL/deadlines, and the StreamAdmissionDecision digest chain. This is a spec-only feature — no runtime code, only documents and their review artifacts. - -**Issues:** #191, #192 - ---- - -## T-7-1-1: Spec — HistoryTime vs HostTime field classification (#191) - -**User Story:** As a contributor implementing time-aware adapters, I want a single authoritative document that classifies every session-stream time field as HistoryTime (deterministic, ordering/replay) or HostTime (telemetry only) so that I never accidentally introduce nondeterminism through a time field. - -**Requirements:** - -- R1: Produce a table in `docs/spec-time-streams-and-wormholes.md` listing every known time field across `StreamAdmissionDecision`, `ClockDecision`, `EventEnvelope`, `InspectorEnvelope`, and session-proto messages. -- R2: Each field is classified as HistoryTime or HostTime with a one-line rationale. -- R3: Add a "decision record" rule: any adapter that consults HostTime must emit a canonical decision record before the simulation consumes the result. -- R4: Cross-reference `docs/spec/merkle-commit.md` for fields that feed into `admission_digest`. - -**Acceptance Criteria:** - -- [ ] AC1: Classification table exists in `docs/spec-time-streams-and-wormholes.md` with at least 10 fields classified. -- [ ] AC2: No field is left unclassified ("TBD" entries are explicit open questions with tracking issues). -- [ ] AC3: The decision-record rule is stated as a normative requirement, not advisory text. -- [ ] AC4: At least one reviewer has confirmed the classifications against the existing `echo-session-proto` message definitions. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Classification of existing time fields; normative HistoryTime/HostTime rule. -**Out of Scope:** Runtime enforcement of the rule; new adapter code; changes to `echo-session-proto` wire format. - -**Test Plan:** - -- **Goldens:** n/a (spec-only) -- **Failures:** n/a -- **Edges:** Verify that `sender_tick` in `EventEnvelope` is correctly classified as HistoryTime (not HostTime), even though it originates from a remote host. -- **Fuzz/Stress:** n/a - -**Blocked By:** none -**Blocking:** T-7-1-2, T-7-2-1 - -**Est. Hours:** 3h -**Expected Complexity:** ~150 LoC (markdown) - ---- - -## T-7-1-2: Spec — TTL/deadline semantics are ticks only (#192) - -**User Story:** As a game designer using Echo, I want certainty that all TTL and deadline semantics use deterministic tick/epoch counts so that my game logic replays identically regardless of host performance. - -**Requirements:** - -- R1: Add a normative section to `docs/spec-time-streams-and-wormholes.md` stating that all TTL and deadline semantics use Chronos ticks or epoch counts, never wall-clock durations. -- R2: Document the "timer as stream" pattern: a system requests a timer via a deterministic event; an adapter fires it; the simulation consumes the recorded firing decision. -- R3: Enumerate known TTL/deadline touch points (session keep-alive, admission budgets, retry policies, wormhole expiry) and confirm each is tick-denominated. -- R4: Add a "violation checklist" — signs that wall-clock time has leaked into semantic state. - -**Acceptance Criteria:** - -- [ ] AC1: Normative "no wall-clock TTL" rule is present in the spec. -- [ ] AC2: Timer-as-stream pattern is documented with a minimal worked example. -- [ ] AC3: At least 4 known TTL/deadline touch points are enumerated and confirmed tick-only. -- [ ] AC4: Violation checklist has at least 3 items. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** Spec text and worked example for tick-only deadlines. -**Out of Scope:** Runtime linting or compile-time enforcement; changes to adapter implementations. - -**Test Plan:** - -- **Goldens:** n/a (spec-only) -- **Failures:** n/a -- **Edges:** Clarify what happens when a tick-based TTL expires during a paused view (answer: it does not expire until the view advances). -- **Fuzz/Stress:** n/a - -**Blocked By:** T-7-1-1 -**Blocking:** T-7-2-1 - -**Est. Hours:** 3h -**Expected Complexity:** ~120 LoC (markdown) diff --git a/docs/method/backlog/up-next/KERNEL_time-travel-capabilities.md b/docs/method/backlog/up-next/KERNEL_time-travel-capabilities.md index 9ec602f9..8c4d7fba 100644 --- a/docs/method/backlog/up-next/KERNEL_time-travel-capabilities.md +++ b/docs/method/backlog/up-next/KERNEL_time-travel-capabilities.md @@ -5,11 +5,59 @@ Ref: #246 -Define the capability model for timeline mutation operations. Who is -allowed to fork a worldline? Rewind? Merge? What are the security -contexts (local debug, multiplayer session, untrusted plugin)? - -This is a Core Echo concern — the kernel owns the capability checks. -warp-ttd will be the primary consumer of these capabilities through -the `TtdHostAdapter` interface. Coordinate with warp-ttd on how -capabilities are declared and enforced. +Status: planned kernel/runtime design. + +This is the canonical Echo follow-up for timeline capability law. Echo owns the +capability decision, the typed denial/obstruction posture, and the witnessed +admission result. `warp-ttd` owns debugger session semantics, transport, and UI +surfaces that consume those Echo results. + +## Why now + +Fork, seek, rewind, merge, settlement, and counterfactual execution are not +ordinary host actions. They expose causal authority over worldlines, strands, +braids, and retained readings. The kernel needs a small, typed capability model +before those operations become public surfaces. + +## Required shape + +Define capability checks for: + +- opening an observer at a coordinate or frontier +- seeking a view to an older coordinate +- creating a strand or fork from a coordinate +- dispatching intents into a forked strand +- admitting, staging, rejecting, or collapsing divergent work +- merging or settling a strand/braid back into another frontier +- revealing retained readings or witness material + +The capability model must name: + +- actor/cause identity +- session or host authority scope +- subject/focus being acted on +- coordinate/frontier basis +- rights being exercised +- denial and obstruction codes +- receipt or witness evidence emitted on success + +## Acceptance criteria + +- Capability names and denial/obstruction codes are documented for seek, fork, + dispatch, merge, settlement, and witness reveal. +- Per-session and per-actor grants can be represented without relying on host + wall-clock ordering or mutable global state. +- Revocation behavior is explicit: active forks/strands become staged, + obstructed, or quarantined by typed posture; they are not silently destroyed. +- Provenance sovereignty is stated as a normative rule: a branch or strand + carries actor/cause evidence, and settlement requires authority over the + target frontier. +- A future `warp-ttd` adapter can ask Echo what capabilities exist and can show + typed denials, but does not become the source of kernel truth. + +## Non-goals + +- Do not design a debugger panel or debugger protocol here. +- Do not add a global mutable state API. +- Do not make rewinds or merges host-time ordered. +- Do not collapse capability failure into boolean success or string status. diff --git a/docs/method/backlog/up-next/KERNEL_timestream-retention.md b/docs/method/backlog/up-next/KERNEL_timestream-retention.md deleted file mode 100644 index cf565675..00000000 --- a/docs/method/backlog/up-next/KERNEL_timestream-retention.md +++ /dev/null @@ -1,19 +0,0 @@ - - - -# TimeStream retention + spool compaction + wormhole density - -Ref: #244 - -Define the storage lifecycle for time streams: - -- Retention policy: how long do old ticks stay materialized? -- Spool compaction: when and how to compact the provenance log? -- Wormhole density: how often to create checkpoints for fast seek? - -`ProvenanceStore::checkpoint_before()` exists as a trait method but -checkpoint creation and compaction are not implemented. - -Affects warp-ttd — retention policy determines what the debugger -can replay. If old ticks are compacted away, the debugger can't -seek to them. diff --git a/docs/method/backlog/up-next/KERNEL_topology-mutation-intent-boundary-audit.md b/docs/method/backlog/up-next/KERNEL_topology-mutation-intent-boundary-audit.md new file mode 100644 index 00000000..2490d930 --- /dev/null +++ b/docs/method/backlog/up-next/KERNEL_topology-mutation-intent-boundary-audit.md @@ -0,0 +1,85 @@ + + + +# WARP optic boundary audit for topology and history operations + +Status: planned kernel audit. + +Depends on: + +- [0022 - Continuum transport identity and import idempotence](../../../design/0022-continuum-transport-identity/design.md) + +## Why now + +Echo now requires external topology-changing operations to be causal and +Intent-driven. The codebase still has useful internal services for provenance +forking, strand registration, support pins, settlement, and witnessed suffix +classification. Those can remain implementation details, but we need a precise +inventory before adding public Intent wrappers. + +The stronger doctrine is that topology mutation is only one posture of the +same WARP optic shape. Tick admission, transport import, fork, merge, braid, +settlement, support mutation, inverse admission, observation, materialization, +and hologram slicing all choose a bounded causal basis/site, apply a law, and +produce a witnessed hologram. This audit should keep the write-side focus +narrow while naming that shared boundary. + +## Goal + +Classify every topology/history/projection surface as one of: + +- internal implementation helper +- read/observation surface +- external mutation surface that must gain an Intent path +- legacy/debug ABI surface that must be documented as temporary +- retention/reveal surface that must be keyed by read identity and witness basis + +## Likely files touched + +- `crates/warp-core/src/provenance_store.rs` +- `crates/warp-core/src/coordinator.rs` +- `crates/warp-core/src/strand.rs` +- `crates/warp-core/src/settlement.rs` +- `crates/warp-core/src/witnessed_suffix.rs` +- `crates/warp-core/src/observation.rs` +- `crates/echo-wasm-abi/src/kernel_port.rs` +- `crates/warp-wasm/src/lib.rs` +- `crates/warp-wasm/src/warp_kernel.rs` +- `docs/architecture/there-is-no-graph.md` +- `docs/architecture/continuum-transport.md` + +## Acceptance criteria + +- The audit lists every current direct external mutation candidate, including: + - provenance fork + - strand registration + - support pin/unpin + - settlement execution + - braid/member mutation surfaces if present + - import suffix admission + - inverse/compensating operations if present +- Each surface is classified as internal, read-only, Intent-required, or + legacy/debug temporary. +- The audit identifies the minimum Intent wrappers needed for the next runtime + cuts. +- The audit identifies read/materialization/retention surfaces that must stay + observer-relative and hologram/read-identity keyed rather than becoming + hidden graph-state fallbacks. +- No code behavior changes are required unless a test exposes an unsafe public + mutation path that can be sealed cheaply. + +## Non-goals + +- Do not implement all wrappers in the audit card. +- Do not delete internal services. +- Do not block read-only compare/plan/observe surfaces. +- Do not add a global graph API. +- Do not turn materialization or retention into canonical graph state. + +## Test expectations + +- Static or targeted tests should prove any newly classified public mutation + path is either Intent-backed or explicitly marked legacy/debug. +- Static or targeted tests should prove classified read/materialization paths + either return bounded readings/holograms or are explicitly internal helpers. +- Existing settlement and strand tests remain green. diff --git a/docs/method/backlog/up-next/PLATFORM_authenticated-wesley-intent-admission-posture.md b/docs/method/backlog/up-next/PLATFORM_authenticated-wesley-intent-admission-posture.md index 906351d3..3ee9eb5e 100644 --- a/docs/method/backlog/up-next/PLATFORM_authenticated-wesley-intent-admission-posture.md +++ b/docs/method/backlog/up-next/PLATFORM_authenticated-wesley-intent-admission-posture.md @@ -7,7 +7,8 @@ Status: proposed security hardening. Depends on: -- [Wesley to Echo toy contract proof](./PLATFORM_wesley-to-echo-toy-contract-proof.md) +- Accepted + [0016 - Wesley To Echo Toy Contract Proof](../../../design/0016-wesley-to-echo-toy-contract-proof/design.md) - [Contract-aware receipts and readings](./KERNEL_contract-aware-receipts-and-readings.md) - [0017 - Authenticated Wesley Intent Admission Posture](../../../design/0017-authenticated-wesley-intent-admission-posture/design.md) diff --git a/docs/method/backlog/up-next/PLATFORM_braid-settlement-intent-paths.md b/docs/method/backlog/up-next/PLATFORM_braid-settlement-intent-paths.md new file mode 100644 index 00000000..4dc63d05 --- /dev/null +++ b/docs/method/backlog/up-next/PLATFORM_braid-settlement-intent-paths.md @@ -0,0 +1,61 @@ + + + +# Braid and settlement Intent paths + +Status: planned implementation slice. + +Depends on: + +- [Topology mutation Intent boundary audit](./KERNEL_topology-mutation-intent-boundary-audit.md) +- [Strand and support Intent paths](./PLATFORM_strand-and-support-intent-paths.md) + +## Why now + +Braids and settlement decide how plural causal histories are projected, +retained, imported, or collapsed. Those decisions must be causal and replayable, +not direct host mutations. + +## Goal + +Add Intent-level external paths for braid member append/collapse/settlement and +strand settlement execution, while keeping compare/plan/read surfaces +side-effect free. + +## Likely files touched + +- `crates/echo-wasm-abi/src/kernel_port.rs` +- `crates/warp-core/src/settlement.rs` +- `crates/warp-core/src/neighborhood.rs` +- `crates/warp-core/src/optic.rs` +- `crates/warp-core/src/cmd.rs` +- `crates/warp-wasm/src/lib.rs` +- `crates/warp-wasm/src/warp_kernel.rs` +- `crates/warp-core/tests/**` + +## Acceptance criteria + +- Compare/plan settlement remain read-only publication surfaces. +- Executing settlement has an Intent equivalent and records causal receipt + evidence. +- Appending a braid member, settling/collapsing a braid, or admitting a braid + projection is represented as an Intent when exposed externally. +- Preserved plurality remains typed; it is not hidden as success/no-op. +- Direct settlement execution ABI surfaces, if retained temporarily, are marked + compatibility/debug and are not required by jedit-style or Continuum-style + flows. + +## Non-goals + +- Do not add jedit text operations. +- Do not flatten support pins into imports. +- Do not implement network transport. +- Do not make braid projection cached text canonical. + +## Test expectations + +- Settlement execution through Intent emits receipt/witness evidence. +- Direct settlement execution is not required by the public flow test. +- Plural and conflict outcomes remain visible. +- Braid member append requires explicit basis and does not mutate stale + projection silently. diff --git a/docs/method/backlog/up-next/PLATFORM_continuum-proof-family-runtime-cutover.md b/docs/method/backlog/up-next/PLATFORM_continuum-proof-family-runtime-cutover.md index d17b711f..c5b840fb 100644 --- a/docs/method/backlog/up-next/PLATFORM_continuum-proof-family-runtime-cutover.md +++ b/docs/method/backlog/up-next/PLATFORM_continuum-proof-family-runtime-cutover.md @@ -37,6 +37,8 @@ For the first cut, the proof slice should cover: - one rewrite op with declared footprint - one valid implementation - one invalid compile-fail implementation +- one generated artifact hash / footprint certificate that Echo can check at + load time ## Done looks like @@ -44,6 +46,8 @@ For the first cut, the proof slice should cover: - handwritten proof-slice ABI/runtime DTOs are removed or proven isomorphic temporary shims - one invalid rewrite that exceeds its declared footprint fails to compile +- the generated proof slice exposes a stable artifact hash or certificate hash + that Echo can compare before trusting the optimized path - runtime guards remain as second-line safety, not the only proof - the browser/WASM host bridge is still able to publish the resulting proof family diff --git a/docs/method/backlog/up-next/PLATFORM_contract-artifact-retention-in-echo-cas.md b/docs/method/backlog/up-next/PLATFORM_contract-artifact-retention-in-echo-cas.md index bf0c37a3..2b73bf15 100644 --- a/docs/method/backlog/up-next/PLATFORM_contract-artifact-retention-in-echo-cas.md +++ b/docs/method/backlog/up-next/PLATFORM_contract-artifact-retention-in-echo-cas.md @@ -9,19 +9,27 @@ Depends on: - [Contract-aware receipts and readings](./KERNEL_contract-aware-receipts-and-readings.md) - [Echo Continuum Runtime And CAS Readings](../../../design/continuum-runtime-and-cas-readings.md) -- [echo-cas Browser](./PLATFORM_echo-cas-browser.md) +- [echo-cas Browser Integration](../../../design/0020-echo-cas-browser/echo-cas-browser.md) +- [WSC, Verkle, IPA, And Retained Readings](../../../architecture/wsc-verkle-ipa-retained-readings.md) ## Why now Echo's doctrine says `echo-cas` stores retained witnesses and cached readings, but the contract-hosting path needs concrete retention rules for generated -contract artifacts. +contract artifacts and bounded optic readings. CAS hashes name bytes. Semantic lookup keys name the question those bytes answer. This matches existing `echo-cas` policy: CAS hashes are content-only, while domain separation belongs in typed references and semantic coordinates above the blob store. +This card also carries the modern replacement for the retired retention wording +in #244: Echo should stay holographic. It should retain witnesses, receipts, +coordinates, and cached bounded readings; it should not materialize the entire +graph state every tick. When memory or disk pressure appears, cache and index +eviction is legal storage policy, but required evidence must either be +rehydrated or produce an explicit obstruction. + ## What it should look like Define and implement minimal retention for: @@ -37,6 +45,17 @@ Semantic lookup should include contract identity, schema hash, basis, observer or intent kind, aperture or payload identity, and law/projection version where applicable. +Storage tiers may use content-defined chunking for large retained artifacts or +reading payloads. Variable chunk sizes, MIME-aware chunk policy, and buzhash-like +chunk boundary selection are implementation options for deduplication and space +savings. Those choices are not causal semantics and must not affect Intent +identity, tick identity, receipt identity, read identity, or replay outcome. + +The future retained-reading stack is WSC-backed and proof-ready: WSC provides +canonical columnar reading/checkpoint bytes, Verkle-style commitments may +authenticate WSC coordinates, IPA-style openings may support bounded apertures, +and `echo-cas` remains byte retention only. + ## Acceptance criteria - Stored contract receipt can be loaded by content hash. @@ -44,6 +63,10 @@ applicable. - Semantic lookup includes contract and schema identity. - Cached reading is not reused for a newer live frontier unless a proof of containment or equivalent witness relation exists. +- Large retained payloads may be stored through chunked CAS layout without + changing their semantic read identity. +- Missing locally retained witness material returns obstruction or + rehydration-required posture, not a fake cache hit. - Garbage collection remains storage policy and does not mutate truth. ## Non-goals diff --git a/docs/method/backlog/up-next/PLATFORM_echo-cas-js-bindings.md b/docs/method/backlog/up-next/PLATFORM_echo-cas-js-bindings.md new file mode 100644 index 00000000..b23d81c7 --- /dev/null +++ b/docs/method/backlog/up-next/PLATFORM_echo-cas-js-bindings.md @@ -0,0 +1,56 @@ + + + +# echo-cas JS Bindings + +Status: planned follow-up. + +Depends on: + +- [0020 - echo-cas Browser Integration](../../../design/0020-echo-cas-browser/echo-cas-browser.md) + +This is the follow-up split from `0020`. The WASM compilation gate is handled +there; this card keeps the JavaScript binding work visible as its own backlog +task. + +## T-4-3-2: JS bindings for CAS store/retrieve + +**User Story:** As a web developer, I want to store and retrieve blobs from +JavaScript so that browser consumers can persist snapshots in content-addressed +storage. + +**Requirements:** + +- R1: Expose `WasmBlobStore` class via wasm-bindgen wrapping a `MemoryTier`. +- R2: Methods: `put(bytes: Uint8Array) -> string` (returns hex hash), + `get(hash_hex: string) -> Uint8Array | undefined`, and + `has(hash_hex: string) -> boolean`. +- R3: `put_verified(hash_hex: string, bytes: Uint8Array)` returns a result + indicating success or hash mismatch. +- R4: Pin and unpin are exposed as `pin(hash_hex)` and `unpin(hash_hex)`. + +**Acceptance Criteria:** + +- [ ] AC1: JS round-trip: `put(data)` returns hash H, `get(H)` returns identical + bytes. +- [ ] AC2: `get` for a non-existent hash returns `undefined`, not an exception. +- [ ] AC3: `put_verified` with mismatched hash throws a structured error with + `code: "HASH_MISMATCH"`. +- [ ] AC4: TypeScript type definitions are generated by wasm-bindgen and + type-check cleanly. + +**Scope:** wasm-bindgen bindings for `BlobStore` trait methods. Hex hash string +interface. + +**Out of Scope:** Budget and eviction controls from JS. Batch put. Streaming +put. + +**Test Plan:** + +- **Goldens:** Hash of `b"hello echo-cas"` matches the BLAKE3 reference vector + by hex string comparison. +- **Failures:** `put_verified` mismatch. `get` with invalid hex string: odd + length or non-hex chars. +- **Edges:** Empty blob, 10 MB blob, and putting the same blob twice remains + idempotent. +- **Stress:** Store 10,000 random blobs, retrieve all, and verify integrity. diff --git a/docs/method/backlog/up-next/PLATFORM_echo-wesley-gen-v2.md b/docs/method/backlog/up-next/PLATFORM_echo-wesley-gen-v2.md deleted file mode 100644 index 3c25326a..00000000 --- a/docs/method/backlog/up-next/PLATFORM_echo-wesley-gen-v2.md +++ /dev/null @@ -1,50 +0,0 @@ - - - -# echo-wesley-gen v2 Update - -> **Milestone:** First Light | **Priority:** P1 | **Repo:** Echo - -Echo-repo work. The `crates/echo-wesley-gen` crate currently consumes `echo-ir/v1` JSON. Update it to handle the `echo-ir/v2` format that Wesley will emit after QIR Phase C, including new fields for query operations and migration metadata. - -## T-2-4-1: Update echo-wesley-gen IR deserializer for v2 format - -**User Story:** As an Echo developer, I want echo-wesley-gen to consume the v2 IR format so that new Wesley features (QIR operations, migration metadata) are available in generated Rust types. - -**Requirements:** - -- R1: Extend `WesleyIR` struct in `crates/echo-wesley-gen/src/ir.rs` with v2 fields: `queries` (QIR operation catalog), `migrations` (migration plan references), `blake3_schema_hash` (optional, for future BLAKE3 migration). -- R2: Maintain backward compatibility: v1 IR files (missing v2 fields) must still deserialize successfully via serde defaults. -- R3: Code generation must produce Rust types for QIR operation argument structs. -- R4: Add integration test with a v2 IR fixture file. - -**Acceptance Criteria:** - -- [ ] AC1: A v2 IR JSON with `queries` field deserializes into `WesleyIR` with populated query catalog. -- [ ] AC2: A v1 IR JSON (no `queries` field) still deserializes without error (backward compat). -- [ ] AC3: Generated Rust code for a query operation compiles and includes argument types. -- [ ] AC4: Integration test in `crates/echo-wesley-gen/tests/generation.rs` covers v2 IR. - -**Definition of Done:** - -- [ ] Code reviewed and merged -- [ ] Tests pass (CI green) -- [ ] Documentation updated (if applicable) - -**Scope:** `crates/echo-wesley-gen/src/ir.rs` (v2 fields), `crates/echo-wesley-gen/src/main.rs` (codegen for queries), test fixtures. -**Out of Scope:** Runtime query execution in Echo. Migration execution. BLAKE3 hash computation (planning only, see T-2-5-1). - -**Test Plan:** - -- **Goldens:** Snapshot test of generated Rust code from a v2 IR fixture. -- **Failures:** Malformed v2 IR (missing required v2 sub-fields), invalid query operation shapes. -- **Edges:** v2 IR with empty `queries` array, v2 IR with zero types but non-empty queries. -- **Fuzz/Stress:** N/A. - -**Blocked By:** none (can be developed against a draft v2 IR spec before Wesley ships QIR) -**Blocking:** none - -**Est. Hours:** 5h -**Expected Complexity:** ~200 LoC (IR struct extensions ~50, codegen ~100, tests ~50) - ---- diff --git a/docs/method/backlog/up-next/PLATFORM_import-outcome-idempotence-and-loop-law.md b/docs/method/backlog/up-next/PLATFORM_import-outcome-idempotence-and-loop-law.md index 59a39519..4182b528 100644 --- a/docs/method/backlog/up-next/PLATFORM_import-outcome-idempotence-and-loop-law.md +++ b/docs/method/backlog/up-next/PLATFORM_import-outcome-idempotence-and-loop-law.md @@ -3,10 +3,6 @@ # Import outcome idempotence and loop law -Depends on: - -- [PLATFORM_witnessed-suffix-admission-shells](../asap/PLATFORM_witnessed-suffix-admission-shells.md) - ## Why now Suffix-shell work already says the right big thing: @@ -48,7 +44,35 @@ That is not strong enough for the runtime boundary Echo now wants to expose. - the runtime can explain why a bundle was not novel without collapsing into silent "no-op" folklore +## Locked decision + +The task design is now captured in: + +- `docs/design/0022-continuum-transport-identity/design.md` +- `docs/architecture/continuum-transport.md` + +For this card, Echo's witnessed suffix model is the source shape for the shared +Continuum transport family. Continuum should promote Echo's +`WitnessedSuffixShell`, `CausalSuffixBundle`, `ImportSuffixResult`, and typed +admission outcome family instead of preserving the older thin `SuffixShell` +placeholder. Import idempotence is shell equivalence under retained causal +evidence, not visible-state equality, host-time ordering, summary strings, or +runtime-local Lamport/tick hashes. + +## Implementation split + +This card is now the import-idempotence umbrella. The executable slices are: + +- `docs/method/backlog/asap/PLATFORM_import-transport-intent-admission-path.md` +- `docs/method/backlog/up-next/PLATFORM_import-outcome-retention-novelty-index.md` + +The first slice makes inbound transport causal by routing it through an import +Intent. The second slice adds retained novelty/idempotence posture after the +Intent path exists. + ## Repo evidence - `docs/design/0009-witnessed-causal-suffix-sync/design.md` +- `docs/design/0022-continuum-transport-identity/design.md` +- `docs/architecture/continuum-transport.md` - `docs/architecture/WARP_DRIFT.md` diff --git a/docs/method/backlog/up-next/PLATFORM_import-outcome-retention-novelty-index.md b/docs/method/backlog/up-next/PLATFORM_import-outcome-retention-novelty-index.md new file mode 100644 index 00000000..06ebaff5 --- /dev/null +++ b/docs/method/backlog/up-next/PLATFORM_import-outcome-retention-novelty-index.md @@ -0,0 +1,60 @@ + + + +# Import outcome retention and novelty index + +Status: planned implementation slice. + +Depends on: + +- [Import transport Intent admission path](../asap/PLATFORM_import-transport-intent-admission-path.md) + +## Why now + +Once transport import is Intent-driven, repeated import has to be classified by +retained causal evidence instead of folklore. Echo must be able to say whether a +bundle is new, already adjudicated, self-history returning through a peer, +support supplement, alternate support path, or state-equivalent but +witness-distinct. + +## Goal + +Retain enough import outcome identity to make exact bundle re-import +idempotent and loop posture inspectable. + +## Likely files touched + +- `crates/warp-core/src/witnessed_suffix.rs` +- `crates/echo-wasm-abi/src/kernel_port.rs` +- `crates/warp-core/src/provenance_store.rs` +- `crates/warp-core/src/coordinator.rs` +- `crates/warp-core/tests/**` +- `docs/design/0022-continuum-transport-identity/design.md` + +## Acceptance criteria + +- Exact re-import of the same `CausalSuffixBundle` is classified as already + adjudicated, not fresh admission. +- Self-history arriving through another runtime is classified as loop/self-echo + posture, not remote novelty. +- Same visible state with different shell or witness identity is not deduped by + state hash alone. +- Import novelty posture is retained with the local receipt/witness. +- Bundle digest and source shell digest participate in retained import identity. +- Runtime-local ticks, Lamport-like ordering, and local receipt hashes are not + portable duplicate keys. + +## Non-goals + +- Do not add network transport. +- Do not require `git-warp`. +- Do not make CAS byte hashes equivalent to causal import identity. +- Do not collapse support supplements into no-op strings. + +## Test expectations + +- First import records a retained outcome. +- Re-import of the exact same bundle returns deterministic already-adjudicated + posture. +- Self-echo fixture returns loop/self-history posture. +- State-equivalent but witness-distinct fixture remains distinct. diff --git a/docs/method/backlog/up-next/PLATFORM_inverse-operation-intent-path.md b/docs/method/backlog/up-next/PLATFORM_inverse-operation-intent-path.md new file mode 100644 index 00000000..f57ca974 --- /dev/null +++ b/docs/method/backlog/up-next/PLATFORM_inverse-operation-intent-path.md @@ -0,0 +1,55 @@ + + + +# Inverse operation Intent path + +Status: planned implementation slice. + +Depends on: + +- [Topology mutation Intent boundary audit](./KERNEL_topology-mutation-intent-boundary-audit.md) + +## Why now + +Undo/unapply must not delete history or rewrite old provenance. The only lawful +write-side surface is to append a contract-defined inverse or compensating +operation through Echo admission. + +## Goal + +Add the generic Intent-level path for requesting contract inverse admission +against an explicit target tick/receipt range and current basis. + +## Likely files touched + +- `crates/echo-wasm-abi/src/kernel_port.rs` +- `crates/warp-core/src/witnessed_suffix.rs` +- `crates/warp-core/src/optic.rs` +- `crates/warp-core/src/cmd.rs` +- `crates/warp-core/src/provenance_store.rs` +- `crates/warp-core/tests/**` + +## Acceptance criteria + +- External unapply/undo submits an Intent against explicit target receipt/tick + and current basis. +- The contract or installed handler produces inverse Intent(s) or typed + obstruction. +- Original target ticks/receipts remain in provenance. +- Resulting inverse tick/receipt links back to the original target evidence. +- Missing inverse fragments, stale basis, compressed/cold unavailable history, + or unmappable causal spans return typed obstruction/conflict posture. + +## Non-goals + +- Do not implement generic blind inverse of `WarpOp` as the user-facing model. +- Do not delete or rewrite historical ticks. +- Do not add app-specific text editing operations to Echo core. +- Do not solve all retention/compaction policy here. + +## Test expectations + +- RED/GREEN fixture appends an inverse tick rather than removing history. +- Provenance length increases. +- Original tick remains present. +- Missing inverse evidence obstructs deterministically. diff --git a/docs/method/backlog/up-next/PLATFORM_jedit-text-contract-mvp.md b/docs/method/backlog/up-next/PLATFORM_jedit-text-contract-mvp.md index 51de73d8..a8511cf2 100644 --- a/docs/method/backlog/up-next/PLATFORM_jedit-text-contract-mvp.md +++ b/docs/method/backlog/up-next/PLATFORM_jedit-text-contract-mvp.md @@ -1,27 +1,38 @@ -# jedit Text Contract MVP +# jedit Text Contract Hosting MVP -Status: planned consumer proof. +Status: planned Echo host integration proof. Depends on: - [Contract artifact retention in echo-cas](./PLATFORM_contract-artifact-retention-in-echo-cas.md) +- [Wesley footprint honesty artifact attestation](./PLATFORM_wesley-footprint-honesty-artifact-attestation.md) +- [WSC, Verkle, IPA, And Retained Readings](../../../architecture/wsc-verkle-ipa-retained-readings.md) +- external `jedit` Text File Optic contract surface - external `jedit` hot text runtime port - external Wesley contract authoring support ## Why now `jedit` is the first serious consumer for Echo as a Wesley-compiled contract -host. The text-editing model must remain application-specific. Echo should host -the generated contract through generic dispatch and observation surfaces. +host. The text-editing model, GraphQL contract, editor runtime, buffer law, +edit-group law, and UI behavior all remain application-specific and belong in +the external `jedit` repo. + +Echo should only prove that an externally authored and Wesley-compiled `jedit` +contract can use generic dispatch, observation, registry, receipt, reading, and +retention surfaces. ## What it should look like -Define a `jedit` GraphQL contract for the minimum hot editing loop. +Do not define or implement the `jedit` GraphQL contract in Echo. The external +`jedit` repo owns that contract and adapts its hot text runtime to generated +contract helpers. -Candidate intents: +This Echo card proves host compatibility for externally owned operation +families such as: - create buffer - replace range @@ -37,22 +48,59 @@ Candidate observations: - edit group history - checkpoint status -Wesley compiles the contract. `jedit` adapts its existing hot text runtime port -to use generated app-level code that validates payloads, packs EINT op ids and -vars, calls Echo's existing `dispatch_intent(...)`, reads registry metadata for -handshake, and decodes observations. +Wesley compiles the external contract. `jedit` adapts its existing hot text +runtime port to generated app-level code that validates payloads, packs EINT op +ids and vars, calls Echo's existing `dispatch_intent(...)`, reads registry +metadata for handshake, and decodes observations. Echo stays the generic host. + +Echo tests may use generated `jedit` Wesley output as a fixture. That fixture is +consumer evidence, not Echo-owned source authority: it should be refreshed from +the external `jedit` contract generation path and used only to prove generic +host behavior. + +For retained text readings and checkpoints, `jedit` owns the hot rope model. +Echo's future generic storage/proof direction is WSC-backed retained readings: +the rope projects to canonical WSC bytes, `echo-cas` stores those bytes, and +future Verkle/IPA proof-carrying apertures can verify selected buffer ranges or +rope leaves without materializing the full reading. + +## Progress notes + +- Echo now has a generic `echo_registry_api::verify_contract_artifact(...)` + load-time verifier. This is the host-side check `jedit` needs before Echo + treats a generated registry's footprint certificate as compile-time-certified. +- `echo-wesley-gen` smoke coverage proves the verifier works against generated + registry output without adding text-editing or `jedit`-specific code to Echo. ## Acceptance criteria -- `jedit` can create a buffer through a contract intent. -- `jedit` can submit a replace-range intent through Echo. -- Echo accepts the generated EINT through the existing WASM intent ingress. -- `jedit` can observe a buffer reading through Echo. -- Save checkpoint produces a retained contract artifact. -- Echo core contains no text-specific APIs outside generated contract payloads. +- Echo can install or accept registry metadata for an externally generated + `jedit` contract artifact. +- Echo can verify the loaded generated `jedit` artifact or footprint + certificate hash before treating its declared footprint as compile-time + certified. +- Echo integration tests can exercise generated `jedit` Wesley fixture output + without requiring Echo to author text-editing SDL. +- Echo accepts generated `jedit` EINT bytes through the existing WASM intent + ingress without adding text-specific code paths. +- Echo emits contract-aware receipts and readings whose identity includes the + external contract and operation/query basis. +- Echo can retain a jedit buffer reading or checkpoint as generic WSC/CAS bytes + when the external jedit runtime supplies that artifact. +- Echo can return a typed obstruction for stale basis, unsupported query, + missing witness, or unavailable retained artifact. +- `jedit` can create a buffer, submit a replace-range intent, and observe a + buffer reading through Echo-owned generic host surfaces. +- Save checkpoint produces a retained contract artifact through generic + retention rules. +- Echo core contains no text-specific APIs outside generated contract payloads + and test fixtures. ## Non-goals +- Do not author or edit the `jedit` GraphQL contract in Echo. +- Do not implement `jedit`'s hot text runtime in Echo. +- Do not treat generated `jedit` fixture output as Echo-owned source truth. - Do not add text-editing types to Echo core. - Do not add a special `jedit` ABI. - Do not move `jedit` payload validation into Echo core unless the registry diff --git a/docs/method/backlog/up-next/PLATFORM_reading-envelope-family-boundary.md b/docs/method/backlog/up-next/PLATFORM_reading-envelope-family-boundary.md deleted file mode 100644 index a76354c8..00000000 --- a/docs/method/backlog/up-next/PLATFORM_reading-envelope-family-boundary.md +++ /dev/null @@ -1,86 +0,0 @@ - - - -# Reading envelope family boundary - -Depends on: - -- [PLATFORM_observer-plan-reading-artifacts](../asap/PLATFORM_observer-plan-reading-artifacts.md) -- [0011 — Optic and observer runtime doctrine](../../../design/0011-optic-observer-runtime-doctrine/design.md) - -## Why now - -Echo now clearly needs a read-side boundary built around: - -- observer plan -- runtime observer instance where needed -- emitted reading artifact - -What is still missing is one exact family boundary for the emitted reading -artifact itself. - -Without that boundary, different consumers will keep guessing at what a reading -should carry: - -- just payload -- payload plus coordinate -- payload plus witness -- payload plus rights/budget posture - -The point of the reading envelope is to stop that guessing. - -## What it should look like - -The emitted reading envelope should be explicit about: - -- plan identity -- coordinate or frontier reference -- payload -- witness or shell reference -- observer basis -- parent-basis posture when the reading is strand-relative -- budget posture -- rights or revelation posture -- plurality, obstruction, or other read-status posture where relevant - -This does not require one global UI shape. It does require one honest runtime -family boundary. - -## Current implementation - -Implemented for built-in one-shot observations: - -- `ObservationArtifact::reading` -- `ReadingEnvelope` -- `ReadingObserverPlan` -- `ReadingObserverBasis` -- `ReadingWitnessRef` -- `ReadingBudgetPosture` -- `ReadingRightsPosture` -- `ReadingResidualPosture` -- `ObservationHashInput::reading` - -Still open: - -- authored observer plans -- hosted/stateful observer instances -- app-specific budget and rights enforcement -- obstruction/plurality variants beyond the current `complete` posture - -## Done looks like - -- one packet names the minimum reading-envelope fields Echo should emit -- the boundary clearly distinguishes: - - authored family - - compiled artifacts - - runtime-emitted values -- downstream repos can depend on one named family instead of reconstructing - their own "reading result" wrappers -- the family stays narrow enough to be shared by Echo, Continuum, and debugger - consumers - -## Repo evidence - -- `docs/architecture/WARP_DRIFT.md` -- `docs/design/0006-echo-continuum-alignment/design.md` -- `docs/design/0009-witnessed-causal-suffix-sync/design.md` diff --git a/docs/method/backlog/up-next/PLATFORM_strand-and-support-intent-paths.md b/docs/method/backlog/up-next/PLATFORM_strand-and-support-intent-paths.md new file mode 100644 index 00000000..9e67709b --- /dev/null +++ b/docs/method/backlog/up-next/PLATFORM_strand-and-support-intent-paths.md @@ -0,0 +1,56 @@ + + + +# Strand and support Intent paths + +Status: planned implementation slice. + +Depends on: + +- [Topology mutation Intent boundary audit](./KERNEL_topology-mutation-intent-boundary-audit.md) +- [Security/capabilities for fork/rewind/merge](./KERNEL_time-travel-capabilities.md) + +## Why now + +Strands and support pins are topology-changing. They affect the causal geometry +that later reads, settlement, and braids observe. External callers should not +create strands or support geometry through direct service mutation calls. + +## Goal + +Add narrow Intent-level external paths for creating a contract/runtime strand +from an explicit basis and for pinning/unpinning support when that is exposed to +application flows. + +## Likely files touched + +- `crates/echo-wasm-abi/src/kernel_port.rs` +- `crates/warp-core/src/strand.rs` +- `crates/warp-core/src/coordinator.rs` +- `crates/warp-core/src/cmd.rs` +- `crates/warp-wasm/src/warp_kernel.rs` +- `crates/warp-core/tests/strand_contract_tests.rs` + +## Acceptance criteria + +- Create-strand/fork external path is an EINT Intent against an explicit parent + coordinate. +- Support pin and unpin external paths are EINT Intents when exposed outside + the runtime. +- Direct registry/service calls remain internal implementation details. +- Stale basis, missing capability, missing provenance, duplicate strand, and + invalid support geometry return typed obstruction/conflict posture. +- Successful operations emit tick/receipt evidence. + +## Non-goals + +- Do not implement full braid settlement here. +- Do not add editor-specific strand nouns. +- Do not delete internal `StrandRegistry` or `ProvenanceService` APIs. + +## Test expectations + +- Creating a strand through the Intent path records causal evidence. +- Direct external mutation is not required by tests. +- Stale or missing basis does not silently create a strand. +- Support pin/unpin requires an Intent path when used externally. diff --git a/docs/method/backlog/up-next/PLATFORM_wesley-footprint-honesty-artifact-attestation.md b/docs/method/backlog/up-next/PLATFORM_wesley-footprint-honesty-artifact-attestation.md new file mode 100644 index 00000000..789c6f46 --- /dev/null +++ b/docs/method/backlog/up-next/PLATFORM_wesley-footprint-honesty-artifact-attestation.md @@ -0,0 +1,135 @@ + + + +# Wesley Footprint Honesty Artifact Attestation + +Status: planned generated-artifact hardening. + +Depends on: + +- [Continuum proof family runtime cutover](./PLATFORM_continuum-proof-family-runtime-cutover.md) +- [Contract-aware receipts and readings](./KERNEL_contract-aware-receipts-and-readings.md) +- [Authenticated Wesley intent admission posture](./PLATFORM_authenticated-wesley-intent-admission-posture.md) + +## Why now + +`warp-core` already has runtime footprint guards that catch executor reads and +writes outside a declared [`Footprint`](../../../crates/warp-core/src/footprint.rs). +Those guards are the right safety net for debug builds, development, and +untrusted code paths. + +For Wesley-compiled GraphQL contracts, Echo also needs a stronger generated +artifact story: + +```text +GraphQL footprint declaration + -> Wesley compile-time access surface + -> Rust/TypeScript generated artifacts + -> artifact hash / certificate + -> Echo load-time attestation + -> release runtime may skip per-access footprint guards for trusted artifacts +``` + +The point is not to remove footprint honesty. The point is to move the primary +enforcement for trusted Wesley artifacts to compile time, then verify the +compiled artifact identity once when it is loaded. + +## What it should look like + +Wesley should lower authored footprint directives into an artifact-level +footprint contract. Generated Rust should expose only the declared read/write +capabilities to the operation implementation. If the implementation reaches +outside its declared footprint, it should fail to compile. + +Generated TypeScript should carry the same footprint metadata and artifact +fingerprint for toolchain and cross-runtime agreement. TypeScript cannot provide +the same hard boundary as Rust in every case, but it can still participate in +schema/hash parity, generated metadata checks, and fixture-level compile +verification. + +The generated artifact should include a stable footprint certificate naming: + +- contract family +- schema hash +- operation id and operation name +- declared read/write footprint +- generator identity and version +- Echo ABI or registry version +- generated Rust artifact hash +- generated TypeScript artifact hash, when present +- footprint certificate hash + +At runtime, Echo or the host should compare the loaded artifact's certificate +hash and generated artifact hash with the trusted registry metadata on first +load. After that: + +- trusted compile-time-certified artifacts may use the optimized release path + without per-access footprint guards; +- debug builds and `footprint_enforce_release` may still run guards as a safety + net; +- missing, mismatched, or unsupported certificates reject or fall back to an + explicitly runtime-guarded posture according to host policy; +- no path silently treats an uncertified artifact as footprint-honest. + +## Acceptance criteria + +- One Wesley-generated Rust proof slice exposes a declared footprint as typed + capabilities. +- One valid generated Rust implementation compiles and runs through the Echo + host path. +- One invalid implementation that reads or writes outside the declared + footprint fails to compile. +- The generated Rust and TypeScript artifacts name the same schema hash, + footprint certificate hash, and operation footprint metadata. +- Echo load-time registration compares the artifact hash or certificate hash + before enabling the trusted optimized posture. +- A hash mismatch returns a typed rejection or obstruction, not a silent + runtime downgrade. +- Runtime footprint guards remain available as debug / opt-in / untrusted-path + enforcement. + +## Non-goals + +- Do not remove `FootprintGuard`. +- Do not make TypeScript the sole hard enforcement boundary. +- Do not add app-specific footprint nouns to Echo core. +- Do not require IPA, Verkle, SNARK, STARK, or proof-carrying apertures. +- Do not skip runtime checks for artifacts whose hash or certificate was not + verified. +- Do not trust generated source text without a stable compiled-artifact + identity. + +## Notes + +Initial Echo-side runway landed: + +- `echo-registry-api::OpDef` can carry an optional no-std + `FootprintCertificate`. +- `echo-wesley-gen` emits deterministic per-operation footprint artifact and + certificate hashes from `@wes_footprint` metadata, operation argument shape, + and the generated Rust artifact manifest hash. +- Generated registry consumers can compare a certificate hash at load time via + `OpDef::footprint_certificate_matches(...)`. +- Hosts can verify a generated registry artifact with + `echo_registry_api::verify_contract_artifact(...)`, which checks schema, + codec, registry layout, expected footprint certificate hashes, optional + generated artifact hashes, and a policy requiring mutation operations to be + backed by expected certificates. + +This is not the full closeout. The remaining hardening is the real +compile-time capability boundary and cross-artifact Rust/TypeScript compiled +artifact identity. + +The expected posture vocabulary is: + +```text +CompileTimeCertified +RuntimeGuarded +UntrustedRejected +UnsupportedObstructed +``` + +This keeps release performance honest without weakening the safety model: +runtime checks become the fallback and development guardrail, while trusted +Wesley-compiled artifacts carry a load-time certificate that proves the +compile-time footprint boundary was the one Echo is about to execute. diff --git a/docs/method/backlog/up-next/PLATFORM_wesley-to-echo-toy-contract-proof.md b/docs/method/backlog/up-next/PLATFORM_wesley-to-echo-toy-contract-proof.md deleted file mode 100644 index 80b9c342..00000000 --- a/docs/method/backlog/up-next/PLATFORM_wesley-to-echo-toy-contract-proof.md +++ /dev/null @@ -1,112 +0,0 @@ - - - -# Wesley To Echo Toy Contract Proof - -Status: GREEN 4. - -Depends on: - -- [Registry provider wiring and host boundary decision](../asap/PLATFORM_static-contract-registry-and-host-boundary.md) -- [0016 - Wesley To Echo Toy Contract Proof](../../../design/0016-wesley-to-echo-toy-contract-proof/design.md) -- [echo-wesley-gen v2 Update](./PLATFORM_echo-wesley-gen-v2.md) -- [WESLEY Protocol Consumer Cutover](../asap/PLATFORM_WESLEY_protocol-consumer-cutover.md) - -## Why now - -Before `jedit` becomes the first serious application consumer, Echo and Wesley -need one tiny contract that proves the full authoring and hosting path. - -This should be deliberately boring. The value is the path: - -```text -GraphQL -> Wesley IR -> echo-wesley-gen Rust -> EINT -> dispatch -> observe -``` - -This proof should reuse existing pieces: EINT v1, `dispatch_intent(...)`, -`RegistryInfo`, `echo-registry-api::RegistryProvider`, `GeneratedRegistry`, and -the current observation/read-envelope boundary. - -## What it should look like - -Use a tiny toy contract, such as a counter, with one intent and one observer. - -Example domain: - -- `Increment` -- `CounterValue` - -The exact schema is not important. The proof must exercise generated identity, -op ids, vars encoding, EINT packing, dispatch, registry metadata, and one -read/observation path. - -## Current RED - -The original RED is documented in -[0016 - Wesley To Echo Toy Contract Proof](../../../design/0016-wesley-to-echo-toy-contract-proof/design.md). - -`echo-wesley-gen` already emits op constants, `OPS`, `GeneratedRegistry`, and -`REGISTRY`. It does not yet emit the first-consumer app-level helper that -validates/encodes operation vars, packs EINT v1, and maps a generated query/read -helper to `observe(...)` / `ReadingEnvelope`. - -## Current GREEN - -GREEN 1 emitted raw-vars helpers from `echo-wesley-gen`: - -- mutation helpers pack EINT v1 with `pack_intent_v1(...)`; -- query helpers construct `ObservationRequest` using the existing query-view - projection. - -GREEN 2 compiled generated output in a temporary standalone consumer crate and -exercises: - -- generated registry metadata; -- generated EINT packing; -- EINT unpacking; -- `KernelPort::dispatch_intent(...)`; -- generated query `ObservationRequest`; -- `KernelPort::observe(...)`. - -GREEN 3 hardens the generated app-facing helper surface: - -- generated operations now receive typed vars structs; -- generated vars are encoded through Echo canonical CBOR before EINT packing or - observation request construction; -- raw-vars helpers remain available only through explicit `_raw_vars` names for - callers that already hold canonical vars bytes; -- the consumer smoke kernel decodes generated vars before asserting app-level - values. - -GREEN 4 extracts the toy counter IR into a named fixture: - -- `crates/echo-wesley-gen/tests/fixtures/toy-counter/echo-ir-v1.json`; -- `crates/echo-wesley-gen/tests/fixtures/toy-counter/README.md`; -- both toy generator tests consume the fixture through `include_str!(...)` - instead of copying inline JSON. - -The next proof should wire the same generated surface into an installed Echo or -application-owned kernel path instead of a toy `KernelPort` implementation. - -## Acceptance criteria - -- Wesley compiles the toy GraphQL contract to Echo-consumable Rust artifacts. -- `echo-wesley-gen` emits op ids, op catalog metadata, and a generated - `RegistryProvider`. -- The consumer proof uses a generated typed vars helper that canonicalizes app - vars before calling `pack_intent_v1(...)` with a generated op id. -- `dispatch_intent(...)` admits one valid toy intent. -- Registry metadata from the installed kernel or app bundle matches the - generated schema and codec metadata. -- One read path proves how generated query/observer operations relate to - `observe(...)` and `ReadingEnvelope`. -- Golden ABI vectors are stable. -- The toy counter contract is a shared fixture, not duplicated inline JSON. - -## Non-goals - -- Do not use a text editing contract. -- Do not add dynamic contract loading. -- Do not require browser packaging. -- Do not add Continuum transport. -- Do not create a second registry or intent envelope. diff --git a/docs/method/dependency-dags.md b/docs/method/dependency-dags.md index a949291e..4595f04f 100644 --- a/docs/method/dependency-dags.md +++ b/docs/method/dependency-dags.md @@ -79,24 +79,19 @@ cargo xtask dags --snapshot-label 2026-01-02 --- -## Tasks DAG (derived from `docs/assets/dags/tasks-dag-source.md`) +## METHOD Backlog DAG -![Tasks DAG](../assets/dags/tasks-dag.svg) +The old `docs/assets/dags/tasks-dag-source.md` issue sketch is retired. Current +task scheduling lives in the METHOD backlog artifacts: -Sources: - -- Source data: `docs/assets/dags/tasks-dag-source.md` -- Generator: `scripts/generate-tasks-dag.js` (scheduled by the GitHub workflow `.github/workflows/refresh-dependency-dags.yml` to keep the rendered output aligned with `docs/assets/dags/tasks-dag-source.md`) -- DOT: `docs/assets/dags/tasks-dag.dot` -- SVG: `docs/assets/dags/tasks-dag.svg` +- DOT: `docs/method/task-dag.dot` +- SVG: `docs/method/task-dag.svg` +- Matrix: `docs/method/task-matrix.md` +- CSV: `docs/method/task-matrix.csv` -This DAG visualizes inferred issue dependencies that contributors log in `docs/assets/dags/tasks-dag-source.md`, offering a quick comparison point against the curated milestone/issue graphs above. -By design, isolated nodes (no incoming/outgoing edges) are filtered out to reduce clutter; the generator computes `connectedNodeIds` / `filteredNodes` and logs the drop counts during render. - -## Regenerating the Tasks DAG +Regenerate the current DAG with: ```sh -node scripts/generate-tasks-dag.js +cargo xtask method matrix +cargo xtask method dag ``` - -Ensure Node.js and Graphviz (`dot`) are installed before running manually, or trigger the scheduled workflow (which now includes this script) to refresh the assets automatically. diff --git a/docs/method/graveyard/KERNEL_stream-merge-semantics.md b/docs/method/graveyard/KERNEL_stream-merge-semantics.md index 1a2e362c..e1b015df 100644 --- a/docs/method/graveyard/KERNEL_stream-merge-semantics.md +++ b/docs/method/graveyard/KERNEL_stream-merge-semantics.md @@ -1,13 +1,16 @@ -# Merge semantics for admitted stream facts across worldlines +# Superseded merge semantics note -Ref: #245 +Folded from: #245 -When two forked worldlines are merged, how do admitted stream facts -(channel emissions, provenance entries) combine? This is the hardest -design question in Echo's strand/braiding story. +This old issue asked how debugger-era per-source admission records combine when +two forked worldlines merge. The old noun framing is obsolete. The remaining +valid concern is folded into +`docs/method/backlog/up-next/KERNEL_contract-strands-and-counterfactuals.md`: +settlement is a generic worldline/strand/braid admission law with typed +admitted, staged, plural, conflict, or obstructed outcomes. git-warp uses CRDT convergence (OR-Set + LWW). Echo needs canonical merge — one deterministic result, not eventual convergence. The diff --git a/docs/method/legends/DOCS.md b/docs/method/legends/DOCS.md index 56f7f6d4..0502348b 100644 --- a/docs/method/legends/DOCS.md +++ b/docs/method/legends/DOCS.md @@ -57,7 +57,6 @@ actually shipped. ## Current cycle and backlog -- latest completed cycle: (none under METHOD yet) -- live backlog: - - `asap/DOCS_docs-cleanup.md` - - `asap/DOCS_cli-man-pages.md` +- Use `cargo xtask method status` for the current live backlog. +- Completed backlog cards are removed from `docs/method/backlog/**`; git + history and retros are the archive. diff --git a/docs/method/legends/KERNEL.md b/docs/method/legends/KERNEL.md index 7d8db155..c57daceb 100644 --- a/docs/method/legends/KERNEL.md +++ b/docs/method/legends/KERNEL.md @@ -59,9 +59,6 @@ declared rewrite rules and scheduling policy. ## Current cycle and backlog -- latest completed cycle: (none under METHOD yet) -- live backlog: - - `asap/KERNEL_determinism-torture.md` - - `asap/KERNEL_domain-separated-hashes.md` - - `up-next/KERNEL_sha256-blake3.md` - - `up-next/KERNEL_time-model-spec.md` +- Use `cargo xtask method status` for the current live backlog. +- Completed backlog cards are removed from `docs/method/backlog/**`; git + history and retros are the archive. diff --git a/docs/method/legends/PLATFORM.md b/docs/method/legends/PLATFORM.md index 07b37aca..635ba703 100644 --- a/docs/method/legends/PLATFORM.md +++ b/docs/method/legends/PLATFORM.md @@ -56,14 +56,6 @@ failing, and what to work on next. ## Current cycle and backlog -- active cycle: - - `0002-xtask-method-status` (in design) -- live backlog: - - `asap/PLATFORM_benchmarks-cleanup.md` - - `asap/PLATFORM_cli-bench.md` - - `asap/PLATFORM_cli-inspect.md` - - `asap/PLATFORM_cli-scaffold.md` - - `asap/PLATFORM_cli-verify.md` - - `asap/PLATFORM_xtask-method-close.md` - - `asap/PLATFORM_xtask-method-drift.md` - - `asap/PLATFORM_xtask-method-pull.md` +- Use `cargo xtask method status` for the current live backlog. +- Completed backlog cards are removed from `docs/method/backlog/**`; git + history and retros are the archive. diff --git a/docs/method/retro/0016-wesley-to-echo-toy-contract-proof/retro.md b/docs/method/retro/0016-wesley-to-echo-toy-contract-proof/retro.md new file mode 100644 index 00000000..f3f19dee --- /dev/null +++ b/docs/method/retro/0016-wesley-to-echo-toy-contract-proof/retro.md @@ -0,0 +1,56 @@ + + + +# Retro: 0016-wesley-to-echo-toy-contract-proof + +Cycle: `0016-wesley-to-echo-toy-contract-proof` +Design: [`docs/design/0016-wesley-to-echo-toy-contract-proof/`](../../../design/0016-wesley-to-echo-toy-contract-proof/) +Witness: generated toy counter smoke crate under `target/` during +`echo-wesley-gen` tests. + +## Outcome + +- Status: Accepted. +- Summary: Closed the `M044` backlog item by proving a Wesley-generated toy + contract can use generated op metadata, typed vars helpers, EINT packing, + installed `warp-wasm` kernel dispatch, installed-kernel observation, and + installed registry metadata without adding application-specific Echo APIs. + +## Evidence + +- `crates/echo-wesley-gen/tests/fixtures/toy-counter/echo-ir-v1.json` is the + shared toy counter IR fixture. +- `crates/echo-wesley-gen/tests/generation.rs` generates Rust from that fixture + and compiles a standalone consumer smoke crate. +- The smoke crate installs an application-owned `ToyKernel` through + `warp_wasm::install_kernel(...)`. +- `crates/warp-wasm/src/lib.rs` exposes native CBOR-envelope helpers for the + installed-kernel boundary: + - `dispatch_intent_cbor(...)`; + - `observe_cbor(...)`; + - `get_registry_info_cbor()`. +- Those helpers do not change the `wasm_bindgen` exports; they let native tests + exercise the same success/error envelope contract without JavaScript + `Uint8Array` bindings. + +## Verification + +- `cargo test -p echo-wesley-gen test_toy_contract_generated_output_compiles_in_consumer_crate` + +## Drift Check + +- Echo core still does not learn text, editor, or `jedit` nouns. +- No new intent envelope, registry model, dynamic loader, or app payload + validator was introduced. +- The toy contract is still a consumer fixture. It proves the host shape; it is + not a privileged domain in Echo. +- Query reads are carried through the generic `QueryView` / + `ObservationProjection::Query` and `OpticApertureShape::QueryBytes` shapes. + +## Follow-Up + +- Pull `M012` for contract-aware receipt and reading identity. +- Pull `M023` for retained contract artifacts and cached bounded readings in + `echo-cas`. +- Use generated `jedit` Wesley output as a fixture only after identity and + retention rules are honest enough for a serious consumer. diff --git a/docs/method/retro/0019-reading-envelope-family-boundary/retro.md b/docs/method/retro/0019-reading-envelope-family-boundary/retro.md new file mode 100644 index 00000000..8a3bd837 --- /dev/null +++ b/docs/method/retro/0019-reading-envelope-family-boundary/retro.md @@ -0,0 +1,50 @@ + + + +# Retro: 0019-reading-envelope-family-boundary + +Cycle: `0019-reading-envelope-family-boundary` +Design: [`docs/design/0019-reading-envelope-family-boundary/`](../../../design/0019-reading-envelope-family-boundary/) +Witness: [`witness/`](./witness/) + +## Outcome + +- Status: Accepted. +- Summary: Closed the `M032` backlog item by naming the generic Echo + reading-envelope family boundary and adding a regression test that proves + reading envelope posture participates in observation artifact identity. + +## Evidence + +- `docs/design/0019-reading-envelope-family-boundary/reading-envelope-family-boundary.md` + distinguishes authored observer families, compiled or installed artifacts, + and runtime-emitted reading values. +- `docs/BEARING.md` points current direction at the accepted boundary. +- `docs/architecture/application-contract-hosting.md` cites the boundary as the + generic read-side target for hosted contracts. +- `crates/warp-core/src/observation.rs` includes + `reading_envelope_posture_participates_in_artifact_identity`. +- Verification: + - `cargo fmt --all -- --check` + - `cargo test -p warp-core reading_envelope_posture_participates_in_artifact_identity` + - `cargo test -p warp-core --lib reading_envelope_posture_participates_in_artifact_identity` + - `pnpm docs:build` + - `pnpm exec prettier --check docs/design/0019-reading-envelope-family-boundary/reading-envelope-family-boundary.md docs/BEARING.md docs/architecture/application-contract-hosting.md` + - `pnpm exec markdownlint-cli2 docs/design/0019-reading-envelope-family-boundary/reading-envelope-family-boundary.md docs/BEARING.md docs/architecture/application-contract-hosting.md` + +## Drift Check + +- The boundary stays generic: no `jedit` nouns, no Graft/editor/rope API, and + no GraphQL-first runtime API were added to Echo core. +- The design explicitly treats CAS bytes as retention, while `ReadIdentity` + names the semantic question answered by those bytes. +- The new test guards against treating `ReadingEnvelope` as decorative metadata + by proving budget posture changes artifact identity even when coordinate and + payload are unchanged. + +## Follow-Up + +- Implement QueryView observers against this boundary. +- Add authored observer installation only through generic plan/artifact + identity, not application nouns. +- Keep retained-reading work keyed by `ReadIdentity` plus byte identity. diff --git a/docs/method/retro/0020-echo-cas-browser/retro.md b/docs/method/retro/0020-echo-cas-browser/retro.md new file mode 100644 index 00000000..e834e631 --- /dev/null +++ b/docs/method/retro/0020-echo-cas-browser/retro.md @@ -0,0 +1,43 @@ + + + +# Retro: 0020-echo-cas-browser + +Cycle: `0020-echo-cas-browser` +Design: [`docs/design/0020-echo-cas-browser/`](../../../design/0020-echo-cas-browser/) +Witness: [`witness/`](./witness/) + +## Outcome + +- Status: Accepted. +- Summary: Closed the `T-4-3-1` MemoryTier WASM compilation gate by proving + `echo-cas` builds for `wasm32-unknown-unknown` and adding a CI job that runs + the same target build on every PR. + +## Evidence + +- `.github/workflows/ci.yml` includes `Build echo-cas (wasm32)`, which runs + `cargo build --target wasm32-unknown-unknown -p echo-cas`. +- `docs/design/0020-echo-cas-browser/echo-cas-browser.md` records the accepted + WASM compilation gate and local witnesses. +- `docs/method/backlog/up-next/PLATFORM_echo-cas-js-bindings.md` preserves the + deferred JavaScript binding follow-up as a separate visible backlog item. +- Verification: + - `cargo build --target wasm32-unknown-unknown -p echo-cas` + - `cargo test -p echo-cas` + +## Drift Check + +- This cycle did not add JavaScript bindings, persistence, DiskTier, async CAS, + or browser-specific mutation semantics. +- `echo-cas` remains content-addressed storage only. CAS hashes still name + bytes, not semantic read identity or Echo ontology. +- The follow-up JS binding task is visible in the backlog instead of hidden + inside this completed cycle. + +## Follow-Up + +- Implement `WasmBlobStore` bindings in the separate + `PLATFORM_echo-cas-js-bindings.md` backlog item. +- Decide later whether local pre-push verification should include a narrow + `echo-cas` WASM smoke lane or leave this as CI-only. diff --git a/docs/method/retro/0021-parent-drift-owned-footprint-revalidation/retro.md b/docs/method/retro/0021-parent-drift-owned-footprint-revalidation/retro.md new file mode 100644 index 00000000..a4625fc0 --- /dev/null +++ b/docs/method/retro/0021-parent-drift-owned-footprint-revalidation/retro.md @@ -0,0 +1,49 @@ + + + +# Retro: 0021-parent-drift-owned-footprint-revalidation + +Cycle: `0021-parent-drift-owned-footprint-revalidation` +Design: [`docs/design/0021-parent-drift-owned-footprint-revalidation/`](../../../design/0021-parent-drift-owned-footprint-revalidation/) +Witness: [`witness/`](./witness/) + +## Outcome + +- Status: Accepted. +- Summary: Pulled `M014` and closed it as existing implementation/evidence + consolidation. The runtime, settlement planner, witnessed-suffix ABI, and + observation artifacts already carry the parent-drift/owned-footprint + revalidation law required by the backlog card. + +## Evidence + +- `crates/warp-core/src/strand.rs` defines `Strand::live_basis_report(...)`, + `StrandRevalidationState`, and `StrandOverlapRevalidation`. +- `crates/warp-core/src/settlement.rs` maps parent movement inside the + strand-owned closed footprint to explicit clean, obstructed, or conflict + revalidation posture. +- `crates/warp-core/src/witnessed_suffix.rs` preserves settlement basis reports + and overlap revalidation through ABI conversion. +- `crates/warp-core/src/observation.rs` carries `ObservationBasisPosture` on + reading artifacts, including `StrandParentAdvancedDisjoint` and + `StrandRevalidationRequired`. +- Verification: + - `cargo test -p warp-core live_basis_report` + - `cargo test -p warp-core strand_frontier_observation_reports_overlap_revalidation_posture` + +## Drift Check + +- No new runtime code was needed for this cycle. +- The accepted shape stays generic: no jedit/editor/Graft/rope nouns were added + to Echo core. +- The live-strand read surface does not pretend overlapped parent movement is + clean. It names the revalidation posture in the reading artifact instead. +- Settlement remains append/admission oriented; parent drift revalidation does + not rewrite old provenance. + +## Follow-Up + +- Add an obstruction-specific overlap revalidation fixture when a natural + patch-level obstruction case is available. +- Keep future optic/read APIs consuming `ObservationBasisPosture` instead of + inventing a separate parent-drift vocabulary. diff --git a/docs/method/retro/0023-import-transport-intent-admission-path/retro.md b/docs/method/retro/0023-import-transport-intent-admission-path/retro.md new file mode 100644 index 00000000..f34e80d0 --- /dev/null +++ b/docs/method/retro/0023-import-transport-intent-admission-path/retro.md @@ -0,0 +1,54 @@ + + + +# Retro: 0023-import-transport-intent-admission-path + +Cycle: `0023-import-transport-intent-admission-path` +Design: [`docs/design/0023-import-transport-intent-admission-path/`](../../../design/0023-import-transport-intent-admission-path/) +Witness: [`witness/`](./witness/) + +## Outcome + +- Status: Accepted. +- Summary: Added Echo's first executable import-transport Intent path. A + `CausalSuffixBundle` import proposal is now wrapped as an Echo-owned EINT v1 + payload, validated at the WASM/kernel boundary, admitted through + `dispatch_intent`, and handled during the scheduled tick as a typed staged + `ImportSuffixResult` graph artifact. + +## Evidence + +- `crates/echo-wasm-abi/src/lib.rs` defines + `IMPORT_SUFFIX_INTENT_V1_OP_ID`, + `pack_import_suffix_intent_v1(...)`, and + `unpack_import_suffix_intent_v1(...)`. +- `crates/warp-wasm/src/warp_kernel.rs` rejects malformed import-suffix EINT + payloads before ingress and registers the generic import command handler for + engine-backed kernels. +- `crates/warp-core/src/cmd.rs` defines `cmd/import_suffix_intent`, preserving + the ingress event and appending a deterministic result node with canonical + CBOR `ImportSuffixResult`. +- Verification: + - `cargo test -p echo-wasm-abi import_suffix_intent` + - `cargo test -p warp-wasm --features engine import_suffix_intent` + - `cargo test -p warp-core import_suffix` + +## Drift Check + +- Echo core still does not learn application nouns. The import command handles + generic witnessed suffix bundles and typed admission posture only. +- Transport arrival remains outside history until wrapped as an EINT intent and + selected by the scheduler. +- The first handler returns `Staged`, not `Admitted`, because full remote + basis-aware admission, novelty indexing, and settlement/collapse are later + slices. +- The original ingress event remains in the graph; the result is recorded as a + separate causal artifact rather than overwriting the proposal bytes. + +## Follow-Up + +- Implement basis-aware import outcome evaluation with local target-basis + evidence. +- Add retained shell-equivalence and novelty/loop-prevention indexes. +- Extend intent-driven settlement/braid/topology paths so staged imports can be + realized without direct mutation APIs. diff --git a/docs/method/task-dag.dot b/docs/method/task-dag.dot new file mode 100644 index 00000000..b5edc8cd --- /dev/null +++ b/docs/method/task-dag.dot @@ -0,0 +1,262 @@ +digraph method_task_dag { + graph [ + label="METHOD Backlog Task Dependency DAG\nopen tasks: 95 / 137; dependency edges: 57", + labelloc=t, + fontsize=24, + fontname="Inter, Helvetica, Arial", + rankdir=LR, + bgcolor="white", + splines=ortho, + overlap=false, + nodesep=0.35, + ranksep=0.75 + ]; + node [ + shape=box, + style="rounded,filled", + fontsize=10, + fontname="Inter, Helvetica, Arial", + margin="0.08,0.06", + penwidth=1.2 + ]; + edge [ + color="#dc2626", + arrowsize=0.8, + penwidth=2.6 + ]; + + subgraph "cluster_asap" { + label="asap"; + color="#cbd5e1"; + fontname="Inter, Helvetica, Arial"; + fontsize=16; + style="rounded"; + "M001" [label="M001\nOPEN\nDocs cleanup", tooltip="M001 [asap] open; blockers=0; Docs cleanup", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M002" [label="M002\nOPEN\nEcho and git-warp\ncompatibility sanity check", tooltip="M002 [asap] open; blockers=0; Echo and git-warp compatibility sanity check", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M003" [label="M003\nOPEN\nT-9-3-1\nVerify and integrate\ndeterministic trig oracle\ninto release gate", tooltip="M003 [asap] open; blockers=0; T-9-3-1 Verify and integrate deterministic trig oracle into release gate", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M004" [label="M004\nOPEN\nCI det-policy hardening", tooltip="M004 [asap] open; blockers=0; CI det-policy hardening", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M005" [label="M005\nOPEN\nT-6-1-2\nConfig file support and\nshell completions", tooltip="M005 [asap] open; blockers=0; T-6-1-2 Config file support and shell completions", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M006" [label="M006\nOPEN\nT-279-1\nMake decoder control\ncoverage auditable", tooltip="M006 [asap] open; blockers=0; T-279-1 Make decoder control coverage auditable", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M007" [label="M007\nOPEN\nEcho Contract Hosting\nRoadmap", tooltip="M007 [asap] open; blockers=0; Echo Contract Hosting Roadmap", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M008" [label="M008\nOPEN\nCommit-ordered rollback\nplaybooks for TTD\nintegration", tooltip="M008 [asap] open; blockers=0; Commit-ordered rollback playbooks for TTD integration", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M009" [label="M009\nOPEN\nReconcile TTD protocol\nschemas with warp-ttd", tooltip="M009 [asap] open; blockers=0; Reconcile TTD protocol schemas with warp-ttd", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M010" [label="M010\nOPEN\nWesley Compiled Contract\nHosting Doctrine", tooltip="M010 [asap] open; blockers=0; Wesley Compiled Contract Hosting Doctrine", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + } + + subgraph "cluster_up_next" { + label="up-next"; + color="#cbd5e1"; + fontname="Inter, Helvetica, Arial"; + fontsize=16; + style="rounded"; + "M011" [label="M011\nOPEN\nCompliance reporting as a\nTTD protocol extension", tooltip="M011 [up-next] open; blockers=0; Compliance reporting as a TTD protocol extension", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M012" [label="M012\nOPEN\nContract-Aware Receipts\nAnd Readings", tooltip="M012 [up-next] open; blockers=0; Contract-Aware Receipts And Readings", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M013" [label="M013\nContract Strands And\nCounterfactuals", tooltip="M013 [up-next] blocked; blockers=1; Contract Strands And Counterfactuals", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M014" [label="M014\nOPEN\nT-2-5-1\nSHA-256 to BLAKE3\nmigration spec", tooltip="M014 [up-next] open; blockers=0; T-2-5-1 SHA-256 to BLAKE3 migration spec", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M015" [label="M015\nOPEN\nSecurity/capabilities for\nfork/rewind/merge", tooltip="M015 [up-next] open; blockers=0; Security/capabilities for fork/rewind/merge", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M016" [label="M016\nOPEN\nWARP optic boundary audit\nfor topology and history\noperations", tooltip="M016 [up-next] open; blockers=0; WARP optic boundary audit for topology and history operations", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M017" [label="M017\nOPEN\nAuthenticated Wesley\nIntent Admission Posture", tooltip="M017 [up-next] open; blockers=0; Authenticated Wesley Intent Admission Posture", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M018" [label="M018\nBraid and settlement\nIntent paths", tooltip="M018 [up-next] blocked; blockers=2; Braid and settlement Intent paths", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M019" [label="M019\nT-4-2-1\nCanvas graph renderer\n(static materialized\nreading)", tooltip="M019 [up-next] blocked; blockers=2; T-4-2-1 Canvas graph renderer (static materialized reading)", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M020" [label="M020\nT-4-2-2\nLive tick playback and\nrewrite animation", tooltip="M020 [up-next] blocked; blockers=2; T-4-2-2 Live tick playback and rewrite animation", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M021" [label="M021\nT-4-2-3\nNode inspection panel", tooltip="M021 [up-next] blocked; blockers=1; T-4-2-3 Node inspection panel", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M022" [label="M022\nOPEN\nContinuum Proof Family\nRuntime Cutover", tooltip="M022 [up-next] open; blockers=0; Continuum Proof Family Runtime Cutover", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M023" [label="M023\nContract Artifact\nRetention In echo-cas", tooltip="M023 [up-next] blocked; blockers=1; Contract Artifact Retention In echo-cas", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M024" [label="M024\nOPEN\nAdd an explicit Echo CLI\nand MCP agent surface", tooltip="M024 [up-next] open; blockers=0; Add an explicit Echo CLI and MCP agent surface", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M025" [label="M025\nOPEN\nT-4-3-2\nJS bindings for CAS\nstore/retrieve", tooltip="M025 [up-next] open; blockers=0; T-4-3-2 JS bindings for CAS store/retrieve", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M026" [label="M026\nOPEN\nEcho / git-warp witnessed\nsuffix sync", tooltip="M026 [up-next] open; blockers=0; Echo / git-warp witnessed suffix sync", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M027" [label="M027\nOPEN\nSplit echo-session-proto\ninto retained bridge\ncontracts vs legacy...", tooltip="M027 [up-next] open; blockers=0; Split echo-session-proto into retained bridge contracts vs legacy transport residue", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M028" [label="M028\nGraft Live Frontier\nStructural Readings", tooltip="M028 [up-next] blocked; blockers=1; Graft Live Frontier Structural Readings", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M029" [label="M029\nOPEN\nImport outcome idempotence\nand loop law", tooltip="M029 [up-next] open; blockers=0; Import outcome idempotence and loop law", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M030" [label="M030\nOPEN\nImport outcome retention\nand novelty index", tooltip="M030 [up-next] open; blockers=0; Import outcome retention and novelty index", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M031" [label="M031\nInverse operation Intent\npath", tooltip="M031 [up-next] blocked; blockers=1; Inverse operation Intent path", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M032" [label="M032\njedit Text Contract\nHosting MVP", tooltip="M032 [up-next] blocked; blockers=2; jedit Text Contract Hosting MVP", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M033" [label="M033\nOPEN\nTriage METHOD drift\nagainst ~/git/method", tooltip="M033 [up-next] open; blockers=0; Triage METHOD drift against ~/git/method", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M034" [label="M034\nStrand and support Intent\npaths", tooltip="M034 [up-next] blocked; blockers=2; Strand and support Intent paths", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M035" [label="M035\nOPEN\nNarrow ttd-browser into an\nEcho browser host bridge", tooltip="M035 [up-next] open; blockers=0; Narrow ttd-browser into an Echo browser host bridge", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M036" [label="M036\nOPEN\nT-4-1-1\nWire Engine lifecycle\nbehind wasm-bindgen\nexports", tooltip="M036 [up-next] open; blockers=0; T-4-1-1 Wire Engine lifecycle behind wasm-bindgen exports", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M037" [label="M037\nT-4-1-2\nSnapshot and ViewOp drain\nexports", tooltip="M037 [up-next] blocked; blockers=1; T-4-1-2 Snapshot and ViewOp drain exports", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M038" [label="M038\nT-4-1-3\nJS/WASM memory bridge and\nerror protocol", tooltip="M038 [up-next] blocked; blockers=1; T-4-1-3 JS/WASM memory bridge and error protocol", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M039" [label="M039\nWesley Footprint Honesty\nArtifact Attestation", tooltip="M039 [up-next] blocked; blockers=3; Wesley Footprint Honesty Artifact Attestation", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M040" [label="M040\nOPEN\nT-2-3-1\nREADME, contributor guide,\nand CI hardening", tooltip="M040 [up-next] open; blockers=0; T-2-3-1 README, contributor guide, and CI hardening", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M041" [label="M041\nOPEN\nT-2-2-1\nBackfill script generation\nfor schema migrations", tooltip="M041 [up-next] open; blockers=0; T-2-2-1 Backfill script generation for schema migrations", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M042" [label="M042\nT-2-2-2\nSwitch-over plan and\ncontract validation", tooltip="M042 [up-next] blocked; blockers=1; T-2-2-2 Switch-over plan and contract validation", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M043" [label="M043\nOPEN\nT-2-1-1\nGraphQL operation parser\nfor QIR", tooltip="M043 [up-next] open; blockers=0; T-2-1-1 GraphQL operation parser for QIR", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M044" [label="M044\nT-2-1-2\nSQL query plan generation\nfrom QIR", tooltip="M044 [up-next] blocked; blockers=1; T-2-1-2 SQL query plan generation from QIR", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M045" [label="M045\nOPEN\nT-4-4-1\nTypeScript type generation\nfrom Wesley IR", tooltip="M045 [up-next] open; blockers=0; T-4-4-1 TypeScript type generation from Wesley IR", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M046" [label="M046\nT-4-4-2\nZod runtime validators\nfrom Wesley IR", tooltip="M046 [up-next] blocked; blockers=1; T-4-4-2 Zod runtime validators from Wesley IR", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + "M047" [label="M047\nT-4-4-3\nCBOR serialization bridge\n(TS types to WASM Rust)", tooltip="M047 [up-next] blocked; blockers=2; T-4-4-3 CBOR serialization bridge (TS types to WASM Rust)", fillcolor="#dbeafe", color="#1d4ed8", penwidth=1.2]; + } + + subgraph "cluster_inbox" { + label="inbox"; + color="#cbd5e1"; + fontname="Inter, Helvetica, Arial"; + fontsize=16; + style="rounded"; + "M048" [label="M048\nOPEN\nT-10-10-1\nInformation Architecture\nConsolidation", tooltip="M048 [inbox] open; blockers=0; T-10-10-1 Information Architecture Consolidation", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M049" [label="M049\nT-10-10-2\nTutorial Series + API\nReference", tooltip="M049 [inbox] blocked; blockers=1; T-10-10-2 Tutorial Series + API Reference", fillcolor="#e5e7eb", color="#4b5563", penwidth=1.2]; + "M050" [label="M050\nOPEN\nT-10-6-1a\nRhai Sandbox Configuration\n(#173, part a)", tooltip="M050 [inbox] open; blockers=0; T-10-6-1a Rhai Sandbox Configuration (#173, part a)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M051" [label="M051\nT-10-6-1b\nViewClaim / EffectClaim\nReceipts (#173, part b)", tooltip="M051 [inbox] blocked; blockers=1; T-10-6-1b ViewClaim / EffectClaim Receipts (#173, part b)", fillcolor="#e5e7eb", color="#4b5563", penwidth=1.2]; + "M052" [label="M052\nOPEN\nFirst-class invariant\ndocuments", tooltip="M052 [inbox] open; blockers=0; First-class invariant documents", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M053" [label="M053\nOPEN\nT-10-2-1\nSpec — Commit/Manifest\nSigning (#20)", tooltip="M053 [inbox] open; blockers=0; T-10-2-1 Spec — Commit/Manifest Signing (#20)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M054" [label="M054\nOPEN\nT-10-2-2\nSpec — Security Contexts\n(#21)", tooltip="M054 [inbox] open; blockers=0; T-10-2-2 Spec — Security Contexts (#21)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M055" [label="M055\nT-10-2-3\nFFI Limits and Validation\n(#38)", tooltip="M055 [inbox] blocked; blockers=1; T-10-2-3 FFI Limits and Validation (#38)", fillcolor="#e5e7eb", color="#4b5563", penwidth=1.2]; + "M056" [label="M056\nOPEN\nT-10-2-4\nJS-ABI Packet Checksum v2\n(#195)", tooltip="M056 [inbox] open; blockers=0; T-10-2-4 JS-ABI Packet Checksum v2 (#195)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M057" [label="M057\nOPEN\nT-10-2-5\nSpec — Provenance\nPayload v1 (#202)", tooltip="M057 [inbox] open; blockers=0; T-10-2-5 Spec — Provenance Payload v1 (#202)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M058" [label="M058\nOPEN\nABI nested evidence\nstrictness", tooltip="M058 [inbox] open; blockers=0; ABI nested evidence strictness", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M059" [label="M059\nOPEN\nT-10-4-1\nDraft Hot-Reload Spec\n(#75)", tooltip="M059 [inbox] open; blockers=0; T-10-4-1 Draft Hot-Reload Spec (#75)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M060" [label="M060\nT-10-4-2\nFile Watcher / Debounce\n(#76)", tooltip="M060 [inbox] blocked; blockers=1; T-10-4-2 File Watcher / Debounce (#76)", fillcolor="#e5e7eb", color="#4b5563", penwidth=1.2]; + "M061" [label="M061\nT-10-4-3\nHot-Reload Implementation\n(#24)", tooltip="M061 [inbox] blocked; blockers=1; T-10-4-3 Hot-Reload Implementation (#24)", fillcolor="#e5e7eb", color="#4b5563", penwidth=1.2]; + "M062" [label="M062\nOPEN\ngit-mind NEXUS", tooltip="M062 [inbox] open; blockers=0; git-mind NEXUS", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M063" [label="M063\nOPEN\nT-10-5-1\nImporter Umbrella Audit +\nClose (#25)", tooltip="M063 [inbox] open; blockers=0; T-10-5-1 Importer Umbrella Audit + Close (#25)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M064" [label="M064\nOPEN\nLegend progress in method\nstatus", tooltip="M064 [inbox] open; blockers=0; Legend progress in method status", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M065" [label="M065\nOPEN\nReconcile Relocated Wesley\nEcho Schemas", tooltip="M065 [inbox] open; blockers=0; Reconcile Relocated Wesley Echo Schemas", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M066" [label="M066\nT-10-3-1\nKey Management Doc (#35)", tooltip="M066 [inbox] blocked; blockers=1; T-10-3-1 Key Management Doc (#35)", fillcolor="#e5e7eb", color="#4b5563", penwidth=1.2]; + "M067" [label="M067\nT-10-3-2\nCI — Sign Release\nArtifacts (Dry Run) (#33)", tooltip="M067 [inbox] blocked; blockers=1; T-10-3-2 CI — Sign Release Artifacts (Dry Run) (#33)", fillcolor="#e5e7eb", color="#4b5563", penwidth=1.2]; + "M068" [label="M068\nT-10-3-3\nCLI Verify Path (#34)", tooltip="M068 [inbox] blocked; blockers=1; T-10-3-3 CLI Verify Path (#34)", fillcolor="#e5e7eb", color="#4b5563", penwidth=1.2]; + "M069" [label="M069\nT-10-3-4\nCI — Verify Signatures\n(#36)", tooltip="M069 [inbox] blocked; blockers=1; T-10-3-4 CI — Verify Signatures (#36)", fillcolor="#e5e7eb", color="#4b5563", penwidth=1.2]; + "M070" [label="M070\nOPEN\nT-10-8-1\nDocs / Logging\nImprovements (#79)", tooltip="M070 [inbox] open; blockers=0; T-10-8-1 Docs / Logging Improvements (#79)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M071" [label="M071\nOPEN\nT-10-8-2\nNaming Consistency Audit\n(#207)", tooltip="M071 [inbox] open; blockers=0; T-10-8-2 Naming Consistency Audit (#207)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M072" [label="M072\nOPEN\nT-10-8-3\nReliving Debugger UX\nDesign (#239)", tooltip="M072 [inbox] open; blockers=0; T-10-8-3 Reliving Debugger UX Design (#239)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M073" [label="M073\nOPEN\nT-10-8-4\nLocal Rustdoc Warning Gate", tooltip="M073 [inbox] open; blockers=0; T-10-8-4 Local Rustdoc Warning Gate", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M074" [label="M074\nOPEN\nT-10-8-5\nDeterministic Test Engine\nHelper", tooltip="M074 [inbox] open; blockers=0; T-10-8-5 Deterministic Test Engine Helper", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M075" [label="M075\nOPEN\nT-10-8-6\nCurrent-Head PR Review /\nMerge Summary Tool", tooltip="M075 [inbox] open; blockers=0; T-10-8-6 Current-Head PR Review / Merge Summary Tool", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M076" [label="M076\nOPEN\nT-10-8-7\nCI Trigger Rationalization", tooltip="M076 [inbox] open; blockers=0; T-10-8-7 CI Trigger Rationalization", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M077" [label="M077\nOPEN\nT-10-8-8\nBackground Cargo Lock\nIsolation", tooltip="M077 [inbox] open; blockers=0; T-10-8-8 Background Cargo Lock Isolation", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M078" [label="M078\nOPEN\nT-10-8-9\nSmall-Commit Pre-Commit\nLatency Reduction", tooltip="M078 [inbox] open; blockers=0; T-10-8-9 Small-Commit Pre-Commit Latency Reduction", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M079" [label="M079\nOPEN\nT-10-8-10\nFeature-Gate Contract\nVerification", tooltip="M079 [inbox] open; blockers=0; T-10-8-10 Feature-Gate Contract Verification", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M080" [label="M080\nOPEN\nT-10-8-11\nPR Review Thread Reply /\nResolution Helper", tooltip="M080 [inbox] open; blockers=0; T-10-8-11 PR Review Thread Reply / Resolution Helper", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M081" [label="M081\nOPEN\nT-10-8-12\nShell Script Style /\nFormat Lane", tooltip="M081 [inbox] open; blockers=0; T-10-8-12 Shell Script Style / Format Lane", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M082" [label="M082\nOPEN\nT-10-8-13\nReview-Fix Fast Path for\nStaged Verification", tooltip="M082 [inbox] open; blockers=0; T-10-8-13 Review-Fix Fast Path for Staged Verification", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M083" [label="M083\nOPEN\nT-10-8-14\nPre-PR Preflight Gate", tooltip="M083 [inbox] open; blockers=0; T-10-8-14 Pre-PR Preflight Gate", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M084" [label="M084\nOPEN\nT-10-8-15\nSelf-Review Command", tooltip="M084 [inbox] open; blockers=0; T-10-8-15 Self-Review Command", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M085" [label="M085\nT-10-8-16\nPre-PR Checklist and\nBoundary-Change Policy", tooltip="M085 [inbox] blocked; blockers=2; T-10-8-16 Pre-PR Checklist and Boundary-Change Policy", fillcolor="#e5e7eb", color="#4b5563", penwidth=1.2]; + "M086" [label="M086\nOPEN\nT-10-8-17\nDocs Validation Beyond\nMarkdown", tooltip="M086 [inbox] open; blockers=0; T-10-8-17 Docs Validation Beyond Markdown", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M087" [label="M087\nOPEN\nT-10-8-18\nImplementation-Backed Docs\nClaims Policy", tooltip="M087 [inbox] open; blockers=0; T-10-8-18 Implementation-Backed Docs Claims Policy", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M088" [label="M088\nOPEN\nT-10-8-19\nRemove Committed Generated\nDAG Artifacts", tooltip="M088 [inbox] open; blockers=0; T-10-8-19 Remove Committed Generated DAG Artifacts", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M089" [label="M089\nOPEN\nT-10-9-1\nFuzzing the Port", tooltip="M089 [inbox] open; blockers=0; T-10-9-1 Fuzzing the Port", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M090" [label="M090\nOPEN\nT-10-9-2\nSIMD Canonicalization", tooltip="M090 [inbox] open; blockers=0; T-10-9-2 SIMD Canonicalization", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M091" [label="M091\nOPEN\nT-10-9-3\nCausal Visualizer", tooltip="M091 [inbox] open; blockers=0; T-10-9-3 Causal Visualizer", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M092" [label="M092\nOPEN\nT-10-7-1\nHashable View Artifacts\n(#174)", tooltip="M092 [inbox] open; blockers=0; T-10-7-1 Hashable View Artifacts (#174)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M093" [label="M093\nT-10-7-2\nSchema Hash Chain Pinning\n(#193)", tooltip="M093 [inbox] blocked; blockers=1; T-10-7-2 Schema Hash Chain Pinning (#193)", fillcolor="#e5e7eb", color="#4b5563", penwidth=1.2]; + "M094" [label="M094\nOPEN\nT-10-7-3\nSchemaDelta Vocabulary\n(#194)", tooltip="M094 [inbox] open; blockers=0; T-10-7-3 SchemaDelta Vocabulary (#194)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M095" [label="M095\nT-10-7-4\nProvenance as Query\nSemantics (#198)", tooltip="M095 [inbox] blocked; blockers=1; T-10-7-4 Provenance as Query Semantics (#198)", fillcolor="#e5e7eb", color="#4b5563", penwidth=1.2]; + "M096" [label="M096\nOPEN\nT-10-9-1\nShadow REALM Investigation", tooltip="M096 [inbox] open; blockers=0; T-10-9-1 Shadow REALM Investigation", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M097" [label="M097\nOPEN\nT-10-9-2\nMulti-Language Generator\nSurvey", tooltip="M097 [inbox] open; blockers=0; T-10-9-2 Multi-Language Generator Survey", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + } + + subgraph "cluster_cool_ideas" { + label="cool-ideas"; + color="#cbd5e1"; + fontname="Inter, Helvetica, Arial"; + fontsize=16; + style="rounded"; + "M098" [label="M098\nOPEN\nEnforce Echo design\nvocabulary", tooltip="M098 [cool-ideas] open; blockers=0; Enforce Echo design vocabulary", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M099" [label="M099\nOPEN\nCourse Material", tooltip="M099 [cool-ideas] open; blockers=0; Course Material", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M100" [label="M100\nOPEN\nCourse Material", tooltip="M100 [cool-ideas] open; blockers=0; Course Material", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M101" [label="M101\nOPEN\nExpose parallel execution\ncounterfactuals", tooltip="M101 [cool-ideas] open; blockers=0; Expose parallel execution counterfactuals", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M102" [label="M102\nT-7-4-1\nImplement rulial diff /\nworldline compare MVP\n(#172)", tooltip="M102 [cool-ideas] blocked; blockers=2; T-7-4-1 Implement rulial diff / worldline compare MVP (#172)", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M103" [label="M103\nT-7-4-2\nImplement Wesley worldline\ndiff — compare query\noutputs/proofs across...", tooltip="M103 [cool-ideas] blocked; blockers=1; T-7-4-2 Implement Wesley worldline diff — compare query outputs/proofs across ticks (#199)", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M104" [label="M104\nT-7-4-3\nImplement provenance\nheatmap — blast radius /\ncohesion over time (#204)", tooltip="M104 [cool-ideas] blocked; blockers=2; T-7-4-3 Implement provenance heatmap — blast radius / cohesion over time (#204)", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M105" [label="M105\nOPEN\nControlled Desync", tooltip="M105 [cool-ideas] open; blockers=0; Controlled Desync", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M106" [label="M106\nOPEN\nLockstep Protocol", tooltip="M106 [cool-ideas] open; blockers=0; Lockstep Protocol", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M107" [label="M107\nOPEN\nRules & State Model", tooltip="M107 [cool-ideas] open; blockers=0; Rules & State Model", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M108" [label="M108\nOPEN\nT-7-3-1\nImplement time travel core\n—\npause/rewind/buffer/catch-up...", tooltip="M108 [cool-ideas] open; blockers=0; T-7-3-1 Implement time travel core — pause/rewind/buffer/catch-up (#171)", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M109" [label="M109\nT-7-3-2\nImplement Reliving\ndebugger MVP — scrub\ntimeline + causal slice +...", tooltip="M109 [cool-ideas] blocked; blockers=1; T-7-3-2 Implement Reliving debugger MVP — scrub timeline + causal slice + fork branch (#205)", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M110" [label="M110\nOPEN\nDesync Breakers", tooltip="M110 [cool-ideas] open; blockers=0; Desync Breakers", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M111" [label="M111\nOPEN\nLockstep Harness", tooltip="M111 [cool-ideas] open; blockers=0; Lockstep Harness", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M112" [label="M112\nOPEN\nT-9-2-1\nImplement\nreplay-from-checkpoint\nconvergence tests", tooltip="M112 [cool-ideas] open; blockers=0; T-9-2-1 Implement replay-from-checkpoint convergence tests", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M113" [label="M113\nT-9-2-2\nImplement\nreplay-from-patches\nconvergence property tests", tooltip="M113 [cool-ideas] blocked; blockers=1; T-9-2-2 Implement replay-from-patches convergence property tests", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M114" [label="M114\nOPEN\nStage 0: AABB", tooltip="M114 [cool-ideas] open; blockers=0; Stage 0: AABB", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M115" [label="M115\nOPEN\nStage 1: Rotation", tooltip="M115 [cool-ideas] open; blockers=0; Stage 1: Rotation", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M116" [label="M116\nOPEN\nStage 2: Friction", tooltip="M116 [cool-ideas] open; blockers=0; Stage 2: Friction", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M117" [label="M117\nOPEN\nStage 3: Sleeping", tooltip="M117 [cool-ideas] open; blockers=0; Stage 3: Sleeping", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M118" [label="M118\nContinuum Contract\nArtifact Interchange", tooltip="M118 [cool-ideas] blocked; blockers=1; Continuum Contract Artifact Interchange", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M119" [label="M119\nOPEN\nCross-repo METHOD\ndashboard", tooltip="M119 [cool-ideas] open; blockers=0; Cross-repo METHOD dashboard", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M120" [label="M120\nOPEN\nT-5-4-1\nArc<[u8]> to bytes::Bytes\nmigration", tooltip="M120 [cool-ideas] open; blockers=0; T-5-4-1 Arc<[u8]> to bytes::Bytes migration", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M121" [label="M121\nT-5-4-2\nAsyncBlobStore trait", tooltip="M121 [cool-ideas] blocked; blockers=2; T-5-4-2 AsyncBlobStore trait", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M122" [label="M122\nT-5-4-3\nEnumeration and metadata\nAPI", tooltip="M122 [cool-ideas] blocked; blockers=1; T-5-4-3 Enumeration and metadata API", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M123" [label="M123\nOPEN\nT-5-1-1\nFile-per-blob DiskTier\nimplementation", tooltip="M123 [cool-ideas] open; blockers=0; T-5-1-1 File-per-blob DiskTier implementation", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M124" [label="M124\nT-5-1-2\nTiered promotion/demotion\n(Memory <-> Disk)", tooltip="M124 [cool-ideas] blocked; blockers=1; T-5-1-2 Tiered promotion/demotion (Memory <-> Disk)", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M125" [label="M125\nT-5-2-1\nMark-sweep reachability\nanalysis", tooltip="M125 [cool-ideas] blocked; blockers=1; T-5-2-1 Mark-sweep reachability analysis", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M126" [label="M126\nT-5-2-2\nEviction policy and\nbackground sweep task", tooltip="M126 [cool-ideas] blocked; blockers=2; T-5-2-2 Eviction policy and background sweep task", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M127" [label="M127\nOPEN\nT-5-3-1\nMessage type definitions\nand binary encoding", tooltip="M127 [cool-ideas] open; blockers=0; T-5-3-1 Message type definitions and binary encoding", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M128" [label="M128\nT-5-3-2\nRequest/response protocol\nand backpressure", tooltip="M128 [cool-ideas] blocked; blockers=1; T-5-3-2 Request/response protocol and backpressure", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M129" [label="M129\nOPEN\nExtract method crate to\nits own repo", tooltip="M129 [cool-ideas] open; blockers=0; Extract method crate to its own repo", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M130" [label="M130\nOPEN\nMethod drift check as\npre-push hook", tooltip="M130 [cool-ideas] open; blockers=0; Method drift check as pre-push hook", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M131" [label="M131\nProof-Carrying Apertures", tooltip="M131 [cool-ideas] blocked; blockers=3; Proof-Carrying Apertures", fillcolor="#ede9fe", color="#7c3aed", penwidth=1.2]; + "M132" [label="M132\nOPEN\nReading envelope inspector", tooltip="M132 [cool-ideas] open; blockers=0; Reading envelope inspector", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M133" [label="M133\nOPEN\nVisualization", tooltip="M133 [cool-ideas] open; blockers=0; Visualization", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M134" [label="M134\nOPEN\nVisualization", tooltip="M134 [cool-ideas] open; blockers=0; Visualization", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M135" [label="M135\nOPEN\nWARPDrive POSIX\nMaterialization Optic", tooltip="M135 [cool-ideas] open; blockers=0; WARPDrive POSIX Materialization Optic", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + } + + subgraph "cluster_bad_code" { + label="bad-code"; + color="#cbd5e1"; + fontname="Inter, Helvetica, Arial"; + fontsize=16; + style="rounded"; + "M136" [label="M136\nOPEN\nRED/GREEN can't be\nseparate commits", tooltip="M136 [bad-code] open; blockers=0; RED/GREEN can't be separate commits", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + "M137" [label="M137\nOPEN\nxtask main.rs is a god\nfile", tooltip="M137 [bad-code] open; blockers=0; xtask main.rs is a god file", fillcolor="#bbf7d0", color="#15803d", penwidth=2.8]; + } + + "M012" -> "M023"; + "M012" -> "M039"; + "M012" -> "M131"; + "M013" -> "M118"; + "M015" -> "M034"; + "M016" -> "M018"; + "M016" -> "M031"; + "M016" -> "M034"; + "M017" -> "M039"; + "M019" -> "M020"; + "M020" -> "M021"; + "M022" -> "M039"; + "M023" -> "M032"; + "M023" -> "M131"; + "M028" -> "M013"; + "M032" -> "M028"; + "M034" -> "M018"; + "M036" -> "M019"; + "M036" -> "M037"; + "M036" -> "M038"; + "M037" -> "M020"; + "M038" -> "M019"; + "M038" -> "M047"; + "M039" -> "M032"; + "M041" -> "M042"; + "M043" -> "M044"; + "M045" -> "M046"; + "M046" -> "M047"; + "M048" -> "M049"; + "M050" -> "M051"; + "M053" -> "M066"; + "M054" -> "M055"; + "M059" -> "M060"; + "M060" -> "M061"; + "M066" -> "M067"; + "M067" -> "M068"; + "M068" -> "M069"; + "M083" -> "M085"; + "M084" -> "M085"; + "M092" -> "M093"; + "M093" -> "M095"; + "M102" -> "M103"; + "M102" -> "M104"; + "M103" -> "M104"; + "M108" -> "M102"; + "M108" -> "M109"; + "M109" -> "M102"; + "M112" -> "M113"; + "M120" -> "M121"; + "M121" -> "M122"; + "M123" -> "M124"; + "M123" -> "M125"; + "M124" -> "M121"; + "M124" -> "M126"; + "M125" -> "M126"; + "M127" -> "M128"; + "M135" -> "M131"; +} diff --git a/docs/method/task-dag.svg b/docs/method/task-dag.svg new file mode 100644 index 00000000..ae97d8b9 --- /dev/null +++ b/docs/method/task-dag.svg @@ -0,0 +1,2047 @@ + + + + + + +method_task_dag + +METHOD Backlog Task Dependency DAG +open tasks: 95 / 137; dependency edges: 57 + +cluster_asap + +asap + + +cluster_up_next + +up-next + + +cluster_inbox + +inbox + + +cluster_cool_ideas + +cool-ideas + + +cluster_bad_code + +bad-code + + + +M001 + + +M001 +OPEN +Docs cleanup + + + + + +M002 + + +M002 +OPEN +Echo and git-warp +compatibility sanity check + + + + + +M003 + + +M003 +OPEN +T-9-3-1 +Verify and integrate +deterministic trig oracle +into release gate + + + + + +M004 + + +M004 +OPEN +CI det-policy hardening + + + + + +M005 + + +M005 +OPEN +T-6-1-2 +Config file support and +shell completions + + + + + +M006 + + +M006 +OPEN +T-279-1 +Make decoder control +coverage auditable + + + + + +M007 + + +M007 +OPEN +Echo Contract Hosting +Roadmap + + + + + +M008 + + +M008 +OPEN +Commit-ordered rollback +playbooks for TTD +integration + + + + + +M009 + + +M009 +OPEN +Reconcile TTD protocol +schemas with warp-ttd + + + + + +M010 + + +M010 +OPEN +Wesley Compiled Contract +Hosting Doctrine + + + + + +M011 + + +M011 +OPEN +Compliance reporting as a +TTD protocol extension + + + + + +M012 + + +M012 +OPEN +Contract-Aware Receipts +And Readings + + + + + +M023 + + +M023 +Contract Artifact +Retention In echo-cas + + + + + +M012->M023 + + + + + +M039 + + +M039 +Wesley Footprint Honesty +Artifact Attestation + + + + + +M012->M039 + + + + + +M131 + + +M131 +Proof-Carrying Apertures + + + + + +M012->M131 + + + + + +M013 + + +M013 +Contract Strands And +Counterfactuals + + + + + +M118 + + +M118 +Continuum Contract +Artifact Interchange + + + + + +M013->M118 + + + + + +M014 + + +M014 +OPEN +T-2-5-1 +SHA-256 to BLAKE3 +migration spec + + + + + +M015 + + +M015 +OPEN +Security/capabilities for +fork/rewind/merge + + + + + +M034 + + +M034 +Strand and support Intent +paths + + + + + +M015->M034 + + + + + +M016 + + +M016 +OPEN +WARP optic boundary audit +for topology and history +operations + + + + + +M018 + + +M018 +Braid and settlement +Intent paths + + + + + +M016->M018 + + + + + +M031 + + +M031 +Inverse operation Intent +path + + + + + +M016->M031 + + + + + +M016->M034 + + + + + +M017 + + +M017 +OPEN +Authenticated Wesley +Intent Admission Posture + + + + + +M017->M039 + + + + + +M019 + + +M019 +T-4-2-1 +Canvas graph renderer +(static materialized +reading) + + + + + +M020 + + +M020 +T-4-2-2 +Live tick playback and +rewrite animation + + + + + +M019->M020 + + + + + +M021 + + +M021 +T-4-2-3 +Node inspection panel + + + + + +M020->M021 + + + + + +M022 + + +M022 +OPEN +Continuum Proof Family +Runtime Cutover + + + + + +M022->M039 + + + + + +M032 + + +M032 +jedit Text Contract +Hosting MVP + + + + + +M023->M032 + + + + + +M023->M131 + + + + + +M024 + + +M024 +OPEN +Add an explicit Echo CLI +and MCP agent surface + + + + + +M025 + + +M025 +OPEN +T-4-3-2 +JS bindings for CAS +store/retrieve + + + + + +M026 + + +M026 +OPEN +Echo / git-warp witnessed +suffix sync + + + + + +M027 + + +M027 +OPEN +Split echo-session-proto +into retained bridge +contracts vs legacy... + + + + + +M028 + + +M028 +Graft Live Frontier +Structural Readings + + + + + +M028->M013 + + + + + +M029 + + +M029 +OPEN +Import outcome idempotence +and loop law + + + + + +M030 + + +M030 +OPEN +Import outcome retention +and novelty index + + + + + +M032->M028 + + + + + +M033 + + +M033 +OPEN +Triage METHOD drift +against ~/git/method + + + + + +M034->M018 + + + + + +M035 + + +M035 +OPEN +Narrow ttd-browser into an +Echo browser host bridge + + + + + +M036 + + +M036 +OPEN +T-4-1-1 +Wire Engine lifecycle +behind wasm-bindgen +exports + + + + + +M036->M019 + + + + + +M037 + + +M037 +T-4-1-2 +Snapshot and ViewOp drain +exports + + + + + +M036->M037 + + + + + +M038 + + +M038 +T-4-1-3 +JS/WASM memory bridge and +error protocol + + + + + +M036->M038 + + + + + +M037->M020 + + + + + +M038->M019 + + + + + +M047 + + +M047 +T-4-4-3 +CBOR serialization bridge +(TS types to WASM Rust) + + + + + +M038->M047 + + + + + +M039->M032 + + + + + +M040 + + +M040 +OPEN +T-2-3-1 +README, contributor guide, +and CI hardening + + + + + +M041 + + +M041 +OPEN +T-2-2-1 +Backfill script generation +for schema migrations + + + + + +M042 + + +M042 +T-2-2-2 +Switch-over plan and +contract validation + + + + + +M041->M042 + + + + + +M043 + + +M043 +OPEN +T-2-1-1 +GraphQL operation parser +for QIR + + + + + +M044 + + +M044 +T-2-1-2 +SQL query plan generation +from QIR + + + + + +M043->M044 + + + + + +M045 + + +M045 +OPEN +T-4-4-1 +TypeScript type generation +from Wesley IR + + + + + +M046 + + +M046 +T-4-4-2 +Zod runtime validators +from Wesley IR + + + + + +M045->M046 + + + + + +M046->M047 + + + + + +M048 + + +M048 +OPEN +T-10-10-1 +Information Architecture +Consolidation + + + + + +M049 + + +M049 +T-10-10-2 +Tutorial Series + API +Reference + + + + + +M048->M049 + + + + + +M050 + + +M050 +OPEN +T-10-6-1a +Rhai Sandbox Configuration +(#173, part a) + + + + + +M051 + + +M051 +T-10-6-1b +ViewClaim / EffectClaim +Receipts (#173, part b) + + + + + +M050->M051 + + + + + +M052 + + +M052 +OPEN +First-class invariant +documents + + + + + +M053 + + +M053 +OPEN +T-10-2-1 +Spec — Commit/Manifest +Signing (#20) + + + + + +M066 + + +M066 +T-10-3-1 +Key Management Doc (#35) + + + + + +M053->M066 + + + + + +M054 + + +M054 +OPEN +T-10-2-2 +Spec — Security Contexts +(#21) + + + + + +M055 + + +M055 +T-10-2-3 +FFI Limits and Validation +(#38) + + + + + +M054->M055 + + + + + +M056 + + +M056 +OPEN +T-10-2-4 +JS-ABI Packet Checksum v2 +(#195) + + + + + +M057 + + +M057 +OPEN +T-10-2-5 +Spec — Provenance +Payload v1 (#202) + + + + + +M058 + + +M058 +OPEN +ABI nested evidence +strictness + + + + + +M059 + + +M059 +OPEN +T-10-4-1 +Draft Hot-Reload Spec +(#75) + + + + + +M060 + + +M060 +T-10-4-2 +File Watcher / Debounce +(#76) + + + + + +M059->M060 + + + + + +M061 + + +M061 +T-10-4-3 +Hot-Reload Implementation +(#24) + + + + + +M060->M061 + + + + + +M062 + + +M062 +OPEN +git-mind NEXUS + + + + + +M063 + + +M063 +OPEN +T-10-5-1 +Importer Umbrella Audit + +Close (#25) + + + + + +M064 + + +M064 +OPEN +Legend progress in method +status + + + + + +M065 + + +M065 +OPEN +Reconcile Relocated Wesley +Echo Schemas + + + + + +M067 + + +M067 +T-10-3-2 +CI — Sign Release +Artifacts (Dry Run) (#33) + + + + + +M066->M067 + + + + + +M068 + + +M068 +T-10-3-3 +CLI Verify Path (#34) + + + + + +M067->M068 + + + + + +M069 + + +M069 +T-10-3-4 +CI — Verify Signatures +(#36) + + + + + +M068->M069 + + + + + +M070 + + +M070 +OPEN +T-10-8-1 +Docs / Logging +Improvements (#79) + + + + + +M071 + + +M071 +OPEN +T-10-8-2 +Naming Consistency Audit +(#207) + + + + + +M072 + + +M072 +OPEN +T-10-8-3 +Reliving Debugger UX +Design (#239) + + + + + +M073 + + +M073 +OPEN +T-10-8-4 +Local Rustdoc Warning Gate + + + + + +M074 + + +M074 +OPEN +T-10-8-5 +Deterministic Test Engine +Helper + + + + + +M075 + + +M075 +OPEN +T-10-8-6 +Current-Head PR Review / +Merge Summary Tool + + + + + +M076 + + +M076 +OPEN +T-10-8-7 +CI Trigger Rationalization + + + + + +M077 + + +M077 +OPEN +T-10-8-8 +Background Cargo Lock +Isolation + + + + + +M078 + + +M078 +OPEN +T-10-8-9 +Small-Commit Pre-Commit +Latency Reduction + + + + + +M079 + + +M079 +OPEN +T-10-8-10 +Feature-Gate Contract +Verification + + + + + +M080 + + +M080 +OPEN +T-10-8-11 +PR Review Thread Reply / +Resolution Helper + + + + + +M081 + + +M081 +OPEN +T-10-8-12 +Shell Script Style / +Format Lane + + + + + +M082 + + +M082 +OPEN +T-10-8-13 +Review-Fix Fast Path for +Staged Verification + + + + + +M083 + + +M083 +OPEN +T-10-8-14 +Pre-PR Preflight Gate + + + + + +M085 + + +M085 +T-10-8-16 +Pre-PR Checklist and +Boundary-Change Policy + + + + + +M083->M085 + + + + + +M084 + + +M084 +OPEN +T-10-8-15 +Self-Review Command + + + + + +M084->M085 + + + + + +M086 + + +M086 +OPEN +T-10-8-17 +Docs Validation Beyond +Markdown + + + + + +M087 + + +M087 +OPEN +T-10-8-18 +Implementation-Backed Docs +Claims Policy + + + + + +M088 + + +M088 +OPEN +T-10-8-19 +Remove Committed Generated +DAG Artifacts + + + + + +M089 + + +M089 +OPEN +T-10-9-1 +Fuzzing the Port + + + + + +M090 + + +M090 +OPEN +T-10-9-2 +SIMD Canonicalization + + + + + +M091 + + +M091 +OPEN +T-10-9-3 +Causal Visualizer + + + + + +M092 + + +M092 +OPEN +T-10-7-1 +Hashable View Artifacts +(#174) + + + + + +M093 + + +M093 +T-10-7-2 +Schema Hash Chain Pinning +(#193) + + + + + +M092->M093 + + + + + +M095 + + +M095 +T-10-7-4 +Provenance as Query +Semantics (#198) + + + + + +M093->M095 + + + + + +M094 + + +M094 +OPEN +T-10-7-3 +SchemaDelta Vocabulary +(#194) + + + + + +M096 + + +M096 +OPEN +T-10-9-1 +Shadow REALM Investigation + + + + + +M097 + + +M097 +OPEN +T-10-9-2 +Multi-Language Generator +Survey + + + + + +M098 + + +M098 +OPEN +Enforce Echo design +vocabulary + + + + + +M099 + + +M099 +OPEN +Course Material + + + + + +M100 + + +M100 +OPEN +Course Material + + + + + +M101 + + +M101 +OPEN +Expose parallel execution +counterfactuals + + + + + +M102 + + +M102 +T-7-4-1 +Implement rulial diff / +worldline compare MVP +(#172) + + + + + +M103 + + +M103 +T-7-4-2 +Implement Wesley worldline +diff — compare query +outputs/proofs across... + + + + + +M102->M103 + + + + + +M104 + + +M104 +T-7-4-3 +Implement provenance +heatmap — blast radius / +cohesion over time (#204) + + + + + +M102->M104 + + + + + +M103->M104 + + + + + +M105 + + +M105 +OPEN +Controlled Desync + + + + + +M106 + + +M106 +OPEN +Lockstep Protocol + + + + + +M107 + + +M107 +OPEN +Rules & State Model + + + + + +M108 + + +M108 +OPEN +T-7-3-1 +Implement time travel core + +pause/rewind/buffer/catch-up... + + + + + +M108->M102 + + + + + +M109 + + +M109 +T-7-3-2 +Implement Reliving +debugger MVP — scrub +timeline + causal slice +... + + + + + +M108->M109 + + + + + +M109->M102 + + + + + +M110 + + +M110 +OPEN +Desync Breakers + + + + + +M111 + + +M111 +OPEN +Lockstep Harness + + + + + +M112 + + +M112 +OPEN +T-9-2-1 +Implement +replay-from-checkpoint +convergence tests + + + + + +M113 + + +M113 +T-9-2-2 +Implement +replay-from-patches +convergence property tests + + + + + +M112->M113 + + + + + +M114 + + +M114 +OPEN +Stage 0: AABB + + + + + +M115 + + +M115 +OPEN +Stage 1: Rotation + + + + + +M116 + + +M116 +OPEN +Stage 2: Friction + + + + + +M117 + + +M117 +OPEN +Stage 3: Sleeping + + + + + +M119 + + +M119 +OPEN +Cross-repo METHOD +dashboard + + + + + +M120 + + +M120 +OPEN +T-5-4-1 +Arc<[u8]> to bytes::Bytes +migration + + + + + +M121 + + +M121 +T-5-4-2 +AsyncBlobStore trait + + + + + +M120->M121 + + + + + +M122 + + +M122 +T-5-4-3 +Enumeration and metadata +API + + + + + +M121->M122 + + + + + +M123 + + +M123 +OPEN +T-5-1-1 +File-per-blob DiskTier +implementation + + + + + +M124 + + +M124 +T-5-1-2 +Tiered promotion/demotion +(Memory <-> Disk) + + + + + +M123->M124 + + + + + +M125 + + +M125 +T-5-2-1 +Mark-sweep reachability +analysis + + + + + +M123->M125 + + + + + +M124->M121 + + + + + +M126 + + +M126 +T-5-2-2 +Eviction policy and +background sweep task + + + + + +M124->M126 + + + + + +M125->M126 + + + + + +M127 + + +M127 +OPEN +T-5-3-1 +Message type definitions +and binary encoding + + + + + +M128 + + +M128 +T-5-3-2 +Request/response protocol +and backpressure + + + + + +M127->M128 + + + + + +M129 + + +M129 +OPEN +Extract method crate to +its own repo + + + + + +M130 + + +M130 +OPEN +Method drift check as +pre-push hook + + + + + +M132 + + +M132 +OPEN +Reading envelope inspector + + + + + +M133 + + +M133 +OPEN +Visualization + + + + + +M134 + + +M134 +OPEN +Visualization + + + + + +M135 + + +M135 +OPEN +WARPDrive POSIX +Materialization Optic + + + + + +M135->M131 + + + + + +M136 + + +M136 +OPEN +RED/GREEN can't be +separate commits + + + + + +M137 + + +M137 +OPEN +xtask main.rs is a god +file + + + + + diff --git a/docs/method/task-matrix.csv b/docs/method/task-matrix.csv new file mode 100644 index 00000000..7bc0a587 --- /dev/null +++ b/docs/method/task-matrix.csv @@ -0,0 +1,138 @@ +task,M001,M002,M003,M004,M005,M006,M007,M008,M009,M010,M011,M012,M013,M014,M015,M016,M017,M018,M019,M020,M021,M022,M023,M024,M025,M026,M027,M028,M029,M030,M031,M032,M033,M034,M035,M036,M037,M038,M039,M040,M041,M042,M043,M044,M045,M046,M047,M048,M049,M050,M051,M052,M053,M054,M055,M056,M057,M058,M059,M060,M061,M062,M063,M064,M065,M066,M067,M068,M069,M070,M071,M072,M073,M074,M075,M076,M077,M078,M079,M080,M081,M082,M083,M084,M085,M086,M087,M088,M089,M090,M091,M092,M093,M094,M095,M096,M097,M098,M099,M100,M101,M102,M103,M104,M105,M106,M107,M108,M109,M110,M111,M112,M113,M114,M115,M116,M117,M118,M119,M120,M121,M122,M123,M124,M125,M126,M127,M128,M129,M130,M131,M132,M133,M134,M135,M136,M137 +M001,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M002,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M003,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M004,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M005,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M006,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M007,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M008,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M009,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M010,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M011,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M012,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M013,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M014,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M015,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M016,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M017,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M018,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M019,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M020,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M021,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M022,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M023,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M024,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M025,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M026,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M027,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M028,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M029,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M030,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M031,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M032,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M033,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M034,,,,,,,,,,,,,,,depends on,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M035,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M036,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M037,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M038,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M039,,,,,,,,,,,,depends on,,,,,depends on,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M040,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M041,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M042,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M043,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M044,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M045,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M046,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M047,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M048,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M049,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M050,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M051,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M052,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M053,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M054,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M055,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M056,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M057,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M058,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M059,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M060,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M061,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M062,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M063,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M064,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M065,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M066,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M067,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M068,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M069,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M070,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M071,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M072,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M073,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M074,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M075,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M076,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M077,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M078,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M079,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M080,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M081,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M082,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M083,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M084,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M085,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M086,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M087,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M088,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M089,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M090,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M091,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M092,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M093,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M094,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M095,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M096,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M097,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M098,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M099,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M100,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M101,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M102,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M103,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M104,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M105,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M106,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M107,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M108,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M109,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M110,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M111,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M112,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M113,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,, +M114,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M115,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M116,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M117,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M118,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M119,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M120,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M121,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,depends on,,,,,,,,,,,,, +M122,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,, +M123,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M124,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,, +M125,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,, +M126,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,depends on,,,,,,,,,,,, +M127,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M128,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,, +M129,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M130,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M131,,,,,,,,,,,,depends on,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,, +M132,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M133,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M134,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M135,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M136,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M137,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, diff --git a/docs/method/task-matrix.md b/docs/method/task-matrix.md new file mode 100644 index 00000000..2d4fd1ea --- /dev/null +++ b/docs/method/task-matrix.md @@ -0,0 +1,331 @@ + + + +# METHOD Task Matrix + +Rows are dependent tasks. Columns are prerequisite tasks. A cell contains +`depends on` when the row task directly depends on the column task. + +This matrix is generated from `docs/method/backlog/**`. If a backlog file +contains `## T-...` task sections, each section is a task row. Otherwise, +the backlog file itself is one task row. File-level `Depends on:` links are +included when they resolve to another backlog task. Section-level +`Blocked By:` / `Blocking:` task IDs are included when they resolve to a +task row. + +Blank cells mean no direct dependency was found. Transitive dependencies are +not expanded. + +## Summary + +- Matrix rows/columns: 137 +- Direct in-matrix dependency edges: 57 +- Completed backlog tasks: 0 +- `asap` tasks: 10 +- `up-next` tasks: 37 +- `inbox` tasks: 50 +- `cool-ideas` tasks: 38 +- `bad-code` tasks: 2 + +## Task IDs + +- `M001` `asap`: [Docs cleanup](docs/method/backlog/asap/DOCS_docs-cleanup.md) (source: [`docs/method/backlog/asap/DOCS_docs-cleanup.md`](docs/method/backlog/asap/DOCS_docs-cleanup.md)) +- `M002` `asap`: [Echo and git-warp compatibility sanity check](docs/method/backlog/asap/KERNEL_echo-git-warp-compatibility-sanity-check.md) (source: [`docs/method/backlog/asap/KERNEL_echo-git-warp-compatibility-sanity-check.md`](docs/method/backlog/asap/KERNEL_echo-git-warp-compatibility-sanity-check.md)) +- `M003` `asap` `T-9-3-1`: [Verify and integrate deterministic trig oracle into release gate](docs/method/backlog/asap/MATH_deterministic-trig.md#t-9-3-1-verify-and-integrate-deterministic-trig-oracle-into-release-gate) (source: [`docs/method/backlog/asap/MATH_deterministic-trig.md`](docs/method/backlog/asap/MATH_deterministic-trig.md)) +- `M004` `asap`: [CI det-policy hardening](docs/method/backlog/asap/PLATFORM_ci-det-policy-hardening.md) (source: [`docs/method/backlog/asap/PLATFORM_ci-det-policy-hardening.md`](docs/method/backlog/asap/PLATFORM_ci-det-policy-hardening.md)) +- `M005` `asap` `T-6-1-2`: [Config file support and shell completions](docs/method/backlog/asap/PLATFORM_cli-scaffold.md#t-6-1-2-config-file-support-and-shell-completions) (source: [`docs/method/backlog/asap/PLATFORM_cli-scaffold.md`](docs/method/backlog/asap/PLATFORM_cli-scaffold.md)) +- `M006` `asap` `T-279-1`: [Make decoder control coverage auditable](docs/method/backlog/asap/PLATFORM_decoder-negative-test-map.md#t-279-1-make-decoder-control-coverage-auditable) (source: [`docs/method/backlog/asap/PLATFORM_decoder-negative-test-map.md`](docs/method/backlog/asap/PLATFORM_decoder-negative-test-map.md)) +- `M007` `asap`: [Echo Contract Hosting Roadmap](docs/method/backlog/asap/PLATFORM_echo-contract-hosting-roadmap.md) (source: [`docs/method/backlog/asap/PLATFORM_echo-contract-hosting-roadmap.md`](docs/method/backlog/asap/PLATFORM_echo-contract-hosting-roadmap.md)) +- `M008` `asap`: [Commit-ordered rollback playbooks for TTD integration](docs/method/backlog/asap/PLATFORM_ttd-rollback-playbooks.md) (source: [`docs/method/backlog/asap/PLATFORM_ttd-rollback-playbooks.md`](docs/method/backlog/asap/PLATFORM_ttd-rollback-playbooks.md)) +- `M009` `asap`: [Reconcile TTD protocol schemas with warp-ttd](docs/method/backlog/asap/PLATFORM_ttd-schema-reconciliation.md) (source: [`docs/method/backlog/asap/PLATFORM_ttd-schema-reconciliation.md`](docs/method/backlog/asap/PLATFORM_ttd-schema-reconciliation.md)) +- `M010` `asap`: [Wesley Compiled Contract Hosting Doctrine](docs/method/backlog/asap/PLATFORM_wesley-compiled-contract-hosting-doctrine.md) (source: [`docs/method/backlog/asap/PLATFORM_wesley-compiled-contract-hosting-doctrine.md`](docs/method/backlog/asap/PLATFORM_wesley-compiled-contract-hosting-doctrine.md)) +- `M011` `up-next`: [Compliance reporting as a TTD protocol extension](docs/method/backlog/up-next/KERNEL_compliance-protocol-envelope.md) (source: [`docs/method/backlog/up-next/KERNEL_compliance-protocol-envelope.md`](docs/method/backlog/up-next/KERNEL_compliance-protocol-envelope.md)) +- `M012` `up-next`: [Contract-Aware Receipts And Readings](docs/method/backlog/up-next/KERNEL_contract-aware-receipts-and-readings.md) (source: [`docs/method/backlog/up-next/KERNEL_contract-aware-receipts-and-readings.md`](docs/method/backlog/up-next/KERNEL_contract-aware-receipts-and-readings.md)) +- `M013` `up-next`: [Contract Strands And Counterfactuals](docs/method/backlog/up-next/KERNEL_contract-strands-and-counterfactuals.md) (source: [`docs/method/backlog/up-next/KERNEL_contract-strands-and-counterfactuals.md`](docs/method/backlog/up-next/KERNEL_contract-strands-and-counterfactuals.md)) +- `M014` `up-next` `T-2-5-1`: [SHA-256 to BLAKE3 migration spec](docs/method/backlog/up-next/KERNEL_sha256-blake3.md#t-2-5-1-sha-256-to-blake3-migration-spec) (source: [`docs/method/backlog/up-next/KERNEL_sha256-blake3.md`](docs/method/backlog/up-next/KERNEL_sha256-blake3.md)) +- `M015` `up-next`: [Security/capabilities for fork/rewind/merge](docs/method/backlog/up-next/KERNEL_time-travel-capabilities.md) (source: [`docs/method/backlog/up-next/KERNEL_time-travel-capabilities.md`](docs/method/backlog/up-next/KERNEL_time-travel-capabilities.md)) +- `M016` `up-next`: [WARP optic boundary audit for topology and history operations](docs/method/backlog/up-next/KERNEL_topology-mutation-intent-boundary-audit.md) (source: [`docs/method/backlog/up-next/KERNEL_topology-mutation-intent-boundary-audit.md`](docs/method/backlog/up-next/KERNEL_topology-mutation-intent-boundary-audit.md)) +- `M017` `up-next`: [Authenticated Wesley Intent Admission Posture](docs/method/backlog/up-next/PLATFORM_authenticated-wesley-intent-admission-posture.md) (source: [`docs/method/backlog/up-next/PLATFORM_authenticated-wesley-intent-admission-posture.md`](docs/method/backlog/up-next/PLATFORM_authenticated-wesley-intent-admission-posture.md)) +- `M018` `up-next`: [Braid and settlement Intent paths](docs/method/backlog/up-next/PLATFORM_braid-settlement-intent-paths.md) (source: [`docs/method/backlog/up-next/PLATFORM_braid-settlement-intent-paths.md`](docs/method/backlog/up-next/PLATFORM_braid-settlement-intent-paths.md)) +- `M019` `up-next` `T-4-2-1`: [Canvas graph renderer (static materialized reading)](docs/method/backlog/up-next/PLATFORM_browser-visualization.md#t-4-2-1-canvas-graph-renderer-static-materialized-reading) (source: [`docs/method/backlog/up-next/PLATFORM_browser-visualization.md`](docs/method/backlog/up-next/PLATFORM_browser-visualization.md)) +- `M020` `up-next` `T-4-2-2`: [Live tick playback and rewrite animation](docs/method/backlog/up-next/PLATFORM_browser-visualization.md#t-4-2-2-live-tick-playback-and-rewrite-animation) (source: [`docs/method/backlog/up-next/PLATFORM_browser-visualization.md`](docs/method/backlog/up-next/PLATFORM_browser-visualization.md)) +- `M021` `up-next` `T-4-2-3`: [Node inspection panel](docs/method/backlog/up-next/PLATFORM_browser-visualization.md#t-4-2-3-node-inspection-panel) (source: [`docs/method/backlog/up-next/PLATFORM_browser-visualization.md`](docs/method/backlog/up-next/PLATFORM_browser-visualization.md)) +- `M022` `up-next`: [Continuum Proof Family Runtime Cutover](docs/method/backlog/up-next/PLATFORM_continuum-proof-family-runtime-cutover.md) (source: [`docs/method/backlog/up-next/PLATFORM_continuum-proof-family-runtime-cutover.md`](docs/method/backlog/up-next/PLATFORM_continuum-proof-family-runtime-cutover.md)) +- `M023` `up-next`: [Contract Artifact Retention In echo-cas](docs/method/backlog/up-next/PLATFORM_contract-artifact-retention-in-echo-cas.md) (source: [`docs/method/backlog/up-next/PLATFORM_contract-artifact-retention-in-echo-cas.md`](docs/method/backlog/up-next/PLATFORM_contract-artifact-retention-in-echo-cas.md)) +- `M024` `up-next`: [Add an explicit Echo CLI and MCP agent surface](docs/method/backlog/up-next/PLATFORM_echo-agent-surface-cli-and-mcp.md) (source: [`docs/method/backlog/up-next/PLATFORM_echo-agent-surface-cli-and-mcp.md`](docs/method/backlog/up-next/PLATFORM_echo-agent-surface-cli-and-mcp.md)) +- `M025` `up-next` `T-4-3-2`: [JS bindings for CAS store/retrieve](docs/method/backlog/up-next/PLATFORM_echo-cas-js-bindings.md#t-4-3-2-js-bindings-for-cas-storeretrieve) (source: [`docs/method/backlog/up-next/PLATFORM_echo-cas-js-bindings.md`](docs/method/backlog/up-next/PLATFORM_echo-cas-js-bindings.md)) +- `M026` `up-next`: [Echo / git-warp witnessed suffix sync](docs/method/backlog/up-next/PLATFORM_echo-git-warp-witnessed-suffix-sync.md) (source: [`docs/method/backlog/up-next/PLATFORM_echo-git-warp-witnessed-suffix-sync.md`](docs/method/backlog/up-next/PLATFORM_echo-git-warp-witnessed-suffix-sync.md)) +- `M027` `up-next`: [Split echo-session-proto into retained bridge contracts vs legacy transport residue](docs/method/backlog/up-next/PLATFORM_echo-session-proto-split.md) (source: [`docs/method/backlog/up-next/PLATFORM_echo-session-proto-split.md`](docs/method/backlog/up-next/PLATFORM_echo-session-proto-split.md)) +- `M028` `up-next`: [Graft Live Frontier Structural Readings](docs/method/backlog/up-next/PLATFORM_graft-live-frontier-structural-readings.md) (source: [`docs/method/backlog/up-next/PLATFORM_graft-live-frontier-structural-readings.md`](docs/method/backlog/up-next/PLATFORM_graft-live-frontier-structural-readings.md)) +- `M029` `up-next`: [Import outcome idempotence and loop law](docs/method/backlog/up-next/PLATFORM_import-outcome-idempotence-and-loop-law.md) (source: [`docs/method/backlog/up-next/PLATFORM_import-outcome-idempotence-and-loop-law.md`](docs/method/backlog/up-next/PLATFORM_import-outcome-idempotence-and-loop-law.md)) +- `M030` `up-next`: [Import outcome retention and novelty index](docs/method/backlog/up-next/PLATFORM_import-outcome-retention-novelty-index.md) (source: [`docs/method/backlog/up-next/PLATFORM_import-outcome-retention-novelty-index.md`](docs/method/backlog/up-next/PLATFORM_import-outcome-retention-novelty-index.md)) +- `M031` `up-next`: [Inverse operation Intent path](docs/method/backlog/up-next/PLATFORM_inverse-operation-intent-path.md) (source: [`docs/method/backlog/up-next/PLATFORM_inverse-operation-intent-path.md`](docs/method/backlog/up-next/PLATFORM_inverse-operation-intent-path.md)) +- `M032` `up-next`: [jedit Text Contract Hosting MVP](docs/method/backlog/up-next/PLATFORM_jedit-text-contract-mvp.md) (source: [`docs/method/backlog/up-next/PLATFORM_jedit-text-contract-mvp.md`](docs/method/backlog/up-next/PLATFORM_jedit-text-contract-mvp.md)) +- `M033` `up-next`: [Triage METHOD drift against ~/git/method](docs/method/backlog/up-next/PLATFORM_method-sync-and-doctor-triage.md) (source: [`docs/method/backlog/up-next/PLATFORM_method-sync-and-doctor-triage.md`](docs/method/backlog/up-next/PLATFORM_method-sync-and-doctor-triage.md)) +- `M034` `up-next`: [Strand and support Intent paths](docs/method/backlog/up-next/PLATFORM_strand-and-support-intent-paths.md) (source: [`docs/method/backlog/up-next/PLATFORM_strand-and-support-intent-paths.md`](docs/method/backlog/up-next/PLATFORM_strand-and-support-intent-paths.md)) +- `M035` `up-next`: [Narrow ttd-browser into an Echo browser host bridge](docs/method/backlog/up-next/PLATFORM_ttd-browser-host-bridge.md) (source: [`docs/method/backlog/up-next/PLATFORM_ttd-browser-host-bridge.md`](docs/method/backlog/up-next/PLATFORM_ttd-browser-host-bridge.md)) +- `M036` `up-next` `T-4-1-1`: [Wire Engine lifecycle behind wasm-bindgen exports](docs/method/backlog/up-next/PLATFORM_wasm-runtime.md#t-4-1-1-wire-engine-lifecycle-behind-wasm-bindgen-exports) (source: [`docs/method/backlog/up-next/PLATFORM_wasm-runtime.md`](docs/method/backlog/up-next/PLATFORM_wasm-runtime.md)) +- `M037` `up-next` `T-4-1-2`: [Snapshot and ViewOp drain exports](docs/method/backlog/up-next/PLATFORM_wasm-runtime.md#t-4-1-2-snapshot-and-viewop-drain-exports) (source: [`docs/method/backlog/up-next/PLATFORM_wasm-runtime.md`](docs/method/backlog/up-next/PLATFORM_wasm-runtime.md)) +- `M038` `up-next` `T-4-1-3`: [JS/WASM memory bridge and error protocol](docs/method/backlog/up-next/PLATFORM_wasm-runtime.md#t-4-1-3-jswasm-memory-bridge-and-error-protocol) (source: [`docs/method/backlog/up-next/PLATFORM_wasm-runtime.md`](docs/method/backlog/up-next/PLATFORM_wasm-runtime.md)) +- `M039` `up-next`: [Wesley Footprint Honesty Artifact Attestation](docs/method/backlog/up-next/PLATFORM_wesley-footprint-honesty-artifact-attestation.md) (source: [`docs/method/backlog/up-next/PLATFORM_wesley-footprint-honesty-artifact-attestation.md`](docs/method/backlog/up-next/PLATFORM_wesley-footprint-honesty-artifact-attestation.md)) +- `M040` `up-next` `T-2-3-1`: [README, contributor guide, and CI hardening](docs/method/backlog/up-next/PLATFORM_wesley-go-public.md#t-2-3-1-readme-contributor-guide-and-ci-hardening) (source: [`docs/method/backlog/up-next/PLATFORM_wesley-go-public.md`](docs/method/backlog/up-next/PLATFORM_wesley-go-public.md)) +- `M041` `up-next` `T-2-2-1`: [Backfill script generation for schema migrations](docs/method/backlog/up-next/PLATFORM_wesley-migration.md#t-2-2-1-backfill-script-generation-for-schema-migrations) (source: [`docs/method/backlog/up-next/PLATFORM_wesley-migration.md`](docs/method/backlog/up-next/PLATFORM_wesley-migration.md)) +- `M042` `up-next` `T-2-2-2`: [Switch-over plan and contract validation](docs/method/backlog/up-next/PLATFORM_wesley-migration.md#t-2-2-2-switch-over-plan-and-contract-validation) (source: [`docs/method/backlog/up-next/PLATFORM_wesley-migration.md`](docs/method/backlog/up-next/PLATFORM_wesley-migration.md)) +- `M043` `up-next` `T-2-1-1`: [GraphQL operation parser for QIR](docs/method/backlog/up-next/PLATFORM_wesley-qir-phase-c.md#t-2-1-1-graphql-operation-parser-for-qir) (source: [`docs/method/backlog/up-next/PLATFORM_wesley-qir-phase-c.md`](docs/method/backlog/up-next/PLATFORM_wesley-qir-phase-c.md)) +- `M044` `up-next` `T-2-1-2`: [SQL query plan generation from QIR](docs/method/backlog/up-next/PLATFORM_wesley-qir-phase-c.md#t-2-1-2-sql-query-plan-generation-from-qir) (source: [`docs/method/backlog/up-next/PLATFORM_wesley-qir-phase-c.md`](docs/method/backlog/up-next/PLATFORM_wesley-qir-phase-c.md)) +- `M045` `up-next` `T-4-4-1`: [TypeScript type generation from Wesley IR](docs/method/backlog/up-next/PLATFORM_wesley-type-pipeline-browser.md#t-4-4-1-typescript-type-generation-from-wesley-ir) (source: [`docs/method/backlog/up-next/PLATFORM_wesley-type-pipeline-browser.md`](docs/method/backlog/up-next/PLATFORM_wesley-type-pipeline-browser.md)) +- `M046` `up-next` `T-4-4-2`: [Zod runtime validators from Wesley IR](docs/method/backlog/up-next/PLATFORM_wesley-type-pipeline-browser.md#t-4-4-2-zod-runtime-validators-from-wesley-ir) (source: [`docs/method/backlog/up-next/PLATFORM_wesley-type-pipeline-browser.md`](docs/method/backlog/up-next/PLATFORM_wesley-type-pipeline-browser.md)) +- `M047` `up-next` `T-4-4-3`: [CBOR serialization bridge (TS types to WASM Rust)](docs/method/backlog/up-next/PLATFORM_wesley-type-pipeline-browser.md#t-4-4-3-cbor-serialization-bridge-ts-types-to-wasm-rust) (source: [`docs/method/backlog/up-next/PLATFORM_wesley-type-pipeline-browser.md`](docs/method/backlog/up-next/PLATFORM_wesley-type-pipeline-browser.md)) +- `M048` `inbox` `T-10-10-1`: [Information Architecture Consolidation](docs/method/backlog/inbox/DOCS_wesley-docs.md#t-10-10-1-information-architecture-consolidation) (source: [`docs/method/backlog/inbox/DOCS_wesley-docs.md`](docs/method/backlog/inbox/DOCS_wesley-docs.md)) +- `M049` `inbox` `T-10-10-2`: [Tutorial Series + API Reference](docs/method/backlog/inbox/DOCS_wesley-docs.md#t-10-10-2-tutorial-series-api-reference) (source: [`docs/method/backlog/inbox/DOCS_wesley-docs.md`](docs/method/backlog/inbox/DOCS_wesley-docs.md)) +- `M050` `inbox` `T-10-6-1a`: [Rhai Sandbox Configuration (#173, part a)](docs/method/backlog/inbox/KERNEL_deterministic-rhai.md#t-10-6-1a-rhai-sandbox-configuration-173-part-a) (source: [`docs/method/backlog/inbox/KERNEL_deterministic-rhai.md`](docs/method/backlog/inbox/KERNEL_deterministic-rhai.md)) +- `M051` `inbox` `T-10-6-1b`: [ViewClaim / EffectClaim Receipts (#173, part b)](docs/method/backlog/inbox/KERNEL_deterministic-rhai.md#t-10-6-1b-viewclaim-effectclaim-receipts-173-part-b) (source: [`docs/method/backlog/inbox/KERNEL_deterministic-rhai.md`](docs/method/backlog/inbox/KERNEL_deterministic-rhai.md)) +- `M052` `inbox`: [First-class invariant documents](docs/method/backlog/inbox/KERNEL_invariants-as-docs.md) (source: [`docs/method/backlog/inbox/KERNEL_invariants-as-docs.md`](docs/method/backlog/inbox/KERNEL_invariants-as-docs.md)) +- `M053` `inbox` `T-10-2-1`: [Spec — Commit/Manifest Signing (#20)](docs/method/backlog/inbox/KERNEL_security.md#t-10-2-1-spec-commitmanifest-signing-20) (source: [`docs/method/backlog/inbox/KERNEL_security.md`](docs/method/backlog/inbox/KERNEL_security.md)) +- `M054` `inbox` `T-10-2-2`: [Spec — Security Contexts (#21)](docs/method/backlog/inbox/KERNEL_security.md#t-10-2-2-spec-security-contexts-21) (source: [`docs/method/backlog/inbox/KERNEL_security.md`](docs/method/backlog/inbox/KERNEL_security.md)) +- `M055` `inbox` `T-10-2-3`: [FFI Limits and Validation (#38)](docs/method/backlog/inbox/KERNEL_security.md#t-10-2-3-ffi-limits-and-validation-38) (source: [`docs/method/backlog/inbox/KERNEL_security.md`](docs/method/backlog/inbox/KERNEL_security.md)) +- `M056` `inbox` `T-10-2-4`: [JS-ABI Packet Checksum v2 (#195)](docs/method/backlog/inbox/KERNEL_security.md#t-10-2-4-js-abi-packet-checksum-v2-195) (source: [`docs/method/backlog/inbox/KERNEL_security.md`](docs/method/backlog/inbox/KERNEL_security.md)) +- `M057` `inbox` `T-10-2-5`: [Spec — Provenance Payload v1 (#202)](docs/method/backlog/inbox/KERNEL_security.md#t-10-2-5-spec-provenance-payload-v1-202) (source: [`docs/method/backlog/inbox/KERNEL_security.md`](docs/method/backlog/inbox/KERNEL_security.md)) +- `M058` `inbox`: [ABI nested evidence strictness](docs/method/backlog/inbox/PLATFORM_abi-nested-evidence-strictness.md) (source: [`docs/method/backlog/inbox/PLATFORM_abi-nested-evidence-strictness.md`](docs/method/backlog/inbox/PLATFORM_abi-nested-evidence-strictness.md)) +- `M059` `inbox` `T-10-4-1`: [Draft Hot-Reload Spec (#75)](docs/method/backlog/inbox/PLATFORM_editor-hot-reload.md#t-10-4-1-draft-hot-reload-spec-75) (source: [`docs/method/backlog/inbox/PLATFORM_editor-hot-reload.md`](docs/method/backlog/inbox/PLATFORM_editor-hot-reload.md)) +- `M060` `inbox` `T-10-4-2`: [File Watcher / Debounce (#76)](docs/method/backlog/inbox/PLATFORM_editor-hot-reload.md#t-10-4-2-file-watcher-debounce-76) (source: [`docs/method/backlog/inbox/PLATFORM_editor-hot-reload.md`](docs/method/backlog/inbox/PLATFORM_editor-hot-reload.md)) +- `M061` `inbox` `T-10-4-3`: [Hot-Reload Implementation (#24)](docs/method/backlog/inbox/PLATFORM_editor-hot-reload.md#t-10-4-3-hot-reload-implementation-24) (source: [`docs/method/backlog/inbox/PLATFORM_editor-hot-reload.md`](docs/method/backlog/inbox/PLATFORM_editor-hot-reload.md)) +- `M062` `inbox`: [git-mind NEXUS](docs/method/backlog/inbox/PLATFORM_git-mind-nexus.md) (source: [`docs/method/backlog/inbox/PLATFORM_git-mind-nexus.md`](docs/method/backlog/inbox/PLATFORM_git-mind-nexus.md)) +- `M063` `inbox` `T-10-5-1`: [Importer Umbrella Audit + Close (#25)](docs/method/backlog/inbox/PLATFORM_importer.md#t-10-5-1-importer-umbrella-audit-close-25) (source: [`docs/method/backlog/inbox/PLATFORM_importer.md`](docs/method/backlog/inbox/PLATFORM_importer.md)) +- `M064` `inbox`: [Legend progress in method status](docs/method/backlog/inbox/PLATFORM_method-status-legend-progress.md) (source: [`docs/method/backlog/inbox/PLATFORM_method-status-legend-progress.md`](docs/method/backlog/inbox/PLATFORM_method-status-legend-progress.md)) +- `M065` `inbox`: [Reconcile Relocated Wesley Echo Schemas](docs/method/backlog/inbox/PLATFORM_reconcile-relocated-wesley-echo-schemas.md) (source: [`docs/method/backlog/inbox/PLATFORM_reconcile-relocated-wesley-echo-schemas.md`](docs/method/backlog/inbox/PLATFORM_reconcile-relocated-wesley-echo-schemas.md)) +- `M066` `inbox` `T-10-3-1`: [Key Management Doc (#35)](docs/method/backlog/inbox/PLATFORM_signing-pipeline.md#t-10-3-1-key-management-doc-35) (source: [`docs/method/backlog/inbox/PLATFORM_signing-pipeline.md`](docs/method/backlog/inbox/PLATFORM_signing-pipeline.md)) +- `M067` `inbox` `T-10-3-2`: [CI — Sign Release Artifacts (Dry Run) (#33)](docs/method/backlog/inbox/PLATFORM_signing-pipeline.md#t-10-3-2-ci-sign-release-artifacts-dry-run-33) (source: [`docs/method/backlog/inbox/PLATFORM_signing-pipeline.md`](docs/method/backlog/inbox/PLATFORM_signing-pipeline.md)) +- `M068` `inbox` `T-10-3-3`: [CLI Verify Path (#34)](docs/method/backlog/inbox/PLATFORM_signing-pipeline.md#t-10-3-3-cli-verify-path-34) (source: [`docs/method/backlog/inbox/PLATFORM_signing-pipeline.md`](docs/method/backlog/inbox/PLATFORM_signing-pipeline.md)) +- `M069` `inbox` `T-10-3-4`: [CI — Verify Signatures (#36)](docs/method/backlog/inbox/PLATFORM_signing-pipeline.md#t-10-3-4-ci-verify-signatures-36) (source: [`docs/method/backlog/inbox/PLATFORM_signing-pipeline.md`](docs/method/backlog/inbox/PLATFORM_signing-pipeline.md)) +- `M070` `inbox` `T-10-8-1`: [Docs / Logging Improvements (#79)](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-1-docs-logging-improvements-79) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M071` `inbox` `T-10-8-2`: [Naming Consistency Audit (#207)](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-2-naming-consistency-audit-207) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M072` `inbox` `T-10-8-3`: [Reliving Debugger UX Design (#239)](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-3-reliving-debugger-ux-design-239) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M073` `inbox` `T-10-8-4`: [Local Rustdoc Warning Gate](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-4-local-rustdoc-warning-gate) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M074` `inbox` `T-10-8-5`: [Deterministic Test Engine Helper](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-5-deterministic-test-engine-helper) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M075` `inbox` `T-10-8-6`: [Current-Head PR Review / Merge Summary Tool](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-6-current-head-pr-review-merge-summary-tool) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M076` `inbox` `T-10-8-7`: [CI Trigger Rationalization](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-7-ci-trigger-rationalization) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M077` `inbox` `T-10-8-8`: [Background Cargo Lock Isolation](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-8-background-cargo-lock-isolation) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M078` `inbox` `T-10-8-9`: [Small-Commit Pre-Commit Latency Reduction](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-9-small-commit-pre-commit-latency-reduction) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M079` `inbox` `T-10-8-10`: [Feature-Gate Contract Verification](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-10-feature-gate-contract-verification) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M080` `inbox` `T-10-8-11`: [PR Review Thread Reply / Resolution Helper](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-11-pr-review-thread-reply-resolution-helper) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M081` `inbox` `T-10-8-12`: [Shell Script Style / Format Lane](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-12-shell-script-style-format-lane) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M082` `inbox` `T-10-8-13`: [Review-Fix Fast Path for Staged Verification](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-13-review-fix-fast-path-for-staged-verification) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M083` `inbox` `T-10-8-14`: [Pre-PR Preflight Gate](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-14-pre-pr-preflight-gate) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M084` `inbox` `T-10-8-15`: [Self-Review Command](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-15-self-review-command) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M085` `inbox` `T-10-8-16`: [Pre-PR Checklist and Boundary-Change Policy](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-16-pre-pr-checklist-and-boundary-change-policy) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M086` `inbox` `T-10-8-17`: [Docs Validation Beyond Markdown](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-17-docs-validation-beyond-markdown) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M087` `inbox` `T-10-8-18`: [Implementation-Backed Docs Claims Policy](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-18-implementation-backed-docs-claims-policy) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M088` `inbox` `T-10-8-19`: [Remove Committed Generated DAG Artifacts](docs/method/backlog/inbox/PLATFORM_tooling-misc.md#t-10-8-19-remove-committed-generated-dag-artifacts) (source: [`docs/method/backlog/inbox/PLATFORM_tooling-misc.md`](docs/method/backlog/inbox/PLATFORM_tooling-misc.md)) +- `M089` `inbox` `T-10-9-1`: [Fuzzing the Port](docs/method/backlog/inbox/PLATFORM_ttd-hardening.md#t-10-9-1-fuzzing-the-port) (source: [`docs/method/backlog/inbox/PLATFORM_ttd-hardening.md`](docs/method/backlog/inbox/PLATFORM_ttd-hardening.md)) +- `M090` `inbox` `T-10-9-2`: [SIMD Canonicalization](docs/method/backlog/inbox/PLATFORM_ttd-hardening.md#t-10-9-2-simd-canonicalization) (source: [`docs/method/backlog/inbox/PLATFORM_ttd-hardening.md`](docs/method/backlog/inbox/PLATFORM_ttd-hardening.md)) +- `M091` `inbox` `T-10-9-3`: [Causal Visualizer](docs/method/backlog/inbox/PLATFORM_ttd-hardening.md#t-10-9-3-causal-visualizer) (source: [`docs/method/backlog/inbox/PLATFORM_ttd-hardening.md`](docs/method/backlog/inbox/PLATFORM_ttd-hardening.md)) +- `M092` `inbox` `T-10-7-1`: [Hashable View Artifacts (#174)](docs/method/backlog/inbox/PLATFORM_wesley-boundary-grammar.md#t-10-7-1-hashable-view-artifacts-174) (source: [`docs/method/backlog/inbox/PLATFORM_wesley-boundary-grammar.md`](docs/method/backlog/inbox/PLATFORM_wesley-boundary-grammar.md)) +- `M093` `inbox` `T-10-7-2`: [Schema Hash Chain Pinning (#193)](docs/method/backlog/inbox/PLATFORM_wesley-boundary-grammar.md#t-10-7-2-schema-hash-chain-pinning-193) (source: [`docs/method/backlog/inbox/PLATFORM_wesley-boundary-grammar.md`](docs/method/backlog/inbox/PLATFORM_wesley-boundary-grammar.md)) +- `M094` `inbox` `T-10-7-3`: [SchemaDelta Vocabulary (#194)](docs/method/backlog/inbox/PLATFORM_wesley-boundary-grammar.md#t-10-7-3-schemadelta-vocabulary-194) (source: [`docs/method/backlog/inbox/PLATFORM_wesley-boundary-grammar.md`](docs/method/backlog/inbox/PLATFORM_wesley-boundary-grammar.md)) +- `M095` `inbox` `T-10-7-4`: [Provenance as Query Semantics (#198)](docs/method/backlog/inbox/PLATFORM_wesley-boundary-grammar.md#t-10-7-4-provenance-as-query-semantics-198) (source: [`docs/method/backlog/inbox/PLATFORM_wesley-boundary-grammar.md`](docs/method/backlog/inbox/PLATFORM_wesley-boundary-grammar.md)) +- `M096` `inbox` `T-10-9-1`: [Shadow REALM Investigation](docs/method/backlog/inbox/PLATFORM_wesley-future.md#t-10-9-1-shadow-realm-investigation) (source: [`docs/method/backlog/inbox/PLATFORM_wesley-future.md`](docs/method/backlog/inbox/PLATFORM_wesley-future.md)) +- `M097` `inbox` `T-10-9-2`: [Multi-Language Generator Survey](docs/method/backlog/inbox/PLATFORM_wesley-future.md#t-10-9-2-multi-language-generator-survey) (source: [`docs/method/backlog/inbox/PLATFORM_wesley-future.md`](docs/method/backlog/inbox/PLATFORM_wesley-future.md)) +- `M098` `cool-ideas`: [Enforce Echo design vocabulary](docs/method/backlog/cool-ideas/DOCS_glossary-enforcement.md) (source: [`docs/method/backlog/cool-ideas/DOCS_glossary-enforcement.md`](docs/method/backlog/cool-ideas/DOCS_glossary-enforcement.md)) +- `M099` `cool-ideas`: [Course Material](docs/method/backlog/cool-ideas/DOCS_splash-guy-course-material.md) (source: [`docs/method/backlog/cool-ideas/DOCS_splash-guy-course-material.md`](docs/method/backlog/cool-ideas/DOCS_splash-guy-course-material.md)) +- `M100` `cool-ideas`: [Course Material](docs/method/backlog/cool-ideas/DOCS_tumble-tower-course-material.md) (source: [`docs/method/backlog/cool-ideas/DOCS_tumble-tower-course-material.md`](docs/method/backlog/cool-ideas/DOCS_tumble-tower-course-material.md)) +- `M101` `cool-ideas`: [Expose parallel execution counterfactuals](docs/method/backlog/cool-ideas/KERNEL_parallel-execution-counterfactuals.md) (source: [`docs/method/backlog/cool-ideas/KERNEL_parallel-execution-counterfactuals.md`](docs/method/backlog/cool-ideas/KERNEL_parallel-execution-counterfactuals.md)) +- `M102` `cool-ideas` `T-7-4-1`: [Implement rulial diff / worldline compare MVP (#172)](docs/method/backlog/cool-ideas/KERNEL_rulial-diff.md#t-7-4-1-implement-rulial-diff-worldline-compare-mvp-172) (source: [`docs/method/backlog/cool-ideas/KERNEL_rulial-diff.md`](docs/method/backlog/cool-ideas/KERNEL_rulial-diff.md)) +- `M103` `cool-ideas` `T-7-4-2`: [Implement Wesley worldline diff — compare query outputs/proofs across ticks (#199)](docs/method/backlog/cool-ideas/KERNEL_rulial-diff.md#t-7-4-2-implement-wesley-worldline-diff-compare-query-outputsproofs-across-ticks-199) (source: [`docs/method/backlog/cool-ideas/KERNEL_rulial-diff.md`](docs/method/backlog/cool-ideas/KERNEL_rulial-diff.md)) +- `M104` `cool-ideas` `T-7-4-3`: [Implement provenance heatmap — blast radius / cohesion over time (#204)](docs/method/backlog/cool-ideas/KERNEL_rulial-diff.md#t-7-4-3-implement-provenance-heatmap-blast-radius-cohesion-over-time-204) (source: [`docs/method/backlog/cool-ideas/KERNEL_rulial-diff.md`](docs/method/backlog/cool-ideas/KERNEL_rulial-diff.md)) +- `M105` `cool-ideas`: [Controlled Desync](docs/method/backlog/cool-ideas/KERNEL_splash-guy-controlled-desync.md) (source: [`docs/method/backlog/cool-ideas/KERNEL_splash-guy-controlled-desync.md`](docs/method/backlog/cool-ideas/KERNEL_splash-guy-controlled-desync.md)) +- `M106` `cool-ideas`: [Lockstep Protocol](docs/method/backlog/cool-ideas/KERNEL_splash-guy-lockstep-protocol.md) (source: [`docs/method/backlog/cool-ideas/KERNEL_splash-guy-lockstep-protocol.md`](docs/method/backlog/cool-ideas/KERNEL_splash-guy-lockstep-protocol.md)) +- `M107` `cool-ideas`: [Rules & State Model](docs/method/backlog/cool-ideas/KERNEL_splash-guy-rules-and-state.md) (source: [`docs/method/backlog/cool-ideas/KERNEL_splash-guy-rules-and-state.md`](docs/method/backlog/cool-ideas/KERNEL_splash-guy-rules-and-state.md)) +- `M108` `cool-ideas` `T-7-3-1`: [Implement time travel core — pause/rewind/buffer/catch-up (#171)](docs/method/backlog/cool-ideas/KERNEL_time-travel-mvp.md#t-7-3-1-implement-time-travel-core-pauserewindbuffercatch-up-171) (source: [`docs/method/backlog/cool-ideas/KERNEL_time-travel-mvp.md`](docs/method/backlog/cool-ideas/KERNEL_time-travel-mvp.md)) +- `M109` `cool-ideas` `T-7-3-2`: [Implement Reliving debugger MVP — scrub timeline + causal slice + fork branch (#205)](docs/method/backlog/cool-ideas/KERNEL_time-travel-mvp.md#t-7-3-2-implement-reliving-debugger-mvp-scrub-timeline-causal-slice-fork-branch-205) (source: [`docs/method/backlog/cool-ideas/KERNEL_time-travel-mvp.md`](docs/method/backlog/cool-ideas/KERNEL_time-travel-mvp.md)) +- `M110` `cool-ideas`: [Desync Breakers](docs/method/backlog/cool-ideas/KERNEL_tumble-tower-desync-breakers.md) (source: [`docs/method/backlog/cool-ideas/KERNEL_tumble-tower-desync-breakers.md`](docs/method/backlog/cool-ideas/KERNEL_tumble-tower-desync-breakers.md)) +- `M111` `cool-ideas`: [Lockstep Harness](docs/method/backlog/cool-ideas/KERNEL_tumble-tower-lockstep-harness.md) (source: [`docs/method/backlog/cool-ideas/KERNEL_tumble-tower-lockstep-harness.md`](docs/method/backlog/cool-ideas/KERNEL_tumble-tower-lockstep-harness.md)) +- `M112` `cool-ideas` `T-9-2-1`: [Implement replay-from-checkpoint convergence tests](docs/method/backlog/cool-ideas/KERNEL_worldline-convergence.md#t-9-2-1-implement-replay-from-checkpoint-convergence-tests) (source: [`docs/method/backlog/cool-ideas/KERNEL_worldline-convergence.md`](docs/method/backlog/cool-ideas/KERNEL_worldline-convergence.md)) +- `M113` `cool-ideas` `T-9-2-2`: [Implement replay-from-patches convergence property tests](docs/method/backlog/cool-ideas/KERNEL_worldline-convergence.md#t-9-2-2-implement-replay-from-patches-convergence-property-tests) (source: [`docs/method/backlog/cool-ideas/KERNEL_worldline-convergence.md`](docs/method/backlog/cool-ideas/KERNEL_worldline-convergence.md)) +- `M114` `cool-ideas`: [Stage 0: AABB](docs/method/backlog/cool-ideas/MATH_tumble-tower-stage-0-aabb.md) (source: [`docs/method/backlog/cool-ideas/MATH_tumble-tower-stage-0-aabb.md`](docs/method/backlog/cool-ideas/MATH_tumble-tower-stage-0-aabb.md)) +- `M115` `cool-ideas`: [Stage 1: Rotation](docs/method/backlog/cool-ideas/MATH_tumble-tower-stage-1-rotation.md) (source: [`docs/method/backlog/cool-ideas/MATH_tumble-tower-stage-1-rotation.md`](docs/method/backlog/cool-ideas/MATH_tumble-tower-stage-1-rotation.md)) +- `M116` `cool-ideas`: [Stage 2: Friction](docs/method/backlog/cool-ideas/MATH_tumble-tower-stage-2-friction.md) (source: [`docs/method/backlog/cool-ideas/MATH_tumble-tower-stage-2-friction.md`](docs/method/backlog/cool-ideas/MATH_tumble-tower-stage-2-friction.md)) +- `M117` `cool-ideas`: [Stage 3: Sleeping](docs/method/backlog/cool-ideas/MATH_tumble-tower-stage-3-sleeping.md) (source: [`docs/method/backlog/cool-ideas/MATH_tumble-tower-stage-3-sleeping.md`](docs/method/backlog/cool-ideas/MATH_tumble-tower-stage-3-sleeping.md)) +- `M118` `cool-ideas`: [Continuum Contract Artifact Interchange](docs/method/backlog/cool-ideas/PLATFORM_continuum-contract-artifact-interchange.md) (source: [`docs/method/backlog/cool-ideas/PLATFORM_continuum-contract-artifact-interchange.md`](docs/method/backlog/cool-ideas/PLATFORM_continuum-contract-artifact-interchange.md)) +- `M119` `cool-ideas`: [Cross-repo METHOD dashboard](docs/method/backlog/cool-ideas/PLATFORM_cross-repo-method-dashboard.md) (source: [`docs/method/backlog/cool-ideas/PLATFORM_cross-repo-method-dashboard.md`](docs/method/backlog/cool-ideas/PLATFORM_cross-repo-method-dashboard.md)) +- `M120` `cool-ideas` `T-5-4-1`: [Arc<[u8]> to bytes::Bytes migration](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-api-evolution.md#t-5-4-1-arcu8-to-bytesbytes-migration) (source: [`docs/method/backlog/cool-ideas/PLATFORM_deep-storage-api-evolution.md`](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-api-evolution.md)) +- `M121` `cool-ideas` `T-5-4-2`: [AsyncBlobStore trait](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-api-evolution.md#t-5-4-2-asyncblobstore-trait) (source: [`docs/method/backlog/cool-ideas/PLATFORM_deep-storage-api-evolution.md`](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-api-evolution.md)) +- `M122` `cool-ideas` `T-5-4-3`: [Enumeration and metadata API](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-api-evolution.md#t-5-4-3-enumeration-and-metadata-api) (source: [`docs/method/backlog/cool-ideas/PLATFORM_deep-storage-api-evolution.md`](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-api-evolution.md)) +- `M123` `cool-ideas` `T-5-1-1`: [File-per-blob DiskTier implementation](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-disk-tier.md#t-5-1-1-file-per-blob-disktier-implementation) (source: [`docs/method/backlog/cool-ideas/PLATFORM_deep-storage-disk-tier.md`](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-disk-tier.md)) +- `M124` `cool-ideas` `T-5-1-2`: [Tiered promotion/demotion (Memory <-> Disk)](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-disk-tier.md#t-5-1-2-tiered-promotiondemotion-memory-disk) (source: [`docs/method/backlog/cool-ideas/PLATFORM_deep-storage-disk-tier.md`](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-disk-tier.md)) +- `M125` `cool-ideas` `T-5-2-1`: [Mark-sweep reachability analysis](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-gc-sweep-eviction.md#t-5-2-1-mark-sweep-reachability-analysis) (source: [`docs/method/backlog/cool-ideas/PLATFORM_deep-storage-gc-sweep-eviction.md`](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-gc-sweep-eviction.md)) +- `M126` `cool-ideas` `T-5-2-2`: [Eviction policy and background sweep task](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-gc-sweep-eviction.md#t-5-2-2-eviction-policy-and-background-sweep-task) (source: [`docs/method/backlog/cool-ideas/PLATFORM_deep-storage-gc-sweep-eviction.md`](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-gc-sweep-eviction.md)) +- `M127` `cool-ideas` `T-5-3-1`: [Message type definitions and binary encoding](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-wire-protocol.md#t-5-3-1-message-type-definitions-and-binary-encoding) (source: [`docs/method/backlog/cool-ideas/PLATFORM_deep-storage-wire-protocol.md`](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-wire-protocol.md)) +- `M128` `cool-ideas` `T-5-3-2`: [Request/response protocol and backpressure](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-wire-protocol.md#t-5-3-2-requestresponse-protocol-and-backpressure) (source: [`docs/method/backlog/cool-ideas/PLATFORM_deep-storage-wire-protocol.md`](docs/method/backlog/cool-ideas/PLATFORM_deep-storage-wire-protocol.md)) +- `M129` `cool-ideas`: [Extract method crate to its own repo](docs/method/backlog/cool-ideas/PLATFORM_method-crate-extract.md) (source: [`docs/method/backlog/cool-ideas/PLATFORM_method-crate-extract.md`](docs/method/backlog/cool-ideas/PLATFORM_method-crate-extract.md)) +- `M130` `cool-ideas`: [Method drift check as pre-push hook](docs/method/backlog/cool-ideas/PLATFORM_method-drift-as-pre-push-hook.md) (source: [`docs/method/backlog/cool-ideas/PLATFORM_method-drift-as-pre-push-hook.md`](docs/method/backlog/cool-ideas/PLATFORM_method-drift-as-pre-push-hook.md)) +- `M131` `cool-ideas`: [Proof-Carrying Apertures](docs/method/backlog/cool-ideas/PLATFORM_proof-carrying-apertures.md) (source: [`docs/method/backlog/cool-ideas/PLATFORM_proof-carrying-apertures.md`](docs/method/backlog/cool-ideas/PLATFORM_proof-carrying-apertures.md)) +- `M132` `cool-ideas`: [Reading envelope inspector](docs/method/backlog/cool-ideas/PLATFORM_reading-envelope-inspector.md) (source: [`docs/method/backlog/cool-ideas/PLATFORM_reading-envelope-inspector.md`](docs/method/backlog/cool-ideas/PLATFORM_reading-envelope-inspector.md)) +- `M133` `cool-ideas`: [Visualization](docs/method/backlog/cool-ideas/PLATFORM_splash-guy-visualization.md) (source: [`docs/method/backlog/cool-ideas/PLATFORM_splash-guy-visualization.md`](docs/method/backlog/cool-ideas/PLATFORM_splash-guy-visualization.md)) +- `M134` `cool-ideas`: [Visualization](docs/method/backlog/cool-ideas/PLATFORM_tumble-tower-visualization.md) (source: [`docs/method/backlog/cool-ideas/PLATFORM_tumble-tower-visualization.md`](docs/method/backlog/cool-ideas/PLATFORM_tumble-tower-visualization.md)) +- `M135` `cool-ideas`: [WARPDrive POSIX Materialization Optic](docs/method/backlog/cool-ideas/PLATFORM_warpdrive-posix-optic.md) (source: [`docs/method/backlog/cool-ideas/PLATFORM_warpdrive-posix-optic.md`](docs/method/backlog/cool-ideas/PLATFORM_warpdrive-posix-optic.md)) +- `M136` `bad-code`: [RED/GREEN can't be separate commits](docs/method/backlog/bad-code/red-green-lint-friction.md) (source: [`docs/method/backlog/bad-code/red-green-lint-friction.md`](docs/method/backlog/bad-code/red-green-lint-friction.md)) +- `M137` `bad-code`: [xtask main.rs is a god file](docs/method/backlog/bad-code/xtask-god-file.md) (source: [`docs/method/backlog/bad-code/xtask-god-file.md`](docs/method/backlog/bad-code/xtask-god-file.md)) + +## Matrix + +```csv +task,M001,M002,M003,M004,M005,M006,M007,M008,M009,M010,M011,M012,M013,M014,M015,M016,M017,M018,M019,M020,M021,M022,M023,M024,M025,M026,M027,M028,M029,M030,M031,M032,M033,M034,M035,M036,M037,M038,M039,M040,M041,M042,M043,M044,M045,M046,M047,M048,M049,M050,M051,M052,M053,M054,M055,M056,M057,M058,M059,M060,M061,M062,M063,M064,M065,M066,M067,M068,M069,M070,M071,M072,M073,M074,M075,M076,M077,M078,M079,M080,M081,M082,M083,M084,M085,M086,M087,M088,M089,M090,M091,M092,M093,M094,M095,M096,M097,M098,M099,M100,M101,M102,M103,M104,M105,M106,M107,M108,M109,M110,M111,M112,M113,M114,M115,M116,M117,M118,M119,M120,M121,M122,M123,M124,M125,M126,M127,M128,M129,M130,M131,M132,M133,M134,M135,M136,M137 +M001,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M002,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M003,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M004,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M005,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M006,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M007,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M008,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M009,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M010,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M011,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M012,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M013,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M014,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M015,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M016,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M017,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M018,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M019,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M020,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M021,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M022,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M023,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M024,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M025,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M026,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M027,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M028,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M029,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M030,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M031,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M032,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M033,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M034,,,,,,,,,,,,,,,depends on,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M035,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M036,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M037,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M038,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M039,,,,,,,,,,,,depends on,,,,,depends on,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M040,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M041,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M042,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M043,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M044,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M045,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M046,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M047,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M048,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M049,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M050,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M051,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M052,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M053,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M054,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M055,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M056,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M057,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M058,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M059,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M060,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M061,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M062,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M063,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M064,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M065,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M066,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M067,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M068,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M069,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M070,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M071,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M072,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M073,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M074,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M075,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M076,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M077,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M078,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M079,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M080,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M081,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M082,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M083,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M084,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M085,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M086,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M087,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M088,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M089,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M090,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M091,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M092,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M093,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M094,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M095,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M096,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M097,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M098,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M099,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M100,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M101,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M102,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M103,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M104,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M105,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M106,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M107,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M108,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M109,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M110,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M111,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M112,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M113,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,, +M114,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M115,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M116,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M117,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M118,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M119,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M120,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M121,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,depends on,,,,,,,,,,,,, +M122,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,,,, +M123,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M124,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,, +M125,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,,,,,, +M126,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,depends on,,,,,,,,,,,, +M127,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M128,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,,,,,,,,,, +M129,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M130,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M131,,,,,,,,,,,,depends on,,,,,,,,,,,depends on,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,depends on,, +M132,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M133,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M134,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M135,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M136,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +M137,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, +``` + +## External Or Unresolved Dependency References + +These references were found in dependency-shaped fields but do not resolve to +a task row in `docs/method/backlog/**`. + +- `M010` Depends on: `../../../design/0011-optic-observer-runtime-doctrine/design.md` +- `M010` Depends on: `../../../design/continuum-runtime-and-cas-readings.md` +- `M013` Depends on: `../../../design/0010-live-basis-settlement-plan/design.md` +- `M016` Depends on: `../../../design/0022-continuum-transport-identity/design.md` +- `M023` Depends on: `../../../architecture/wsc-verkle-ipa-retained-readings.md` +- `M023` Depends on: `../../../design/0020-echo-cas-browser/echo-cas-browser.md` +- `M023` Depends on: `../../../design/continuum-runtime-and-cas-readings.md` +- `M025` Depends on: `../../../design/0020-echo-cas-browser/echo-cas-browser.md` +- `M030` Depends on: `../asap/PLATFORM_import-transport-intent-admission-path.md` +- `M032` Depends on: `../../../architecture/wsc-verkle-ipa-retained-readings.md` +- `M131` Depends on: `../../../architecture/wsc-verkle-ipa-retained-readings.md` +- `M135` Depends on: `../../../architecture/continuum-transport.md` +- `M135` Depends on: `../../../architecture/there-is-no-graph.md` +- `M135` Depends on: `../../../design/0018-echo-optics-api-design/design.md` diff --git a/docs/public/assets/collision/animate.js b/docs/public/assets/collision/animate.js deleted file mode 100644 index 3303aff6..00000000 --- a/docs/public/assets/collision/animate.js +++ /dev/null @@ -1,156 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS -(function(){ - const els = Array.from(document.querySelectorAll('.fade-seed')); - if (!('IntersectionObserver' in window)) { - els.forEach(el => el.classList.add('fade-in')); - return; - } - const io = new IntersectionObserver((entries) => { - entries.forEach(entry => { - if (entry.isIntersecting) { - entry.target.classList.add('fade-in'); - io.unobserve(entry.target); - } - }); - }, { rootMargin: '0px 0px -10% 0px', threshold: 0.1 }); - els.forEach(el => io.observe(el)); -})(); - -// Carousel pager for each rule's step-grid -(function(){ - function buildPager(rule) { - const grid = rule.querySelector('.step-grid'); - if (!grid) return; - const slides = Array.from(grid.querySelectorAll('figure')); - if (slides.length < 2) return; - - // Build overlay captions with step counts from figcaptions - slides.forEach((fig, i) => { - const cap = fig.querySelector('figcaption'); - const text = cap ? cap.textContent.trim() : ''; - const ov = document.createElement('div'); - ov.className = 'overlay'; - ov.innerHTML = `Step ${i + 1} of ${slides.length}
${text}
`; - fig.appendChild(ov); - fig.classList.add('has-overlay'); - }); - - const nav = document.createElement('div'); - nav.className = 'pager'; - const prev = document.createElement('button'); - prev.className = 'btn'; prev.textContent = '◀ Prev'; - const next = document.createElement('button'); - next.className = 'btn'; next.textContent = 'Next ▶'; - const toggle = document.createElement('button'); - toggle.className = 'btn'; toggle.textContent = 'Show all'; - const world = document.createElement('button'); - world.className = 'btn'; world.textContent = 'World view: On'; - nav.append(prev, next, toggle, world); - grid.after(nav); - - let mode = 'all'; // default to all frames visible - let idx = 0; - - // Helpers to find neighbor rule blocks - const prevRule = (el) => { let r = el.previousElementSibling; while (r && !r.classList.contains('rule')) r = r.previousElementSibling; return r; }; - const nextRule = (el) => { let r = el.nextElementSibling; while (r && !r.classList.contains('rule')) r = r.nextElementSibling; return r; }; - - function render() { - if (mode === 'all') { - slides.forEach(el => el.classList.remove('hidden')); - toggle.textContent = 'Carousel mode'; - } else { - slides.forEach((el, i) => { - el.classList.toggle('hidden', i !== idx); - // ensure visible slide is faded in - if (!el.classList.contains('fade-in')) el.classList.add('fade-in'); - }); - toggle.textContent = 'Show all'; - } - // Enable/disable edges. If at first slide and no previous rule, disable Prev. - // If at last slide and no next rule, disable Next. - if (mode === 'all') { - // Keep navigation enabled in 'all' mode so users/tests can enter carousel via Prev/Next. - prev.disabled = false; next.disabled = false; - } else { - const atFirst = idx === 0; - const atLast = idx === slides.length - 1; - prev.disabled = atFirst && !prevRule(rule); - next.disabled = atLast && !nextRule(rule); - } - } - - prev.addEventListener('click', () => { - // If in all mode, enter carousel at first slide - if (mode === 'all') { mode = 'one'; idx = 0; render(); return; } - if (idx > 0) { idx -= 1; render(); return; } - // At first slide: navigate to previous rule, show its first slide - const pr = prevRule(rule); - if (pr && pr._pager) { - pr._pager.setIndex(0); - pr.scrollIntoView({ behavior: 'smooth', block: 'start' }); - } - render(); - }); - next.addEventListener('click', () => { - if (mode === 'all') { mode = 'one'; idx = 0; render(); return; } - if (idx < slides.length - 1) { idx += 1; render(); return; } - // At last slide: navigate to next rule, show its first slide - const nr = nextRule(rule); - if (nr && nr._pager) { - nr._pager.setIndex(0); - nr.scrollIntoView({ behavior: 'smooth', block: 'start' }); - } - render(); - }); - toggle.addEventListener('click', () => { - mode = (mode === 'all') ? 'one' : 'all'; - render(); - }); - - // Picture-in-picture container with tabs (World / Graph) - slides.forEach((fig) => { - const srcWorld = fig.getAttribute('data-pip'); - const srcGraph = fig.getAttribute('data-graph'); - if (!srcWorld && !srcGraph) return; - const wrap = document.createElement('div'); - wrap.className = 'pip'; - const tabs = document.createElement('div'); - tabs.className = 'pip-tabs'; - const tabWorld = document.createElement('div'); tabWorld.className = 'tab active'; tabWorld.textContent = 'World'; - const tabGraph = document.createElement('div'); tabGraph.className = 'tab'; tabGraph.textContent = 'Graph'; - tabs.append(tabWorld, tabGraph); - const imgWorld = document.createElement('img'); imgWorld.alt = 'World view'; if (srcWorld) imgWorld.src = srcWorld; else imgWorld.style.display='none'; - const imgGraph = document.createElement('img'); imgGraph.alt = 'Graph view'; if (srcGraph) imgGraph.src = srcGraph; else imgGraph.style.display='none'; imgGraph.classList.add('hidden'); - wrap.append(tabs, imgWorld, imgGraph); - fig.appendChild(wrap); - function show(which){ - if (which==='world') { tabWorld.classList.add('active'); tabGraph.classList.remove('active'); imgWorld.classList.remove('hidden'); imgGraph.classList.add('hidden'); } - else { tabGraph.classList.add('active'); tabWorld.classList.remove('active'); imgGraph.classList.remove('hidden'); imgWorld.classList.add('hidden'); } - } - tabWorld.addEventListener('click', ()=>show('world')); - tabGraph.addEventListener('click', ()=>show('graph')); - }); - - let worldOn = true; - world.addEventListener('click', () => { - worldOn = !worldOn; - world.textContent = worldOn ? 'World view: On' : 'World view: Off'; - slides.forEach(fig => { - const pip = fig.querySelector('.pip'); - if (pip) pip.classList.toggle('hidden', !worldOn); - }); - }); - - // Expose simple API for cross-rule navigation - rule._pager = { - setIndex: (i) => { mode = 'one'; idx = Math.max(0, Math.min(slides.length - 1, i)); render(); }, - setMode: (m) => { mode = m; render(); }, - }; - - render(); - } - - document.querySelectorAll('.rule').forEach(buildPager); -})(); diff --git a/docs/public/assets/collision/broad_phase_pairing.mmd b/docs/public/assets/collision/broad_phase_pairing.mmd deleted file mode 100644 index 8a1b9cb3..00000000 --- a/docs/public/assets/collision/broad_phase_pairing.mmd +++ /dev/null @@ -1,17 +0,0 @@ -flowchart LR - subgraph LHS - PA((TemporalProxy(a,n))):::KClass - PB((TemporalProxy(b,n))):::KClass - PA -- overlaps --> PB - end - - subgraph RHS - PA2((TemporalProxy(a,n))):::KClass - PB2((TemporalProxy(b,n))):::KClass - PP[[PotentialPair(a,b,n)]]:::Add - PA2 -- pair_of --> PP - PB2 -- pair_of --> PP - end - -classDef KClass stroke:#ffd166,stroke-width:2; -classDef Add stroke:#00c853,stroke-width:2,fill:#0d2a1a,color:#dfe7ff; diff --git a/docs/public/assets/collision/build_temporal_proxy.mmd b/docs/public/assets/collision/build_temporal_proxy.mmd deleted file mode 100644 index 719a4f3f..00000000 --- a/docs/public/assets/collision/build_temporal_proxy.mmd +++ /dev/null @@ -1,19 +0,0 @@ -flowchart LR - subgraph LHS - E[Collider(e)] --- T[Transform(e,n)] - V[Velocity(e)] - K[Tick(n)] - T -- at --> K - E -- produced_in --> K - end - - subgraph RHS - E2[Collider(e)]:::KClass --- T2[Transform(e,n)]:::KClass - K2[Tick(n)]:::KClass - P((TemporalProxy(e,n))):::Add - E2 -- has_proxy --> P - P -- produced_in --> K2 - end - -classDef KClass stroke:#ffd166,stroke-width:2; -classDef Add stroke:#00c853,stroke-width:2,fill:#0d2a1a,color:#dfe7ff; diff --git a/docs/public/assets/collision/contact_events.mmd b/docs/public/assets/collision/contact_events.mmd deleted file mode 100644 index 8221e17c..00000000 --- a/docs/public/assets/collision/contact_events.mmd +++ /dev/null @@ -1,12 +0,0 @@ -flowchart LR - subgraph LHS - C1((Contact(pair,n-1))):::KClass - C2((Contact(pair,n))):::KClass - end - subgraph RHS - E((ContactEvent(kind,pair,n))):::Add - E -- event_of --> C2 - end - -classDef KClass stroke:#ffd166,stroke-width:2; -classDef Add stroke:#00c853,stroke-width:2,fill:#0d2a1a,color:#dfe7ff; diff --git a/docs/public/assets/collision/diagrams.css b/docs/public/assets/collision/diagrams.css deleted file mode 100644 index 8fda5d72..00000000 --- a/docs/public/assets/collision/diagrams.css +++ /dev/null @@ -1,64 +0,0 @@ -/* Echo collision/CCD DPO diagrams — base styles and optional animations */ -:root { - --bg: #0b1020; - --panel: #121a33; - --text: #dfe7ff; - --muted: #9fb0d6; - --edge: #6aa0ff; - --k: #ffd166; - --added: #00c853; - --removed: #ff5252; - --scope: #ffa600; -} - -svg { font-family: ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, 'Helvetica Neue', Arial, 'Noto Sans', 'Liberation Sans', sans-serif; } -.panel { fill: var(--panel); rx: 8px; } -.title { fill: var(--text); font-weight: 600; font-size: 16px; } -.caption { fill: var(--muted); font-size: 12px; } -.node rect { fill: #182347; stroke: #3356a6; stroke-width: 1.2; rx: 6px; } -.node.interfaceK rect { fill: #2b2746; stroke: var(--k); stroke-width: 2; } -.node.added rect { fill: #0d2a1a; stroke: var(--added); stroke-width: 2; } -.node.removed rect { fill: #2a0d0d; stroke: var(--removed); stroke-width: 2; } -.node text { fill: var(--text); font-size: 12px; } -.edge line, .edge path { stroke: var(--edge); stroke-width: 1.6; fill: none; } -.edge.added line, .edge.added path { stroke: var(--added); stroke-width: 2; } -.edge.removed line, .edge.removed path { stroke: var(--removed); stroke-width: 2; } -.edge text { fill: var(--muted); font-size: 11px; } -.scope rect { fill: none; stroke: var(--scope); stroke-width: 2; stroke-dasharray: 6 4; rx: 8px; } -.label { fill: var(--text); font-size: 13px; } - -/* Optional animation hooks */ -.pulse-add { animation: pulseAdd 1.6s ease-in-out infinite; } -.pulse-remove { animation: pulseRemove 1.6s ease-in-out infinite; } -@keyframes pulseAdd { 0%{opacity:0.5} 50%{opacity:1} 100%{opacity:0.5} } -@keyframes pulseRemove { 0%{opacity:1} 50%{opacity:0.6} 100%{opacity:1} } - -/* Fade-in on scroll (tour page) */ -.fade-seed { opacity: 0; transform: translateY(12px); transition: opacity 400ms ease, transform 400ms ease; } -.fade-in { opacity: 1; transform: translateY(0); } - -/* Carousel pager */ -.pager { display: flex; gap: 8px; margin-top: 8px; } -.btn { background: #1b2446; color: var(--text); border: 1px solid #3356a6; border-radius: 6px; padding: 6px 10px; cursor: pointer; font-size: 12px; } -.btn:hover { background: #223060; } -.btn:disabled { opacity: 0.5; cursor: default; } -.hidden { display: none; } - -/* Overlay caption inside figures */ -.has-overlay figcaption { display: none; } -.overlay { position: absolute; left: 12px; right: 12px; bottom: 12px; background: rgba(18,26,51,0.85); border: 1px solid #3356a6; border-radius: 6px; padding: 8px 10px; backdrop-filter: blur(2px); } -.overlay strong { color: var(--text); font-size: 12px; letter-spacing: 0.2px; } -.overlay .ov-t { color: var(--muted); font-size: 12px; margin-top: 2px; } -.has-overlay { padding-bottom: 68px; } -.slide-explain { margin-top: 10px; font-size: 13px; color: var(--text); } -.slide-explain p { margin: 0 0 6px; } -.slide-explain ul { margin: 6px 0 0 18px; } -.slide-explain li { margin: 3px 0; color: var(--muted); } - -/* Picture-in-picture world view */ -.pip { position: absolute; right: 12px; top: 12px; width: 32%; max-width: 240px; border: 1px solid #3356a6; background: #0b1020; border-radius: 6px; box-shadow: 0 2px 8px rgba(0,0,0,0.35); overflow: hidden; } -.pip img { width: 100%; display: block; } -.pip img.hidden { display: none; } -.pip-tabs { display: flex; gap: 0; border-bottom: 1px solid #2a3c72; } -.pip-tabs .tab { flex: 1; text-align: center; font-size: 11px; color: var(--muted); padding: 6px 4px; cursor: pointer; background: #121a33; } -.pip-tabs .tab.active { color: var(--text); background: #1b2446; } diff --git a/docs/public/assets/collision/dpo_broad_phase_pairing.svg b/docs/public/assets/collision/dpo_broad_phase_pairing.svg deleted file mode 100644 index e1525e19..00000000 --- a/docs/public/assets/collision/dpo_broad_phase_pairing.svg +++ /dev/null @@ -1,46 +0,0 @@ - - - - - - - DPO: BroadPhasePairing (update) - LHS: overlapping TemporalProxy(a,n), TemporalProxy(b,n); RHS: add PotentialPair(a,b,n); K = both proxies - - - LHS - - RHS - - - TemporalProxy(a,n) - TemporalProxy(b,n) - - overlap (fat AABBs) - - - TemporalProxy(a,n) - TemporalProxy(b,n) - PotentialPair(a,b,n) - - - - - - diff --git a/docs/public/assets/collision/dpo_broad_phase_pairing_step1.svg b/docs/public/assets/collision/dpo_broad_phase_pairing_step1.svg deleted file mode 100644 index 8a55a290..00000000 --- a/docs/public/assets/collision/dpo_broad_phase_pairing_step1.svg +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - BroadPhasePairing — Step 1: LHS - TemporalProxy(a,n) - TemporalProxy(b,n) - fat AABBs overlap - diff --git a/docs/public/assets/collision/dpo_broad_phase_pairing_step2.svg b/docs/public/assets/collision/dpo_broad_phase_pairing_step2.svg deleted file mode 100644 index 759cfa34..00000000 --- a/docs/public/assets/collision/dpo_broad_phase_pairing_step2.svg +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - BroadPhasePairing — Step 2: Interface K - TemporalProxy(a,n) - TemporalProxy(b,n) - - diff --git a/docs/public/assets/collision/dpo_broad_phase_pairing_step3.svg b/docs/public/assets/collision/dpo_broad_phase_pairing_step3.svg deleted file mode 100644 index 14ed7556..00000000 --- a/docs/public/assets/collision/dpo_broad_phase_pairing_step3.svg +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - BroadPhasePairing — Step 3: RHS - TemporalProxy(a,n) - TemporalProxy(b,n) - PotentialPair(a,b,n) - diff --git a/docs/public/assets/collision/dpo_build_temporal_proxy.svg b/docs/public/assets/collision/dpo_build_temporal_proxy.svg deleted file mode 100644 index 4f35c6e3..00000000 --- a/docs/public/assets/collision/dpo_build_temporal_proxy.svg +++ /dev/null @@ -1,67 +0,0 @@ - - - - - - - DPO: BuildTemporalProxy (pre_update) - LHS: Collider+Transform(+Velocity) at Tick n; RHS: add TemporalProxy(e,n) with fat AABB; K: Collider, Transform, Tick - - - - LHS - - RHS - - - Collider(e) - Transform(e,n) - Velocity(e) - Tick(n) - - - - - produced_in - - - - at - - - - Collider(e) - Transform(e,n) - Tick(n) - - - TemporalProxy(e,n) - - - has_proxy - - - - produced_in - - - - - - diff --git a/docs/public/assets/collision/dpo_build_temporal_proxy_step1.svg b/docs/public/assets/collision/dpo_build_temporal_proxy_step1.svg deleted file mode 100644 index d0e9642c..00000000 --- a/docs/public/assets/collision/dpo_build_temporal_proxy_step1.svg +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - BuildTemporalProxy — Step 1: LHS - Collider(e) - Transform(e,n) - Velocity(e) - Tick(n) - diff --git a/docs/public/assets/collision/dpo_build_temporal_proxy_step2.svg b/docs/public/assets/collision/dpo_build_temporal_proxy_step2.svg deleted file mode 100644 index 0add5c31..00000000 --- a/docs/public/assets/collision/dpo_build_temporal_proxy_step2.svg +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - BuildTemporalProxy — Step 2: Interface K - Collider(e) - Transform(e,n) - Tick(n) - - diff --git a/docs/public/assets/collision/dpo_build_temporal_proxy_step3.svg b/docs/public/assets/collision/dpo_build_temporal_proxy_step3.svg deleted file mode 100644 index a667846a..00000000 --- a/docs/public/assets/collision/dpo_build_temporal_proxy_step3.svg +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - BuildTemporalProxy — Step 3: RHS - Collider(e) - Transform(e,n) - Tick(n) - TemporalProxy(e,n) - diff --git a/docs/public/assets/collision/dpo_contact_events.svg b/docs/public/assets/collision/dpo_contact_events.svg deleted file mode 100644 index ed888ecb..00000000 --- a/docs/public/assets/collision/dpo_contact_events.svg +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - DPO: ContactEvents (post_update) - LHS: Contact(pair,n-1) and Contact(pair,n) (or absence); RHS: Begin/Persist/End event - - - LHS - - RHS - - Contact(pair,n-1) - Contact(pair,n) - - ContactEvent(kind, pair, n) - - - - - - diff --git a/docs/public/assets/collision/dpo_contact_events_step1.svg b/docs/public/assets/collision/dpo_contact_events_step1.svg deleted file mode 100644 index fb3e039f..00000000 --- a/docs/public/assets/collision/dpo_contact_events_step1.svg +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - ContactEvents — Step 1: LHS - Contact(pair,n-1) - Contact(pair,n) - diff --git a/docs/public/assets/collision/dpo_contact_events_step2.svg b/docs/public/assets/collision/dpo_contact_events_step2.svg deleted file mode 100644 index 05703400..00000000 --- a/docs/public/assets/collision/dpo_contact_events_step2.svg +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - ContactEvents — Step 2: Interface K - Contact(pair,n-1) - Contact(pair,n) - - diff --git a/docs/public/assets/collision/dpo_contact_events_step3.svg b/docs/public/assets/collision/dpo_contact_events_step3.svg deleted file mode 100644 index 7e079498..00000000 --- a/docs/public/assets/collision/dpo_contact_events_step3.svg +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - ContactEvents — Step 3: RHS - Contact(pair,n-1) - Contact(pair,n) - ContactEvent(kind,pair,n) - diff --git a/docs/public/assets/collision/dpo_gc_ephemeral.svg b/docs/public/assets/collision/dpo_gc_ephemeral.svg deleted file mode 100644 index a38a91bc..00000000 --- a/docs/public/assets/collision/dpo_gc_ephemeral.svg +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - DPO: GC Ephemeral (timeline_flush) - LHS: Temporal artifacts older than retention and unreferenced; RHS: delete deterministically - - - LHS - - RHS - - TemporalProxy(e,n-2) - PotentialPair(a,b,n-2) - Toi(pair,n-2) - - (deleted ephemeral nodes) - - - - diff --git a/docs/public/assets/collision/dpo_gc_ephemeral_step1.svg b/docs/public/assets/collision/dpo_gc_ephemeral_step1.svg deleted file mode 100644 index d8af8e0c..00000000 --- a/docs/public/assets/collision/dpo_gc_ephemeral_step1.svg +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - GC Ephemeral — Step 1: LHS - TemporalProxy(e,n-2) - PotentialPair(a,b,n-2) - Toi(pair,n-2) - diff --git a/docs/public/assets/collision/dpo_gc_ephemeral_step2.svg b/docs/public/assets/collision/dpo_gc_ephemeral_step2.svg deleted file mode 100644 index 8844e59f..00000000 --- a/docs/public/assets/collision/dpo_gc_ephemeral_step2.svg +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - GC Ephemeral — Step 2: Selection - TemporalProxy(e,n-2) - PotentialPair(a,b,n-2) - Toi(pair,n-2) - - diff --git a/docs/public/assets/collision/dpo_gc_ephemeral_step3.svg b/docs/public/assets/collision/dpo_gc_ephemeral_step3.svg deleted file mode 100644 index f0651f70..00000000 --- a/docs/public/assets/collision/dpo_gc_ephemeral_step3.svg +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - GC Ephemeral — Step 3: RHS - (deleted ephemeral nodes) - diff --git a/docs/public/assets/collision/dpo_narrow_phase_ccd.svg b/docs/public/assets/collision/dpo_narrow_phase_ccd.svg deleted file mode 100644 index c8c77b8a..00000000 --- a/docs/public/assets/collision/dpo_narrow_phase_ccd.svg +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - DPO: NarrowPhaseCCD (update) - LHS: PotentialPair(a,b,n), CCD policy triggers; RHS: Toi(pair,n) + Contact(pair,n) at s∈[0,1] - - - LHS - - RHS - - PotentialPair(a,b,n) - ccd: policy(a,b) == true - - PotentialPair(a,b,n) - Toi(pair,n) - Contact(pair,n) { Manifold } - toi_s - - - - - diff --git a/docs/public/assets/collision/dpo_narrow_phase_ccd_step1.svg b/docs/public/assets/collision/dpo_narrow_phase_ccd_step1.svg deleted file mode 100644 index b9d20369..00000000 --- a/docs/public/assets/collision/dpo_narrow_phase_ccd_step1.svg +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - NarrowPhaseCCD — Step 1: LHS - PotentialPair(a,b,n) - policy(a,b) == true (CCD) - diff --git a/docs/public/assets/collision/dpo_narrow_phase_ccd_step2.svg b/docs/public/assets/collision/dpo_narrow_phase_ccd_step2.svg deleted file mode 100644 index a7ab1df2..00000000 --- a/docs/public/assets/collision/dpo_narrow_phase_ccd_step2.svg +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - NarrowPhaseCCD — Step 2: Interface K - PotentialPair(a,b,n) - - diff --git a/docs/public/assets/collision/dpo_narrow_phase_ccd_step3.svg b/docs/public/assets/collision/dpo_narrow_phase_ccd_step3.svg deleted file mode 100644 index 1613f487..00000000 --- a/docs/public/assets/collision/dpo_narrow_phase_ccd_step3.svg +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - NarrowPhaseCCD — Step 3: RHS - PotentialPair(a,b,n) - Toi(pair,n) - Contact(pair,n) { Manifold } - diff --git a/docs/public/assets/collision/dpo_narrow_phase_discrete.svg b/docs/public/assets/collision/dpo_narrow_phase_discrete.svg deleted file mode 100644 index d3e70b0a..00000000 --- a/docs/public/assets/collision/dpo_narrow_phase_discrete.svg +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - DPO: NarrowPhaseDiscrete (update) - LHS: PotentialPair(a,b,n), discrete overlap at end poses; RHS: add Contact(pair,n) with Manifold - - - LHS - - RHS - - PotentialPair(a,b,n) - discrete: overlap == true @ end pose - - PotentialPair(a,b,n) - Contact(pair,n) { Manifold } - - - - diff --git a/docs/public/assets/collision/dpo_narrow_phase_discrete_step1.svg b/docs/public/assets/collision/dpo_narrow_phase_discrete_step1.svg deleted file mode 100644 index 6d401272..00000000 --- a/docs/public/assets/collision/dpo_narrow_phase_discrete_step1.svg +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - NarrowPhaseDiscrete — Step 1: LHS - PotentialPair(a,b,n) - overlap == true @ end pose - diff --git a/docs/public/assets/collision/dpo_narrow_phase_discrete_step2.svg b/docs/public/assets/collision/dpo_narrow_phase_discrete_step2.svg deleted file mode 100644 index 39dc906c..00000000 --- a/docs/public/assets/collision/dpo_narrow_phase_discrete_step2.svg +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - NarrowPhaseDiscrete — Step 2: Interface K - PotentialPair(a,b,n) - - diff --git a/docs/public/assets/collision/dpo_narrow_phase_discrete_step3.svg b/docs/public/assets/collision/dpo_narrow_phase_discrete_step3.svg deleted file mode 100644 index cb694af4..00000000 --- a/docs/public/assets/collision/dpo_narrow_phase_discrete_step3.svg +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - NarrowPhaseDiscrete — Step 3: RHS - PotentialPair(a,b,n) - Contact(pair,n) { Manifold } - diff --git a/docs/public/assets/collision/graph_collision_overview.mmd b/docs/public/assets/collision/graph_collision_overview.mmd deleted file mode 100644 index 731969f8..00000000 --- a/docs/public/assets/collision/graph_collision_overview.mmd +++ /dev/null @@ -1,48 +0,0 @@ -flowchart TB - Tick[(Tick n)] - subgraph Entities - EA[Entity A] - EB[Entity B] - end - TA[Transform(A,n)] - CA[Collider(A)] - TB[Transform(B,n)] - CB[Collider(B)] - TPA[[TemporalProxy(A,n)]] - TPB[[TemporalProxy(B,n)]] - PP{{PotentialPair(A,B,n)}} - C((Contact(pair,n))) - TOI((Toi(pair,n))) - E((ContactEvent(kind,n))) - - EA --> TA - EA --> CA - EB --> TB - EB --> CB - - TA -- produced_in --> Tick - TB -- produced_in --> Tick - - TA -. has_proxy .-> TPA - TB -. has_proxy .-> TPB - TPA -- pair_of --> PP - TPB -- pair_of --> PP - PP -- contact_of --> C - PP -. toi_of .-> TOI - C -- event_of --> E - - TPA -- produced_in --> Tick - TPB -- produced_in --> Tick - PP -- produced_in --> Tick - C -- produced_in --> Tick - TOI -- produced_in --> Tick - E -- produced_in --> Tick - -classDef tick fill:#1a233f,stroke:#3e5fb5,color:#dfe7ff; -classDef proxy stroke:#ffd166,color:#dfe7ff; -classDef pair stroke:#00c853,color:#dfe7ff; -classDef event fill:#241a4b,stroke:#a78bfa,color:#dfe7ff; -class Tick tick; -class TPA,TPB proxy; -class PP pair; -class E event; diff --git a/docs/public/assets/collision/graph_collision_overview.svg b/docs/public/assets/collision/graph_collision_overview.svg deleted file mode 100644 index efc12d26..00000000 --- a/docs/public/assets/collision/graph_collision_overview.svg +++ /dev/null @@ -1,131 +0,0 @@ - - - - - - - - - Collision Subgraph — Typed Nodes and Edges (Tick n) - Everything is a graph: entities/components, time‑aware proxies, pairs, contacts, TOI, and events are nodes linked by typed edges. - - - - - Tick(n) - - - - - - Entity A - - - - Entity B - - - - - - Transform(A,n) - - - - Collider(A) - - - - - - Transform(B,n) - - - - Collider(B) - - - - - - TemporalProxy(A,n) - - - - TemporalProxy(B,n) - - - - - - PotentialPair(A,B,n) - - - - - - Contact(pair,n) - - - - Toi(pair,n) - - - - - - ContactEvent(kind,n) - - - - has_component - - - - - - produced_in - - - - has_proxy - - - - has_proxy - - - - pair_of - - - - contact_of - toi_of - - - event_of - - - - - - - - - - diff --git a/docs/public/assets/collision/legend.svg b/docs/public/assets/collision/legend.svg deleted file mode 100644 index 4779ddb0..00000000 --- a/docs/public/assets/collision/legend.svg +++ /dev/null @@ -1,48 +0,0 @@ - - - - - - DPO Diagram Legend - - Node - Regular node - - - Interface K - Preserved between LHS and RHS - - - Added - Created on RHS - - - Removed - Deleted on RHS - - - - edge - added edge - removed edge - - - - Scope boundary - diff --git a/docs/public/assets/collision/narrow_phase_ccd.mmd b/docs/public/assets/collision/narrow_phase_ccd.mmd deleted file mode 100644 index eaadda91..00000000 --- a/docs/public/assets/collision/narrow_phase_ccd.mmd +++ /dev/null @@ -1,15 +0,0 @@ -flowchart LR - subgraph LHS - PP[[PotentialPair(a,b,n)]]:::KClass - PP -- policy(ccd) --> CCD[/run CCD/] - end - subgraph RHS - PP2[[PotentialPair(a,b,n)]]:::KClass - TOI((Toi(pair,n))):::Add - C((Contact(pair,n))):::Add - TOI -- toi_s --> C - C -- contact_of --> PP2 - end - -classDef KClass stroke:#ffd166,stroke-width:2; -classDef Add stroke:#00c853,stroke-width:2,fill:#0d2a1a,color:#dfe7ff; diff --git a/docs/public/assets/collision/narrow_phase_discrete.mmd b/docs/public/assets/collision/narrow_phase_discrete.mmd deleted file mode 100644 index 7e441d50..00000000 --- a/docs/public/assets/collision/narrow_phase_discrete.mmd +++ /dev/null @@ -1,13 +0,0 @@ -flowchart LR - subgraph LHS - PP[[PotentialPair(a,b,n)]]:::KClass - PP -- overlap_at_end --> YES{overlap?} - end - subgraph RHS - PP2[[PotentialPair(a,b,n)]]:::KClass - C((Contact(pair,n))):::Add - C -- contact_of --> PP2 - end - -classDef KClass stroke:#ffd166,stroke-width:2; -classDef Add stroke:#00c853,stroke-width:2,fill:#0d2a1a,color:#dfe7ff; diff --git a/docs/public/assets/collision/pip/build_proxy_step1.svg b/docs/public/assets/collision/pip/build_proxy_step1.svg deleted file mode 100644 index 5bc3682a..00000000 --- a/docs/public/assets/collision/pip/build_proxy_step1.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - Entity e — start pose and velocity - - diff --git a/docs/public/assets/collision/pip/build_proxy_step2.svg b/docs/public/assets/collision/pip/build_proxy_step2.svg deleted file mode 100644 index c1a855cc..00000000 --- a/docs/public/assets/collision/pip/build_proxy_step2.svg +++ /dev/null @@ -1,6 +0,0 @@ - - - - - Interface K — e@Tick n preserved - diff --git a/docs/public/assets/collision/pip/build_proxy_step3.svg b/docs/public/assets/collision/pip/build_proxy_step3.svg deleted file mode 100644 index 47853a2b..00000000 --- a/docs/public/assets/collision/pip/build_proxy_step3.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - Fat AABB over motion [start,end] - - diff --git a/docs/public/assets/collision/pip/ccd_step1.svg b/docs/public/assets/collision/pip/ccd_step1.svg deleted file mode 100644 index 7fbfb08e..00000000 --- a/docs/public/assets/collision/pip/ccd_step1.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - Fast mover towards obstacle (CCD) - - diff --git a/docs/public/assets/collision/pip/ccd_step2.svg b/docs/public/assets/collision/pip/ccd_step2.svg deleted file mode 100644 index 9c114a4c..00000000 --- a/docs/public/assets/collision/pip/ccd_step2.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - K: pair preserved for CCD - diff --git a/docs/public/assets/collision/pip/ccd_step3.svg b/docs/public/assets/collision/pip/ccd_step3.svg deleted file mode 100644 index 572572cf..00000000 --- a/docs/public/assets/collision/pip/ccd_step3.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - Impact at s ≈ TOI; contact normal - diff --git a/docs/public/assets/collision/pip/discrete_step1.svg b/docs/public/assets/collision/pip/discrete_step1.svg deleted file mode 100644 index 4f248427..00000000 --- a/docs/public/assets/collision/pip/discrete_step1.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - End pose overlap (discrete) - diff --git a/docs/public/assets/collision/pip/discrete_step2.svg b/docs/public/assets/collision/pip/discrete_step2.svg deleted file mode 100644 index f2846ec7..00000000 --- a/docs/public/assets/collision/pip/discrete_step2.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - K: pair preserved - diff --git a/docs/public/assets/collision/pip/discrete_step3.svg b/docs/public/assets/collision/pip/discrete_step3.svg deleted file mode 100644 index 6bb7da0c..00000000 --- a/docs/public/assets/collision/pip/discrete_step3.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - Contact manifold (canonical order) - diff --git a/docs/public/assets/collision/pip/events_step1.svg b/docs/public/assets/collision/pip/events_step1.svg deleted file mode 100644 index e2e9ff1f..00000000 --- a/docs/public/assets/collision/pip/events_step1.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - Contacts at n and n−1 (compare) - diff --git a/docs/public/assets/collision/pip/events_step2.svg b/docs/public/assets/collision/pip/events_step2.svg deleted file mode 100644 index c3b14f78..00000000 --- a/docs/public/assets/collision/pip/events_step2.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - K: states preserved for diff - diff --git a/docs/public/assets/collision/pip/events_step3.svg b/docs/public/assets/collision/pip/events_step3.svg deleted file mode 100644 index ae0de6c5..00000000 --- a/docs/public/assets/collision/pip/events_step3.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - Begin - Event node emitted (Begin/Persist/End) - diff --git a/docs/public/assets/collision/pip/gc_step1.svg b/docs/public/assets/collision/pip/gc_step1.svg deleted file mode 100644 index 0709dff2..00000000 --- a/docs/public/assets/collision/pip/gc_step1.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - Ephemeral artifacts present - diff --git a/docs/public/assets/collision/pip/gc_step2.svg b/docs/public/assets/collision/pip/gc_step2.svg deleted file mode 100644 index f4864573..00000000 --- a/docs/public/assets/collision/pip/gc_step2.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - GC selection window - diff --git a/docs/public/assets/collision/pip/gc_step3.svg b/docs/public/assets/collision/pip/gc_step3.svg deleted file mode 100644 index 641fdf26..00000000 --- a/docs/public/assets/collision/pip/gc_step3.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - Ephemeral cleared (deterministic) - diff --git a/docs/public/assets/collision/pip/graph_build_proxy_step1.svg b/docs/public/assets/collision/pip/graph_build_proxy_step1.svg deleted file mode 100644 index cada2fb1..00000000 --- a/docs/public/assets/collision/pip/graph_build_proxy_step1.svg +++ /dev/null @@ -1,13 +0,0 @@ - - - - - Transform(A,n) - - Collider(A) - - Tick(n) - - - produced_in - diff --git a/docs/public/assets/collision/pip/graph_build_proxy_step2.svg b/docs/public/assets/collision/pip/graph_build_proxy_step2.svg deleted file mode 100644 index c82b011f..00000000 --- a/docs/public/assets/collision/pip/graph_build_proxy_step2.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - Transform(A,n) - - Collider(A) - - Tick(n) - - diff --git a/docs/public/assets/collision/pip/graph_build_proxy_step3.svg b/docs/public/assets/collision/pip/graph_build_proxy_step3.svg deleted file mode 100644 index 4f55ee58..00000000 --- a/docs/public/assets/collision/pip/graph_build_proxy_step3.svg +++ /dev/null @@ -1,14 +0,0 @@ - - - - - Transform(A,n) - - Collider(A) - - Tick(n) - - TemporalProxy(A,n) - - has_proxy - diff --git a/docs/public/assets/collision/pip/graph_ccd_step1.svg b/docs/public/assets/collision/pip/graph_ccd_step1.svg deleted file mode 100644 index 44622f7e..00000000 --- a/docs/public/assets/collision/pip/graph_ccd_step1.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - PotentialPair(A,B,n) - Policy: CCD - diff --git a/docs/public/assets/collision/pip/graph_ccd_step2.svg b/docs/public/assets/collision/pip/graph_ccd_step2.svg deleted file mode 100644 index 8f9237b0..00000000 --- a/docs/public/assets/collision/pip/graph_ccd_step2.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - PotentialPair(A,B,n) - Run CA / sweep - diff --git a/docs/public/assets/collision/pip/graph_ccd_step3.svg b/docs/public/assets/collision/pip/graph_ccd_step3.svg deleted file mode 100644 index 2d729cdd..00000000 --- a/docs/public/assets/collision/pip/graph_ccd_step3.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - PotentialPair(A,B,n) - - Contact(pair,n) - - Toi(pair,n) - - - diff --git a/docs/public/assets/collision/pip/graph_discrete_step1.svg b/docs/public/assets/collision/pip/graph_discrete_step1.svg deleted file mode 100644 index ef8dec34..00000000 --- a/docs/public/assets/collision/pip/graph_discrete_step1.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - PotentialPair(A,B,n) - LHS: overlap at end pose - diff --git a/docs/public/assets/collision/pip/graph_discrete_step2.svg b/docs/public/assets/collision/pip/graph_discrete_step2.svg deleted file mode 100644 index 07802466..00000000 --- a/docs/public/assets/collision/pip/graph_discrete_step2.svg +++ /dev/null @@ -1,7 +0,0 @@ - - - - - PotentialPair(A,B,n) - K: pair preserved - diff --git a/docs/public/assets/collision/pip/graph_discrete_step3.svg b/docs/public/assets/collision/pip/graph_discrete_step3.svg deleted file mode 100644 index e6ff976d..00000000 --- a/docs/public/assets/collision/pip/graph_discrete_step3.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - PotentialPair(A,B,n) - - Contact(pair,n) - - contact_of - diff --git a/docs/public/assets/collision/pip/graph_events_step1.svg b/docs/public/assets/collision/pip/graph_events_step1.svg deleted file mode 100644 index 2c43658f..00000000 --- a/docs/public/assets/collision/pip/graph_events_step1.svg +++ /dev/null @@ -1,8 +0,0 @@ - - - - - Contact(pair,n-1) - - Contact(pair,n) - diff --git a/docs/public/assets/collision/pip/graph_events_step2.svg b/docs/public/assets/collision/pip/graph_events_step2.svg deleted file mode 100644 index e2b13f47..00000000 --- a/docs/public/assets/collision/pip/graph_events_step2.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - Contact(pair,n-1) - - Contact(pair,n) - K: compare states - diff --git a/docs/public/assets/collision/pip/graph_events_step3.svg b/docs/public/assets/collision/pip/graph_events_step3.svg deleted file mode 100644 index e7348665..00000000 --- a/docs/public/assets/collision/pip/graph_events_step3.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - Contact(pair,n) - - ContactEvent(kind,n) - - event_of - diff --git a/docs/public/assets/collision/pip/graph_gc_step1.svg b/docs/public/assets/collision/pip/graph_gc_step1.svg deleted file mode 100644 index 032f3fde..00000000 --- a/docs/public/assets/collision/pip/graph_gc_step1.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - TemporalProxy(*,n) - - PotentialPair(*,n) - - Toi(*,n) - Ephemeral artifacts - diff --git a/docs/public/assets/collision/pip/graph_gc_step2.svg b/docs/public/assets/collision/pip/graph_gc_step2.svg deleted file mode 100644 index 43500529..00000000 --- a/docs/public/assets/collision/pip/graph_gc_step2.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - TemporalProxy(*,n) - - PotentialPair(*,n) - - Toi(*,n) - Selecting for deletion - diff --git a/docs/public/assets/collision/pip/graph_gc_step3.svg b/docs/public/assets/collision/pip/graph_gc_step3.svg deleted file mode 100644 index aaeab7e5..00000000 --- a/docs/public/assets/collision/pip/graph_gc_step3.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - Ephemeral cleared - diff --git a/docs/public/assets/collision/pip/graph_pairing_step1.svg b/docs/public/assets/collision/pip/graph_pairing_step1.svg deleted file mode 100644 index ddce482a..00000000 --- a/docs/public/assets/collision/pip/graph_pairing_step1.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - TemporalProxy(A,n) - - TemporalProxy(B,n) - LHS: overlapping proxies - diff --git a/docs/public/assets/collision/pip/graph_pairing_step2.svg b/docs/public/assets/collision/pip/graph_pairing_step2.svg deleted file mode 100644 index a0a33552..00000000 --- a/docs/public/assets/collision/pip/graph_pairing_step2.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - TemporalProxy(A,n) - - TemporalProxy(B,n) - K: proxies preserved - diff --git a/docs/public/assets/collision/pip/graph_pairing_step3.svg b/docs/public/assets/collision/pip/graph_pairing_step3.svg deleted file mode 100644 index c8567ae7..00000000 --- a/docs/public/assets/collision/pip/graph_pairing_step3.svg +++ /dev/null @@ -1,13 +0,0 @@ - - - - - TemporalProxy(A,n) - - TemporalProxy(B,n) - - PotentialPair(A,B,n) - - - pair_of - diff --git a/docs/public/assets/collision/pip/pairing_step1.svg b/docs/public/assets/collision/pip/pairing_step1.svg deleted file mode 100644 index 7df40c48..00000000 --- a/docs/public/assets/collision/pip/pairing_step1.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - Overlapping fat AABBs → candidate - diff --git a/docs/public/assets/collision/pip/pairing_step2.svg b/docs/public/assets/collision/pip/pairing_step2.svg deleted file mode 100644 index f8f19568..00000000 --- a/docs/public/assets/collision/pip/pairing_step2.svg +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - K: proxies preserved (ordering stable) - diff --git a/docs/public/assets/collision/pip/pairing_step3.svg b/docs/public/assets/collision/pip/pairing_step3.svg deleted file mode 100644 index 9ed71d3a..00000000 --- a/docs/public/assets/collision/pip/pairing_step3.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - PotentialPair added (canonical id) - diff --git a/docs/public/assets/collision/scheduler_phase_mapping.svg b/docs/public/assets/collision/scheduler_phase_mapping.svg deleted file mode 100644 index 3e7b4204..00000000 --- a/docs/public/assets/collision/scheduler_phase_mapping.svg +++ /dev/null @@ -1,50 +0,0 @@ - - - - - - Scheduler Phase Mapping (Collision/CCD) - pre_update → update → post_update → timeline_flush - - - - pre_update - BuildTemporalProxy - - - - - update - BroadPhasePairing - NarrowPhaseDiscrete - NarrowPhaseCCD - - - - - post_update - ContactEvents - - - - - timeline_flush - GC Ephemeral - - diff --git a/docs/public/collision-dpo-tour.html b/docs/public/collision-dpo-tour.html deleted file mode 100644 index d6411883..00000000 --- a/docs/public/collision-dpo-tour.html +++ /dev/null @@ -1,739 +0,0 @@ - - - - - - - - Echo Collision DPO Tour - - - -

Collision / CCD — DPO Rule Tour

-

- Each rule shown as LHS → Interface K → RHS. See the legend for - visual semantics. -

-

- Legend -

- -
-

Graph Anatomy (Everything Is a Graph)

-
-
- Collision Subgraph Overview -
- Overview — typed nodes and edges for one colliding pair - at tick n. -
-
- Node/Edge Graph -
- Entities, components, temporal proxies, potential - pair, contact, TOI, event — all first‑class nodes - linked by typed edges (has_component, has_proxy, - pair_of, contact_of, event_of, produced_in). -
-
-
-

- This is the literal graph Echo maintains. Derived - artifacts (proxies, pairs, contacts, events) are not - hidden engine buffers — they are nodes that tools - can query, branch, replay, and merge - deterministically. The same initial facts and - policies yield the same subgraph and the same - snapshot hash on every peer. -

-
-
-
-
- -

- How Things Move (time-aware proxies) -

-
-

BuildTemporalProxy (pre_update)

-
-
- BuildTemporalProxy Step 1 -
- Step 1 — LHS: Collider + Transform (+ Velocity) at Tick - n -
-
-

- We gather the collider’s - Transform (and optional - Velocity) at tick n. In - Echo this input is explicit graph state, not a - transient engine struct. Chronos gives us a fixed - dt, so the motion window for the - upcoming tick is well-defined and reproducible. -

-
    -
  • - Different: many engines pull - from mutable component state ad‑hoc; here we - read typed nodes bound to a specific tick. -
  • -
  • - Determinism: same - dt and same components ⇒ same - inputs on every peer/branch. -
  • -
-
-
-
- BuildTemporalProxy Step 2 -
Step 2 — Interface K
-
-

- The DPO Interface K shows what is - preserved between LHS and RHS: collider + transform - + tick. This is how we say “the world keeps these - facts while we add the proxy.” -

-
    -
  • - Different: Echo makes the - preserved context explicit; typical engines - merge implicit state in-place. -
  • -
-
-
-
- BuildTemporalProxy Step 3 -
- Step 3 — RHS: TemporalProxy(e,n) added -
-
-

- We add a TemporalProxy with a fat - AABB that encloses motion over [start,end]. Padding - is derived from velocity and quantized policy, so - two peers derive the same box. The proxy links back - to the entity and Tick n. -

-
    -
  • - Different: broad‑phase caches - become first‑class graph nodes with stable IDs. -
  • -
  • - Determinism: quantized padding - + stable insertion order ⇒ identical proxy sets. -
  • -
-
-
-
-
-

- This rule deterministically derives a - TemporalProxy for each collider at tick - n. The proxy’s “fat AABB” encloses the body over - the whole tick window [start,end], so fast movers can’t - tunnel between broad‑phase sweeps. The proxy is a typed node - in the graph (not an opaque engine cache) and is linked back - to the entity and the producing tick. -

-
    -
  • - Different from typical engines: - broad‑phase buffers are usually internal and - mutation‑ordered; in Echo they are explicit graph nodes, - created by a rewrite with a stable scope and ID. -
  • -
  • - Determinism: proxy size and padding are - computed from quantized policy values; insert order is - sorted by ID, so peers/branches build identical proxy - sets. -
  • -
-
-
- -

- How Collision Works (broad → narrow → events) -

-
-

BroadPhasePairing (update)

-
-
- BroadPhasePairing Step 1 -
Step 1 — LHS: overlapping proxies
-
-

- We test fat AABB overlap on proxies - built for the full tick window. Overlap means the - pair is a candidate for narrow phase. -

-
    -
  • - Different: the candidate - condition is a graph fact, not an opaque - boolean. -
  • -
  • - Determinism: identical proxies - ⇒ identical overlap set. -
  • -
-
-
-
- BroadPhasePairing Step 2 -
Step 2 — K: proxies preserved
-
-

- The proxies themselves are preserved (K). This makes - the rule commute with other rules that may also read - them this tick. -

-
-
-
- BroadPhasePairing Step 3 -
Step 3 — RHS: PotentialPair added
-
-

- We mint a PotentialPair with - canonical PairId = - H(min(A,B)||max(A,B)||branch) and back‑refs to - proxies. -

-
    -
  • - Different: pair lists are - reproducible data, not engine iteration order. -
  • -
  • - Determinism: output list is - sorted strictly; peers/branches match. -
  • -
-
-
-
-
-

- The broad phase converts overlapping proxies into - PotentialPair nodes. Each pair gets a - canonical PairId = H(min(A,B) || max(A,B) || - branch) and edges back to the proxies. The emitted list is - sorted deterministically, which makes network replication - and timeline diffs trivial. -

-
    -
  • - Different: most engines keep an - internal unsorted array of candidate pairs; Echo - materializes pairs as graph facts, with stable IDs and - ordering. -
  • -
  • - Determinism: ties in AABB endpoints - break on IDs; output is strictly sorted, so two peers - converge on identical pair order. -
  • -
-
-
- -
-

NarrowPhaseDiscrete (update)

-
-
- NarrowPhaseDiscrete Step 1 -
- Step 1 — LHS: discrete overlap @ end pose -
-
-

- For low‑speed pairs, we evaluate shapes at the end - pose of tick n. If they overlap, we proceed - to build a manifold. -

-
    -
  • - Different: thresholding policy - is data; no hidden time‑step heuristics. -
  • -
-
-
-
- NarrowPhaseDiscrete Step 2 -
Step 2 — K: pair preserved
-
-

- We keep the PotentialPair (K). The - narrow phase acts as a pure derivation from - pair+poses. -

-
-
-
- NarrowPhaseDiscrete Step 3 -
- Step 3 — RHS: Contact with Manifold added -
-
-

- We create a Contact with a reduced - Manifold (2–4 points). Points are - canonicalized by feature IDs to ensure reproducible - ordering. -

-
    -
  • - Different: engine doesn’t call - your code mid‑narrow; it records facts you can - read consistently. -
  • -
  • - Determinism: centralized - tolerances + ordering. -
  • -
-
-
-
-
-

- For low‑speed pairs, we evaluate shapes at end‑of‑tick poses - and, if intersecting, create a Contact with - a deterministically ordered Manifold (2–4 - clipped points). The contact attaches to the pair and to the - producing tick. Manifold point ordering and feature IDs are - canonicalized to remove platform drift. -

-
    -
  • - Different: instead of imperative - callbacks that mutate scripts, Echo records contacts as - first‑class nodes; scripts read them after rules run. -
  • -
  • - Determinism: manifold reduction, - feature selection, and floating‑point tolerances are - centralized and quantized. -
  • -
-
-
- -
-

NarrowPhaseCCD (update)

-
-
- NarrowPhaseCCD Step 1 -
Step 1 — LHS: CCD policy triggers
-
-

- Policy flags fast motion/small features (or - material‑required CCD). We will compute a - Toi in [0,1] before creating a - contact. -

-
-
-
- NarrowPhaseCCD Step 2 -
Step 2 — K: pair preserved
-
-

- We keep the pair (K) and run - conservative advancement or a swept - primitive test to find the impact time. -

-
-
-
- NarrowPhaseCCD Step 3 -
Step 3 — RHS: Toi + Contact added
-
-

- We emit a Toi node with quantized - s and a Contact at the - impact pose. Quantization and iteration caps are - recorded to make this stable. -

-
-
-
-
-

- When a policy indicates high motion or small features, we - run CCD: conservative advancement for - general convex shapes or closed‑form sweeps for - spheres/capsules. We emit a Toi node with - quantized s ∈ [0,1] and a - Contact at the impact pose. Because - s is quantized and the rule scopes are stable, - peers compute the same TOI and contact set. -

-
    -
  • - Different: CCD outputs are persisted as - graph data (Toi + Contact), not transient solver state; - branches and replays see identical values. -
  • -
  • - Determinism: iteration caps, policy - thresholds, and s quantization are recorded; - identical inputs yield identical s. -
  • -
-
-
- -
-

ContactEvents (post_update)

-
-
- ContactEvents Step 1 -
- Step 1 — LHS: contact states n-1 vs n -
-
-

- We stage previous and current - Contact facts for the pair to - compute Begin/Persist/End. -

-
-
-
- ContactEvents Step 2 -
Step 2 — K: contacts preserved
-
-

- K keeps both contact nodes in scope - so event construction is a pure comparison, not - in‑place mutation. -

-
-
-
- ContactEvents Step 3 -
Step 3 — RHS: ContactEvent added
-
-

- We create a - ContactEvent (Begin/Persist/End) - sorted by (toi_s, ContactId). - Events are nodes that tools and scripts can consume - deterministically. -

-
-
-
-
-

- We diff previous vs current Contact nodes - and create a - ContactEvent (Begin/Persist/End) ordered by - (toi_s, ContactId). Events are regular - nodes and flow through the Temporal Bridge to tools, replay, - or networking. -

-
    -
  • - Different: engines typically invoke - user callbacks in engine order; Echo records events as - data first, then tooling/scripts consume them - deterministically. -
  • -
  • - Determinism: strict sort order; event - payloads are value objects that hash the same on every - peer. -
  • -
-
-
- -

- How We Keep It Clean (deterministic GC) -

-
-

GC Ephemeral (timeline_flush)

-
-
- GC Ephemeral Step 1 -
Step 1 — LHS: ephemeral nodes
-
-

- Before flush, the frame has proxies, pairs, TOIs and - optional per‑tick contacts. They’re marked - ephemeral. -

-
-
-
- GC Ephemeral Step 2 -
Step 2 — Selection
-
-

- We deterministically select unreferenced, older - artifacts for deletion. The retention policy is - configured and recorded. -

-
-
-
- GC Ephemeral Step 3 -
Step 3 — RHS: nodes deleted
-
-

- We remove the selected nodes in a stable ID order. - Snapshots after flush are identical across - peers/branches. -

-
-
-
-
-

- Broad‑phase proxies, potential pairs, transient TOIs and, - optionally, per‑tick contacts are - ephemeral. At timeline_flush we - delete them in a deterministic order. We keep only the - high‑value artifacts (Aion‑tagged events, metrics) for tools - and audits. -

-
    -
  • - Different: many engines leak implicit - caches across frames; Echo models and cleans them - explicitly as graph data. -
  • -
  • - Determinism: GC order is sorted by ID; - post‑flush snapshots are identical across branches and - peers. -
  • -
-
-
- - - - diff --git a/docs/refs.bib b/docs/refs.bib deleted file mode 100644 index 4855dd7b..00000000 --- a/docs/refs.bib +++ /dev/null @@ -1,196 +0,0 @@ -@incollection{EL97, - author = {Ehrig, Hartmut and L{\"o}we, Michael}, - title = {Parallel and distributed graph transformation}, - booktitle = {Handbook of Graph Grammars and Computing by Graph Transformation}, - editor = {Rozenberg, Grzegorz}, - publisher = {World Scientific}, - volume = {2}, - pages = {433--456}, - year = {1997}, - doi = {10.1142/9789812384720_0011} -} - -@article{LS08, - author = {Lack, Stephen and Soboci{\'n}ski, Pawe{\l}}, - title = {Adhesive categories}, - journal = {Foundations of Computational Mathematics}, - volume = {8}, - number = {2}, - pages = {191--210}, - year = {2008}, - doi = {10.1007/s10208-007-9019-6} -} - -@article{Mil02, - author = {Milner, Robin}, - title = {Bigraphs and mobile processes}, - journal = {Theoretical Computer Science}, - volume = {274}, - number = {1--2}, - pages = {1--51}, - year = {2002}, - doi = {10.1016/S0304-3975(00)00312-2} -} - -@book{EEPT06, - author = {Ehrig, Hartmut and Ehrig, Karsten and Prange, Ulrike and Taentzer, Gabriele}, - title = {Fundamentals of Algebraic Graph Transformation}, - publisher = {Springer}, - series = {Monographs in Theoretical Computer Science}, - year = {2006}, - doi = {10.1007/3-540-31188-2} -} - -@book{MacLane98, - author = {Mac Lane, Saunders}, - title = {Categories for the Working Mathematician}, - edition = {2}, - publisher = {Springer}, - series = {Graduate Texts in Mathematics}, - volume = {5}, - year = {1998}, - doi = {10.1007/978-1-4757-4721-8} -} - -@book{AdamekRosicky94, - author = {Ad{\'a}mek, Ji{\v{r}}{\'i} and Rosick{\'y}, Ji{\v{r}}{\'i}}, - title = {Locally Presentable and Accessible Categories}, - publisher = {Cambridge University Press}, - year = {1994}, - series = {London Mathematical Society Lecture Note Series}, - volume = {189}, - doi = {10.1017/CBO9780511600579} -} - -@article{Harel87, - author = {Harel, David}, - title = {Statecharts: A Visual Formalism for Complex Systems}, - journal = {Science of Computer Programming}, - volume = {8}, - number = {3}, - pages = {231--274}, - year = {1987}, - doi = {10.1016/0167-6423(87)90035-9} -} - -@book{Jensen97, - author = {Jensen, Kurt}, - title = {Coloured Petri Nets: Basic Concepts, Analysis Methods and Practical Use}, - publisher = {Springer}, - year = {1997}, - series = {EATCS Monographs on Theoretical Computer Science}, - volume = {1}, - doi = {10.1007/978-3-642-60794-3} -} - -% ------------------------------------------------------------ -% Additional references for Paper II (added Dec 2025) -% ------------------------------------------------------------ - -@misc{Ross2025warpI, - author = {Ross, James}, - title = {{WARP Graphs: A Worldline Algebra for Recursive Provenance}}, - howpublished = {AI$\Omega$N Foundations Series --- Paper I}, - month = {December}, - year = {2025}, - note = {Version cited: December 2025 PDF.} -} - -@book{Jacobs99, - author = {Jacobs, Bart}, - title = {Categorical Logic and Type Theory}, - publisher = {Elsevier Science}, - year = {1999}, - series = {Studies in Logic and the Foundations of Mathematics}, - volume = {141}, - isbn = {9780444501707} -} - -@article{Benabou85, - author = {B{\'e}nabou, Jean}, - title = {Fibered categories and the foundations of naive category theory}, - journal = {The Journal of Symbolic Logic}, - volume = {50}, - number = {1}, - pages = {10--37}, - year = {1985}, - doi = {10.2307/2273784} -} - -@article{Street80, - author = {Street, Ross}, - title = {Fibrations in bicategories}, - journal = {Cahiers de Topologie et G{\'e}om{\'e}trie Diff{\'e}rentielle Cat{\'e}goriques}, - volume = {21}, - number = {2}, - pages = {111--160}, - year = {1980} -} - -@incollection{Mazurkiewicz87, - author = {Mazurkiewicz, Antoni}, - title = {Trace theory}, - booktitle = {Petri Nets: Applications and Relationships to Other Models of Concurrency}, - series = {Lecture Notes in Computer Science}, - volume = {255}, - editor = {Brauer, Wilfried and Reisig, Wolfgang and Rozenberg, Grzegorz}, - publisher = {Springer}, - pages = {278--324}, - year = {1987}, - doi = {10.1007/3-540-17906-2_30} -} - -@incollection{Winskel87, - author = {Winskel, Glynn}, - title = {Event structures}, - booktitle = {Petri Nets: Applications and Relationships to Other Models of Concurrency}, - series = {Lecture Notes in Computer Science}, - volume = {255}, - editor = {Brauer, Wilfried and Reisig, Wolfgang and Rozenberg, Grzegorz}, - publisher = {Springer}, - pages = {325--392}, - year = {1987}, - doi = {10.1007/3-540-17906-2_31} -} - -@article{Newman42, - author = {Newman, M. H. A.}, - title = {On theories with a combinatorial definition of ``equivalence''}, - journal = {Annals of Mathematics}, - series = {2}, - volume = {43}, - number = {2}, - pages = {223--243}, - year = {1942}, - doi = {10.2307/1968867} -} - -@article{vanOostrom94, - author = {van Oostrom, Vincent}, - title = {Confluence by decreasing diagrams}, - journal = {Theoretical Computer Science}, - volume = {126}, - number = {2}, - pages = {259--280}, - year = {1994}, - doi = {10.1016/0304-3975(92)00023-K} -} - -@article{GambinoKock13, - author = {Gambino, Nicola and Kock, Joachim}, - title = {Polynomial functors and polynomial monads}, - journal = {Mathematical Proceedings of the Cambridge Philosophical Society}, - volume = {154}, - number = {1}, - pages = {153--192}, - year = {2013}, - doi = {10.1017/S0305004112000394} -} - -@book{GrayReuter92, - author = {Gray, Jim and Reuter, Andreas}, - title = {Transaction Processing: Concepts and Techniques}, - publisher = {Morgan Kaufmann}, - year = {1992}, - isbn = {9781558601901} -} diff --git a/docs/spec/SPEC-0004-worldlines-playback-truthbus.md b/docs/spec/SPEC-0004-worldlines-playback-truthbus.md index 719a4269..87d97c89 100644 --- a/docs/spec/SPEC-0004-worldlines-playback-truthbus.md +++ b/docs/spec/SPEC-0004-worldlines-playback-truthbus.md @@ -13,6 +13,7 @@ Depends on: - [Merkle Commit](merkle-commit.md) - [Provenance Payload](SPEC-0005-provenance-payload.md) - [WASM ABI Contract](SPEC-0009-wasm-abi.md) +- [FIXED-TIMESTEP](../invariants/FIXED-TIMESTEP.md) ## Why this packet exists @@ -40,11 +41,21 @@ The worldline is not itself the observer. It is the carrier that makes replay, a A playback cursor materializes a worldline at a coordinate without mutating the writer head unless it is explicitly acting as the writer. Seeking replays recorded patches and verifies expected hashes. It does not re-run rules. +Playback coordinates follow the [FIXED-TIMESTEP](../invariants/FIXED-TIMESTEP.md) +invariant: ticks are HistoryTime coordinates, and HostTime cannot affect replay +or coordinate identity except through an admitted canonical decision record. +Timer starts, fires, expiries, and cancellations follow the same law: an Intent +is only a proposal, and only an admitted tick plus receipt becomes replayable +timer history. + ## Decision 3: Observation is the public read contract Public reads are expressed through observation artifacts: coordinate resolution, reading-envelope metadata, declared frame, declared projection, artifact hash, and payload. Observation is a reading emitted from an observer basis, not raw access to the causal carrier. -The reading envelope is part of the contract, not decoration: it carries the observer plan, native basis, witness refs, parent/basis posture, budget posture, rights posture, and residual posture that bound the emitted reading. +The reading envelope is part of the contract, not decoration: it carries the +observer plan, optional hosted observer instance, native basis, witness refs, +parent/basis posture, budget posture, rights posture, and residual posture that +bound the emitted reading. ## Decision 4: Session output is replace-only diff --git a/docs/spec/SPEC-0009-wasm-abi.md b/docs/spec/SPEC-0009-wasm-abi.md index b9bfa9a7..cc9d34bb 100644 --- a/docs/spec/SPEC-0009-wasm-abi.md +++ b/docs/spec/SPEC-0009-wasm-abi.md @@ -7,7 +7,7 @@ _Define the current deterministic browser boundary for intent ingress, scheduler Legend: PLATFORM -Current ABI version: 8 +Current ABI version: 9 Depends on: @@ -19,7 +19,10 @@ Depends on: The WASM boundary is where browser and host code meet the Echo runtime. It must be small, deterministic, and explicit about what kind of operation is crossing: intent admission, scheduler inspection, or observation. -ABI version 8 keeps the current export shape and carries richer reading-envelope metadata for observation artifacts. +ABI version 9 keeps the current export shape and makes observation requests +name their observer plan, optional hosted observer instance, read budget, and +rights posture explicitly. Observation artifacts continue to carry +reading-envelope metadata for emitted readings. ## Human users / jobs / hills @@ -47,7 +50,14 @@ Removed exports stay removed: `step`, `snapshot_at`, `render_snapshot`, `execute `observe(request)` returns an observation artifact with resolved coordinate, reading envelope, declared frame, declared projection, artifact hash, and payload. -The reading envelope names the observer plan, native observer basis, witness refs, parent/basis posture, budget posture, rights posture, and residual posture. Built-in observations currently emit `complete` residual posture for clean derived readings. The ABI also names `residual`, `plurality_preserved`, and `obstructed` so external consumers can recognize bounded non-clean readings without treating the payload as a generic state read. +The observation request names the observer plan, optional hosted observer +instance, read budget, and rights posture. The reading envelope names the +observer plan, hosted observer instance when present, native observer basis, +witness refs, parent/basis posture, budget posture, rights posture, and +residual posture. Built-in observations currently emit `complete` residual +posture for clean derived readings. The ABI also names `residual`, +`plurality_preserved`, and `obstructed` so external consumers can recognize +bounded non-clean readings without treating the payload as a generic state read. ## Decision 4: The ABI uses logical clocks only diff --git a/e2e/collision-dpo-tour.spec.ts b/e2e/collision-dpo-tour.spec.ts deleted file mode 100644 index c6cfcca3..00000000 --- a/e2e/collision-dpo-tour.spec.ts +++ /dev/null @@ -1,54 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -import { test, expect } from '@playwright/test' -import { resolve } from 'node:path' -import { pathToFileURL } from 'node:url' - -function fileUrl(rel: string) { - return pathToFileURL(resolve(rel)).href -} - -test.describe('Collision DPO Tour (static HTML)', () => { - test('loads and renders', async ({ page }) => { - await page.goto(fileUrl('docs/public/collision-dpo-tour.html')) - await expect(page.locator('h1')).toHaveText(/Collision/i) - // Animate script attaches pagers; ensure at least one exists - await expect(page.locator('.pager').first()).toBeVisible() - }) - - test('tabs toggle World/Graph views', async ({ page }) => { - await page.goto(fileUrl('docs/public/collision-dpo-tour.html')) - // Find a figure with pip tabs - const tabs = page.locator('.pip-tabs').first() - await expect(tabs).toBeVisible() - const graphTab = tabs.locator('.tab', { hasText: 'Graph' }) - const worldTab = tabs.locator('.tab', { hasText: 'World' }) - await graphTab.click() - // Graph image should be visible, world hidden within the same figure - const fig = tabs.locator('..') // pip - const pip = fig - await expect(pip.locator('img[alt="Graph view"]')).toBeVisible() - await expect(pip.locator('img[alt="World view"]')).toBeHidden() - await worldTab.click() - await expect(pip.locator('img[alt="World view"]')).toBeVisible() - }) - - test('prev/next navigation toggles carousel mode', async ({ page }) => { - await page.goto(fileUrl('docs/public/collision-dpo-tour.html')) - const firstRule = page.locator('.rule').filter({ has: page.locator('.pager') }).first() - await expect(firstRule).toBeVisible() - const nextBtn = firstRule.locator('.pager .btn', { hasText: 'Next' }).first() - await expect(nextBtn).toBeVisible() - // Initially all slides are visible - const figs = firstRule.locator('.step-grid figure') - const total = await figs.count() - expect(total).toBeGreaterThan(1) - // Click next -> enter carousel mode (only one visible) - await nextBtn.click() - // Wait a tick for layout updates - await page.waitForTimeout(50) - const hiddenCount = await firstRule.locator('.step-grid figure.hidden').count() - expect(hiddenCount).toBeGreaterThan(0) - }) -}) diff --git a/package.json b/package.json index 6ba048d3..45c07ac3 100644 --- a/package.json +++ b/package.json @@ -4,16 +4,14 @@ "type": "module", "packageManager": "pnpm@10.23.0", "engines": { - "node": ">=18 <25" + "node": ">=18 <26" }, "scripts": { "docs:dev": "vitepress dev docs", "docs:build": "vitepress build docs", - "test:e2e": "playwright test", "schema:runtime:check": "node scripts/validate-runtime-schema-fragments.mjs" }, "devDependencies": { - "@playwright/test": "^1.48.0", "asciichart": "^1.5.25", "graphql": "16.11.0", "markdownlint-cli2": "0.22.0", diff --git a/packages/ttd-protocol-ts/README.md b/packages/ttd-protocol-ts/README.md new file mode 100644 index 00000000..1593f74b --- /dev/null +++ b/packages/ttd-protocol-ts/README.md @@ -0,0 +1,27 @@ + + + +# @echo/ttd-protocol-ts + +Generated TypeScript consumer types for the host-neutral TTD protocol. + +Echo is not the source of truth for this protocol. The canonical authored +schema lives with `warp-ttd` at: + +```text +warp-ttd/schemas/warp-ttd-protocol.graphql +``` + +This package is a checked-in downstream consumer artifact produced through the +Wesley TTD generator path. Do not edit `index.ts`, `types.ts`, `registry.ts`, or +`zod.ts` by hand. + +Local provenance check: + +```sh +cargo xtask wesley sync +``` + +That check verifies that the generated Rust and TypeScript consumers agree on +the canonical schema hash and that Echo remains a protocol consumer rather than +a backup protocol owner. diff --git a/packages/wesley-generator-vue/package.json b/packages/wesley-generator-vue/package.json deleted file mode 100644 index 2824849c..00000000 --- a/packages/wesley-generator-vue/package.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "name": "@wesley/generator-vue", - "version": "0.1.0", - "description": "Generate Vue client artifacts (ops.ts, schemas.ts, client.ts, useEcho.ts) from Echo Ops IR", - "type": "module", - "main": "src/index.mjs", - "license": "Apache-2.0" -} diff --git a/packages/wesley-generator-vue/src/index.mjs b/packages/wesley-generator-vue/src/index.mjs deleted file mode 100644 index abe69334..00000000 --- a/packages/wesley-generator-vue/src/index.mjs +++ /dev/null @@ -1,326 +0,0 @@ -import path from "node:path"; - -/** - * Generate Vue artifacts (ops.ts, schemas.ts, client.ts, useEcho.ts) from Echo Ops IR. - * @param {object} ir - Echo Ops IR JSON (contains ops[], types[], schema_sha256, codec_id). - * @param {object} options - { outDir?: string } - * @returns {{ files: { path: string, content: string }[] }} - */ -export async function generateVue(ir, options = {}) { - if (!ir || !Array.isArray(ir.ops)) { - throw new Error("@wesley/generator-vue requires Echo Ops IR with `ops[]`"); - } - const outDir = options.outDir ?? "src/wesley/generated"; - const files = []; - files.push({ path: path.join(outDir, "ops.ts"), content: emitOps(ir) }); - files.push({ path: path.join(outDir, "schemas.ts"), content: emitSchemas(ir) }); - files.push({ path: path.join(outDir, "client.ts"), content: emitClient(ir) }); - files.push({ path: path.join(outDir, "useEcho.ts"), content: emitUseEcho() }); - return { files }; -} - -// ----------------------- emitOps ----------------------- - -function emitOps(ir) { - const schemaSha = ir.schema_sha256 ?? "unknown"; - const codecId = ir.codec_id ?? "unknown"; - const regVer = ir.registry_version ?? 0; - const ops = [...ir.ops].sort((a, b) => { - const ak = String(a.kind).toUpperCase(); - const bk = String(b.kind).toUpperCase(); - if (ak !== bk) return ak.localeCompare(bk); - const an = String(a.name); - const bn = String(b.name); - if (an !== bn) return an.localeCompare(bn); - return (a.op_id ?? 0) - (b.op_id ?? 0); - }); - - const lines = []; - lines.push("// AUTO-GENERATED. DO NOT EDIT."); - lines.push(`// schema_sha256: ${schemaSha}`); - lines.push(`// codec_id: ${codecId}`); - lines.push(`// registry_version: ${regVer}`); - lines.push(""); - lines.push(`export const SCHEMA_SHA256 = ${JSON.stringify(schemaSha)};`); - lines.push(`export const CODEC_ID = ${JSON.stringify(codecId)};`); - lines.push(`export const REGISTRY_VERSION = ${Number(regVer)};`); - lines.push(""); - - for (const op of ops) { - const kind = String(op.kind).toUpperCase(); - const constName = - kind === "QUERY" - ? `QUERY_${toScreamingSnake(op.name)}_ID` - : `MUT_${toScreamingSnake(op.name)}_ID`; - lines.push(`export const ${constName} = ${op.op_id} as const;`); - } - - lines.push(""); - lines.push(`export type OpKind = "QUERY" | "MUTATION";`); - lines.push(`export type OpDef = { kind: OpKind; name: string; opId: number };`); - lines.push(""); - lines.push("export const OPS: readonly OpDef[] = ["); - for (const op of ops) { - const kind = String(op.kind).toUpperCase(); - lines.push( - ` { kind: ${JSON.stringify(kind)}, name: ${JSON.stringify(op.name)}, opId: ${op.op_id} },` - ); - } - lines.push("] as const;"); - lines.push(""); - lines.push("export function findOpId(kind: OpKind, name: string): number | undefined {"); - lines.push(" const hit = OPS.find((o) => o.kind === kind && o.name === name);"); - lines.push(" return hit?.opId;"); - lines.push("}"); - lines.push(""); - return lines.join("\n"); -} - -// ----------------------- emitSchemas ----------------------- - -function emitSchemas(ir) { - const schemaSha = ir.schema_sha256 ?? "unknown"; - const codecId = ir.codec_id ?? "unknown"; - const regVer = ir.registry_version ?? 0; - const types = ir.types ?? []; - const ops = ir.ops ?? []; - - const lines = []; - lines.push("// AUTO-GENERATED. DO NOT EDIT."); - lines.push(`// schema_sha256: ${schemaSha}`); - lines.push(`// codec_id: ${codecId}`); - lines.push(`// registry_version: ${regVer}`); - lines.push(""); - lines.push('import { z } from "zod";'); - lines.push(""); - - const typeMap = new Map(types.map((t) => [t.name, t])); - - // ENUMs - for (const t of types) { - if (t.kind !== "ENUM") continue; - const values = (t.values ?? []).map((v) => JSON.stringify(v)).join(", "); - lines.push(`export const ${schemaName(t.name)} = z.enum([${values}]);`); - } - if (types.some((t) => t.kind === "ENUM")) lines.push(""); - - // OBJECTs - for (const t of types) { - if (t.kind !== "OBJECT") continue; - const fields = (t.fields ?? []).map((f) => { - const schemaExpr = wrapField(f, typeMap); - return ` ${JSON.stringify(f.name)}: ${schemaExpr},`; - }); - lines.push( - `export const ${schemaName(t.name)} = z.object({\n${fields.join("\n")}\n}).strict();` - ); - } - if (types.some((t) => t.kind === "OBJECT")) lines.push(""); - - // Op var/result schemas - lines.push("// Operation variable/result schemas"); - for (const op of ops) { - const varsSchemaName = `${pascal(op.name)}VarsSchema`; - const resultSchemaName = `${pascal(op.name)}ResultSchema`; - const args = op.args ?? []; - const argLines = args.map((a) => { - const schemaExpr = wrapArg(a, typeMap); - return ` ${JSON.stringify(a.name)}: ${schemaExpr},`; - }); - lines.push(`export const ${varsSchemaName} = z.object({\n${argLines.join("\n")}\n}).strict();`); - if (op.result_type) { - lines.push(`export const ${resultSchemaName} = ${refType(op.result_type, typeMap)};`); - } else { - lines.push(`export const ${resultSchemaName} = z.undefined();`); - } - lines.push(""); - } - - return lines.join("\n"); -} - -function refType(name, typeMap) { - if (isScalar(name)) return scalarSchema(name); - if (!typeMap.has(name)) throw new Error(`Unknown type: ${name}`); - return schemaName(name); -} -function wrapField(f, typeMap) { - return wrapType(f.type, !!f.list, !!f.required, typeMap); -} -function wrapArg(a, typeMap) { - return wrapType(a.type, !!a.list, !!a.required, typeMap); -} -function wrapType(typeName, list, required, typeMap) { - let expr = refType(typeName, typeMap); - if (list) expr = `z.array(${expr})`; - if (!required) expr = `${expr}.optional()`; - return expr; -} -function schemaName(name) { - return `${name}Schema`; -} -function isScalar(t) { - return t === "String" || t === "Boolean" || t === "Int" || t === "Float" || t === "ID"; -} -function scalarSchema(t) { - switch (t) { - case "String": - case "ID": - return "z.string()"; - case "Boolean": - return "z.boolean()"; - case "Int": - return "z.number().int()"; - case "Float": - return "z.number()"; - default: - throw new Error(`Unknown scalar: ${t}`); - } -} - -// ----------------------- emitClient ----------------------- - -function emitClient(ir) { - const schemaSha = ir.schema_sha256 ?? "unknown"; - const codecId = ir.codec_id ?? "unknown"; - const ops = ir.ops ?? []; - const queries = ops.filter((o) => String(o.kind).toUpperCase() === "QUERY"); - const muts = ops.filter((o) => String(o.kind).toUpperCase() === "MUTATION"); - - const lines = []; - lines.push("// AUTO-GENERATED. DO NOT EDIT."); - lines.push(`// schema_sha256: ${schemaSha}`); - lines.push(`// codec_id: ${codecId}`); - lines.push(""); - lines.push('import {'); - lines.push(' CODEC_ID, SCHEMA_SHA256, REGISTRY_VERSION,'); - for (const op of ops) { - const kind = String(op.kind).toUpperCase(); - const constName = - kind === "QUERY" - ? `QUERY_${toScreamingSnake(op.name)}_ID` - : `MUT_${toScreamingSnake(op.name)}_ID`; - lines.push(` ${constName},`); - } - lines.push('} from "./ops";'); - lines.push('import {'); - for (const op of ops) { - const name = pascal(op.name); - lines.push(` ${name}VarsSchema,`); - lines.push(` ${name}ResultSchema,`); - } - lines.push('} from "./schemas";'); - lines.push(""); - lines.push("export type Bytes = Uint8Array;"); - lines.push(""); - lines.push("export interface EchoWasm {"); - lines.push(" dispatch_intent(intentBytes: Bytes): void;"); - lines.push(" step(stepBudget: number): Bytes; // StepResult"); - lines.push(" drain_view_ops(): any; // ViewOp[]"); - lines.push(" get_ledger(): any; // (Snapshot, Receipt, Patch)[]"); - lines.push(" jump_to_tick(tickIndex: number): void;"); - lines.push(" render_state(): void;"); - lines.push(" get_head(): Bytes; // HeadInfo"); - lines.push(""); - lines.push(" execute_query(queryId: number, varsBytes: Bytes): Bytes;"); - lines.push(" encode_command(opId: number, payload: unknown): Bytes;"); - lines.push(" encode_query_vars(queryId: number, vars: unknown): Bytes;"); - lines.push(""); - lines.push(" get_registry_info?(): Bytes;"); - lines.push("}"); - lines.push(""); - lines.push("export type RegistryInfo = { schema_sha256: string; codec_id: string; registry_version: number };"); - lines.push(""); - lines.push("export class WesleyClient {"); - lines.push(" constructor(private wasm: EchoWasm) {}"); - lines.push(""); - lines.push(" verifyRegistry(decodeRegistryInfo?: (bytes: Bytes) => RegistryInfo) {"); - lines.push(" if (!this.wasm.get_registry_info || !decodeRegistryInfo) return;"); - lines.push(" const info = decodeRegistryInfo(this.wasm.get_registry_info());"); - lines.push(" if (info.schema_sha256 !== SCHEMA_SHA256) throw new Error('Schema hash mismatch');"); - lines.push(" if (info.codec_id !== CODEC_ID) throw new Error('Codec mismatch');"); - lines.push(" if (info.registry_version !== REGISTRY_VERSION) throw new Error('Registry version mismatch');"); - lines.push(" }"); - lines.push(""); - lines.push(" getLedger() {"); - lines.push(" return this.wasm.get_ledger();"); - lines.push(" }"); - lines.push(""); - lines.push(" jumpToTick(index: number) {"); - lines.push(" this.wasm.jump_to_tick(index);"); - lines.push(" }"); - lines.push(""); - lines.push(" renderState() {"); - lines.push(" this.wasm.render_state();"); - lines.push(" }"); - lines.push(""); - for (const q of queries) { const fn = `query${pascal(q.name)}`; - const constName = `QUERY_${toScreamingSnake(q.name)}_ID`; - lines.push(` ${fn}(vars: unknown = {}) {`); - lines.push(` const parsed = ${pascal(q.name)}VarsSchema.parse(vars);`); - lines.push(` const varsBytes = this.wasm.encode_query_vars(${constName}, parsed);`); - lines.push(` const resultBytes = this.wasm.execute_query(${constName}, varsBytes);`); - lines.push(` return { bytes: resultBytes, schema: ${pascal(q.name)}ResultSchema };`); - lines.push(" }"); - lines.push(""); - } - for (const m of muts) { - const fn = `dispatch${pascal(m.name)}`; - const constName = `MUT_${toScreamingSnake(m.name)}_ID`; - const args = (m.args ?? []).map((a) => safeIdent(a.name)).join(", "); - const payloadObj = - (m.args ?? []).length === 0 - ? "{}" - : `{ ${(m.args ?? []).map((a) => `${safeIdent(a.name)}: ${safeIdent(a.name)}`).join(", ")} }`; - lines.push(` ${fn}(${args}) {`); - lines.push(` const payload = ${pascal(m.name)}VarsSchema.parse(${payloadObj});`); - lines.push(` const bytes = this.wasm.encode_command(${constName}, payload);`); - lines.push(" this.wasm.dispatch_intent(bytes);"); - lines.push(" }"); - lines.push(""); - } - lines.push("}"); - lines.push(""); - return lines.join("\n"); -} - -// ----------------------- emitUseEcho ----------------------- - -function emitUseEcho() { - return `// AUTO-GENERATED scaffold. You will likely extend this.\n` + - `import { WesleyClient } from "./client";\n\n` + - `export function useEcho(wasm) {\n` + - ` const client = new WesleyClient(wasm);\n` + - ` const pump = (budget = 1000) => {\n` + - ` const res = wasm.step(budget);\n` + - ` // TODO: decode StepResult and apply ViewOps via wasm.drain_view_ops()\n` + - ` return res;\n` + - ` };\n` + - ` return { client, pump };\n` + - `}\n`; -} - -// ----------------------- helpers ----------------------- - -function pascal(name) { - return String(name) - .replace(/[^A-Za-z0-9]+/g, " ") - .trim() - .split(/\s+/) - .map((w) => w.slice(0, 1).toUpperCase() + w.slice(1)) - .join(""); -} - -function toScreamingSnake(name) { - return String(name) - .replace(/([a-z0-9])([A-Z])/g, "$1_$2") - .replace(/[^A-Za-z0-9]+/g, "_") - .replace(/^_+|_+$/g, "") - .toUpperCase(); -} - -function safeIdent(name) { - const s = String(name).replace(/[^A-Za-z0-9_]/g, "_"); - if (!/^[A-Za-z_]/.test(s)) return `_${s}`; - return s; -} diff --git a/playwright.config.ts b/playwright.config.ts deleted file mode 100644 index 8eb189bd..00000000 --- a/playwright.config.ts +++ /dev/null @@ -1,14 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -import { defineConfig } from '@playwright/test' - -export default defineConfig({ - testDir: './e2e', - retries: 0, - use: { - headless: true, - viewport: { width: 1280, height: 800 }, - ignoreHTTPSErrors: true, - }, -}) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4fc9bfdf..9d7fae79 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -14,9 +14,6 @@ importers: specifier: ^0.3.0 version: 0.3.0 devDependencies: - '@playwright/test': - specifier: ^1.48.0 - version: 1.57.0 asciichart: specifier: ^1.5.25 version: 1.5.25 @@ -62,8 +59,6 @@ importers: specifier: ^4.0.0 version: 4.3.6 - packages/wesley-generator-vue: {} - packages: '@algolia/abtesting@1.12.2': @@ -374,11 +369,6 @@ packages: resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} - '@playwright/test@1.57.0': - resolution: {integrity: sha512-6TyEnHgd6SArQO8UO2OMTxshln3QMWBtPGrOCgs3wVEmQmwyuNtB10IZMfmYDE0riwNR1cu4q+pPcxMVtaG3TA==} - engines: {node: '>=18'} - hasBin: true - '@rollup/rollup-android-arm-eabi@4.54.0': resolution: {integrity: sha512-OywsdRHrFvCdvsewAInDKCNyR3laPA2mc9bRYJ6LBp5IyvF3fvXbbNR0bSzHlZVFtn6E0xw2oZlyjg4rKCVcng==} cpu: [arm] @@ -1132,11 +1122,6 @@ packages: focus-trap@7.7.1: resolution: {integrity: sha512-Pkp8m55GjxBLnhBoT6OXdMvfRr4TjMAKLvFM566zlIryq5plbhaTmLAJWTGR0EkRwLjEte1lCOG9MxF1ipJrOg==} - fsevents@2.3.2: - resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} - engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} - os: [darwin] - fsevents@2.3.3: resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} @@ -1443,16 +1428,6 @@ packages: pkg-types@1.3.1: resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} - playwright-core@1.57.0: - resolution: {integrity: sha512-agTcKlMw/mjBWOnD6kFZttAAGHgi/Nw0CZ2o6JqWSbMlI219lAFLZZCyqByTsvVAJq5XA5H8cA6PrvBRpBWEuQ==} - engines: {node: '>=18'} - hasBin: true - - playwright@1.57.0: - resolution: {integrity: sha512-ilYQj1s8sr2ppEJ2YVadYBN0Mb3mdo9J0wQ+UuDhzYqURwSoW4n1Xs5vs7ORwgDGmyEh33tRMeS8KhdkMoLXQw==} - engines: {node: '>=18'} - hasBin: true - points-on-curve@0.2.0: resolution: {integrity: sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==} @@ -2070,10 +2045,6 @@ snapshots: '@nodelib/fs.scandir': 2.1.5 fastq: 1.20.1 - '@playwright/test@1.57.0': - dependencies: - playwright: 1.57.0 - '@rollup/rollup-android-arm-eabi@4.54.0': optional: true @@ -2883,9 +2854,6 @@ snapshots: dependencies: tabbable: 6.4.0 - fsevents@2.3.2: - optional: true - fsevents@2.3.3: optional: true @@ -3330,14 +3298,6 @@ snapshots: mlly: 1.8.2 pathe: 2.0.3 - playwright-core@1.57.0: {} - - playwright@1.57.0: - dependencies: - playwright-core: 1.57.0 - optionalDependencies: - fsevents: 2.3.2 - points-on-curve@0.2.0: {} points-on-path@0.2.1: diff --git a/scripts/check-append-only.js b/scripts/check-append-only.js index 223477df..f6b5c7fe 100644 --- a/scripts/check-append-only.js +++ b/scripts/check-append-only.js @@ -3,7 +3,7 @@ import { execFileSync } from "node:child_process"; -const files = ["AGENTS.md", "docs/assets/dags/tasks-dag-source.md"]; +const files = ["AGENTS.md"]; const args = process.argv.slice(2); const baseArgIndex = args.indexOf("--base"); diff --git a/scripts/generate-dependency-dags.js b/scripts/generate-dependency-dags.js index 383cac3c..63976cda 100644 --- a/scripts/generate-dependency-dags.js +++ b/scripts/generate-dependency-dags.js @@ -103,7 +103,7 @@ function parseArgs(argv) { milestonesJson: ".cache/echo/deps/milestones-all.json", configJson: "docs/assets/dags/deps-config.json", outDir: "docs/assets/dags", - tasksDagPath: path.join("docs", "assets", "dags", "tasks-dag-source.md"), + tasksDagPath: null, snapshot: null, snapshotLabelMode: "auto", }; @@ -138,7 +138,7 @@ function parseArgs(argv) { " --milestones-json Read/write milestones snapshot JSON", " --config Dependency config (edges) JSON", " --out-dir Output directory for DOT/SVG", - " --tasks-dag Path to docs/assets/dags/tasks-dag-source.md (reality edges)", + " --tasks-dag Optional reality-edge source to compare against plan edges", " --snapshot Override label date in output graphs (legacy; prefer --snapshot-label)", " --snapshot-label Snapshot label: auto|none|rolling|YYYY-MM-DD", "", @@ -284,9 +284,9 @@ function emitIssueDot({ issues, issueEdges, snapshotLabel, realityEdges }) { } if (realityEdges) { - // Add nodes for reality-only edges (tasks-dag-source.md) when both endpoints - // exist and the edge is absent from configuredEdges, so red “missing from - // plan” edges can render. + // Add nodes for reality-only edges when both endpoints exist and the edge + // is absent from configuredEdges, so red "missing from plan" edges can + // render. for (const edgeKey of realityEdges) { const realityEdge = safeParseEdgeKey(edgeKey, "reality edge"); if (!realityEdge) continue; @@ -335,10 +335,13 @@ function emitIssueDot({ issues, issueEdges, snapshotLabel, realityEdges }) { snapshotLabel == null ? "Echo — Issue Dependency Sketch" : `Echo — Issue Dependency Sketch (snapshot: ${snapshotLabel})`; + const realityLegend = realityEdges + ? "\\nGreen = Confirmed in optional reality edges; Red = In optional reality edges but missing from Plan." + : ""; lines.push( ` label="${escapeDotString( title, - )}\\nEdge direction: prerequisite → dependent (do tail before head)\\nEdge styles encode confidence (solid=strong, dashed=medium, dotted=weak).\\nGreen = Confirmed in tasks-dag-source.md; Red = In tasks-dag-source.md but missing from Plan.";`, + )}\\nEdge direction: prerequisite → dependent (do tail before head)\\nEdge styles encode confidence (solid=strong, dashed=medium, dotted=weak).${realityLegend}";`, ); lines.push(""); @@ -350,19 +353,21 @@ function emitIssueDot({ issues, issueEdges, snapshotLabel, realityEdges }) { lines.push(' L1 [label="strong", fillcolor="#ffffff"];'); lines.push(' L2 [label="medium", fillcolor="#ffffff"];'); lines.push(' L3 [label="weak", fillcolor="#ffffff"];'); - lines.push( - ' LG [label="confirmed (reality)", color="green", fontcolor="green"];', - ); - lines.push( - ' LR [label="missing from plan", color="red", fontcolor="red"];', - ); lines.push( ` L1 -> L2 [arrowhead=none, ${confidenceEdgeAttrs("strong")}];`, ); lines.push( ` L2 -> L3 [arrowhead=none, ${confidenceEdgeAttrs("medium")}];`, ); - lines.push(' LG -> LR [arrowhead=none, color="red", style="dashed"];'); + if (realityEdges) { + lines.push( + ' LG [label="confirmed (reality)", color="green", fontcolor="green"];', + ); + lines.push( + ' LR [label="missing from plan", color="red", fontcolor="red"];', + ); + lines.push(' LG -> LR [arrowhead=none, color="red", style="dashed"];'); + } lines.push(" }"); lines.push(""); @@ -394,6 +399,7 @@ function emitIssueDot({ issues, issueEdges, snapshotLabel, realityEdges }) { } for (const { from, to, confidence, note } of issueEdges) { + if (!byNum.has(from) || !byNum.has(to)) continue; const edgeKey = `${from}->${to}`; const inReality = realityEdges && realityEdges.has(edgeKey); if (inReality && confidence !== "strong") { @@ -419,7 +425,7 @@ function emitIssueDot({ issues, issueEdges, snapshotLabel, realityEdges }) { const { from: u, to: v } = realityEdge; if (byNum.has(u) && byNum.has(v)) { lines.push( - ` i${u} -> i${v} [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from tasks-dag-source.md (missing from Plan)"];`, + ` i${u} -> i${v} [color="red", penwidth=2.0, style="dashed", tooltip="Inferred from optional reality edges (missing from Plan)"];`, ); } } @@ -561,19 +567,25 @@ function main() { ); let realityEdges = null; - const tasksDagPath = path.resolve(process.cwd(), args.tasksDagPath); - if (fs.existsSync(tasksDagPath)) { - try { - const tasksDagContent = fs.readFileSync(tasksDagPath, "utf8"); - const { edges: tasksDagEdges } = parseTasksDag(tasksDagContent); - realityEdges = new Set( - tasksDagEdges.map((edge) => `${edge.from}->${edge.to}`), - ); - } catch (err) { + if (args.tasksDagPath) { + const tasksDagPath = path.resolve(process.cwd(), args.tasksDagPath); + if (!fs.existsSync(tasksDagPath)) { console.warn( - `Warning: failed to parse ${tasksDagPath} for reality edges: ${err?.message ?? err}`, + `Warning: optional reality-edge source not found: ${tasksDagPath}`, ); - realityEdges = null; + } else { + try { + const tasksDagContent = fs.readFileSync(tasksDagPath, "utf8"); + const { edges: tasksDagEdges } = parseTasksDag(tasksDagContent); + realityEdges = new Set( + tasksDagEdges.map((edge) => `${edge.from}->${edge.to}`), + ); + } catch (err) { + console.warn( + `Warning: failed to parse ${tasksDagPath} for reality edges: ${err?.message ?? err}`, + ); + realityEdges = null; + } } } diff --git a/scripts/generate-tasks-dag.js b/scripts/generate-tasks-dag.js deleted file mode 100644 index 6b8ac24c..00000000 --- a/scripts/generate-tasks-dag.js +++ /dev/null @@ -1,271 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS - -import fs from "node:fs"; -import path from "node:path"; -import { spawnSync } from "node:child_process"; -import { parseTasksDag } from "./parse-tasks-dag.js"; -import { escapeDotString } from "./dag-utils.js"; - -const INPUT_FILE_DISPLAY = "docs/assets/dags/tasks-dag-source.md"; -const INPUT_FILE = path.join(...INPUT_FILE_DISPLAY.split("/")); -const OUT_DIR = "docs/assets/dags"; -const DOT_FILE = path.join(OUT_DIR, "tasks-dag.dot"); -const SVG_FILE = path.join(OUT_DIR, "tasks-dag.svg"); -// Cluster heuristic: match known prefixes at the start of issue titles to group related work. -// Prefix list is configurable via docs/assets/dags/clusters-config.json (array of strings); we fall back to this default. -const DEFAULT_CLUSTER_PREFIXES = [ - "TT0", - "TT1", - "TT2", - "TT3", - "S1", - "M1", - "M2", - "M4", - "W1", - "Demo 2", - "Demo 3", - "Spec:", - "Draft", - "Tooling:", - "Backlog:", -]; -const CLUSTER_CONFIG_PATH = path.join( - "docs", - "assets", - "dags", - "clusters-config.json", -); - -function loadClusterPrefixes() { - try { - const raw = fs.readFileSync(CLUSTER_CONFIG_PATH, "utf8"); - const parsed = JSON.parse(raw); - if ( - Array.isArray(parsed) && - parsed.every((p) => typeof p === "string" && p.trim().length > 0) - ) { - return parsed.map((p) => p.trim()); - } - console.warn( - `clusters-config.json is invalid (expected array of non-empty strings); using defaults.`, - ); - } catch (err) { - if (err?.code !== "ENOENT") { - console.warn( - `Failed to read ${CLUSTER_CONFIG_PATH}: ${err.message}; using defaults.`, - ); - } - } - return DEFAULT_CLUSTER_PREFIXES; -} - -const CLUSTER_PREFIXES = loadClusterPrefixes(); - -function fail(message) { - throw new Error(message); -} - -function runChecked(cmd, args) { - const result = spawnSync(cmd, args, { - encoding: "utf8", - timeout: 30000, - killSignal: "SIGTERM", - }); - if (result.error && result.error.code === "ETIMEDOUT") { - fail(`Command timed out: ${cmd} ${args.join(" ")}`); - } - if ( - result.timedOut || - (result.status === null && result.signal === "SIGTERM") - ) { - fail(`Command timed out: ${cmd} ${args.join(" ")}`); - } - if (result.error) fail(`Failed to run ${cmd}: ${result.error.message}`); - if (result.status !== 0) - fail(`Command failed: ${cmd} ${args.join(" ")}\n${result.stderr}`); - return result.stdout; -} - -function wrapLabel(text, maxLineLength = 30) { - const words = String(text).split(/\s+/); - const lines = []; - let current = ""; - for (const word of words) { - if (word.length > maxLineLength) { - if (current.length) { - lines.push(current); - current = ""; - } - for (let i = 0; i < word.length; i += maxLineLength) { - lines.push(word.slice(i, i + maxLineLength)); - } - continue; - } - if ( - (current + (current ? " " : "") + word).length > maxLineLength && - current.length - ) { - lines.push(current); - current = word; - } else { - current = current ? `${current} ${word}` : word; - } - } - if (current.length) lines.push(current); - return lines.join("\\n"); -} - -function confidenceAttrs(confidence) { - switch (confidence) { - case "strong": - return 'color="green3", penwidth=2.5, style="solid"'; - case "medium": - return 'color="orange", penwidth=2.0, style="solid"'; - case "weak": - return 'color="red", penwidth=1.0, style="dashed"'; - default: - return 'color="gray50", penwidth=1.0, style="dotted"'; - } -} - -function hashString(str) { - let hash = 5381; - for (let i = 0; i < str.length; i += 1) { - hash = ((hash << 5) + hash) ^ str.charCodeAt(i); // djb2 xor variant - } - return Math.abs(hash); -} - -// Heuristic to guess cluster name from title -function getClusterName(title) { - for (const p of CLUSTER_PREFIXES) { - if (title.startsWith(p)) return p.replace(/:/g, ""); - } - return "Misc"; -} - -function generateDot(nodes, edges) { - // Filter out isolated nodes to reduce clutter in the visualization (only nodes with at least one edge render). - const connectedNodeIds = new Set(); - for (const e of edges) { - connectedNodeIds.add(e.from); - connectedNodeIds.add(e.to); - } - - // Create a filtered map of nodes - const filteredNodes = new Map(); - for (const [id, node] of nodes) { - if (connectedNodeIds.has(id)) { - filteredNodes.set(id, node); - } - } - const removed = nodes.size - filteredNodes.size; - console.log( - `Tasks DAG: ${nodes.size} nodes total; removed ${removed} isolated node(s); rendering ${filteredNodes.size}.`, - ); - - const lines = []; - lines.push("digraph tasks_dag {"); - lines.push( - ' graph [rankdir=LR, labelloc="t", fontsize=18, fontname="Helvetica", newrank=true, splines=true];', - ); - lines.push( - ' node [shape=box, style="rounded,filled", fontname="Helvetica", fontsize=10, margin="0.10,0.06"];', - ); - lines.push(' edge [fontname="Helvetica", fontsize=9, arrowsize=0.8];'); - lines.push( - ` label="Echo — Tasks DAG (from ${escapeDotString(INPUT_FILE_DISPLAY)})\\nGenerated by scripts/generate-tasks-dag.js";`, - ); - lines.push(""); - - lines.push(" subgraph cluster_legend {"); - lines.push(' label="Legend";'); - lines.push(' color="gray70";'); - lines.push(' fontcolor="gray30";'); - lines.push(' style="rounded";'); - lines.push( - ` LG [label="confirmed in ${escapeDotString(INPUT_FILE_DISPLAY)}", color="green", fontcolor="green"];`, - ); - lines.push(" }"); - lines.push(""); - - // Clusters - const clusters = new Map(); - for (const node of filteredNodes.values()) { - const cluster = getClusterName(node.title); - if (!clusters.has(cluster)) clusters.set(cluster, []); - clusters.get(cluster).push(node); - } - - for (const [name, groupNodes] of clusters) { - // Sanitize cluster name for ID - const clusterId = "cluster_" + name.replace(/[^a-zA-Z0-9]/g, "_"); - lines.push(` subgraph ${clusterId} {`); - lines.push(` label="${escapeDotString(name)}";`); - lines.push(' style="rounded"; color="gray70";'); - // Simple color cycle for clusters - const colors = [ - "#dbeafe", - "#dcfce7", - "#ffedd5", - "#f3f4f6", - "#fef9c3", - "#ede9fe", - "#ccfbf1", - "#fee2e2", - ]; - const color = colors[hashString(name) % colors.length]; - lines.push(` node [fillcolor="${color}"];`); - - for (const node of groupNodes) { - const label = `#${node.number}\n${node.title}`; - let safeLabel = wrapLabel(label, 30); - safeLabel = escapeDotString(safeLabel); - - lines.push( - ` i${node.number} [label="${safeLabel}", URL="${escapeDotString(node.url)}", tooltip="${escapeDotString(node.title)}"];`, - ); - } - lines.push(" }"); - } - - lines.push(""); - for (const edge of edges) { - if (filteredNodes.has(edge.from) && filteredNodes.has(edge.to)) { - const attrs = confidenceAttrs(edge.confidence); - lines.push( - ` i${edge.from} -> i${edge.to} [${attrs}, tooltip="${escapeDotString(edge.note || "")}"];`, - ); - } - } - - lines.push("}"); - return lines.join("\n"); -} - -function main() { - if (!fs.existsSync(INPUT_FILE)) fail(`Input file not found: ${INPUT_FILE}`); - - const content = fs.readFileSync(INPUT_FILE, "utf8"); - const { nodes, edges } = parseTasksDag(content); - - const dotContent = generateDot(nodes, edges); - - if (!fs.existsSync(OUT_DIR)) fs.mkdirSync(OUT_DIR, { recursive: true }); - fs.writeFileSync(DOT_FILE, dotContent); - console.log(`Wrote DOT file to ${DOT_FILE}`); - - try { - runChecked("dot", ["-Tsvg", DOT_FILE, "-o", SVG_FILE]); - console.log(`Rendered SVG to ${SVG_FILE}`); - } catch (e) { - console.warn( - "Warning: Failed to render SVG (is graphviz installed?). Only DOT file generated.", - e?.message ?? e, - ); - } -} - -main(); diff --git a/scripts/tests/fixed_timestep_invariant_test.sh b/scripts/tests/fixed_timestep_invariant_test.sh index c177929e..e27e59bd 100755 --- a/scripts/tests/fixed_timestep_invariant_test.sh +++ b/scripts/tests/fixed_timestep_invariant_test.sh @@ -80,13 +80,66 @@ assert "ruling 7: identical tick_quantum for cross-worldline" \ # --- Cross-references --- echo "" -echo "4. Cross-references" +echo "4. HistoryTime / HostTime classification" +assert "classifies HistoryTime" \ + grep -q "HistoryTime" "${invariant}" +assert "classifies HostTime" \ + grep -q "HostTime" "${invariant}" +assert "legacy OpEnvelope timestamp is HostTime" \ + grep -q "Legacy \`OpEnvelope.ts\`.*HostTime" "${invariant}" +assert "deadlineTick is HistoryTime" \ + grep -q "deadlineTick.*HistoryTime" "${invariant}" + +# --- Cross-references --- +echo "" +echo "5. Timer and deadline doctrine" +assert "timer doctrine names admitted timer events" \ + grep -q "admitted timer-event history" "${invariant}" +assert "timer start intent only arms when admitted" \ + grep -q "Only an admitted \`timer.start\` tick arms" "${invariant}" +assert "timer fire intent is admitted against start receipt" \ + grep -q 'Intent(timer.fire' "${invariant}" +assert "timer doctrine names typed admission outcomes" \ + grep -q "\`Admitted\`, \`Staged\`, \`Plural\`, \`Conflict\`, or \`Obstructed\`" "${invariant}" +assert "paused views do not expire from wall clock" \ + grep -q "paused observer view does not advance" "${invariant}" +assert "touch point table covers session keep-alive" \ + grep -q "Session keep-alive" "${invariant}" +assert "touch point table covers admission budgets" \ + grep -q "Admission budgets" "${invariant}" +assert "touch point table covers retry policies" \ + grep -q "Retry policies" "${invariant}" +assert "touch point table covers wormhole/checkpoint retention" \ + grep -q "Wormhole/checkpoint retention" "${invariant}" +assert "violation checklist exists" \ + grep -q "Violation checklist" "${invariant}" + +# --- Cross-references --- +echo "" +echo "6. Cross-references" assert "SPEC-0004 references the invariant" \ grep -qi "FIXED-TIMESTEP" "${spec004}" +assert "SPEC-0004 references timer admitted history" \ + grep -q "only an admitted tick plus receipt" "${spec004}" +assert "static nondeterminism guard is referenced" \ + grep -q "scripts/ban-nondeterminism.sh" "${invariant}" +assert "release allowlist policy is referenced" \ + grep -q "docs/determinism/RELEASE_POLICY.md" "${invariant}" + +# --- Static wall-clock guard --- +echo "" +echo "7. Static wall-clock guard" +guard="${repo_root}/scripts/ban-nondeterminism.sh" +assert "ban-nondeterminism guard exists" \ + test -x "${guard}" +assert "ban-nondeterminism bans SystemTime" \ + grep -q "SystemTime" "${guard}" +assert "ban-nondeterminism bans Instant" \ + grep -q "Instant" "${guard}" # --- Negative test: no variable-dt concepts in crates --- echo "" -echo "5. Negative test: variable-dt concepts absent from crates" +echo "8. Negative test: variable-dt concepts absent from crates" assert_not "no 'variable_dt' in crates/" \ grep -r "variable_dt" "${repo_root}/crates/" assert_not "no 'dt_stream' in crates/" \ diff --git a/specs/spec-000-rewrite/Cargo.toml b/specs/spec-000-rewrite/Cargo.toml deleted file mode 100644 index 800db770..00000000 --- a/specs/spec-000-rewrite/Cargo.toml +++ /dev/null @@ -1,29 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# © James Ross Ω FLYING•ROBOTS -[package] -name = "spec-000-rewrite" -version = "0.1.0" -edition = "2024" -rust-version.workspace = true -description = "Living spec (Spec-000) for Echo: Leptos + Trunk WASM demo scaffold" -license.workspace = true -repository.workspace = true -readme = "README.md" -keywords = ["echo", "warp", "wasm", "leptos", "spec"] -categories = ["wasm", "web-programming", "visualization"] - -[lib] -crate-type = ["cdylib", "rlib"] - -[dependencies] -leptos = { version = "0.8.15", features = ["csr"] } -console_error_panic_hook = "0.1" -wasm-bindgen = { version = "0.2", optional = true } - -[features] -default = [] -wasm = ["wasm-bindgen"] - - -[lints] -workspace = true diff --git a/specs/spec-000-rewrite/README.md b/specs/spec-000-rewrite/README.md deleted file mode 100644 index d29ae9cd..00000000 --- a/specs/spec-000-rewrite/README.md +++ /dev/null @@ -1,28 +0,0 @@ - - - -# Spec-000 Rewrite (Living Spec) - -Leptos + Trunk WASM scaffold for Spec-000: “Everything Is a Rewrite.” This page will embed the actual Echo kernel in the browser to demonstrate rewrite-driven state. - -## Dev - -```sh -rustup target add wasm32-unknown-unknown -cargo install --locked trunk -make spec-000-dev # from repo root -``` - -Serves at `http://127.0.0.1:8080` with hot reload. - -## Build - -```sh -make spec-000-build # outputs dist/ -``` - -## Next steps - -- Wire kernel bindings (wasm-bindgen feature) -- Render WARP graph + rewrite log -- Add completion badge win condition diff --git a/specs/spec-000-rewrite/Trunk.toml b/specs/spec-000-rewrite/Trunk.toml deleted file mode 100644 index 40e4d4ed..00000000 --- a/specs/spec-000-rewrite/Trunk.toml +++ /dev/null @@ -1,10 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# © James Ross Ω FLYING•ROBOTS -filehash = true - -[build] -dist = "dist" - -[cargo] -# enable wasm feature so the wasm_bindgen start is exported -features = ["wasm"] diff --git a/specs/spec-000-rewrite/index.html b/specs/spec-000-rewrite/index.html deleted file mode 100644 index e1fb70b5..00000000 --- a/specs/spec-000-rewrite/index.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - - - - Spec-000: Everything Is a Rewrite - - - - - - - - - diff --git a/specs/spec-000-rewrite/src/lib.rs b/specs/spec-000-rewrite/src/lib.rs deleted file mode 100644 index 54d9377f..00000000 --- a/specs/spec-000-rewrite/src/lib.rs +++ /dev/null @@ -1,54 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS -#![allow(unsafe_code, clippy::print_stdout, clippy::print_stderr)] -//! Spec-000 scaffold: Leptos CSR app wired for trunk/wasm32. - -use leptos::prelude::*; - -#[cfg(all(feature = "wasm", target_arch = "wasm32"))] -use wasm_bindgen::prelude::*; - -mod spec_content; - -/// Top-level Spec-000 Leptos component (WASM). -#[component] -pub fn App() -> impl IntoView { - let (epoch, set_epoch) = signal(0usize); - - view! { -
-

"Spec-000: Everything Is a Rewrite"

-

- "Living spec harness — the same Rust compiled to native and WASM." -

- -
-
- "Current Epoch:" - {move || epoch.get()} -
- -
- -
- "Hook this component to the real kernel bindings to drive rewrites, " - "render the WARP graph, and record completion hashes." -
- -
-

"SPEC-000: Everything Is a Rewrite"

-
{spec_content::SPEC_MD}
-
-
- } -} - -/// WASM entry point required by `trunk serve`. -#[cfg(all(feature = "wasm", target_arch = "wasm32"))] -#[wasm_bindgen(start)] -pub fn start() { - console_error_panic_hook::set_once(); - leptos::mount_to_body(|| view! { }) -} diff --git a/specs/spec-000-rewrite/src/spec_content.rs b/specs/spec-000-rewrite/src/spec_content.rs deleted file mode 100644 index e3b557e8..00000000 --- a/specs/spec-000-rewrite/src/spec_content.rs +++ /dev/null @@ -1,5 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// © James Ross Ω FLYING•ROBOTS -//! Static markdown content for Spec-000 displayed in-app. - -pub const SPEC_MD: &str = include_str!("../README.md"); diff --git a/tests/hooks/test_coderabbit_config.sh b/tests/hooks/test_coderabbit_config.sh new file mode 100755 index 00000000..10769c6f --- /dev/null +++ b/tests/hooks/test_coderabbit_config.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash +# SPDX-License-Identifier: Apache-2.0 +# © James Ross Ω FLYING•ROBOTS +set -euo pipefail + +cd "$(dirname "${BASH_SOURCE[0]}")/../.." || exit 1 + +if ! rg -q -- '- "!docs/archive/\*\*"' .coderabbit.yaml; then + echo "CodeRabbit must ignore archived documentation: missing !docs/archive/** path filter" >&2 + exit 1 +fi diff --git a/tests/hooks/test_module_size.sh b/tests/hooks/test_module_size.sh new file mode 100755 index 00000000..aa5c6f08 --- /dev/null +++ b/tests/hooks/test_module_size.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash +# SPDX-License-Identifier: Apache-2.0 +# © James Ross Ω FLYING•ROBOTS +set -euo pipefail + +cd "$(dirname "${BASH_SOURCE[0]}")/../.." || exit 1 + +max_lines=3000 +file="crates/warp-core/src/optic.rs" +line_count="$(wc -l <"$file" | tr -d ' ')" + +if (( line_count > max_lines )); then + echo "$file has $line_count lines; keep module roots under $max_lines lines" >&2 + exit 1 +fi diff --git a/xtask/src/main.rs b/xtask/src/main.rs index dc10e362..93bfad0d 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -61,6 +61,8 @@ enum Commands { DocsLint(DocsLintArgs), /// METHOD workspace operations (status, backlog inspection). Method(MethodArgs), + /// Wesley consumer artifact maintenance. + Wesley(WesleyArgs), /// Run a narrow local test slice with explicit Cargo target selection. TestSlice(TestSliceArgs), } @@ -96,12 +98,48 @@ struct MethodArgs { command: MethodCommand, } +#[derive(Args)] +struct WesleyArgs { + /// Wesley maintenance subcommand to execute. + #[command(subcommand)] + command: WesleyCommand, +} + +#[derive(Subcommand)] +enum WesleyCommand { + /// Verify Echo's downstream Wesley-generated protocol consumer artifacts. + Sync(WesleySyncArgs), +} + +#[derive(Args)] +struct WesleySyncArgs { + /// Output as JSON (agent surface). + #[arg(long)] + json: bool, +} + #[derive(Subcommand)] enum MethodCommand { /// Capture a backlog note in inbox/. Inbox(MethodInboxArgs), + /// Scaffold a retro and witness directory for an active cycle. + Close(MethodCloseArgs), + /// Promote a backlog item into the next numbered design cycle. + Pull(MethodPullArgs), + /// Check playback questions against committed tests. + Drift(MethodDriftArgs), /// Show backlog lanes, active cycles, and legend load. Status(MethodStatusArgs), + /// Regenerate METHOD task matrix markdown and CSV. + Matrix(MethodMatrixArgs), + /// Regenerate METHOD task DAG DOT and SVG. + Dag(MethodDagArgs), + /// Show tasks with no unresolved backlog-task blockers. + Frontier(MethodFrontierArgs), + /// Show the unweighted longest dependency chain. + CriticalPath(MethodCriticalPathArgs), + /// Verify METHOD graph artifacts are up to date. + CheckDag(MethodCheckDagArgs), } #[derive(Args)] @@ -110,6 +148,27 @@ struct MethodInboxArgs { title: String, } +#[derive(Args)] +struct MethodCloseArgs { + /// Cycle number or full cycle directory name. Defaults to most recent active cycle. + cycle: Option, +} + +#[derive(Args)] +struct MethodPullArgs { + /// Backlog item path, file stem, METHOD task id, or native task id. + item: String, +} + +#[derive(Args)] +struct MethodDriftArgs { + /// Cycle number or full cycle directory name. Defaults to most recent active cycle. + cycle: Option, + /// Output as JSON (agent surface). + #[arg(long)] + json: bool, +} + #[derive(Args)] struct MethodStatusArgs { /// Output as JSON (agent surface). @@ -117,6 +176,47 @@ struct MethodStatusArgs { json: bool, } +#[derive(Args)] +struct MethodMatrixArgs { + /// Check generated matrix artifacts without writing them. + #[arg(long)] + check: bool, +} + +#[derive(Args)] +struct MethodDagArgs { + /// Check generated DAG artifacts without writing them. + #[arg(long)] + check: bool, + /// Skip rendering SVG with Graphviz; write/check DOT only. + #[arg(long)] + no_render: bool, +} + +#[derive(Args)] +struct MethodFrontierArgs { + /// Output as JSON (agent surface). + #[arg(long)] + json: bool, + /// Maximum number of tasks to print in human mode. + #[arg(long, default_value = "25")] + limit: usize, +} + +#[derive(Args)] +struct MethodCriticalPathArgs { + /// Output as JSON (agent surface). + #[arg(long)] + json: bool, +} + +#[derive(Args)] +struct MethodCheckDagArgs { + /// Skip checking rendered SVG freshness. + #[arg(long)] + no_render: bool, +} + #[derive(Args)] struct BenchArgs { /// Benchmark maintenance subcommand to execute. @@ -303,6 +403,9 @@ struct ManPagesArgs { /// Output directory for generated man pages. #[arg(long, default_value = "docs/man")] out: std::path::PathBuf, + /// Check committed man pages without writing. + #[arg(long)] + check: bool, } #[derive(Args)] @@ -407,6 +510,7 @@ fn main() -> Result<()> { Commands::MarkdownFix(args) => run_markdown_fix(&args), Commands::DocsLint(args) => run_docs_lint(args), Commands::Method(args) => run_method(args), + Commands::Wesley(args) => run_wesley(args), Commands::TestSlice(args) => run_test_slice(args), } } @@ -490,17 +594,204 @@ fn display_command(command: &Command) -> String { parts.join(" ") } +#[derive(Serialize)] +struct WesleySyncReport { + ok: bool, + canonical_schema: String, + rust_schema_sha256: Option, + typescript_schema_sha256: Option, + checks: Vec, +} + +#[derive(Serialize)] +struct WesleySyncCheck { + name: String, + ok: bool, + detail: String, +} + +fn run_wesley(args: WesleyArgs) -> Result<()> { + match args.command { + WesleyCommand::Sync(sync_args) => run_wesley_sync(sync_args), + } +} + +fn run_wesley_sync(args: WesleySyncArgs) -> Result<()> { + let repo_root = find_repo_root()?; + let report = build_wesley_sync_report(&repo_root)?; + + if args.json { + println!( + "{}", + serde_json::to_string_pretty(&report) + .context("failed to serialize Wesley sync report")? + ); + } else { + print_wesley_sync_report(&report); + } + + if report.ok { + Ok(()) + } else { + bail!("Wesley protocol consumer check failed") + } +} + +fn build_wesley_sync_report(repo_root: &Path) -> Result { + let rust_cargo = read_repo_file(repo_root, "crates/ttd-protocol-rs/Cargo.toml")?; + let rust_lib = read_repo_file(repo_root, "crates/ttd-protocol-rs/lib.rs")?; + let ts_package = read_repo_file(repo_root, "packages/ttd-protocol-ts/package.json")?; + let ts_index = read_repo_file(repo_root, "packages/ttd-protocol-ts/index.ts")?; + let ts_types = read_repo_file(repo_root, "packages/ttd-protocol-ts/types.ts")?; + let ts_registry = read_repo_file(repo_root, "packages/ttd-protocol-ts/registry.ts")?; + let ts_zod = read_repo_file(repo_root, "packages/ttd-protocol-ts/zod.ts")?; + let echo_ttd_cargo = read_repo_file(repo_root, "crates/echo-ttd/Cargo.toml")?; + + let rust_schema_sha256 = extract_assignment_string(&rust_lib, "SCHEMA_SHA256"); + let typescript_schema_sha256 = extract_assignment_string(&ts_registry, "SCHEMA_HASH"); + let mut checks = Vec::new(); + + push_check( + &mut checks, + "local-ttd-schema-absent", + !repo_root.join("schemas/ttd-protocol.graphql").exists(), + "Echo must not carry a backup source-of-truth TTD protocol schema", + ); + push_check( + &mut checks, + "rust-crate-canonical-owner", + rust_cargo.contains("canonical warp-ttd protocol") + && rust_cargo.contains("cargo xtask wesley sync"), + "Rust consumer crate must name canonical warp-ttd ownership and the local check command", + ); + push_check( + &mut checks, + "rust-lib-generated-marker", + rust_lib.contains("Generated code") && rust_lib.contains("SCHEMA_SHA256"), + "Rust lib.rs must remain a generated protocol artifact with schema identity", + ); + push_check( + &mut checks, + "typescript-package-canonical-owner", + ts_package.contains("canonical warp-ttd protocol") && ts_package.contains("DO NOT EDIT"), + "TypeScript package must advertise downstream generated-consumer status", + ); + push_check( + &mut checks, + "typescript-generated-markers", + [&ts_index, &ts_types, &ts_registry, &ts_zod] + .into_iter() + .all(|content| content.contains("Auto-generated by @wesley/generator-ttd")), + "TypeScript generated files must retain generator markers", + ); + push_check( + &mut checks, + "schema-hash-match", + rust_schema_sha256.is_some() + && rust_schema_sha256 == typescript_schema_sha256 + && rust_schema_sha256.as_deref().is_some_and(is_sha256_hex), + "Rust and TypeScript generated consumers must name the same 64-hex schema hash", + ); + push_check( + &mut checks, + "echo-ttd-runtime-separate", + !echo_ttd_cargo.contains("ttd-protocol-rs"), + "Echo runtime-side compliance must not depend on host-neutral generated protocol nouns", + ); + + let ok = checks.iter().all(|check| check.ok); + Ok(WesleySyncReport { + ok, + canonical_schema: "warp-ttd/schemas/warp-ttd-protocol.graphql".to_owned(), + rust_schema_sha256, + typescript_schema_sha256, + checks, + }) +} + +fn push_check(checks: &mut Vec, name: &str, ok: bool, detail: &str) { + checks.push(WesleySyncCheck { + name: name.to_owned(), + ok, + detail: detail.to_owned(), + }); +} + +fn print_wesley_sync_report(report: &WesleySyncReport) { + println!( + "Wesley protocol consumer check: {}", + if report.ok { "ok" } else { "failed" } + ); + println!("Canonical schema: {}", report.canonical_schema); + println!( + "Rust schema SHA-256: {}", + report.rust_schema_sha256.as_deref().unwrap_or("") + ); + println!( + "TypeScript schema SHA-256: {}", + report + .typescript_schema_sha256 + .as_deref() + .unwrap_or("") + ); + for check in &report.checks { + println!( + " {} {} — {}", + if check.ok { "ok" } else { "FAIL" }, + check.name, + check.detail + ); + } +} + +fn read_repo_file(repo_root: &Path, relative: &str) -> Result { + let path = repo_root.join(relative); + std::fs::read_to_string(&path).with_context(|| format!("failed to read {relative}")) +} + +fn extract_assignment_string(contents: &str, name: &str) -> Option { + contents + .lines() + .find(|line| line.contains(name)) + .and_then(|line| { + let start = line.find('"').or_else(|| line.find('\''))?; + let quote = line.as_bytes()[start]; + let rest = &line[start + 1..]; + let end = rest + .as_bytes() + .iter() + .position(|candidate| *candidate == quote)?; + Some(rest[..end].to_owned()) + }) +} + +fn is_sha256_hex(candidate: &str) -> bool { + candidate.len() == 64 && candidate.bytes().all(|byte| byte.is_ascii_hexdigit()) +} + fn run_method(args: MethodArgs) -> Result<()> { match args.command { MethodCommand::Inbox(inbox_args) => run_method_inbox(inbox_args), + MethodCommand::Close(close_args) => run_method_close(close_args), + MethodCommand::Pull(pull_args) => run_method_pull(pull_args), + MethodCommand::Drift(drift_args) => run_method_drift(drift_args), MethodCommand::Status(status_args) => run_method_status(status_args), + MethodCommand::Matrix(matrix_args) => run_method_matrix(matrix_args), + MethodCommand::Dag(dag_args) => run_method_dag(dag_args), + MethodCommand::Frontier(frontier_args) => run_method_frontier(frontier_args), + MethodCommand::CriticalPath(path_args) => run_method_critical_path(path_args), + MethodCommand::CheckDag(check_args) => run_method_check_dag(check_args), } } +fn method_workspace() -> Result { + let root = std::env::current_dir().context("failed to get current dir")?; + method::workspace::MethodWorkspace::discover(&root).map_err(|e| anyhow::anyhow!(e)) +} + fn run_method_inbox(args: MethodInboxArgs) -> Result<()> { let root = std::env::current_dir().context("failed to get current dir")?; - let workspace = - method::workspace::MethodWorkspace::discover(&root).map_err(|e| anyhow::anyhow!(e))?; + let workspace = method_workspace()?; let path = method::inbox::create_inbox_item(&workspace, &args.title) .map_err(|e| anyhow::anyhow!(e))?; let display_path = path.strip_prefix(&root).unwrap_or(&path); @@ -508,10 +799,66 @@ fn run_method_inbox(args: MethodInboxArgs) -> Result<()> { Ok(()) } -fn run_method_status(args: MethodStatusArgs) -> Result<()> { +fn run_method_close(args: MethodCloseArgs) -> Result<()> { + let root = std::env::current_dir().context("failed to get current dir")?; + let workspace = method_workspace()?; + let result = method::close::close_cycle(&workspace, args.cycle.as_deref()) + .map_err(|e| anyhow::anyhow!(e))?; + let retro_path = result + .retro_path + .strip_prefix(&root) + .unwrap_or(&result.retro_path); + let witness_dir = result + .witness_dir + .strip_prefix(&root) + .unwrap_or(&result.witness_dir); + + println!("closed {}", result.cycle); + println!("retro {}", retro_path.display()); + println!("witness {}", witness_dir.display()); + Ok(()) +} + +fn run_method_pull(args: MethodPullArgs) -> Result<()> { let root = std::env::current_dir().context("failed to get current dir")?; - let workspace = - method::workspace::MethodWorkspace::discover(&root).map_err(|e| anyhow::anyhow!(e))?; + let workspace = method_workspace()?; + let result = + method::pull::pull_backlog_item(&workspace, &args.item).map_err(|e| anyhow::anyhow!(e))?; + let design_path = result + .design_path + .strip_prefix(&root) + .unwrap_or(&result.design_path); + + println!("pulled {}", result.cycle_number); + println!("cycle {}", result.cycle); + println!("design {}", design_path.display()); + Ok(()) +} + +fn run_method_drift(args: MethodDriftArgs) -> Result<()> { + let workspace = method_workspace()?; + let report = method::drift::drift_report(&workspace, args.cycle.as_deref()) + .map_err(|e| anyhow::anyhow!(e))?; + + if args.json { + let json = + serde_json::to_string_pretty(&report).context("failed to serialize drift report")?; + println!("{json}"); + } else { + print_drift_human(&report); + } + + if !report.covered() { + bail!( + "METHOD drift check failed: {} playback question(s) lack matching tests", + report.missing_count() + ); + } + Ok(()) +} + +fn run_method_status(args: MethodStatusArgs) -> Result<()> { + let workspace = method_workspace()?; let report = method::status::StatusReport::build(&workspace).map_err(|e| anyhow::anyhow!(e))?; if args.json { @@ -524,6 +871,217 @@ fn run_method_status(args: MethodStatusArgs) -> Result<()> { Ok(()) } +fn print_drift_human(report: &method::drift::DriftReport) { + println!("Drift check: {}", report.cycle); + println!(" design files: {}", report.design_paths.len()); + println!(" playback questions: {}", report.questions.len()); + println!(" missing coverage: {}", report.missing_count()); + for question in &report.questions { + let status = if question.matches.is_empty() { + "MISS" + } else { + "ok" + }; + println!(" {status} {}", question.question); + for path in &question.matches { + println!(" {}", path.display()); + } + } +} + +fn run_method_matrix(args: MethodMatrixArgs) -> Result<()> { + let workspace = method_workspace()?; + let graph = method::graph::TaskGraph::build(&workspace).map_err(|e| anyhow::anyhow!(e))?; + let artifacts = method::graph::GraphArtifacts::render(&graph); + let paths = method::graph::GraphArtifactPaths::defaults(&workspace); + + let checks = [ + ( + "matrix markdown", + &paths.matrix_md, + artifacts.matrix_md.as_bytes(), + ), + ( + "matrix csv", + &paths.matrix_csv, + artifacts.matrix_csv.as_bytes(), + ), + ]; + if args.check { + check_artifacts_current(&checks)?; + println!("METHOD matrix artifacts are current"); + } else { + write_artifact(&paths.matrix_md, artifacts.matrix_md.as_bytes())?; + write_artifact(&paths.matrix_csv, artifacts.matrix_csv.as_bytes())?; + println!("wrote {}", paths.matrix_md.display()); + println!("wrote {}", paths.matrix_csv.display()); + } + Ok(()) +} + +fn run_method_dag(args: MethodDagArgs) -> Result<()> { + let workspace = method_workspace()?; + let graph = method::graph::TaskGraph::build(&workspace).map_err(|e| anyhow::anyhow!(e))?; + let artifacts = method::graph::GraphArtifacts::render(&graph); + let paths = method::graph::GraphArtifactPaths::defaults(&workspace); + + if args.check { + let rendered_svg = if args.no_render { + None + } else { + Some(render_dot_to_svg(&artifacts.dot)?) + }; + let mut checks = vec![("task dag dot", &paths.dot, artifacts.dot.as_bytes())]; + if let Some(svg) = rendered_svg.as_ref() { + checks.push(("task dag svg", &paths.svg, svg.as_slice())); + } + check_artifacts_current(&checks)?; + println!("METHOD DAG artifacts are current"); + } else { + write_artifact(&paths.dot, artifacts.dot.as_bytes())?; + println!("wrote {}", paths.dot.display()); + if !args.no_render { + let svg = render_dot_to_svg(&artifacts.dot)?; + write_artifact(&paths.svg, &svg)?; + println!("wrote {}", paths.svg.display()); + } + } + Ok(()) +} + +fn run_method_frontier(args: MethodFrontierArgs) -> Result<()> { + let workspace = method_workspace()?; + let graph = method::graph::TaskGraph::build(&workspace).map_err(|e| anyhow::anyhow!(e))?; + let frontier = graph.frontier(); + + if args.json { + let json = + serde_json::to_string_pretty(&frontier).context("failed to serialize frontier")?; + println!("{json}"); + return Ok(()); + } + + println!("Open frontier: {} task(s)", frontier.len()); + for task in frontier.into_iter().take(args.limit) { + let native = task + .task + .native_id + .as_ref() + .map(|id| format!(" {id}")) + .unwrap_or_default(); + println!( + " {} [{}]{} {}", + task.task.id, task.task.lane, native, task.task.title + ); + println!( + " unlocks: {}, downstream depth: {}, source: {}", + task.downstream_count, task.downstream_depth, task.task.source_path + ); + } + Ok(()) +} + +fn run_method_critical_path(args: MethodCriticalPathArgs) -> Result<()> { + let workspace = method_workspace()?; + let graph = method::graph::TaskGraph::build(&workspace).map_err(|e| anyhow::anyhow!(e))?; + let path = graph.critical_path(); + + if args.json { + let json = + serde_json::to_string_pretty(&path).context("failed to serialize critical path")?; + println!("{json}"); + return Ok(()); + } + + println!("Critical path: {} task(s)", path.len()); + for (idx, task) in path.iter().enumerate() { + let native = task + .native_id + .as_ref() + .map(|id| format!(" {id}")) + .unwrap_or_default(); + println!( + " {}. {} [{}]{} {}", + idx + 1, + task.id, + task.lane, + native, + task.title + ); + } + Ok(()) +} + +fn run_method_check_dag(args: MethodCheckDagArgs) -> Result<()> { + run_method_matrix(MethodMatrixArgs { check: true })?; + run_method_dag(MethodDagArgs { + check: true, + no_render: args.no_render, + }) +} + +fn write_artifact(path: &Path, bytes: &[u8]) -> Result<()> { + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent) + .with_context(|| format!("failed to create {}", parent.display()))?; + } + std::fs::write(path, bytes).with_context(|| format!("failed to write {}", path.display())) +} + +fn check_artifacts_current(checks: &[(&str, &PathBuf, &[u8])]) -> Result<()> { + let mut stale = Vec::new(); + for (label, path, expected) in checks { + match std::fs::read(path) { + Ok(actual) if actual == *expected => {} + Ok(_) => stale.push(format!("{label}: {} is stale", path.display())), + Err(err) => stale.push(format!("{label}: {} missing ({err})", path.display())), + } + } + if stale.is_empty() { + Ok(()) + } else { + bail!( + "METHOD graph artifacts are not current:\n{}", + stale + .into_iter() + .map(|line| format!(" - {line}")) + .collect::>() + .join("\n") + ) + } +} + +fn render_dot_to_svg(dot: &str) -> Result> { + use std::io::Write; + + let mut child = Command::new("dot") + .arg("-Tsvg") + .stdin(std::process::Stdio::piped()) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .spawn() + .context("failed to spawn `dot` (is Graphviz installed?)")?; + + { + let stdin = child.stdin.as_mut().context("failed to open dot stdin")?; + stdin + .write_all(dot.as_bytes()) + .context("failed to write DOT to Graphviz")?; + } + + let output = child + .wait_with_output() + .context("failed to wait for Graphviz")?; + if !output.status.success() { + bail!( + "Graphviz failed (exit status: {}):\n{}", + output.status, + String::from_utf8_lossy(&output.stderr) + ); + } + Ok(output.stdout) +} + fn print_status_human(report: &method::status::StatusReport) { println!("Backlog"); for (lane, count) in &report.lanes { @@ -5536,9 +6094,24 @@ fn run_docs_lint(args: DocsLintArgs) -> Result<()> { } fn run_man_pages(args: ManPagesArgs) -> Result<()> { - use clap::CommandFactory; - let out_dir = &args.out; + let pages = render_man_pages()?; + + if args.check { + let checks = pages + .iter() + .map(|(filename, bytes)| (filename.as_str(), out_dir.join(filename), bytes.as_slice())) + .collect::>(); + let checks = checks + .iter() + .map(|(label, path, bytes)| (*label, path, *bytes)) + .collect::>(); + check_artifacts_current(&checks)?; + check_no_stale_man_pages(out_dir, &pages)?; + println!("Man pages are current in {}", out_dir.display()); + return Ok(()); + } + std::fs::create_dir_all(out_dir) .with_context(|| format!("failed to create output directory: {}", out_dir.display()))?; @@ -5558,14 +6131,29 @@ fn run_man_pages(args: ManPagesArgs) -> Result<()> { } } + for (filename, bytes) in pages { + let path = out_dir.join(&filename); + std::fs::write(&path, &bytes) + .with_context(|| format!("failed to write {}", path.display()))?; + println!(" wrote {}", path.display()); + } + + println!("Man pages generated in {}", out_dir.display()); + Ok(()) +} + +fn render_man_pages() -> Result)>> { + use clap::CommandFactory; + let cmd = warp_cli::cli::Cli::command(); + let mut pages = Vec::new(); + let man = clap_mangen::Man::new(cmd.clone()); let mut buf: Vec = Vec::new(); man.render(&mut buf) .context("failed to render echo-cli.1")?; - let path = out_dir.join("echo-cli.1"); - std::fs::write(&path, &buf).with_context(|| format!("failed to write {}", path.display()))?; - println!(" wrote {}", path.display()); + trim_trailing_ascii_whitespace(&mut buf); + pages.push(("echo-cli.1".to_string(), buf)); for sub in cmd.get_subcommands() { let sub_name = sub.get_name().to_string(); @@ -5577,15 +6165,64 @@ fn run_man_pages(args: ManPagesArgs) -> Result<()> { let mut buf: Vec = Vec::new(); man.render(&mut buf) .with_context(|| format!("failed to render echo-cli-{sub_name}.1"))?; - let filename = format!("echo-cli-{sub_name}.1"); - let path = out_dir.join(&filename); - std::fs::write(&path, &buf) - .with_context(|| format!("failed to write {}", path.display()))?; - println!(" wrote {}", path.display()); + trim_trailing_ascii_whitespace(&mut buf); + pages.push((format!("echo-cli-{sub_name}.1"), buf)); } - println!("Man pages generated in {}", out_dir.display()); - Ok(()) + Ok(pages) +} + +fn trim_trailing_ascii_whitespace(bytes: &mut Vec) { + let mut out = Vec::with_capacity(bytes.len()); + for line in bytes.split_inclusive(|byte| *byte == b'\n') { + let has_newline = line.last() == Some(&b'\n'); + let body = if has_newline { + &line[..line.len() - 1] + } else { + line + }; + let trimmed_len = body + .iter() + .rposition(|byte| !byte.is_ascii_whitespace()) + .map_or(0, |idx| idx + 1); + out.extend_from_slice(&body[..trimmed_len]); + if has_newline { + out.push(b'\n'); + } + } + *bytes = out; +} + +fn check_no_stale_man_pages(out_dir: &Path, pages: &[(String, Vec)]) -> Result<()> { + let expected = pages + .iter() + .map(|(filename, _)| filename.as_str()) + .collect::>(); + let mut stale = Vec::new(); + if let Ok(entries) = std::fs::read_dir(out_dir) { + for entry in entries.flatten() { + let name = entry.file_name(); + let name = name.to_string_lossy(); + if name.starts_with("echo-cli") + && name.ends_with(".1") + && !expected.contains(name.as_ref()) + { + stale.push(entry.path()); + } + } + } + if stale.is_empty() { + Ok(()) + } else { + bail!( + "stale man page(s):\n{}", + stale + .into_iter() + .map(|path| format!(" - {}", path.display())) + .collect::>() + .join("\n") + ) + } } #[cfg(test)] @@ -5613,6 +6250,20 @@ mod tests { (program, args) } + #[test] + fn man_pages_render_top_level_and_subcommands() { + let pages = assert_ok(render_man_pages(), "man pages should render"); + let filenames = pages + .iter() + .map(|(filename, _)| filename.as_str()) + .collect::>(); + + assert!(filenames.contains("echo-cli.1")); + assert!(filenames.contains("echo-cli-verify.1")); + assert!(filenames.contains("echo-cli-bench.1")); + assert!(filenames.contains("echo-cli-inspect.1")); + } + #[test] fn test_slice_settlement_uses_lib_filter_not_integration_scan() { let commands = build_test_slice_commands(TestSlice::Settlement); @@ -5642,6 +6293,36 @@ mod tests { ); } + #[test] + fn wesley_sync_extracts_rust_and_typescript_schema_hashes() { + let hash = "d55d6000b43562e7be04702cdd4335452d1eb6df1f0fbea924e4c6434fff2871"; + assert_eq!( + extract_assignment_string( + &format!("pub const SCHEMA_SHA256: &str = \"{hash}\";"), + "SCHEMA_SHA256" + ), + Some(hash.to_owned()) + ); + assert_eq!( + extract_assignment_string( + &format!("export const SCHEMA_HASH = '{hash}';"), + "SCHEMA_HASH" + ), + Some(hash.to_owned()) + ); + } + + #[test] + fn wesley_sync_accepts_only_sha256_hex_schema_hashes() { + assert!(is_sha256_hex( + "d55d6000b43562e7be04702cdd4335452d1eb6df1f0fbea924e4c6434fff2871" + )); + assert!(!is_sha256_hex("d55d6000")); + assert!(!is_sha256_hex( + "z55d6000b43562e7be04702cdd4335452d1eb6df1f0fbea924e4c6434fff2871" + )); + } + fn sample_pr_overview() -> PrOverview { PrOverview { owner: "flyingrobots".to_owned(), @@ -5806,10 +6487,10 @@ mod tests { fn public_asset_resolution() { let source = Path::new("docs/index.md"); let docs_root = Path::new("docs"); - let candidates = build_candidates(source, "/collision-dpo-tour.html", docs_root); + let candidates = build_candidates(source, "/example-public-asset.html", docs_root); assert!(candidates .iter() - .any(|p| p.ends_with("docs/public/collision-dpo-tour.html"))); + .any(|p| p.ends_with("docs/public/example-public-asset.html"))); } // ── pr_status helpers ────────────────────────────────────────────