diff --git a/.agents/skills/deploy-railway/SKILL.md b/.agents/skills/deploy-railway/SKILL.md
new file mode 100644
index 0000000..588c828
--- /dev/null
+++ b/.agents/skills/deploy-railway/SKILL.md
@@ -0,0 +1,143 @@
+---
+name: deploy-railway
+description: Deploy the Hyperindex frontend and backend to Railway. Use this skill when the user asks to deploy, redeploy, or update the production services on Railway.
+---
+
+# Deploy Hyperindex to Railway
+
+## Project Layout
+
+Hyperindex is a monorepo with two Railway services:
+
+| Service | Source | Dockerfile | Railway Name |
+|---------|--------|------------|-------------|
+| **Backend** (Go) | repo root `/` | `Dockerfile` | `backend` |
+| **Frontend** (Next.js) | `client/` | `client/Dockerfile` | `frontend` |
+
+## Custom Domains
+
+| Service | Domain |
+|---------|--------|
+| Backend | `https://api.hi.gainforest.app` |
+| Frontend | `https://hi.gainforest.app` |
+
+Legacy domains (still active): `backend-production-95a22.up.railway.app`, `frontend-production-dcce.up.railway.app`
+
+## Prerequisites
+
+- Railway CLI v4+ installed and logged in (`railway whoami`)
+- Linked to project: `railway status` should show project `hyperindex`
+- On the correct git branch (typically `tap-feature`)
+
+## Deploy Backend
+
+The backend deploys from the repo root using the root `Dockerfile`:
+
+```bash
+railway up -s backend -d
+```
+
+This uploads the entire repo, builds the Go binary in Docker, and deploys it. Takes ~3-5 minutes.
+
+### Verify backend:
+```bash
+curl -s https://api.hi.gainforest.app/
+# Should return: {"name":"Hyperindex","version":"0.1.0-dev",...}
+```
+
+## Deploy Frontend
+
+**CRITICAL:** The frontend MUST use `--path-as-root` to avoid Railway picking up the root Go Dockerfile:
+
+```bash
+railway up --path-as-root client/ -s frontend -d
+```
+
+This makes `client/` the archive root so Railway only sees `client/Dockerfile` (the Next.js build). Takes ~3-5 minutes.
+
+### Why `--path-as-root`?
+
+Without it, `railway up` uploads the entire monorepo and Railway finds the root `Dockerfile` (Go backend) instead of `client/Dockerfile` (Next.js frontend). This causes the frontend service to run the Go binary instead of the Next.js app.
+
+### Verify frontend:
+```bash
+curl -s -o /dev/null -w "%{http_code}" https://hi.gainforest.app/
+# Should return: 200
+
+# Verify it's actually Next.js (not the Go server):
+curl -s https://hi.gainforest.app/ | grep -o '
[^<]*'
+# Should return: Hyperindex
+```
+
+## Deploy Both Services
+
+```bash
+# Backend (from repo root)
+railway up -s backend -d
+
+# Frontend (with path-as-root)
+railway up --path-as-root client/ -s frontend -d
+```
+
+## Environment Variables
+
+### Backend (`backend` service)
+| Variable | Value |
+|----------|-------|
+| `HOST` | `0.0.0.0` |
+| `PORT` | `8080` |
+| `DATABASE_URL` | `sqlite:/app/data/hypergoat.db` |
+| `EXTERNAL_BASE_URL` | `https://api.hi.gainforest.app` |
+| `TRUST_PROXY_HEADERS` | `true` |
+| `ADMIN_DIDS` | `did:plc:qc42fmqqlsmdq7jiypiiigww` (daviddao.org) |
+| `OAUTH_LOOPBACK_MODE` | `true` |
+| `SECRET_KEY_BASE` | *(set on Railway, do not change)* |
+
+### Frontend (`frontend` service)
+| Variable | Value |
+|----------|-------|
+| `PORT` | `3000` |
+| `PUBLIC_URL` | `https://hi.gainforest.app` |
+| `NEXT_PUBLIC_API_URL` | `https://api.hi.gainforest.app` |
+| `HYPERINDEX_URL` | `https://api.hi.gainforest.app` |
+| `COOKIE_SECRET` | *(set on Railway, do not change)* |
+| `ATPROTO_JWK_PRIVATE` | *(ES256 JWK, set on Railway, do not change)* |
+
+**Note:** `NEXT_PUBLIC_API_URL` is a build-time variable (inlined by Next.js during `npm run build`). The `client/Dockerfile` declares `ARG NEXT_PUBLIC_API_URL` so Railway passes it during Docker build.
+
+## Troubleshooting
+
+### Frontend shows Go JSON response instead of HTML
+You forgot `--path-as-root client/`. Redeploy with:
+```bash
+railway up --path-as-root client/ -s frontend -d
+```
+
+### "Application not found" on custom domain
+SSL certificate is still provisioning. Wait 5-15 minutes after adding DNS records.
+
+### GraphiQL returns 500 through frontend
+GraphiQL is served directly by the backend. The frontend has a `/graphiql` server-side redirect route that redirects to `https://api.hi.gainforest.app/graphiql`.
+
+### OAuth login fails
+Check that `ATPROTO_JWK_PRIVATE` and `PUBLIC_URL` are set on the frontend service. Generate a new JWK with:
+```bash
+node scripts/generate-jwk.js # (in hyperscan repo, or client/scripts/ if copied)
+```
+
+### "admin privileges required" after login
+Ensure `TRUST_PROXY_HEADERS=true` is set on the backend. Without it, the backend ignores the `X-User-DID` header from the Next.js proxy.
+
+## Setting Environment Variables
+
+```bash
+# Set a variable on a service
+railway variables set 'KEY=value' -s backend
+railway variables set 'KEY=value' -s frontend
+
+# View all variables for a service
+railway variables -s backend
+railway variables -s frontend
+```
+
+After changing env vars, redeploy the affected service.
diff --git a/.beads/.gitignore b/.beads/.gitignore
new file mode 100644
index 0000000..0acd8c6
--- /dev/null
+++ b/.beads/.gitignore
@@ -0,0 +1,46 @@
+# SQLite databases
+*.db
+*.db?*
+*.db-journal
+*.db-wal
+*.db-shm
+
+# Daemon runtime files
+daemon.lock
+daemon.log
+daemon.pid
+bd.sock
+sync-state.json
+last-touched
+
+# Local version tracking (prevents upgrade notification spam after git ops)
+.local_version
+
+# Legacy database files
+db.sqlite
+bd.db
+
+# Worktree redirect file (contains relative path to main repo's .beads/)
+# Must not be committed as paths would be wrong in other clones
+redirect
+
+# Merge artifacts (temporary files from 3-way merge)
+beads.base.jsonl
+beads.base.meta.json
+beads.left.jsonl
+beads.left.meta.json
+beads.right.jsonl
+beads.right.meta.json
+
+# Sync state (local-only, per-machine)
+# These files are machine-specific and should not be shared across clones
+.sync.lock
+.jsonl.lock
+sync_base.jsonl
+export-state/
+
+# NOTE: Do NOT add negation patterns (e.g., !issues.jsonl) here.
+# They would override fork protection in .git/info/exclude, allowing
+# contributors to accidentally commit upstream issue databases.
+# The JSONL files (issues.jsonl, interactions.jsonl) and config files
+# are tracked by git by default since no pattern above ignores them.
diff --git a/.beads/.jsonl.lock b/.beads/.jsonl.lock
new file mode 100644
index 0000000..e69de29
diff --git a/.beads/.local_version b/.beads/.local_version
new file mode 100644
index 0000000..76d0ef9
--- /dev/null
+++ b/.beads/.local_version
@@ -0,0 +1 @@
+0.49.6
diff --git a/.beads/README.md b/.beads/README.md
new file mode 100644
index 0000000..50f281f
--- /dev/null
+++ b/.beads/README.md
@@ -0,0 +1,81 @@
+# Beads - AI-Native Issue Tracking
+
+Welcome to Beads! This repository uses **Beads** for issue tracking - a modern, AI-native tool designed to live directly in your codebase alongside your code.
+
+## What is Beads?
+
+Beads is issue tracking that lives in your repo, making it perfect for AI coding agents and developers who want their issues close to their code. No web UI required - everything works through the CLI and integrates seamlessly with git.
+
+**Learn more:** [github.com/steveyegge/beads](https://github.com/steveyegge/beads)
+
+## Quick Start
+
+### Essential Commands
+
+```bash
+# Create new issues
+bd create "Add user authentication"
+
+# View all issues
+bd list
+
+# View issue details
+bd show
+
+# Update issue status
+bd update --status in_progress
+bd update --status done
+
+# Sync with git remote
+bd sync
+```
+
+### Working with Issues
+
+Issues in Beads are:
+- **Git-native**: Stored in `.beads/issues.jsonl` and synced like code
+- **AI-friendly**: CLI-first design works perfectly with AI coding agents
+- **Branch-aware**: Issues can follow your branch workflow
+- **Always in sync**: Auto-syncs with your commits
+
+## Why Beads?
+
+✨ **AI-Native Design**
+- Built specifically for AI-assisted development workflows
+- CLI-first interface works seamlessly with AI coding agents
+- No context switching to web UIs
+
+🚀 **Developer Focused**
+- Issues live in your repo, right next to your code
+- Works offline, syncs when you push
+- Fast, lightweight, and stays out of your way
+
+🔧 **Git Integration**
+- Automatic sync with git commits
+- Branch-aware issue tracking
+- Intelligent JSONL merge resolution
+
+## Get Started with Beads
+
+Try Beads in your own projects:
+
+```bash
+# Install Beads
+curl -sSL https://raw.githubusercontent.com/steveyegge/beads/main/scripts/install.sh | bash
+
+# Initialize in your repo
+bd init
+
+# Create your first issue
+bd create "Try out Beads"
+```
+
+## Learn More
+
+- **Documentation**: [github.com/steveyegge/beads/docs](https://github.com/steveyegge/beads/tree/main/docs)
+- **Quick Start Guide**: Run `bd quickstart`
+- **Examples**: [github.com/steveyegge/beads/examples](https://github.com/steveyegge/beads/tree/main/examples)
+
+---
+
+*Beads: Issue tracking that moves at the speed of thought* ⚡
diff --git a/.beads/beads.db b/.beads/beads.db
new file mode 100644
index 0000000..48bf4d3
Binary files /dev/null and b/.beads/beads.db differ
diff --git a/.beads/beads.db-shm b/.beads/beads.db-shm
new file mode 100644
index 0000000..c6bf417
Binary files /dev/null and b/.beads/beads.db-shm differ
diff --git a/.beads/beads.db-wal b/.beads/beads.db-wal
new file mode 100644
index 0000000..1c3f32f
Binary files /dev/null and b/.beads/beads.db-wal differ
diff --git a/.beads/config.yaml b/.beads/config.yaml
new file mode 100644
index 0000000..ff8bc92
--- /dev/null
+++ b/.beads/config.yaml
@@ -0,0 +1,67 @@
+# Beads Configuration File
+# This file configures default behavior for all bd commands in this repository
+# All settings can also be set via environment variables (BD_* prefix)
+# or overridden with command-line flags
+
+# Issue prefix for this repository (used by bd init)
+# If not set, bd init will auto-detect from directory name
+# Example: issue-prefix: "myproject" creates issues like "myproject-1", "myproject-2", etc.
+# issue-prefix: ""
+
+# Use no-db mode: load from JSONL, no SQLite, write back after each command
+# When true, bd will use .beads/issues.jsonl as the source of truth
+# instead of SQLite database
+# no-db: false
+
+# Disable daemon for RPC communication (forces direct database access)
+# no-daemon: false
+
+# Disable auto-flush of database to JSONL after mutations
+# no-auto-flush: false
+
+# Disable auto-import from JSONL when it's newer than database
+# no-auto-import: false
+
+# Enable JSON output by default
+# json: false
+
+# Default actor for audit trails (overridden by BD_ACTOR or --actor)
+# actor: ""
+
+# Path to database (overridden by BEADS_DB or --db)
+# db: ""
+
+# Auto-start daemon if not running (can also use BEADS_AUTO_START_DAEMON)
+# auto-start-daemon: true
+
+# Debounce interval for auto-flush (can also use BEADS_FLUSH_DEBOUNCE)
+# flush-debounce: "5s"
+
+# Export events (audit trail) to .beads/events.jsonl on each flush/sync
+# When enabled, new events are appended incrementally using a high-water mark.
+# Use 'bd export --events' to trigger manually regardless of this setting.
+# events-export: false
+
+# Git branch for beads commits (bd sync will commit to this branch)
+# IMPORTANT: Set this for team projects so all clones use the same sync branch.
+# This setting persists across clones (unlike database config which is gitignored).
+# Can also use BEADS_SYNC_BRANCH env var for local override.
+# If not set, bd sync will require you to run 'bd config set sync.branch '.
+# sync-branch: "beads-sync"
+
+# Multi-repo configuration (experimental - bd-307)
+# Allows hydrating from multiple repositories and routing writes to the correct JSONL
+# repos:
+# primary: "." # Primary repo (where this database lives)
+# additional: # Additional repos to hydrate from (read-only)
+# - ~/beads-planning # Personal planning repo
+# - ~/work-planning # Work planning repo
+
+# Integration settings (access with 'bd config get/set')
+# These are stored in the database, not in this file:
+# - jira.url
+# - jira.project
+# - linear.url
+# - linear.api-key
+# - github.org
+# - github.repo
diff --git a/.beads/daemon.lock b/.beads/daemon.lock
new file mode 100644
index 0000000..321c431
--- /dev/null
+++ b/.beads/daemon.lock
@@ -0,0 +1,7 @@
+{
+ "pid": 46504,
+ "parent_pid": 46496,
+ "database": "/Users/david/Projects/gainforest/hyperindex/.beads/beads.db",
+ "version": "0.49.6",
+ "started_at": "2026-02-18T08:44:00.42754Z"
+}
diff --git a/.beads/daemon.log b/.beads/daemon.log
new file mode 100644
index 0000000..41e5eea
--- /dev/null
+++ b/.beads/daemon.log
@@ -0,0 +1,777 @@
+time=2026-02-18T16:44:00.440+08:00 level=INFO msg="Daemon started" interval=5s auto_commit=false auto_push=false
+time=2026-02-18T16:44:00.440+08:00 level=INFO msg="using database" path=/Users/david/Projects/gainforest/hyperindex/.beads/beads.db
+time=2026-02-18T16:44:00.467+08:00 level=INFO msg="database opened" path=/Users/david/Projects/gainforest/hyperindex/.beads/beads.db backend=sqlite freshness_checking=true
+time=2026-02-18T16:44:00.467+08:00 level=INFO msg="upgrading .beads/.gitignore"
+time=2026-02-18T16:44:00.467+08:00 level=WARN msg="failed to upgrade .gitignore" error="open .beads/.gitignore: no such file or directory"
+time=2026-02-18T16:44:00.473+08:00 level=INFO msg="Repository fingerprint validated" repo_id=f8ba04d3
+time=2026-02-18T16:44:00.473+08:00 level=INFO msg="starting RPC server" socket=/Users/david/Projects/gainforest/hyperindex/.beads/bd.sock
+time=2026-02-18T16:44:00.473+08:00 level=INFO msg="RPC server ready (socket listening)"
+time=2026-02-18T16:44:00.479+08:00 level=INFO msg="registered in global registry"
+time=2026-02-18T16:44:00.480+08:00 level=WARN msg="initial import failed (continuing anyway)" error="failed to open JSONL: open /Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl: no such file or directory"
+time=2026-02-18T16:44:00.480+08:00 level=INFO msg="Starting sync cycle" mode="sync cycle"
+time=2026-02-18T16:44:00.486+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:44:01.276+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:44:01.304+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:44:01.324+08:00 level=INFO msg="Sync cycle complete"
+time=2026-02-18T16:44:01.324+08:00 level=INFO msg="monitoring parent process" pid=0
+time=2026-02-18T16:44:01.324+08:00 level=INFO msg="using event-driven mode"
+time=2026-02-18T16:44:01.325+08:00 level=INFO msg="Auto-pull disabled: use 'git pull' manually to sync remote changes"
+time=2026-02-18T16:44:02.333+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:44:03.335+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:44:03.337+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:44:04.111+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:44:04.111+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:44:04.112+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:44:05.637+08:00 level=INFO msg="Mutation detected" type=update issue_id=hyperindex-md3.10
+time=2026-02-18T16:44:06.138+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T16:44:06.138+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T16:44:06.150+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:44:06.152+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:44:06.152+08:00 level=INFO msg="Export complete"
+time=2026-02-18T16:44:06.201+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:44:06.201+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:44:07.203+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:44:07.204+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:44:07.215+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:44:07.215+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:44:07.215+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:44:07.990+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:44:07.990+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:44:07.990+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:44:07.998+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:44:07.999+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:44:09.223+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:44:09.274+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:44:09.274+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:44:10.275+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:44:10.276+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:44:10.287+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:44:10.299+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:44:10.299+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:44:10.299+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:44:11.050+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:44:11.050+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:44:11.051+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:44:11.289+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:44:11.289+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:44:11.297+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:44:11.297+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:44:11.297+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:44:12.015+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:44:12.015+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:44:12.015+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:44:54.719+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:44:54.770+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:44:54.770+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:44:55.772+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:44:55.774+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:44:55.786+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:44:56.526+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:44:56.526+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:44:56.526+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:44:56.788+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:44:56.789+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:44:57.616+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:44:57.616+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:44:57.616+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:45:00.597+08:00 level=INFO msg="Mutation detected" type=status issue_id=hyperindex-md3.10
+time=2026-02-18T16:45:01.098+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T16:45:01.100+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T16:45:01.117+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:45:01.119+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:45:01.120+08:00 level=INFO msg="Export complete"
+time=2026-02-18T16:45:01.168+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:45:01.168+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:02.170+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:45:02.171+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:45:02.186+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:02.186+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:45:02.186+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:45:02.918+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:45:02.918+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:45:02.918+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:45:02.926+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:45:02.927+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:45:02.960+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:45:03.011+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:45:03.011+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:04.012+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:45:04.013+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:45:04.026+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:04.026+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:45:04.026+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:45:04.768+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:45:04.769+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:45:04.769+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:45:46.287+08:00 level=INFO msg="Mutation detected" type=status issue_id=hyperindex-md3.10
+time=2026-02-18T16:45:46.788+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T16:45:46.788+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T16:45:46.796+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:45:46.797+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:45:46.797+08:00 level=INFO msg="Export complete"
+time=2026-02-18T16:45:46.847+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:45:46.847+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:47.849+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:45:47.850+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:45:47.861+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:47.863+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:47.863+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:45:47.863+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:45:48.691+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:45:48.691+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:45:48.692+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:45:48.699+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:45:48.699+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:45:48.862+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:45:48.863+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:45:48.872+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:48.874+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:48.874+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:45:48.874+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:45:49.684+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:45:49.684+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:45:49.684+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:45:49.692+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:45:49.693+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:45:49.875+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:45:49.875+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:45:49.884+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:49.887+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:49.887+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:45:49.887+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:45:50.135+08:00 level=INFO msg="Mutation detected" type=status issue_id=hyperindex-md3
+time=2026-02-18T16:45:50.615+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:45:50.615+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:45:50.616+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:45:50.622+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:45:50.622+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:45:50.635+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T16:45:50.635+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T16:45:50.635+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T16:45:50.642+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T16:45:50.647+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:45:50.649+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:45:50.649+08:00 level=INFO msg="Export complete"
+time=2026-02-18T16:45:50.699+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:45:50.699+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:51.134+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:45:51.135+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:45:51.143+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:51.143+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:45:51.143+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:45:51.700+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:45:51.701+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:45:51.710+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:51.710+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:45:51.710+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:45:51.871+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:45:51.871+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:45:51.872+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:45:51.879+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:45:51.880+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:45:52.407+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:45:52.407+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:45:52.407+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:45:52.415+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:45:52.416+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:45:53.534+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:45:53.585+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:45:53.585+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:53.603+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:53.721+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:45:53.772+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:45:53.772+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:54.774+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:45:54.774+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:45:54.782+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:45:55.539+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:45:55.540+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:45:55.540+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:45:55.784+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:45:55.784+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:45:56.515+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:45:56.515+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:45:56.515+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:48:34.554+08:00 level=INFO msg="Mutation detected" type=create issue_id=hyperindex-6bl
+time=2026-02-18T16:48:35.056+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T16:48:35.056+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T16:48:35.069+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:48:35.071+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:48:35.071+08:00 level=INFO msg="Export complete"
+time=2026-02-18T16:48:35.120+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:48:35.121+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:48:36.123+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:48:36.124+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:48:36.142+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:48:36.142+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:48:36.142+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:48:36.904+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:48:36.904+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:48:36.904+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:48:36.912+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:48:36.914+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:48:40.475+08:00 level=INFO msg="Mutation detected" type=create issue_id=hyperindex-73q
+time=2026-02-18T16:48:40.976+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T16:48:40.977+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T16:48:40.978+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T16:48:40.991+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T16:48:41.002+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:48:41.005+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:48:41.005+08:00 level=INFO msg="Export complete"
+time=2026-02-18T16:48:41.053+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:48:41.053+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:48:42.055+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:48:42.056+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:48:42.070+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:48:42.070+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:48:42.070+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:48:42.825+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:48:42.825+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:48:42.827+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:48:42.835+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:48:42.836+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:48:45.610+08:00 level=INFO msg="Mutation detected" type=create issue_id=hyperindex-m1t
+time=2026-02-18T16:48:46.111+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T16:48:46.112+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T16:48:46.113+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T16:48:46.127+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T16:48:46.138+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:48:46.141+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:48:46.141+08:00 level=INFO msg="Export complete"
+time=2026-02-18T16:48:46.190+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:48:46.190+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:48:47.192+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:48:47.193+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:48:47.208+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:48:47.208+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:48:47.208+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:48:48.004+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:48:48.004+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:48:48.004+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:48:48.012+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:48:48.013+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:48:53.283+08:00 level=INFO msg="Mutation detected" type=update issue_id=hyperindex-6bl
+time=2026-02-18T16:48:53.381+08:00 level=INFO msg="Mutation detected" type=update issue_id=hyperindex-73q
+time=2026-02-18T16:48:53.471+08:00 level=INFO msg="Mutation detected" type=update issue_id=hyperindex-m1t
+time=2026-02-18T16:48:53.972+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T16:48:53.973+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T16:48:53.973+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T16:48:53.988+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T16:48:54.000+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:48:54.003+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:48:54.003+08:00 level=INFO msg="Export complete"
+time=2026-02-18T16:48:54.051+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:48:54.051+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:48:55.054+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:48:55.055+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:48:55.068+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:48:55.068+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:48:55.068+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:48:55.812+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:48:55.812+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:48:55.813+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:48:55.821+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:48:55.823+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:48:56.263+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:48:56.314+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:48:56.314+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:48:57.316+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:48:57.317+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:48:57.329+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:48:57.348+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:48:57.348+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:48:57.348+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:48:58.063+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:48:58.063+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:48:58.064+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:48:58.331+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:48:58.332+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:48:58.345+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:48:58.345+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:48:58.345+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:48:59.078+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:48:59.078+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:48:59.079+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:49:26.577+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:49:26.628+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:49:26.628+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:27.630+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:49:27.631+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:49:27.647+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:27.651+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:27.651+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:49:27.651+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:49:28.425+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:49:28.425+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:49:28.427+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:49:28.649+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:49:28.650+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:49:28.664+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:28.664+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:49:28.664+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:49:29.393+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:49:29.393+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:49:29.393+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:49:37.492+08:00 level=INFO msg="Mutation detected" type=create issue_id=hyperindex-uau
+time=2026-02-18T16:49:37.993+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T16:49:37.994+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T16:49:38.008+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:49:38.011+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:49:38.011+08:00 level=INFO msg="Export complete"
+time=2026-02-18T16:49:38.059+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:49:38.059+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:39.061+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:49:39.062+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:49:39.075+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:39.075+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:49:39.075+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:49:39.797+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:49:39.797+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:49:39.798+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:49:39.807+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:49:39.809+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:49:41.018+08:00 level=INFO msg="Mutation detected" type=update issue_id=hyperindex-uau
+time=2026-02-18T16:49:41.520+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T16:49:41.520+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T16:49:41.521+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T16:49:41.533+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T16:49:41.554+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:49:41.573+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:49:41.573+08:00 level=INFO msg="Export complete"
+time=2026-02-18T16:49:41.605+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:49:41.605+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:42.607+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:49:42.617+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:49:42.633+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:42.633+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:49:42.633+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:49:43.404+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:49:43.404+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:49:43.405+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:49:43.414+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:49:43.415+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:49:47.688+08:00 level=INFO msg="Mutation detected" type=create issue_id=hyperindex-3jy
+time=2026-02-18T16:49:48.189+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T16:49:48.190+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T16:49:48.192+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T16:49:48.208+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T16:49:48.217+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:49:48.220+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:49:48.220+08:00 level=INFO msg="Export complete"
+time=2026-02-18T16:49:48.268+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:49:48.268+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:49.271+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:49:49.272+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:49:49.284+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:49.284+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:49:49.284+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:49:49.970+08:00 level=INFO msg="Mutation detected" type=update issue_id=hyperindex-3jy
+time=2026-02-18T16:49:49.997+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:49:49.997+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:49:49.998+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:49:50.004+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:49:50.005+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:49:50.471+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T16:49:50.471+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T16:49:50.472+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T16:49:50.486+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T16:49:50.497+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:49:50.500+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T16:49:50.500+08:00 level=INFO msg="Export complete"
+time=2026-02-18T16:49:50.549+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:49:50.549+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:51.551+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:49:51.552+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:49:51.562+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:51.562+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:49:51.562+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:49:52.285+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:49:52.285+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:49:52.286+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T16:49:52.294+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T16:49:52.296+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T16:49:53.259+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T16:49:53.310+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T16:49:53.310+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:54.312+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:49:54.314+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:49:54.329+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:54.349+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:54.349+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:49:54.349+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:49:55.085+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:49:55.085+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:49:55.086+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T16:49:55.330+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T16:49:55.331+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T16:49:55.344+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T16:49:55.344+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T16:49:55.344+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T16:49:56.078+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T16:49:56.078+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T16:49:56.079+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:10:15.982+08:00 level=INFO msg="Mutation detected" type=create issue_id=hyperindex-xuq
+time=2026-02-18T17:10:16.484+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T17:10:16.484+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T17:10:16.493+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:10:16.495+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T17:10:16.495+08:00 level=INFO msg="Export complete"
+time=2026-02-18T17:10:16.544+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:10:16.544+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:10:17.546+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:10:17.547+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:10:17.563+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:10:17.563+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:10:17.563+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:10:18.320+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:10:18.320+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:10:18.321+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T17:10:18.328+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T17:10:18.328+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T17:10:35.044+08:00 level=INFO msg="Mutation detected" type=create issue_id=hyperindex-xuq.1
+time=2026-02-18T17:10:35.545+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T17:10:35.546+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T17:10:35.546+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T17:10:35.553+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T17:10:35.560+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:10:35.562+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T17:10:35.562+08:00 level=INFO msg="Export complete"
+time=2026-02-18T17:10:35.611+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:10:35.611+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:10:36.613+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:10:36.614+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:10:36.625+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:10:36.625+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:10:36.625+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:10:37.335+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:10:37.335+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:10:37.335+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T17:10:37.341+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T17:10:37.342+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T17:10:46.160+08:00 level=INFO msg="Mutation detected" type=create issue_id=hyperindex-xuq.2
+time=2026-02-18T17:10:46.661+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T17:10:46.661+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T17:10:46.662+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T17:10:46.676+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T17:10:46.686+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:10:46.688+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T17:10:46.688+08:00 level=INFO msg="Export complete"
+time=2026-02-18T17:10:46.737+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:10:46.737+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:10:47.740+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:10:47.740+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:10:47.754+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:10:47.754+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:10:47.754+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:10:48.539+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:10:48.539+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:10:48.540+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T17:10:48.548+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T17:10:48.549+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T17:11:04.452+08:00 level=INFO msg="Mutation detected" type=create issue_id=hyperindex-xuq.3
+time=2026-02-18T17:11:04.953+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T17:11:04.953+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T17:11:04.954+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T17:11:04.964+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T17:11:04.973+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:11:04.977+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T17:11:04.977+08:00 level=INFO msg="Export complete"
+time=2026-02-18T17:11:05.024+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:11:05.024+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:06.026+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:11:06.030+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:11:06.039+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:06.039+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:11:06.039+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:11:06.787+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:11:06.788+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:11:06.788+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T17:11:06.797+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T17:11:06.798+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T17:11:20.029+08:00 level=INFO msg="Mutation detected" type=create issue_id=hyperindex-xuq.4
+time=2026-02-18T17:11:20.530+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T17:11:20.530+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T17:11:20.530+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T17:11:20.541+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T17:11:20.551+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:11:20.554+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T17:11:20.554+08:00 level=INFO msg="Export complete"
+time=2026-02-18T17:11:20.603+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:11:20.603+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:21.605+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:11:21.605+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:11:21.619+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:21.619+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:11:21.619+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:11:22.323+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:11:22.324+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:11:22.324+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T17:11:22.332+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T17:11:22.333+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T17:11:29.668+08:00 level=INFO msg="Mutation detected" type=create issue_id=hyperindex-xuq.5
+time=2026-02-18T17:11:30.169+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T17:11:30.170+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T17:11:30.170+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T17:11:30.181+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T17:11:30.190+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:11:30.192+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T17:11:30.192+08:00 level=INFO msg="Export complete"
+time=2026-02-18T17:11:30.241+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:11:30.241+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:31.243+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:11:31.244+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:11:31.257+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:31.257+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:11:31.257+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:11:32.027+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:11:32.027+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:11:32.027+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T17:11:32.037+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T17:11:32.037+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T17:11:43.522+08:00 level=INFO msg="Mutation detected" type=create issue_id=hyperindex-xuq.6
+time=2026-02-18T17:11:44.024+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T17:11:44.024+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T17:11:44.024+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T17:11:44.032+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T17:11:44.039+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:11:44.041+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T17:11:44.041+08:00 level=INFO msg="Export complete"
+time=2026-02-18T17:11:44.090+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:11:44.090+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:45.092+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:11:45.093+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:11:45.110+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:45.110+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:11:45.110+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:11:45.873+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:11:45.873+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:11:45.874+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T17:11:45.880+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T17:11:45.881+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T17:11:48.534+08:00 level=INFO msg="Mutation detected" type=update issue_id=hyperindex-xuq.4
+time=2026-02-18T17:11:49.035+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T17:11:49.036+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T17:11:49.036+08:00 level=INFO msg="JSONL changed externally, importing before export"
+time=2026-02-18T17:11:49.046+08:00 level=INFO msg="Auto-import complete, proceeding with export"
+time=2026-02-18T17:11:49.055+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:11:49.057+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T17:11:49.058+08:00 level=INFO msg="Export complete"
+time=2026-02-18T17:11:49.106+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:11:49.106+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:50.109+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:11:50.109+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:11:50.119+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:50.119+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:11:50.119+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:11:50.836+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:11:50.836+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:11:50.837+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T17:11:50.845+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T17:11:50.846+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T17:11:54.000+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:11:54.051+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:11:54.051+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:54.162+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:11:54.213+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:11:54.213+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:55.216+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:11:55.216+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:11:55.221+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:11:56.097+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:11:56.097+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:11:56.097+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:11:56.224+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:11:56.224+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:11:56.941+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:11:56.941+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:11:56.942+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:11.280+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:11.332+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:11.332+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:11.716+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:11.767+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:11.767+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:12.201+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:12.252+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:12.252+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:12.468+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:12.519+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:12.519+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:12.739+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:12.790+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:12.790+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:12.869+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:13.871+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:13.872+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:13.882+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:14.616+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:14.616+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:14.616+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:14.884+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:14.885+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:15.674+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:15.674+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:15.674+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:16.347+08:00 level=INFO msg="Mutation detected" type=update issue_id=hyperindex-xuq.3
+time=2026-02-18T17:12:16.551+08:00 level=INFO msg="Mutation detected" type=update issue_id=hyperindex-xuq.6
+time=2026-02-18T17:12:17.010+08:00 level=INFO msg="Mutation detected" type=update issue_id=hyperindex-xuq.2
+time=2026-02-18T17:12:17.512+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T17:12:17.512+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T17:12:17.521+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:17.523+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T17:12:17.523+08:00 level=INFO msg="Export complete"
+time=2026-02-18T17:12:17.572+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:17.572+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:17.908+08:00 level=INFO msg="Mutation detected" type=update issue_id=hyperindex-xuq.1
+time=2026-02-18T17:12:18.384+08:00 level=INFO msg="Mutation detected" type=update issue_id=hyperindex-xuq.5
+time=2026-02-18T17:12:18.536+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:18.573+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:18.574+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:18.579+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:18.579+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:12:18.579+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:12:18.586+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:18.586+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:18.586+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:18.773+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:18.824+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:18.824+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:18.885+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T17:12:18.885+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T17:12:18.892+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:18.893+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T17:12:18.893+08:00 level=INFO msg="Export complete"
+time=2026-02-18T17:12:18.943+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:18.943+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:19.260+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:19.260+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:19.260+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:19.696+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:19.747+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:19.747+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:19.945+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:19.946+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:19.955+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:19.970+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:19.970+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:12:19.970+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:12:20.327+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:20.378+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:20.378+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:20.685+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:20.685+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:20.686+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:20.689+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:20.741+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:20.741+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:21.742+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:21.743+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:21.752+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:21.766+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:21.767+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:12:21.767+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:12:22.450+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:22.450+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:22.450+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:22.754+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:22.755+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:22.765+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:22.765+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:12:22.765+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:12:23.449+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:23.449+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:23.450+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:33.665+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:33.716+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:33.716+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:34.719+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:34.726+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:34.738+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:35.570+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:35.570+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:35.570+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:35.740+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:35.741+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:36.302+08:00 level=INFO msg="Mutation detected" type=status issue_id=hyperindex-xuq.2
+time=2026-02-18T17:12:36.474+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:36.474+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:36.475+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:36.803+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T17:12:36.804+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T17:12:36.821+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:36.825+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T17:12:36.825+08:00 level=INFO msg="Export complete"
+time=2026-02-18T17:12:36.873+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:36.873+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:37.875+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:37.875+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:37.886+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:37.886+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:12:37.886+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:12:38.068+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:38.120+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:38.120+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:38.710+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:38.710+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:38.711+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:39.122+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:39.123+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:39.133+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:39.133+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:12:39.133+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:12:39.831+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:39.831+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:39.831+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:43.524+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:43.575+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:43.575+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:44.577+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:44.578+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:44.585+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:45.348+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:45.349+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:45.349+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:45.587+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:45.588+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:46.302+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:46.302+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:46.302+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:46.604+08:00 level=INFO msg="Mutation detected" type=status issue_id=hyperindex-xuq.5
+time=2026-02-18T17:12:47.105+08:00 level=INFO msg="Export triggered by mutation events"
+time=2026-02-18T17:12:47.105+08:00 level=INFO msg=Starting mode=export
+time=2026-02-18T17:12:47.113+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:47.115+08:00 level=INFO msg="Exported to JSONL"
+time=2026-02-18T17:12:47.115+08:00 level=INFO msg="Export complete"
+time=2026-02-18T17:12:47.164+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:47.164+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:48.166+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:48.167+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:48.176+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:48.176+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:12:48.176+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:12:48.917+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:48.918+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:48.918+08:00 level=INFO msg="JSONL content changed, proceeding with operation..." mode=auto-import
+time=2026-02-18T17:12:48.925+08:00 level=INFO msg="Imported from JSONL"
+time=2026-02-18T17:12:48.926+08:00 level=INFO msg="Auto-import complete"
+time=2026-02-18T17:12:50.125+08:00 level=INFO msg="JSONL removed/renamed, re-establishing watch"
+time=2026-02-18T17:12:50.176+08:00 level=INFO msg="Successfully re-established JSONL watch" delay=50ms
+time=2026-02-18T17:12:50.176+08:00 level=INFO msg="JSONL file created" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:50.178+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:51.180+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:51.181+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:51.186+08:00 level=INFO msg="File change detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:51.197+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:51.197+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:12:51.197+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:12:51.931+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:51.931+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:51.931+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
+time=2026-02-18T17:12:52.187+08:00 level=INFO msg="Import triggered by file change"
+time=2026-02-18T17:12:52.188+08:00 level=INFO msg="Starting auto-import" mode=auto-import
+time=2026-02-18T17:12:52.199+08:00 level=INFO msg="WARNING: Uncommitted local changes detected" path=/Users/david/Projects/gainforest/hyperindex/.beads/issues.jsonl
+time=2026-02-18T17:12:52.199+08:00 level=INFO msg=" Pulling from remote may overwrite local unpushed changes."
+time=2026-02-18T17:12:52.199+08:00 level=INFO msg=" Consider running 'bd sync' to commit and push your changes first."
+time=2026-02-18T17:12:52.951+08:00 level=INFO msg="Pulled from remote"
+time=2026-02-18T17:12:52.951+08:00 level=INFO msg="🔍 Checking JSONL content hash AFTER pull (fix: auto-pull-not-pulling)"
+time=2026-02-18T17:12:52.953+08:00 level=INFO msg="Skipping: JSONL content unchanged" mode=auto-import
diff --git a/.beads/daemon.pid b/.beads/daemon.pid
new file mode 100644
index 0000000..68d11c4
--- /dev/null
+++ b/.beads/daemon.pid
@@ -0,0 +1 @@
+46504
diff --git a/.beads/interactions.jsonl b/.beads/interactions.jsonl
new file mode 100644
index 0000000..e69de29
diff --git a/.beads/issues.jsonl b/.beads/issues.jsonl
new file mode 100644
index 0000000..2451d01
--- /dev/null
+++ b/.beads/issues.jsonl
@@ -0,0 +1,82 @@
+{"id":"hyperindex-0nk","title":"Epic: Frontend Design Overhaul — Adopt Hypercerts Design Language","description":"## Goals\n- Adopt the hypercerts-scaffold-atproto design language for the hyperindex Next.js client\n- Replace the goat emoji / logo.png with the hypercerts_logo.png from hyperscan\n- Add light mode / dark mode toggle (like hyperscan uses)\n- Rename all user-visible branding from 'hi' and 'Hypergoat' to 'Hyperindex'\n- Update API documentation pages to reflect new branding\n\n## Design Source\nThe design language comes from `../hypercerts-scaffold-atproto` which uses:\n- **Fonts**: Syne (display/headings) + Outfit (body text)\n- **Colors**: OKLCH color space with blue-gray hue angle 260, near-monochromatic palette\n- **Surfaces**: Glass-panel (frosted glass with backdrop-blur)\n- **Background**: noise-bg texture + gradient-mesh radial gradients\n- **Dark mode**: Full OKLCH dark theme with CSS custom properties\n- **Border radius**: 0.625rem (10px) base\n- **Animations**: fade-in-up, stagger-children, scale-in\n\n## Theme Toggle Source\nThe dark/light toggle comes from `../hyperscan` which uses:\n- `next-themes` library with `attribute='class'`, `defaultTheme='light'`, `enableSystem={false}`\n- Sun/Moon icon toggle button\n- `@custom-variant dark (\u0026:where(.dark, .dark *))` for Tailwind v4\n\n## Scope\n- All files in `client/` (Next.js frontend)\n- A few strings in `cmd/hypergoat/main.go` and `internal/server/graphiql.go` (rename branding)\n- Config files (`.env.example`, `client/src/lib/env.ts`, etc.)\n\n## Constraints\n- Do NOT change the Go module path (`github.com/GainForest/hypergoat`) — too many import references\n- Do NOT change the binary name or Dockerfile entrypoint\n- Do NOT change any Go tests except `handlers_test.go` title assertions\n- Preserve all existing functionality — this is purely visual + branding","status":"open","priority":1,"issue_type":"epic","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","created_at":"2026-02-18T17:32:33.630839+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:55:59.06656+08:00","labels":["needs-integration-review","scope:medium"]}
+{"id":"hyperindex-0nk.1","title":"Setup: Install next-themes, add Syne + Outfit fonts to layout","description":"## Files\n- client/package.json (modify)\n- client/src/app/layout.tsx (modify)\n\n## What to do\n\n### 1. Add next-themes dependency\nIn `client/package.json`, add to `dependencies`:\n```\n\"next-themes\": \"^0.4.6\"\n```\nThen run `npm install` in the `client/` directory.\n\n### 2. Replace fonts in layout.tsx\nReplace the current font imports:\n```tsx\n// REMOVE these:\nimport { Inter, EB_Garamond } from \"next/font/google\";\nconst inter = Inter({ variable: \"--font-inter\", subsets: [\"latin\"] });\nconst garamond = EB_Garamond({ variable: \"--font-garamond\", subsets: [\"latin\"], weight: [\"400\", \"500\", \"600\", \"700\"] });\n\n// ADD these:\nimport { Syne, Outfit } from \"next/font/google\";\n\nconst syne = Syne({\n variable: \"--font-syne\",\n subsets: [\"latin\"],\n weight: [\"400\", \"500\", \"600\", \"700\", \"800\"],\n});\n\nconst outfit = Outfit({\n variable: \"--font-outfit\",\n subsets: [\"latin\"],\n weight: [\"300\", \"400\", \"500\", \"600\", \"700\"],\n});\n```\n\n### 3. Update body className\nChange the body tag className from:\n```tsx\nclassName={`${inter.variable} ${garamond.variable} antialiased bg-white text-zinc-800`}\n```\nto:\n```tsx\nclassName={`${syne.variable} ${outfit.variable} antialiased`}\n```\n(Background/text colors will be handled by globals.css in a separate task)\n\n## Don't\n- Do NOT change any other files\n- Do NOT modify Providers, Header, or footer yet (those are separate tasks)\n- Do NOT remove the existing CSS variable references in globals.css yet","acceptance_criteria":"1. `npm ls next-themes` in client/ shows next-themes installed\n2. layout.tsx imports Syne and Outfit from next/font/google (not Inter or EB_Garamond)\n3. layout.tsx defines CSS variables --font-syne and --font-outfit\n4. body className includes both font variables and antialiased\n5. `npm run build` in client/ succeeds without errors","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":20,"created_at":"2026-02-18T17:32:48.187066+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:47:52.643169+08:00","closed_at":"2026-02-18T17:47:52.643169+08:00","close_reason":"d39a509 Setup: Install next-themes, add Syne + Outfit fonts to layout","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-0nk.1","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:32:48.187828+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-0nk.10","title":"Pages: Update Lexicons + Backfill pages for dark mode","description":"## Files\n- client/src/app/lexicons/page.tsx (modify)\n- client/src/app/backfill/page.tsx (modify)\n\n## What to do\nUpdate both pages to use CSS custom properties instead of hardcoded color classes. Apply the same consistent pattern used in Dashboard and Docs pages.\n\n### Common pattern for both pages:\n\n**Text replacements (use style prop):**\n- `text-zinc-900` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-800` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-700` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-600` → `style={{ color: \"var(--secondary-foreground)\" }}`\n- `text-zinc-500` → `style={{ color: \"var(--muted-foreground)\" }}`\n- `text-zinc-400` → `style={{ color: \"var(--muted-foreground)\" }}`\n- `text-zinc-300` → `style={{ color: \"var(--border)\" }}`\n- `text-emerald-*` → `style={{ color: \"var(--primary)\" }}` (or keep semantic OKLCH for status indicators)\n- `text-red-500`, `text-red-*` → keep as is (error state, works in both themes)\n\n**Background replacements:**\n- `bg-white` → `style={{ backgroundColor: \"var(--card)\" }}`\n- `bg-zinc-50` → `style={{ backgroundColor: \"var(--muted)\" }}`\n- `bg-zinc-100` → `style={{ backgroundColor: \"var(--muted)\" }}`\n- `bg-emerald-50` → `style={{ backgroundColor: \"var(--accent)\" }}`\n- `hover:bg-zinc-50` → `hover:opacity-90` or keep and override inline\n\n**Border replacements:**\n- `border-zinc-200/60` → `style={{ borderColor: \"var(--border)\" }}`\n- `border-zinc-200` → `style={{ borderColor: \"var(--border)\" }}`\n- `border-zinc-100` → `style={{ borderColor: \"var(--border)\" }}`\n- `divide-zinc-*` → use CSS variable for divider color\n\n**Font replacements:**\n- `font-[family-name:var(--font-garamond)]` → `font-[family-name:var(--font-syne)]`\n\n**Skeleton/loading states:**\n- `bg-zinc-100 animate-pulse` → `animate-pulse` with `style={{ backgroundColor: \"var(--muted)\" }}`\n\n### Lexicons-specific notes:\n- The tree view uses indentation and expand/collapse icons — keep the structural logic\n- NSID text in monospace — keep `font-mono`\n- Delete buttons / destructive actions — keep red colors\n\n### Backfill-specific notes:\n- Status indicators (running/idle) — keep green/gray semantic colors but use OKLCH:\n - Running: `style={{ color: \"oklch(0.65 0.15 155)\" }}` with a pulsing dot\n - Idle: `style={{ color: \"var(--muted-foreground)\" }}`\n- DID input field uses the Input component (already updated in task 6)\n\n## Don't\n- Do NOT change any data fetching, mutation, or state logic\n- Do NOT change component structure or imports\n- Do NOT change form validation or error handling behavior","acceptance_criteria":"1. No hardcoded zinc-*/emerald-* Tailwind color classes remain in either file (except text-red-* for errors)\n2. All font-garamond references replaced with font-syne\n3. All card-like containers use var(--card) background\n4. All borders use var(--border)\n5. All text uses appropriate CSS variables\n6. `npm run build` in client/ succeeds","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T17:36:20.172865+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:54:00.049557+08:00","closed_at":"2026-02-18T17:54:00.049557+08:00","close_reason":"4b86216 feat: update lexicons and backfill pages for dark mode","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-0nk.10","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:36:20.173778+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.10","depends_on_id":"hyperindex-0nk.4","type":"blocks","created_at":"2026-02-18T17:36:20.17514+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.10","depends_on_id":"hyperindex-0nk.6","type":"blocks","created_at":"2026-02-18T17:36:20.176306+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-0nk.11","title":"Pages: Update Settings + Onboarding pages for dark mode","description":"## Files\n- client/src/app/settings/page.tsx (modify)\n- client/src/app/onboarding/page.tsx (modify)\n\n## What to do\nUpdate both pages to use CSS custom properties instead of hardcoded color classes. Apply the same consistent pattern.\n\n### Common pattern for both pages:\n\n**Text replacements (use style prop):**\n- `text-zinc-900` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-800` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-700` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-600` → `style={{ color: \"var(--secondary-foreground)\" }}`\n- `text-zinc-500` → `style={{ color: \"var(--muted-foreground)\" }}`\n- `text-zinc-400` → `style={{ color: \"var(--muted-foreground)\" }}`\n- `text-zinc-300` → `style={{ color: \"var(--border)\" }}`\n- `text-emerald-*` → `style={{ color: \"var(--primary)\" }}`\n\n**Background replacements:**\n- `bg-white` → `style={{ backgroundColor: \"var(--card)\" }}`\n- `bg-zinc-50` → `style={{ backgroundColor: \"var(--muted)\" }}`\n- `bg-zinc-100` → `style={{ backgroundColor: \"var(--muted)\" }}`\n\n**Border replacements:**\n- `border-zinc-200/60` → `style={{ borderColor: \"var(--border)\" }}`\n- `border-zinc-200` → `style={{ borderColor: \"var(--border)\" }}`\n- `border-zinc-100` → `style={{ borderColor: \"var(--border)\" }}`\n\n**Font replacements:**\n- `font-[family-name:var(--font-garamond)]` → `font-[family-name:var(--font-syne)]`\n\n### Settings-specific notes:\n- The settings page uses Card components (already updated in task 6)\n- Keep the danger zone section with red styling (text-red-*, border-red-*) — these semantic colors work in both themes\n- OAuth client cards — update background and border colors\n- Admin DID list — update colors\n\n### Onboarding-specific notes:\n- The step indicator/stepper — update active/inactive colors:\n - Active step: `style={{ backgroundColor: \"var(--primary)\", color: \"var(--primary-foreground)\" }}`\n - Completed step: same as active\n - Upcoming step: `style={{ backgroundColor: \"var(--muted)\", color: \"var(--muted-foreground)\" }}`\n- Welcome screen text and decorative elements\n- Success/complete screen at the end\n\n## Don't\n- Do NOT change form logic, state management, or mutations\n- Do NOT change the step flow in onboarding\n- Do NOT change card layout or section structure\n- Do NOT change danger zone logic (reset all, etc.)","acceptance_criteria":"1. No hardcoded zinc-*/emerald-* Tailwind color classes remain in either file (except red for danger zone)\n2. All font-garamond references replaced with font-syne\n3. All card backgrounds use var(--card) or Card component\n4. All borders use var(--border)\n5. Onboarding stepper uses CSS variables for active/inactive states\n6. `npm run build` in client/ succeeds","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T17:36:33.968542+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:54:17.9742+08:00","closed_at":"2026-02-18T17:54:17.9742+08:00","close_reason":"6033c8f feat: update settings and onboarding pages for dark mode","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-0nk.11","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:36:33.969326+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.11","depends_on_id":"hyperindex-0nk.4","type":"blocks","created_at":"2026-02-18T17:36:33.970574+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.11","depends_on_id":"hyperindex-0nk.6","type":"blocks","created_at":"2026-02-18T17:36:33.971536+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-0nk.12","title":"Docs: Update agent docs route — rename branding to Hyperindex","description":"## Files\n- client/src/app/docs/agents/route.ts (modify)\n\n## What to do\nUpdate the agent documentation markdown content to use consistent \"Hyperindex\" branding.\n\n### 1. Update title (line ~4)\nChange: `# Hyperindex (hi) API - Complete Integration Guide for AI Agents`\nTo: `# Hyperindex API - Complete Integration Guide for AI Agents`\n\n### 2. Update \"What is Hyperindex?\" section (lines ~6-15)\nChange:\n```\n**Hyperindex** (short: **hi**, formerly known as Hypergoat) is GainForest's AT Protocol AppView server...\nThe name \"hi\" stands for **H**yper**i**ndex -- it indexes...\n```\nTo:\n```\n**Hyperindex** is GainForest's AT Protocol AppView server for the Hypersphere ecosystem. It indexes Lexicon-defined records from the AT Protocol network and exposes them via a dynamically-generated GraphQL API.\n```\n\n### 3. Remove \"History\" line\nDelete: `- **History**: Formerly known as Hypergoat (Hypersphere Go ATProto AppView)`\n\n### 4. Update all remaining instances\nSearch for any remaining \"Hypergoat\" or \"hi\" alias references in the file and replace:\n- \"Hypergoat\" → \"Hyperindex\" (in text descriptions)\n- Remove any \"(hi)\" or \"short: hi\" references\n- Keep the string \"Hyperindex\" as-is where it already appears\n\n### 5. Keep URLs as-is\nDo NOT change the API_ENDPOINT or WS_ENDPOINT constants — these are deployment URLs.\n\n## Don't\n- Do NOT change the API_ENDPOINT or WS_ENDPOINT URL constants\n- Do NOT change the GraphQL query examples\n- Do NOT change the code examples\n- Do NOT change the response format for the GET handler\n- Do NOT change the Content-Type or Cache-Control headers","acceptance_criteria":"1. Title reads \"# Hyperindex API - Complete Integration Guide for AI Agents\" (no \"hi\" in parens)\n2. No \"Hypergoat\" string appears anywhere in the file\n3. No \"(hi)\" or \"short: hi\" references remain\n4. No \"formerly known as\" text remains\n5. API_ENDPOINT and WS_ENDPOINT URLs are unchanged\n6. `npm run build` in client/ succeeds","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":15,"created_at":"2026-02-18T17:36:51.204447+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:44:04.094028+08:00","closed_at":"2026-02-18T17:44:04.094028+08:00","close_reason":"3e6b656 docs: rename branding to Hyperindex in agents route","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-0nk.12","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:36:51.205271+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-0nk.13","title":"Backend: Rename Hypergoat → Hyperindex in Go user-visible strings + client config","description":"## Files\n- cmd/hypergoat/main.go (modify)\n- internal/server/graphiql.go (modify — package doc comment only)\n- internal/server/handlers_test.go (modify)\n- client/src/lib/env.ts (modify)\n- client/src/lib/graphql/client.ts (modify)\n- client/src/app/api/admin/graphql/route.ts (modify)\n- client/.env.example (modify)\n\n## What to do\n\n### Go files\n\n#### cmd/hypergoat/main.go\n1. Line ~109: Change `\"Starting Hypergoat - AT Protocol AppView Server\"` → `\"Starting Hyperindex - AT Protocol AppView Server\"`\n2. Line ~354: Change `\"name\": \"Hypergoat\"` → `\"name\": \"Hyperindex\"` in the root endpoint JSON response\n3. Line ~408: Change `ClientName: \"Hypergoat\"` → `ClientName: \"Hyperindex\"` in OAuth metadata\n4. Line ~487: Change `Title: \"Hypergoat GraphQL\"` → `Title: \"Hyperindex GraphQL\"` \n5. Line ~488: Change `# Hypergoat GraphQL API` → `# Hyperindex GraphQL API`\n6. Line ~506: Change `Title: \"Hypergoat Admin\"` → `Title: \"Hyperindex Admin\"`\n7. Line ~507: Change `# Hypergoat Admin API` → `# Hyperindex Admin API`\n\n#### internal/server/graphiql.go\n1. Line 1: Change package doc comment from `// Package server contains HTTP handlers for the hypergoat server.` → `// Package server contains HTTP handlers for the Hyperindex server.`\n\n#### internal/server/handlers_test.go\nSearch for test assertions containing \"Hypergoat\" and update them:\n1. Any test title like `\"Hypergoat GraphiQL\"` → `\"Hyperindex GraphiQL\"` (or whatever the test expects)\n\n### Client config files\n\n#### client/src/lib/env.ts\n1. Line ~28: Change comment from `// Hypergoat backend URL` → `// Hyperindex backend URL`\n2. Line ~29: Change `HYPERGOAT_URL: getEnv(\"HYPERGOAT_URL\", ...)` → `HYPERINDEX_URL: getEnv(\"HYPERINDEX_URL\", \"http://127.0.0.1:8080\")`\n Also support fallback: `getEnv(\"HYPERINDEX_URL\", getEnv(\"HYPERGOAT_URL\", \"http://127.0.0.1:8080\"))` for backward compatibility\n\n#### client/src/lib/graphql/client.ts\n1. Line ~13: Rename function `getHypergoatUrl` → `getHyperindexUrl`\n2. Line ~15: Change `HYPERGOAT_URL` → `HYPERINDEX_URL` in process.env reference\n3. Update comment on line ~13: `// Get Hyperindex URL for direct backend access`\n\n#### client/src/app/api/admin/graphql/route.ts\n1. Update all comments from \"Hypergoat\" → \"Hyperindex\":\n - Line ~9: `Checks session authentication and passes user DID to Hyperindex.`\n - Line ~16: `// Build headers for Hyperindex`\n2. Line ~30: Change `env.HYPERGOAT_URL` → `env.HYPERINDEX_URL`\n3. Line ~38: Change `// Log errors from Hypergoat` → `// Log errors from Hyperindex`\n\n#### client/.env.example\n1. Line ~2: Change `# Hypergoat Client Configuration` → `# Hyperindex Client Configuration`\n2. Line ~11: Change `HYPERGOAT_URL=http://127.0.0.1:8080` → `HYPERINDEX_URL=http://127.0.0.1:8080`\n\n## Don't\n- Do NOT change the Go module path (github.com/GainForest/hypergoat) — changing that would break all import paths\n- Do NOT rename the binary name (hypergoat) in Makefile/Dockerfile\n- Do NOT change any package-level doc comments EXCEPT the graphiql.go one specified above\n- Do NOT change any non-user-visible internal variable names in Go code\n- Do NOT change the CI workflow or Docker configuration","acceptance_criteria":"1. `go build ./...` succeeds\n2. `go test ./...` passes (especially handlers_test.go)\n3. grep -r \"Hypergoat\" cmd/hypergoat/main.go returns NO matches for user-visible strings (startup log, JSON, GraphiQL titles)\n4. client/src/lib/env.ts exports HYPERINDEX_URL (not HYPERGOAT_URL)\n5. client/src/app/api/admin/graphql/route.ts references env.HYPERINDEX_URL\n6. client/.env.example uses HYPERINDEX_URL\n7. `npm run build` in client/ succeeds","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-18T17:37:14.846706+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:47:45.915489+08:00","closed_at":"2026-02-18T17:47:45.915489+08:00","close_reason":"d39a509 rename Hypergoat → Hyperindex in user-visible strings and client config","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-0nk.13","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:37:14.847467+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-0nk.2","title":"Foundation: Rewrite globals.css with OKLCH design tokens + dark mode","description":"## Files\n- client/src/app/globals.css (modify)\n\n## What to do\nReplace the entire contents of globals.css with the new design system CSS. The file must contain:\n\n### 1. Tailwind import + dark mode variant\n```css\n@import \"tailwindcss\";\n\n@custom-variant dark (\u0026:where(.dark, .dark *));\n```\n\n### 2. Root CSS custom properties (light mode)\n```css\n:root {\n --font-sans: system-ui, sans-serif;\n --font-display: var(--font-syne);\n --font-body: var(--font-outfit);\n --radius: 0.625rem;\n\n --background: oklch(0.985 0.002 260);\n --foreground: oklch(0.16 0.005 260);\n --card: oklch(0.995 0.001 260);\n --card-foreground: oklch(0.16 0.005 260);\n --primary: oklch(0.20 0.005 260);\n --primary-foreground: oklch(0.98 0.005 260);\n --secondary: oklch(0.96 0.005 260);\n --secondary-foreground: oklch(0.20 0.005 260);\n --muted: oklch(0.96 0.005 260);\n --muted-foreground: oklch(0.50 0.01 260);\n --accent: oklch(0.94 0.005 260);\n --accent-foreground: oklch(0.20 0.005 260);\n --destructive: oklch(0.577 0.245 27.325);\n --border: oklch(0.91 0.005 260);\n --input: oklch(0.91 0.005 260);\n --ring: oklch(0.35 0.01 260);\n}\n```\n\n### 3. Dark mode custom properties\n```css\n.dark {\n --background: oklch(0.13 0.005 260);\n --foreground: oklch(0.95 0.005 260);\n --card: oklch(0.17 0.005 260);\n --card-foreground: oklch(0.95 0.005 260);\n --primary: oklch(0.82 0.01 260);\n --primary-foreground: oklch(0.13 0.005 260);\n --secondary: oklch(0.22 0.01 260);\n --secondary-foreground: oklch(0.95 0.005 260);\n --muted: oklch(0.22 0.008 260);\n --muted-foreground: oklch(0.65 0.01 260);\n --accent: oklch(0.25 0.008 260);\n --accent-foreground: oklch(0.95 0.005 260);\n --destructive: oklch(0.577 0.245 27.325);\n --border: oklch(0.95 0 0 / 10%);\n --input: oklch(0.95 0 0 / 15%);\n --ring: oklch(0.55 0.01 260);\n}\n```\n\n### 4. Base body/heading styles\n```css\nbody {\n font-family: var(--font-body), var(--font-sans);\n background-color: var(--background);\n color: var(--foreground);\n}\n\nh1, h2, h3, h4, h5, h6 {\n font-family: var(--font-display), var(--font-sans);\n}\n```\n\n### 5. Glass panel utility class\n```css\n.glass-panel {\n background: oklch(1 0 0 / 0.7);\n backdrop-filter: blur(20px) saturate(1.4);\n border: 1px solid oklch(0.92 0.005 260 / 0.6);\n}\n\n.dark .glass-panel {\n background: oklch(0.18 0.005 260 / 0.7);\n border: 1px solid oklch(0.95 0 0 / 0.08);\n}\n```\n\n### 6. Noise background class\n```css\n.noise-bg {\n position: relative;\n}\n\n.noise-bg::before {\n content: \"\";\n position: absolute;\n inset: 0;\n opacity: 0.03;\n background-image: url(\"data:image/svg+xml,%3Csvg viewBox='0 0 256 256' xmlns='http://www.w3.org/2000/svg'%3E%3Cfilter id='noise'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='0.65' numOctaves='3' stitchTiles='stitch'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23noise)'/%3E%3C/svg%3E\");\n background-size: 256px 256px;\n pointer-events: none;\n z-index: 0;\n}\n\n.dark .noise-bg::before {\n opacity: 0.06;\n}\n```\n\n### 7. Gradient mesh class\n```css\n.gradient-mesh {\n background:\n radial-gradient(ellipse at 20% 50%, oklch(0.40 0.01 260 / 0.08), transparent 50%),\n radial-gradient(ellipse at 80% 20%, oklch(0.50 0.01 250 / 0.06), transparent 50%),\n radial-gradient(ellipse at 50% 80%, oklch(0.55 0.008 240 / 0.05), transparent 50%);\n}\n```\n\n### 8. Animation keyframes\n```css\n@keyframes fadeInUp {\n from { opacity: 0; transform: translateY(12px); }\n to { opacity: 1; transform: translateY(0); }\n}\n\n@keyframes fadeIn {\n from { opacity: 0; }\n to { opacity: 1; }\n}\n\n.animate-fade-in-up {\n animation: fadeInUp 0.5s ease-out both;\n}\n\n.animate-fade-in {\n animation: fadeIn 0.4s ease-out both;\n}\n\n.stagger-children \u003e *:nth-child(1) { animation-delay: 0ms; }\n.stagger-children \u003e *:nth-child(2) { animation-delay: 60ms; }\n.stagger-children \u003e *:nth-child(3) { animation-delay: 120ms; }\n.stagger-children \u003e *:nth-child(4) { animation-delay: 180ms; }\n.stagger-children \u003e *:nth-child(5) { animation-delay: 240ms; }\n.stagger-children \u003e *:nth-child(6) { animation-delay: 300ms; }\n```\n\n### 9. Keep existing scrollbar styles but add dark mode\nKeep the scrollbar styles from the original file but add dark mode variants:\n```css\n.overflow-y-auto { scrollbar-width: thin; scrollbar-color: #e4e4e7 transparent; }\n.dark .overflow-y-auto { scrollbar-color: #3f3f46 transparent; }\n/* ... same webkit scrollbar styles + dark variants */\n```\n\n### 10. Keep focus and scrollbar-none utilities\nKeep the existing `scrollbar-none` and focus utilities from the original file.\n\n## Don't\n- Do NOT add any @import for fonts (fonts are loaded via next/font in layout.tsx)\n- Do NOT add Tailwind @theme directive — we use CSS custom properties directly\n- Do NOT remove the existing keyframe animation for float (may still be used temporarily)","acceptance_criteria":"1. globals.css contains `@custom-variant dark` directive\n2. globals.css defines all OKLCH CSS custom properties for both :root and .dark\n3. globals.css defines .glass-panel with backdrop-filter blur\n4. globals.css defines .noise-bg with SVG fractalNoise background\n5. globals.css defines .gradient-mesh with 3 radial gradients\n6. globals.css defines .animate-fade-in-up and .animate-fade-in keyframe animations\n7. body uses var(--background) and var(--foreground) for base colors\n8. Headings use var(--font-display) font-family\n9. `npm run build` in client/ succeeds","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-18T17:33:18.327031+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:45:13.574186+08:00","closed_at":"2026-02-18T17:45:13.574186+08:00","close_reason":"dbacf8f feat: rewrite globals.css with OKLCH design tokens + dark mode","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-0nk.2","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:33:18.327888+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-0nk.3","title":"Theme: Create ThemeProvider and ThemeToggle components","description":"## Files\n- client/src/components/ThemeProvider.tsx (create)\n- client/src/components/ThemeToggle.tsx (create)\n\n## What to do\n\n### 1. Create ThemeProvider.tsx\nCreate `client/src/components/ThemeProvider.tsx` with this exact content:\n```tsx\n\"use client\"\n\nimport { ThemeProvider as NextThemesProvider } from \"next-themes\"\n\nexport function ThemeProvider({ children }: { children: React.ReactNode }) {\n return (\n \u003cNextThemesProvider attribute=\"class\" defaultTheme=\"light\" enableSystem={false}\u003e\n {children}\n \u003c/NextThemesProvider\u003e\n )\n}\n```\n\n### 2. Create ThemeToggle.tsx\nCreate `client/src/components/ThemeToggle.tsx`:\n```tsx\n\"use client\"\n\nimport { useEffect, useState } from \"react\"\nimport { useTheme } from \"next-themes\"\n\nexport function ThemeToggle() {\n const { theme, setTheme } = useTheme()\n const [mounted, setMounted] = useState(false)\n\n useEffect(() =\u003e setMounted(true), [])\n\n if (!mounted) {\n return \u003cdiv className=\"w-9 h-9\" /\u003e\n }\n\n const isDark = theme === \"dark\"\n\n return (\n \u003cbutton\n onClick={() =\u003e setTheme(isDark ? \"light\" : \"dark\")}\n className=\"p-2 rounded-full transition-colors cursor-pointer\"\n style={{ color: \"var(--muted-foreground)\" }}\n aria-label={isDark ? \"Switch to light mode\" : \"Switch to dark mode\"}\n title={isDark ? \"Switch to light mode\" : \"Switch to dark mode\"}\n \u003e\n {isDark ? (\n \u003csvg className=\"w-5 h-5\" fill=\"none\" stroke=\"currentColor\" viewBox=\"0 0 24 24\"\u003e\n \u003cpath strokeLinecap=\"round\" strokeLinejoin=\"round\" strokeWidth={1.5} d=\"M12 3v2.25m6.364.386l-1.591 1.591M21 12h-2.25m-.386 6.364l-1.591-1.591M12 18.75V21m-4.773-4.227l-1.591 1.591M5.25 12H3m4.227-4.773L5.636 5.636M15.75 12a3.75 3.75 0 11-7.5 0 3.75 3.75 0 017.5 0z\" /\u003e\n \u003c/svg\u003e\n ) : (\n \u003csvg className=\"w-5 h-5\" fill=\"none\" stroke=\"currentColor\" viewBox=\"0 0 24 24\"\u003e\n \u003cpath strokeLinecap=\"round\" strokeLinejoin=\"round\" strokeWidth={1.5} d=\"M21.752 15.002A9.718 9.718 0 0118 15.75c-5.385 0-9.75-4.365-9.75-9.75 0-1.33.266-2.597.748-3.752A9.753 9.753 0 003 11.25C3 16.635 7.365 21 12.75 21a9.753 9.753 0 009.002-5.998z\" /\u003e\n \u003c/svg\u003e\n )}\n \u003c/button\u003e\n )\n}\n```\n\nKey implementation details:\n- Uses `mounted` state to prevent SSR hydration mismatch\n- Renders a 9x9 placeholder div on server to prevent layout shift\n- Sun icon shows in dark mode (click → light), Moon icon shows in light mode (click → dark)\n- Uses inline style for color via CSS custom property (theme-aware)\n- Has aria-label and title for accessibility\n\n## Don't\n- Do NOT wire these into the layout yet (that is a separate task)\n- Do NOT add any other theme-related files\n- Do NOT use Tailwind dark: classes for the toggle button styling — use CSS custom properties via inline style since the component needs to work before Tailwind processes the dark variant","acceptance_criteria":"1. client/src/components/ThemeProvider.tsx exists and exports ThemeProvider\n2. client/src/components/ThemeToggle.tsx exists and exports ThemeToggle\n3. ThemeProvider wraps children with NextThemesProvider with attribute=\"class\", defaultTheme=\"light\"\n4. ThemeToggle uses useTheme() from next-themes\n5. ThemeToggle renders sun icon in dark mode, moon icon in light mode\n6. ThemeToggle has mounted guard to prevent hydration mismatch\n7. `npm run build` in client/ succeeds","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":15,"created_at":"2026-02-18T17:33:38.319238+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:49:45.533058+08:00","closed_at":"2026-02-18T17:49:45.533058+08:00","close_reason":"812315d feat: add ThemeProvider and ThemeToggle components","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-0nk.3","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:33:38.319974+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.3","depends_on_id":"hyperindex-0nk.1","type":"blocks","created_at":"2026-02-18T17:33:38.321003+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-0nk.4","title":"Layout: Wire ThemeProvider, update body/html, replace logo, update footer","description":"## Files\n- client/src/app/layout.tsx (modify)\n\n## What to do\n\n### 1. Import ThemeProvider\nAdd import at top:\n```tsx\nimport { ThemeProvider } from \"@/components/ThemeProvider\";\n```\n\n### 2. Add suppressHydrationWarning to html tag\nChange `\u003chtml lang=\"en\"\u003e` to `\u003chtml lang=\"en\" suppressHydrationWarning\u003e`.\nThis prevents React warnings when next-themes injects the dark class before hydration.\n\n### 3. Wrap content with ThemeProvider\nInside the body tag, wrap everything with ThemeProvider (inside Providers):\n```tsx\n\u003cProviders\u003e\n \u003cThemeProvider\u003e\n \u003cdiv className=\"relative min-h-screen overflow-hidden flex flex-col noise-bg\"\u003e\n \u003cdiv className=\"gradient-mesh fixed inset-0 -z-10 pointer-events-none\" /\u003e\n \u003cGeometricBackground /\u003e\n \u003cHeader /\u003e\n \u003cmain className=\"relative flex-1 max-w-3xl w-full mx-auto px-4 sm:px-6 pb-8 z-10\"\u003e\n {children}\n \u003c/main\u003e\n {/* Footer */}\n \u003cfooter className=\"relative py-6 mt-auto z-10\"\u003e\n \u003cdiv className=\"max-w-3xl mx-auto px-4 sm:px-6\"\u003e\n \u003ca href=\"https://gainforest.earth\" target=\"_blank\" rel=\"noopener noreferrer\"\n className=\"flex items-center justify-center gap-1.5 hover:opacity-80 transition-opacity\"\u003e\n \u003cspan className=\"text-[11px] tracking-wide\" style={{ color: \"var(--muted-foreground)\" }}\u003eMade by\u003c/span\u003e\n \u003cImage src=\"/gainforest-logo.png\" alt=\"GainForest\" width={14} height={14} className=\"inline-block\" /\u003e\n \u003cspan className=\"text-[11px] font-medium tracking-wide\" style={{ color: \"var(--muted-foreground)\" }}\u003eGainForest\u003c/span\u003e\n \u003c/a\u003e\n \u003c/div\u003e\n \u003c/footer\u003e\n \u003c/div\u003e\n \u003c/ThemeProvider\u003e\n\u003c/Providers\u003e\n```\n\n### 4. Update metadata\nChange the icon references:\n```tsx\nexport const metadata: Metadata = {\n title: \"Hyperindex\",\n description: \"AT Protocol AppView Server\",\n icons: {\n icon: \"/hypercerts_logo.png\",\n apple: \"/hypercerts_logo.png\",\n },\n};\n```\n\n### 5. Add noise-bg and gradient-mesh classes\nThe outermost div inside body should have `noise-bg` class and a sibling gradient-mesh div (see layout structure above). These CSS classes are defined in globals.css (task 2).\n\n## Don't\n- Do NOT modify the Header component (separate task)\n- Do NOT modify GeometricBackground (separate task)\n- Do NOT remove the GeometricBackground import (will be updated separately)\n- Do NOT change the Providers component","acceptance_criteria":"1. html tag has suppressHydrationWarning attribute\n2. ThemeProvider wraps the content inside Providers\n3. The outermost content div has noise-bg class\n4. A gradient-mesh div exists as a fixed background layer\n5. Footer text color uses var(--muted-foreground) instead of hardcoded zinc colors\n6. Metadata icons point to /hypercerts_logo.png\n7. `npm run build` in client/ succeeds","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":25,"created_at":"2026-02-18T17:33:56.040485+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:51:27.059161+08:00","closed_at":"2026-02-18T17:51:27.059161+08:00","close_reason":"a6db65d Layout: Wire ThemeProvider, update body/html, replace logo, update footer","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-0nk.4","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:33:56.041328+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.4","depends_on_id":"hyperindex-0nk.1","type":"blocks","created_at":"2026-02-18T17:33:56.042407+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.4","depends_on_id":"hyperindex-0nk.2","type":"blocks","created_at":"2026-02-18T17:33:56.043242+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.4","depends_on_id":"hyperindex-0nk.3","type":"blocks","created_at":"2026-02-18T17:33:56.044356+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-0nk.5","title":"Header: Redesign with glass-panel, hypercerts logo, Hyperindex branding, theme toggle","description":"## Files\n- client/src/components/layout/Header.tsx (modify)\n\n## What to do\nRedesign the Header component to match the hypercerts-scaffold design language.\n\n### 1. Add imports\n```tsx\nimport { ThemeToggle } from \"@/components/ThemeToggle\"\n```\n\n### 2. Replace the nav wrapper\nChange from:\n```tsx\n\u003cnav className=\"relative z-10 py-6\"\u003e\n \u003cdiv className=\"max-w-3xl mx-auto px-4 sm:px-6\"\u003e\n```\nto a sticky glass-panel navbar:\n```tsx\n\u003cnav className=\"sticky top-0 z-50 glass-panel border-b\" style={{ borderColor: \"var(--border)\" }}\u003e\n \u003cdiv className=\"h-16 max-w-3xl mx-auto px-4 sm:px-6 flex items-center\"\u003e\n```\n\n### 3. Replace Logo\nChange from:\n```tsx\n\u003cImage src=\"/logo.png\" alt=\"Hyperindex\" width={20} height={20} className=\"opacity-80\" /\u003e\n\u003cspan className=\"text-lg font-medium text-zinc-800 tracking-tight\"\u003ehi\u003c/span\u003e\n```\nto:\n```tsx\n\u003cImage src=\"/hypercerts_logo.png\" alt=\"Hyperindex\" width={22} height={22} /\u003e\n\u003cspan className=\"text-lg font-[family-name:var(--font-syne)] font-bold tracking-tight\" style={{ color: \"var(--foreground)\" }}\u003e\n Hyperindex\n\u003c/span\u003e\n```\n\n### 4. Update nav link styles\nReplace hardcoded zinc colors with CSS variable-based styles:\n- Active: `style={{ color: \"var(--foreground)\" }}` with `font-medium`\n- Inactive: `style={{ color: \"var(--muted-foreground)\" }}`\n- Use `font-[family-name:var(--font-outfit)]` for nav link text\n\n### 5. Add ThemeToggle to the right side\nAdd `\u003cThemeToggle /\u003e` between the nav links and the user menu area:\n```tsx\n{/* Right side */}\n\u003cdiv className=\"flex items-center gap-2 ml-auto\"\u003e\n \u003cThemeToggle /\u003e\n {/* existing user menu button/dropdown */}\n\u003c/div\u003e\n```\n\n### 6. Update dropdown styles for dark mode\nThe dropdown menu currently uses hardcoded `bg-white`, `border-zinc-200/60`, `text-zinc-*` classes. Replace with CSS variable-based styles:\n- Dropdown container: `className=\"glass-panel rounded-xl shadow-lg py-2 z-50\"` with `style={{ borderColor: \"var(--border)\" }}`\n- Text colors: Use `style={{ color: \"var(--foreground)\" }}` and `style={{ color: \"var(--muted-foreground)\" }}`\n- Hover backgrounds: Use `hover:bg-[var(--accent)]` or inline style\n- Active link indicator: Use `style={{ color: \"var(--primary)\" }}` instead of emerald\n\n### 7. Update login modal for dark mode\nReplace hardcoded colors in the login modal:\n- Modal backdrop: keep `bg-black/20 backdrop-blur-sm`\n- Modal card: `className=\"glass-panel rounded-xl shadow-lg p-6\"`\n- Input: Use CSS variables for border/focus colors\n- Heading: Use `font-[family-name:var(--font-syne)]`\n- Primary button: `style={{ backgroundColor: \"var(--primary)\", color: \"var(--primary-foreground)\" }}`\n- Secondary button: `style={{ backgroundColor: \"var(--secondary)\", color: \"var(--secondary-foreground)\" }}`\n\n### 8. Update user avatar fallback\nChange from `bg-emerald-100 text-emerald-700` to:\n```tsx\nstyle={{ backgroundColor: \"var(--accent)\", color: \"var(--accent-foreground)\" }}\nclassName=\"font-[family-name:var(--font-syne)] font-semibold\"\n```\n\n## Don't\n- Do NOT change the navigation links array (Dashboard, Lexicons, Backfill, API Docs)\n- Do NOT change the auth logic (login, logout, session handling)\n- Do NOT change the dropdown behavior (click outside to close, etc.)\n- Do NOT use Tailwind dark: prefix — use CSS custom properties via style prop or the glass-panel class","acceptance_criteria":"1. Nav is sticky top-0 z-50 with glass-panel class\n2. Logo shows hypercerts_logo.png (not logo.png) at 22x22\n3. Logo text reads \"Hyperindex\" (not \"hi\") in Syne bold font\n4. ThemeToggle component is rendered in the header\n5. No hardcoded zinc/emerald color classes remain (all replaced with CSS variables)\n6. Login modal uses glass-panel and CSS variables for colors\n7. Dropdown menu uses glass-panel and CSS variables\n8. `npm run build` in client/ succeeds","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T17:34:20.456786+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:52:50.122481+08:00","closed_at":"2026-02-18T17:52:50.122481+08:00","close_reason":"78649fb redesign Header with glass-panel, hypercerts logo, Hyperindex branding, theme toggle","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-0nk.5","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:34:20.457778+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.5","depends_on_id":"hyperindex-0nk.3","type":"blocks","created_at":"2026-02-18T17:34:20.459026+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.5","depends_on_id":"hyperindex-0nk.4","type":"blocks","created_at":"2026-02-18T17:34:20.459904+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-0nk.6","title":"Components: Update Card, Button, Input, Alert for dark mode + new tokens","description":"## Files\n- client/src/components/ui/Card.tsx (modify)\n- client/src/components/ui/Button.tsx (modify)\n- client/src/components/ui/Input.tsx (modify)\n- client/src/components/ui/Alert.tsx (modify)\n\n## What to do\nUpdate all four UI primitive components to use CSS custom properties instead of hardcoded zinc/emerald Tailwind classes. This makes them theme-aware for dark mode.\n\n### Card.tsx\nReplace hardcoded color classes with CSS variable equivalents:\n\n```tsx\n// Card container — replace:\n\"rounded-xl border border-zinc-200/60 bg-white shadow-sm\"\n// with:\n\"rounded-xl border shadow-sm\"\n// and add style prop: style={{ backgroundColor: \"var(--card)\", borderColor: \"var(--border)\", color: \"var(--card-foreground)\" }}\n\n// CardTitle — replace:\n\"font-[family-name:var(--font-garamond)] text-xl text-zinc-900 leading-none tracking-tight\"\n// with:\n\"font-[family-name:var(--font-syne)] text-xl leading-none tracking-tight\"\n// and add: style={{ color: \"var(--card-foreground)\" }}\n\n// CardDescription — replace:\n\"text-sm text-zinc-400\"\n// with:\n\"text-sm\"\n// and add: style={{ color: \"var(--muted-foreground)\" }}\n```\n\n### Button.tsx\nReplace the button variant colors:\n```tsx\nconst buttonVariants = {\n default: \"\", // will use style prop\n primary: \"\", // will use style prop\n outline: \"border bg-transparent\", // will use style prop for border/text\n ghost: \"bg-transparent\", // will use style prop\n destructive: \"\", // will use style prop\n};\n```\n\nFor each variant, apply colors via a `variantStyles` object that returns inline styles:\n```tsx\nconst variantStyles: Record\u003cstring, React.CSSProperties\u003e = {\n default: { backgroundColor: \"var(--primary)\", color: \"var(--primary-foreground)\" },\n primary: { backgroundColor: \"var(--primary)\", color: \"var(--primary-foreground)\" },\n outline: { borderColor: \"var(--border)\", color: \"var(--foreground)\" },\n ghost: { color: \"var(--foreground)\" },\n destructive: { backgroundColor: \"var(--destructive)\", color: \"#fff\" },\n};\n```\n\nApply the inline style from variantStyles in the component render. Keep hover via opacity: `hover:opacity-90`.\n\nReplace the focus ring from `focus-visible:ring-emerald-500/30 focus-visible:border-emerald-400` to `focus-visible:ring-2` with `style` using `var(--ring)`.\n\nUpdate font: replace implicit font with `font-[family-name:var(--font-outfit)]`.\n\n### Input.tsx\nReplace hardcoded colors:\n```tsx\n// Replace:\n\"bg-white/50 border border-zinc-200/60\"\n\"text-zinc-800 placeholder:text-zinc-300\"\n\"focus:ring-emerald-500/30 focus:border-emerald-400\"\n\"focus:bg-white/70\"\n\n// With CSS variable equivalents via style prop:\nstyle={{\n backgroundColor: \"var(--card)\",\n borderColor: error ? \"var(--destructive)\" : \"var(--input)\",\n color: \"var(--foreground)\",\n}}\n// Keep: \"w-full px-3 py-2 text-sm border rounded-lg focus:outline-none focus:ring-2 transition-all disabled:opacity-50 disabled:cursor-not-allowed\"\n```\n\nUpdate label: `style={{ color: \"var(--foreground)\" }}` instead of `text-zinc-600`\nUpdate hint: `style={{ color: \"var(--muted-foreground)\" }}` instead of `text-zinc-300`\n\n### Alert.tsx\nReplace hardcoded variant colors. Keep the semantic colors (blue, emerald/green, amber, red) but add dark mode support via opacity patterns:\n```tsx\nconst variants = {\n info: \"border\",\n success: \"border\",\n warning: \"border\",\n error: \"border\",\n};\n\nconst variantStyles: Record\u003cstring, React.CSSProperties\u003e = {\n info: { backgroundColor: \"oklch(0.60 0.15 250 / 0.08)\", color: \"oklch(0.45 0.15 250)\", borderColor: \"oklch(0.60 0.15 250 / 0.2)\" },\n success: { backgroundColor: \"oklch(0.65 0.15 155 / 0.08)\", color: \"oklch(0.45 0.15 155)\", borderColor: \"oklch(0.65 0.15 155 / 0.2)\" },\n warning: { backgroundColor: \"oklch(0.75 0.15 75 / 0.08)\", color: \"oklch(0.55 0.15 75)\", borderColor: \"oklch(0.75 0.15 75 / 0.2)\" },\n error: { backgroundColor: \"oklch(0.60 0.20 25 / 0.08)\", color: \"oklch(0.50 0.20 25)\", borderColor: \"oklch(0.60 0.20 25 / 0.2)\" },\n};\n```\nThese OKLCH colors with alpha channel work in both light and dark mode since they are semi-transparent overlays.\n\n## Don't\n- Do NOT change component APIs (props, exported names, displayName)\n- Do NOT remove the cn() utility usage — keep it for className merging\n- Do NOT change the Alert icons or Button loading spinner logic\n- Do NOT add new dependencies","acceptance_criteria":"1. Card, Button, Input, Alert all compile without errors\n2. No hardcoded zinc-* or emerald-* color classes remain in any of the four files\n3. All components use CSS custom properties (via style prop) for colors\n4. CardTitle uses font-syne (not font-garamond)\n5. Button variants use var(--primary), var(--destructive) etc.\n6. Input uses var(--input) for border, var(--foreground) for text\n7. Alert variants use OKLCH colors with alpha for cross-theme support\n8. `npm run build` in client/ succeeds","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":40,"created_at":"2026-02-18T17:34:48.882113+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:50:21.329807+08:00","closed_at":"2026-02-18T17:50:21.329807+08:00","close_reason":"e0029c7 feat: update Card, Button, Input, Alert for dark mode + CSS tokens","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-0nk.6","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:34:48.882955+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.6","depends_on_id":"hyperindex-0nk.2","type":"blocks","created_at":"2026-02-18T17:34:48.884128+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-0nk.7","title":"Background: Replace GeometricBackground with simplified noise-bg version","description":"## Files\n- client/src/components/layout/GeometricBackground.tsx (modify)\n\n## What to do\nSimplify the GeometricBackground component. The current version has emerald-colored flowing geometric shapes. Replace it with a much simpler component that complements the noise-bg and gradient-mesh already applied in layout.tsx.\n\nReplace the entire file content with:\n\n```tsx\n\"use client\"\n\nimport { useState, useEffect } from \"react\"\n\nexport function GeometricBackground() {\n const [showLogo, setShowLogo] = useState(false)\n\n useEffect(() =\u003e {\n const interval = setInterval(() =\u003e {\n setShowLogo(true)\n setTimeout(() =\u003e setShowLogo(false), 8000)\n }, 30000)\n\n const initialTimeout = setTimeout(() =\u003e {\n setShowLogo(true)\n setTimeout(() =\u003e setShowLogo(false), 8000)\n }, 2000)\n\n return () =\u003e {\n clearInterval(interval)\n clearTimeout(initialTimeout)\n }\n }, [])\n\n return (\n \u003cdiv\n className=\"fixed inset-0 pointer-events-none select-none overflow-hidden z-0\"\n aria-hidden=\"true\"\n \u003e\n {/* Subtle flowing lines using theme-aware colors */}\n \u003cdiv className=\"absolute right-[220px] top-0\"\u003e\n \u003cdiv className=\"w-0.5 h-20 rounded-full animate-[flowDown_9s_linear_infinite] [animation-fill-mode:backwards]\"\n style={{ background: \"linear-gradient(to bottom, transparent, var(--border), transparent)\" }} /\u003e\n \u003c/div\u003e\n \u003cdiv className=\"absolute right-[150px] top-0\"\u003e\n \u003cdiv className=\"w-0.5 h-16 rounded-full animate-[flowDown_7s_linear_infinite_2s] [animation-fill-mode:backwards]\"\n style={{ background: \"linear-gradient(to bottom, transparent, var(--border), transparent)\" }} /\u003e\n \u003c/div\u003e\n \u003cdiv className=\"absolute right-[80px] top-0\"\u003e\n \u003cdiv className=\"w-0.5 h-[70px] rounded-full animate-[flowDown_11s_linear_infinite_4s] [animation-fill-mode:backwards]\"\n style={{ background: \"linear-gradient(to bottom, transparent, var(--border), transparent)\" }} /\u003e\n \u003c/div\u003e\n\n {/* Hypercerts logo flow - appears every 30s */}\n {showLogo \u0026\u0026 (\n \u003cdiv className=\"absolute right-[140px] top-0 animate-[flowDownLogo_8s_ease-in-out_forwards] opacity-20\"\u003e\n {/* eslint-disable-next-line @next/next/no-img-element */}\n \u003cimg src=\"/hypercerts_logo.png\" alt=\"\" width={40} height={40} className=\"opacity-40\" /\u003e\n \u003c/div\u003e\n )}\n\n \u003cstyle jsx\u003e{`\n @keyframes flowDown {\n 0% { transform: translateY(-100px); }\n 100% { transform: translateY(100vh); }\n }\n @keyframes flowDownLogo {\n 0% { transform: translateY(-60px); opacity: 0; }\n 10% { opacity: 1; }\n 90% { opacity: 1; }\n 100% { transform: translateY(100vh); opacity: 0; }\n }\n `}\u003c/style\u003e\n \u003c/div\u003e\n )\n}\n```\n\nKey changes from current version:\n1. Replace all `emerald-400` colors with `var(--border)` CSS variable (theme-aware)\n2. Remove the geometric shapes (squares, triangles) — just keep subtle flowing lines\n3. Replace the inline GainForest SVG logo with the hypercerts_logo.png image\n4. Keep the same timing/animation behavior\n5. Add z-0 to ensure it stays behind content\n\n## Don't\n- Do NOT change the export name (still `GeometricBackground`)\n- Do NOT change the animation timing (30s interval, 8s display)\n- Do NOT remove the component entirely (layout.tsx imports it)","acceptance_criteria":"1. GeometricBackground no longer contains any emerald/green color references\n2. Flowing lines use var(--border) for theme-aware colors\n3. Logo animation uses hypercerts_logo.png instead of inline SVG\n4. Component still exports as GeometricBackground\n5. `npm run build` in client/ succeeds","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":15,"created_at":"2026-02-18T17:35:09.329559+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:49:42.486326+08:00","closed_at":"2026-02-18T17:49:42.486326+08:00","close_reason":"4143af6 simplify GeometricBackground with theme-aware colors and hypercerts logo","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-0nk.7","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:35:09.330405+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.7","depends_on_id":"hyperindex-0nk.2","type":"blocks","created_at":"2026-02-18T17:35:09.331693+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-0nk.8","title":"Pages: Update Dashboard page + sub-components for dark mode","description":"## Files\n- client/src/app/page.tsx (modify)\n- client/src/components/dashboard/StatsCards.tsx (modify)\n- client/src/components/dashboard/ActivityChart.tsx (modify)\n- client/src/components/dashboard/RecentActivity.tsx (modify)\n\n## What to do\nUpdate the Dashboard page and its three sub-components to use CSS custom properties instead of hardcoded zinc/emerald Tailwind classes.\n\n### page.tsx (Dashboard)\nReplace all hardcoded color classes:\n- `text-zinc-900` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-500` → `style={{ color: \"var(--muted-foreground)\" }}`\n- `text-zinc-800` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-400` → `style={{ color: \"var(--muted-foreground)\" }}`\n- `text-emerald-600`, `text-emerald-700` → `style={{ color: \"var(--primary)\" }}`\n- `group-hover:text-emerald-600`, `group-hover:text-emerald-700` → keep hover but use CSS variable\n- `hover:bg-zinc-50` → `hover:opacity-80` or similar\n- `bg-emerald-50` references → `style={{ backgroundColor: \"var(--accent)\" }}`\n- `font-[family-name:var(--font-garamond)]` → `font-[family-name:var(--font-syne)]`\n\nThe heading \"Dashboard\" should use `font-[family-name:var(--font-syne)]` instead of `font-garamond`.\n\n### StatsCards.tsx\nReplace:\n- `text-emerald-600` → `style={{ color: \"oklch(0.65 0.15 155)\" }}` (keep green for Records semantic)\n- `text-blue-600` → keep as is (blue for Actors is fine)\n- `text-purple-600` → keep as is (purple for Lexicons is fine)\n- `text-zinc-400` → `style={{ color: \"var(--muted-foreground)\" }}`\n- `text-zinc-200` → `style={{ color: \"var(--border)\" }}`\n- `bg-zinc-100` → `style={{ backgroundColor: \"var(--muted)\" }}`\n\n### ActivityChart.tsx\nReplace:\n- `font-[family-name:var(--font-garamond)]` → `font-[family-name:var(--font-syne)]`\n- `text-zinc-900` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-400` → `style={{ color: \"var(--muted-foreground)\" }}`\n- `bg-emerald-50 text-emerald-600` → `style={{ backgroundColor: \"var(--accent)\", color: \"var(--primary)\" }}`\n- `border-zinc-200/60 bg-white` → `style={{ backgroundColor: \"var(--card)\", borderColor: \"var(--border)\" }}`\n- `bg-zinc-50` (skeleton) → `style={{ backgroundColor: \"var(--muted)\" }}`\n- Update Recharts Tooltip `contentStyle` to use CSS variables:\n ```tsx\n contentStyle={{\n backgroundColor: \"var(--card)\",\n border: \"1px solid var(--border)\",\n borderRadius: \"0.75rem\",\n fontSize: \"12px\",\n color: \"var(--foreground)\",\n }}\n ```\n- Update CartesianGrid stroke: `stroke=\"var(--border)\"`\n- Update XAxis/YAxis stroke: `stroke=\"var(--muted-foreground)\"`\n\n### RecentActivity.tsx\nReplace:\n- `font-[family-name:var(--font-garamond)]` → `font-[family-name:var(--font-syne)]`\n- `text-zinc-900` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-400` → `style={{ color: \"var(--muted-foreground)\" }}`\n- `text-zinc-800` → `style={{ color: \"var(--foreground)\" }}`\n- `border-zinc-200/60 bg-white` → `style={{ backgroundColor: \"var(--card)\", borderColor: \"var(--border)\" }}`\n- `divide-zinc-100` → style with `var(--border)` for divider color\n- `bg-zinc-50` → style with `var(--muted)` \n- `hover:bg-zinc-50` → `hover:opacity-90`\n- Operation badges: Keep the semantic colors (emerald=create, blue=update, amber=delete) but use OKLCH with alpha for cross-theme:\n - create: `style={{ backgroundColor: \"oklch(0.65 0.15 155 / 0.1)\", color: \"oklch(0.55 0.15 155)\" }}`\n - update: `style={{ backgroundColor: \"oklch(0.60 0.15 250 / 0.1)\", color: \"oklch(0.50 0.15 250)\" }}`\n - delete: `style={{ backgroundColor: \"oklch(0.75 0.15 75 / 0.1)\", color: \"oklch(0.60 0.15 75)\" }}`\n\n## Don't\n- Do NOT change component props or APIs\n- Do NOT change the data fetching logic\n- Do NOT change the Recharts Area colors (keep green/blue/amber for creates/updates/deletes)\n- Do NOT change functionality — only visual styling","acceptance_criteria":"1. No hardcoded zinc-* or emerald-* Tailwind color classes remain in any of the four files\n2. All text uses CSS variables for color (var(--foreground), var(--muted-foreground))\n3. All backgrounds use CSS variables (var(--card), var(--muted), var(--accent))\n4. font-garamond references replaced with font-syne\n5. Recharts Tooltip and grid use CSS variables\n6. `npm run build` in client/ succeeds","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T17:35:33.327352+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:53:58.353165+08:00","closed_at":"2026-02-18T17:53:58.353165+08:00","close_reason":"4b86216 update Dashboard page and sub-components for dark mode","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-0nk.8","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:35:33.328235+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.8","depends_on_id":"hyperindex-0nk.4","type":"blocks","created_at":"2026-02-18T17:35:33.329516+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.8","depends_on_id":"hyperindex-0nk.6","type":"blocks","created_at":"2026-02-18T17:35:33.330641+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-0nk.9","title":"Pages: Update Docs page — rename branding + dark mode styles","description":"## Files\n- client/src/app/docs/page.tsx (modify)\n\n## What to do\nUpdate the API docs page for new branding and dark mode support.\n\n### 1. Update API_ENDPOINT constant\nThe constant `API_ENDPOINT` is currently hardcoded to `https://hypergoat-app-production.up.railway.app`. Leave it as is for now — this is a deployment URL that should be configured via env vars, but that is out of scope. Just note: do NOT rename this URL.\n\n### 2. Update branding text\nOn line 631 (approximately), change:\n```tsx\n\u003cstrong className=\"text-zinc-700\"\u003eHyperindex\u003c/strong\u003e (\u003cem\u003ehi\u003c/em\u003e, formerly Hypergoat) is{\" \"}\n```\nto:\n```tsx\n\u003cstrong style={{ color: \"var(--foreground)\" }}\u003eHyperindex\u003c/strong\u003e is{\" \"}\n```\nRemove the \"(hi, formerly Hypergoat)\" parenthetical entirely.\n\n### 3. Replace all font-garamond references\nReplace every instance of `font-[family-name:var(--font-garamond)]` with `font-[family-name:var(--font-syne)]`.\nThis appears in all `\u003ch2\u003e` and `\u003ch3\u003e` headings throughout the page.\n\n### 4. Replace all hardcoded color classes\nApply the same pattern as other pages — replace Tailwind color classes with CSS custom properties:\n\n**Text colors:**\n- `text-zinc-900` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-800` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-700` → `style={{ color: \"var(--foreground)\" }}`\n- `text-zinc-600` → `style={{ color: \"var(--secondary-foreground)\" }}`\n- `text-zinc-500` → `style={{ color: \"var(--muted-foreground)\" }}`\n- `text-zinc-400` → `style={{ color: \"var(--muted-foreground)\" }}`\n- `text-zinc-300` → `style={{ color: \"var(--border)\" }}`\n- `text-emerald-600` → `style={{ color: \"var(--primary)\" }}`\n\n**Backgrounds:**\n- `bg-white` → `style={{ backgroundColor: \"var(--card)\" }}`\n- `bg-zinc-50` → `style={{ backgroundColor: \"var(--muted)\" }}`\n- `bg-zinc-100` → `style={{ backgroundColor: \"var(--muted)\" }}`\n- `bg-emerald-50` → `style={{ backgroundColor: \"var(--accent)\" }}`\n- `bg-emerald-100` → `style={{ backgroundColor: \"var(--accent)\" }}`\n\n**Borders:**\n- `border-zinc-200/60` → `style={{ borderColor: \"var(--border)\" }}`\n- `border-zinc-200` → `style={{ borderColor: \"var(--border)\" }}`\n- `border-zinc-100` → `style={{ borderColor: \"var(--border)\" }}`\n- `border-emerald-200/60` → `style={{ borderColor: \"var(--border)\" }}`\n\n**Code blocks (CodeBlock component):**\n- `bg-zinc-800` (header bar) → keep dark for code blocks — they look good dark in both themes\n- `bg-zinc-900` (code content) → keep dark\n- `bg-zinc-700/50` (copy button) → keep dark\n- `text-zinc-100` (code text) → keep light text for dark code blocks\n- These code blocks should stay dark-themed regardless of page theme (like most code editors)\n\n**LanguageTabs component:**\n- `bg-zinc-100` → `style={{ backgroundColor: \"var(--muted)\" }}`\n- `bg-white text-zinc-900` (active) → `style={{ backgroundColor: \"var(--card)\", color: \"var(--foreground)\" }}`\n- `text-zinc-500` → `style={{ color: \"var(--muted-foreground)\" }}`\n\n**Protocol Details cards:**\n- `bg-emerald-100` → `style={{ backgroundColor: \"oklch(0.65 0.15 155 / 0.15)\" }}`\n- `text-emerald-600` → `style={{ color: \"oklch(0.55 0.15 155)\" }}`\n- `bg-blue-100` → `style={{ backgroundColor: \"oklch(0.60 0.15 250 / 0.15)\" }}`\n- `text-blue-600` → `style={{ color: \"oklch(0.50 0.15 250)\" }}`\n\n**Inline code elements:**\n- `bg-zinc-100 text-zinc-700` → `style={{ backgroundColor: \"var(--muted)\", color: \"var(--foreground)\" }}`\n- `bg-emerald-100` → `style={{ backgroundColor: \"var(--accent)\" }}`\n\n**Tips section gradient:**\n- `border-emerald-200/60 bg-gradient-to-br from-emerald-50/50 to-white` → `style={{ borderColor: \"var(--border)\", backgroundColor: \"var(--card)\" }}`\n\n**Tab buttons:**\n- Active: `style={{ backgroundColor: \"var(--card)\", color: \"var(--foreground)\" }}`\n- Inactive: `style={{ color: \"var(--muted-foreground)\" }}`\n\n### 5. Endpoint code blocks\nThe endpoint display code blocks (`bg-zinc-900 text-emerald-400`, `text-blue-400`, `text-purple-400`) should stay as dark-themed blocks. These look good in both light and dark mode since they are code snippets. Leave the colors of the endpoint URL text as-is.\n\n## Don't\n- Do NOT change the API_ENDPOINT URL value\n- Do NOT change the code examples content (only their wrapping component styles)\n- Do NOT change the CodeBlock highlighting logic (highlightJS, highlightPython, highlightShell)\n- Do NOT change the syntax highlighting token colors inside code blocks (those are fine as-is on dark backgrounds)\n- Do NOT change the component structure or data flow","acceptance_criteria":"1. No \"(hi, formerly Hypergoat)\" text remains\n2. All headings use font-syne instead of font-garamond\n3. No hardcoded zinc-*/emerald-* classes remain except inside CodeBlock dark backgrounds\n4. LanguageTabs use CSS variables for colors\n5. Protocol detail cards use OKLCH colors\n6. Code blocks remain dark-themed in both light and dark mode\n7. `npm run build` in client/ succeeds","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T17:36:02.714257+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:55:17.39867+08:00","closed_at":"2026-02-18T17:55:17.39867+08:00","close_reason":"0198d61 docs page: update branding, font-syne, dark mode CSS vars","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-0nk.9","depends_on_id":"hyperindex-0nk","type":"parent-child","created_at":"2026-02-18T17:36:02.715349+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.9","depends_on_id":"hyperindex-0nk.4","type":"blocks","created_at":"2026-02-18T17:36:02.716722+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-0nk.9","depends_on_id":"hyperindex-0nk.6","type":"blocks","created_at":"2026-02-18T17:36:02.717721+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-2hm","title":"Epic: Replace Jetstream+Backfill with Tap sidecar","description":"## Why\nHyperindex currently has ~2,500 lines of complex data ingestion infrastructure across two subsystems:\n- **Jetstream consumer** (810 lines) — WebSocket client to Bluesky's Jetstream service\n- **Backfill worker** (1,329 lines) — 3-phase pipeline with DID resolution, CAR/CBOR parsing, 3 levels of concurrency semaphores\n\nCritical gaps in current implementation:\n1. **No cryptographic verification** — we accept unverified data from Jetstream\n2. **No ordering guarantees** — backfill and live events race (backfill may arrive after live events)\n3. **No at-least-once delivery** — events lost on crash between receive and DB write\n4. **Identity events ignored** — handle changes, account status not tracked\n\nBluesky's official **Tap** utility (https://github.com/bluesky-social/indigo/tree/main/cmd/tap) handles all of this as a sidecar process. It verifies repo structure + MST integrity + identity signatures, manages backfill automatically, provides strict per-repo ordering, and outputs simple JSON over WebSocket with acks.\n\n## What Success Looks Like\n- Tap runs as a sidecar (Docker or binary) alongside Hyperindex\n- Hyperindex connects to Tap's WebSocket at ws://localhost:2480/channel\n- All record events (create/update/delete) stored in RecordsRepository\n- Identity events update ActorsRepository (handle, status)\n- GraphQL subscriptions receive events from Tap\n- Admin backfill callbacks use Tap's /repos/add API instead of custom backfill code\n- internal/jetstream/ and internal/backfill/ packages deleted\n- Heavy deps removed: bluesky-social/indigo, fxamacker/cbor, ipfs/go-cid, hashicorp/go-retryablehttp\n- All existing tests still pass\n- ~2,100 lines deleted, ~500 lines added\n\n## Key Constraints\n- All changes on the tap-feature branch (branched from filter-feature)\n- Tap event format: {id, type: \"record\"|\"identity\", record: {live, rev, did, collection, rkey, action, cid, record}, identity: {did, handle, isActive, status}}\n- Must support ack-based delivery (send ack after DB write succeeds)\n- Must handle Tap reconnection gracefully\n- Admin single-actor backfill → POST /repos/add to Tap\n- Admin full-network backfill → configure TAP_SIGNAL_COLLECTION or TAP_FULL_NETWORK on Tap side\n- Config env vars: TAP_URL (default ws://localhost:2480), TAP_ADMIN_PASSWORD, TAP_DISABLE_ACKS\n- Keep gorilla/websocket dep (already used) or use nhooyr/websocket (already used for subscriptions)","status":"closed","priority":1,"issue_type":"epic","owner":"einstein.climateai.org","created_at":"2026-02-18T16:11:26.42114+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:22:46.227411+08:00","closed_at":"2026-02-18T16:22:46.227411+08:00","close_reason":"5dfe636 All 8 tasks complete — Tap sidecar integration with TAP_ENABLED feature flag","labels":["scope:medium"]}
+{"id":"hyperindex-2hm.1","title":"Add Tap config env vars to config package","description":"## Files\n- internal/config/config.go (modify)\n- internal/config/config_test.go (modify)\n\n## What to do\nAdd Tap-related configuration fields to the Config struct and Load function. These replace the Jetstream and Backfill env vars (but do NOT remove the old ones yet — that happens in a later task).\n\n1. Add fields to Config struct (after the Backfill section, around line 57):\n\\`\\`\\`go\n// Tap (replaces Jetstream + Backfill)\nTapURL string // Tap WebSocket URL (default: ws://localhost:2480)\nTapAdminPassword string // Tap admin API password for Basic auth\nTapDisableAcks bool // Fire-and-forget mode (default: false)\nTapEnabled bool // Use Tap instead of Jetstream+Backfill (default: false)\n\\`\\`\\`\n\n2. Add env var loading in Load() (after the Backfill section):\n\\`\\`\\`go\n// Tap\nTapURL: getEnv(\"TAP_URL\", \"ws://localhost:2480\"),\nTapAdminPassword: getEnv(\"TAP_ADMIN_PASSWORD\", \"\"),\nTapDisableAcks: getEnvBool(\"TAP_DISABLE_ACKS\", false),\nTapEnabled: getEnvBool(\"TAP_ENABLED\", false),\n\\`\\`\\`\n\n3. Add Tap fields to LogConfig() (redact TapAdminPassword):\n\\`\\`\\`go\n\"tap_enabled\", c.TapEnabled,\n\"tap_url\", c.TapURL,\n\"tap_admin_password_set\", c.TapAdminPassword != \"\",\n\"tap_disable_acks\", c.TapDisableAcks,\n\\`\\`\\`\n\n## Dont\n- Do NOT remove existing Jetstream/Backfill config fields yet\n- Do NOT modify main.go\n- Do NOT add validation that prevents running without Tap","acceptance_criteria":"1. Config struct has TapURL, TapAdminPassword, TapDisableAcks, TapEnabled fields\n2. TAP_URL env var defaults to \\\"ws://localhost:2480\\\"\n3. TAP_ENABLED defaults to false\n4. LogConfig logs tap_enabled, tap_url, tap_admin_password_set (not the password itself), tap_disable_acks\n5. Existing Jetstream/Backfill config fields still present and working\n6. go test ./internal/config/... passes\n7. go test ./... passes","status":"closed","priority":1,"issue_type":"task","owner":"einstein.climateai.org","estimated_minutes":15,"created_at":"2026-02-18T16:11:42.034827+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:15:41.766867+08:00","closed_at":"2026-02-18T16:15:41.766867+08:00","close_reason":"68efb07 Add Tap config env vars to config package","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-2hm.1","depends_on_id":"hyperindex-2hm","type":"parent-child","created_at":"2026-02-18T16:11:42.036029+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-2hm.2","title":"Define Tap event types and JSON parser","description":"## Files\n- internal/tap/event.go (create)\n- internal/tap/event_test.go (create)\n\n## What to do\nCreate the internal/tap/ package with event type definitions matching Tap's JSON output format. These are the Go structs that the Tap client will parse incoming WebSocket messages into.\n\n1. Create internal/tap/event.go:\n\\`\\`\\`go\n// Package tap provides a client for Bluesky's Tap sync utility.\npackage tap\n\nimport (\n \"encoding/json\"\n \"fmt\"\n)\n\n// EventType is the top-level event type from Tap.\ntype EventType string\n\nconst (\n EventTypeRecord EventType = \"record\"\n EventTypeIdentity EventType = \"identity\"\n)\n\n// ActionType is the record action type.\ntype ActionType string\n\nconst (\n ActionCreate ActionType = \"create\"\n ActionUpdate ActionType = \"update\"\n ActionDelete ActionType = \"delete\"\n)\n\n// Event is the top-level Tap event envelope.\ntype Event struct {\n ID int64 \\`json:\\\"id\\\"\\`\n Type EventType \\`json:\\\"type\\\"\\`\n Record *RecordEvent \\`json:\\\"record,omitempty\\\"\\`\n Identity *IdentityEvent \\`json:\\\"identity,omitempty\\\"\\`\n}\n\n// RecordEvent is a record change event from Tap.\ntype RecordEvent struct {\n Live bool \\`json:\\\"live\\\"\\`\n Rev string \\`json:\\\"rev\\\"\\`\n DID string \\`json:\\\"did\\\"\\`\n Collection string \\`json:\\\"collection\\\"\\`\n RKey string \\`json:\\\"rkey\\\"\\`\n Action ActionType \\`json:\\\"action\\\"\\`\n CID string \\`json:\\\"cid,omitempty\\\"\\`\n Record json.RawMessage \\`json:\\\"record,omitempty\\\"\\` // Only for create/update\n}\n\n// URI returns the AT-URI for this record event.\nfunc (r *RecordEvent) URI() string {\n return fmt.Sprintf(\"at://%s/%s/%s\", r.DID, r.Collection, r.RKey)\n}\n\n// IdentityEvent is an identity change event from Tap.\ntype IdentityEvent struct {\n DID string \\`json:\\\"did\\\"\\`\n Handle string \\`json:\\\"handle\\\"\\`\n IsActive bool \\`json:\\\"isActive\\\"\\`\n Status string \\`json:\\\"status\\\"\\` // active, takendown, suspended, deactivated, deleted\n}\n\n// ParseEvent parses a Tap event from JSON bytes.\nfunc ParseEvent(data []byte) (*Event, error) {\n var event Event\n if err := json.Unmarshal(data, \u0026event); err != nil {\n return nil, fmt.Errorf(\"failed to parse tap event: %w\", err)\n }\n if event.Type == \"\" {\n return nil, fmt.Errorf(\"tap event missing type field\")\n }\n return \u0026event, nil\n}\n\n// IsRecord returns true if this is a record event.\nfunc (e *Event) IsRecord() bool {\n return e.Type == EventTypeRecord \u0026\u0026 e.Record != nil\n}\n\n// IsIdentity returns true if this is an identity event.\nfunc (e *Event) IsIdentity() bool {\n return e.Type == EventTypeIdentity \u0026\u0026 e.Identity != nil\n}\n\\`\\`\\`\n\n2. Create event_test.go with table-driven tests:\n- Parse valid record create event (with record body)\n- Parse valid record delete event (no record body, no cid)\n- Parse valid identity event\n- Parse event with missing type field → error\n- Parse invalid JSON → error\n- RecordEvent.URI() returns correct AT-URI\n- IsRecord/IsIdentity helpers\n\n## Dont\n- Do NOT create the WebSocket client yet — that is a separate task\n- Do NOT import any external packages — only stdlib\n- Do NOT duplicate jetstream event types — this is a clean replacement","acceptance_criteria":"1. internal/tap/event.go exists with Event, RecordEvent, IdentityEvent types\n2. ParseEvent correctly parses Tap JSON format: {id: 12345, type: \"record\", record: {live: true, rev: \"...\", did: \"did:plc:abc\", collection: \"app.bsky.feed.post\", rkey: \"abc\", action: \"create\", cid: \"bafyrei...\", record: {\"text\": \"hello\"}}}\n3. ParseEvent correctly parses identity events: {id: 12346, type: \"identity\", identity: {did: \"did:plc:abc\", handle: \"alice.bsky.social\", isActive: true, status: \"active\"}}\n4. ParseEvent returns error for missing type field\n5. RecordEvent.URI() returns \"at://did:plc:abc/app.bsky.feed.post/abc\"\n6. go test -v ./internal/tap/... passes with all test cases\n7. go test ./... passes","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":25,"created_at":"2026-02-18T16:12:02.155562+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:15:55.244406+08:00","closed_at":"2026-02-18T16:15:55.244406+08:00","close_reason":"7e1d850 feat: add internal/tap package with event types and JSON parser","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-2hm.2","depends_on_id":"hyperindex-2hm","type":"parent-child","created_at":"2026-02-18T16:12:02.156673+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-2hm.3","title":"Implement Tap HTTP admin client for /repos/add and /repos/remove","description":"## Files\n- internal/tap/admin.go (create)\n- internal/tap/admin_test.go (create)\n\n## What to do\nCreate an HTTP client for Tap's admin API endpoints. This is used by the admin backfill callbacks to add/remove repos from Tap's tracking list.\n\n1. Create internal/tap/admin.go:\n\\`\\`\\`go\n// AdminClient communicates with Tap's HTTP API for repo management.\ntype AdminClient struct {\n baseURL string // e.g., \"http://localhost:2480\"\n password string // Basic auth password (empty = no auth)\n client *http.Client\n}\n\n// NewAdminClient creates a new Tap admin HTTP client.\nfunc NewAdminClient(baseURL, password string) *AdminClient\n\n// AddRepos adds DIDs to Tap's tracking list, triggering backfill.\n// POST /repos/add with body {\"dids\": [\"did:plc:abc\", ...]}\nfunc (c *AdminClient) AddRepos(ctx context.Context, dids []string) error\n\n// RemoveRepos removes DIDs from Tap's tracking list.\n// POST /repos/remove with body {\"dids\": [\"did:plc:abc\", ...]}\nfunc (c *AdminClient) RemoveRepos(ctx context.Context, dids []string) error\n\n// Health checks if Tap is healthy.\n// GET /health — expects {\"status\":\"ok\"}\nfunc (c *AdminClient) Health(ctx context.Context) error\n\n// RepoInfo gets info about a tracked repo.\n// GET /info/:did\nfunc (c *AdminClient) RepoInfo(ctx context.Context, did string) (*RepoInfoResponse, error)\n\\`\\`\\`\n\n2. Basic auth: if password is set, add header `Authorization: Basic base64(\"admin:\" + password)` to all requests.\n\n3. Error handling: return wrapped errors with HTTP status code context.\n\n4. HTTP client: use stdlib http.Client with 30s timeout.\n\n## Dont\n- Do NOT add WebSocket logic here — that is the separate client task\n- Do NOT import any external HTTP packages — use stdlib net/http\n- Do NOT add retry logic — keep it simple, Tap handles its own retries","acceptance_criteria":"1. AdminClient.AddRepos sends POST to /repos/add with JSON body {\"dids\": [...]}\n2. AdminClient.RemoveRepos sends POST to /repos/remove with JSON body {\"dids\": [...]}\n3. AdminClient.Health sends GET to /health and verifies response\n4. Basic auth header is included when password is configured\n5. No auth header when password is empty\n6. HTTP errors return wrapped error with status code\n7. Table-driven tests using httptest.NewServer for all methods\n8. go test -v ./internal/tap/... passes\n9. go test ./... passes","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-18T16:12:17.345313+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:16:10.59648+08:00","closed_at":"2026-02-18T16:16:10.59648+08:00","close_reason":"85ed81f implement Tap HTTP admin client","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-2hm.3","depends_on_id":"hyperindex-2hm","type":"parent-child","created_at":"2026-02-18T16:12:17.346524+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-2hm.4","title":"Implement Tap WebSocket consumer with ack support","description":"## Files\n- internal/tap/consumer.go (create)\n- internal/tap/consumer_test.go (create)\n\n## What to do\nCreate the core Tap WebSocket consumer that connects to Tap's /channel endpoint, receives events, dispatches them, and sends acks. This replaces internal/jetstream/client.go + consumer.go.\n\n1. Create internal/tap/consumer.go:\n\\`\\`\\`go\n// ConsumerConfig configures the Tap consumer.\ntype ConsumerConfig struct {\n TapURL string // WebSocket URL (e.g., \"ws://localhost:2480\")\n DisableAcks bool // Fire-and-forget mode\n}\n\n// EventHandler processes Tap events. Return nil to ack, error to nack.\ntype EventHandler interface {\n HandleRecord(ctx context.Context, event *RecordEvent) error\n HandleIdentity(ctx context.Context, event *IdentityEvent) error\n}\n\n// Consumer connects to Tap's WebSocket and dispatches events.\ntype Consumer struct {\n config ConsumerConfig\n handler EventHandler\n // ... internal state\n}\n\nfunc NewConsumer(config ConsumerConfig, handler EventHandler) *Consumer\n\n// Start connects to Tap and begins processing events.\n// Blocks until context is cancelled or fatal error.\n// Automatically reconnects on connection loss with exponential backoff.\nfunc (c *Consumer) Start(ctx context.Context) error\n\n// Stop gracefully shuts down the consumer.\nfunc (c *Consumer) Stop()\n\\`\\`\\`\n\n2. WebSocket connection:\n - Connect to {TapURL}/channel\n - Use gorilla/websocket (already in go.mod)\n - Read text messages (JSON events)\n - Parse with tap.ParseEvent()\n - Dispatch to EventHandler.HandleRecord or HandleIdentity\n - After successful handling, send ack: write text message with the event ID as string (e.g., \"12345\")\n - If DisableAcks is true, skip ack sending\n\n3. Reconnection:\n - On connection loss, exponential backoff: 1s → 2s → 4s → ... → 2min cap\n - Reset backoff on successful connection\n - Log reconnection attempts\n\n4. Stats tracking (same pattern as current jetstream consumer):\n\\`\\`\\`go\ntype Stats struct {\n EventsReceived int64\n RecordsCreated int64\n RecordsUpdated int64\n RecordsDeleted int64\n IdentityEvents int64\n Errors int64\n}\nfunc (c *Consumer) Stats() Stats\n\\`\\`\\`\n\n## Dont\n- Do NOT implement EventHandler here — that is the next task\n- Do NOT handle cursor management — Tap manages cursors via acks\n- Do NOT use nhooyr/websocket — use gorilla/websocket for consistency with existing code\n- Do NOT import any repository packages","acceptance_criteria":"1. Consumer connects to {TapURL}/channel via WebSocket\n2. Consumer parses incoming JSON messages as Tap events\n3. Record events dispatched to handler.HandleRecord, identity events to handler.HandleIdentity\n4. After successful handler return, consumer sends event ID as text message (ack)\n5. When DisableAcks=true, no ack is sent\n6. On connection loss, consumer reconnects with exponential backoff (1s to 2min)\n7. Consumer.Stop() gracefully closes the WebSocket connection\n8. Stats() returns event counts\n9. Tests use a mock WebSocket server (httptest) to verify connect, receive, ack, reconnect\n10. go test -v ./internal/tap/... passes\n11. go test ./... passes","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T16:12:35.965149+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:18:53.850762+08:00","closed_at":"2026-02-18T16:18:53.850762+08:00","close_reason":"8469c62 implement Tap WebSocket consumer with ack support","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-2hm.4","depends_on_id":"hyperindex-2hm","type":"parent-child","created_at":"2026-02-18T16:12:35.966442+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-2hm.4","depends_on_id":"hyperindex-2hm.2","type":"blocks","created_at":"2026-02-18T16:12:35.967988+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-2hm.5","title":"Implement Tap EventHandler that stores records and publishes to PubSub","description":"## Files\n- internal/tap/handler.go (create)\n- internal/tap/handler_test.go (create)\n\n## What to do\nImplement the EventHandler interface from the consumer task. This is the bridge between Tap events and the existing repository/subscription layer. It mirrors what internal/jetstream/consumer.go's handleCommit() does today.\n\n1. Create internal/tap/handler.go:\n\\`\\`\\`go\n// IndexHandler implements EventHandler and stores events in the database.\ntype IndexHandler struct {\n records *repositories.RecordsRepository\n actors *repositories.ActorsRepository\n activity *repositories.JetstreamActivityRepository // reuse existing activity repo\n pubsub *subscription.PubSub\n}\n\nfunc NewIndexHandler(\n records *repositories.RecordsRepository,\n actors *repositories.ActorsRepository,\n activity *repositories.JetstreamActivityRepository,\n pubsub *subscription.PubSub,\n) *IndexHandler\n\\`\\`\\`\n\n2. HandleRecord implementation:\n\\`\\`\\`go\nfunc (h *IndexHandler) HandleRecord(ctx context.Context, event *RecordEvent) error {\n uri := event.URI()\n\n switch event.Action {\n case ActionCreate, ActionUpdate:\n // Ensure actor exists\n h.actors.Upsert(ctx, event.DID, \"\") // empty handle, identity events update it\n \n // Store record\n _, err := h.records.Insert(ctx, uri, event.CID, event.DID, event.Collection, string(event.Record))\n if err != nil {\n return fmt.Errorf(\"failed to insert record: %w\", err)\n }\n \n // Log activity (if activity repo available)\n if h.activity != nil {\n h.activity.LogActivity(ctx, time.Now(), string(event.Action), event.Collection, event.DID, event.RKey, string(event.Record))\n }\n \n // Publish to GraphQL subscriptions\n eventType := subscription.EventCreate\n if event.Action == ActionUpdate {\n eventType = subscription.EventUpdate\n }\n h.pubsub.PublishRecord(eventType, uri, event.CID, event.DID, event.Collection, event.Record)\n \n case ActionDelete:\n h.records.Delete(ctx, uri)\n h.pubsub.PublishRecord(subscription.EventDelete, uri, \"\", event.DID, event.Collection, nil)\n if h.activity != nil {\n h.activity.LogActivity(ctx, time.Now(), \"delete\", event.Collection, event.DID, event.RKey, \"\")\n }\n }\n return nil\n}\n\\`\\`\\`\n\n3. HandleIdentity implementation:\n\\`\\`\\`go\nfunc (h *IndexHandler) HandleIdentity(ctx context.Context, event *IdentityEvent) error {\n // Update actor with handle from identity event\n return h.actors.Upsert(ctx, event.DID, event.Handle)\n}\n\\`\\`\\`\n\nThis is a key improvement over the current system: we now process identity events and store handles.\n\n## Dont\n- Do NOT modify the existing repositories — use their existing Insert/Delete/Upsert methods\n- Do NOT add new database tables or migrations\n- Do NOT modify the PubSub interface\n- Do NOT log every event at Info level in production — use Debug for individual records, Info for batches","acceptance_criteria":"1. HandleRecord with ActionCreate calls records.Insert and pubsub.PublishRecord(EventCreate, ...)\n2. HandleRecord with ActionUpdate calls records.Insert and pubsub.PublishRecord(EventUpdate, ...)\n3. HandleRecord with ActionDelete calls records.Delete and pubsub.PublishRecord(EventDelete, ...)\n4. HandleRecord ensures actor exists via actors.Upsert for create/update\n5. HandleIdentity calls actors.Upsert with DID and handle from the event\n6. Activity is logged when activity repo is non-nil\n7. Tests use mock repositories (or real SQLite) to verify correct calls\n8. go test -v ./internal/tap/... passes\n9. go test ./... passes","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":40,"created_at":"2026-02-18T16:12:55.65983+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:18:36.125908+08:00","closed_at":"2026-02-18T16:18:36.125908+08:00","close_reason":"5ac3f2e Implement Tap IndexHandler that stores records and publishes to PubSub","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-2hm.5","depends_on_id":"hyperindex-2hm","type":"parent-child","created_at":"2026-02-18T16:12:55.662054+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-2hm.5","depends_on_id":"hyperindex-2hm.2","type":"blocks","created_at":"2026-02-18T16:12:55.663845+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-2hm.6","title":"Wire Tap consumer into main.go with TAP_ENABLED feature flag","description":"## Files\n- cmd/hypergoat/main.go (modify)\n\n## What to do\nAdd a conditional branch in main.go that uses the Tap consumer instead of Jetstream+Backfill when TAP_ENABLED=true. The old code paths remain for backward compatibility.\n\n1. Add import for internal/tap package.\n\n2. Modify the run() function flow. After setupGraphQL returns collections:\n\\`\\`\\`go\nif cfg.TapEnabled {\n startTap(cfg, svc, pubsub, adminHandler, bg)\n} else {\n // Existing code\n startJetstream(cfg, svc, pubsub, collections, adminHandler, bg)\n startBackfill(cfg, svc)\n}\n\\`\\`\\`\n\n3. Create startTap function:\n\\`\\`\\`go\nfunc startTap(cfg *config.Config, svc *services, pubsub *subscription.PubSub, adminHandler *admin.Handler, bg *backgroundServices) {\n tapURL := cfg.TapURL\n \n // Create handler that stores records and publishes to subscriptions\n handler := tap.NewIndexHandler(svc.records, svc.actors, svc.activity, pubsub)\n \n // Create and start consumer\n consumer := tap.NewConsumer(tap.ConsumerConfig{\n TapURL: tapURL,\n DisableAcks: cfg.TapDisableAcks,\n }, handler)\n \n // Store consumer reference for clean shutdown\n // Add tapConsumer field to backgroundServices struct\n bg.tapConsumer = consumer\n \n tapCtx, tapCancel := context.WithCancel(context.Background())\n bg.tapCancel = tapCancel\n \n go func() {\n slog.Info(\"Starting Tap consumer\", \"url\", tapURL, \"disable_acks\", cfg.TapDisableAcks)\n if err := consumer.Start(tapCtx); err != nil {\n slog.Error(\"Tap consumer error\", \"error\", err)\n }\n }()\n \n // Wire admin backfill callbacks to use Tap's /repos/add API\n if adminHandler != nil {\n tapHTTPURL := strings.Replace(strings.Replace(tapURL, \"ws://\", \"http://\", 1), \"wss://\", \"https://\", 1)\n adminClient := tap.NewAdminClient(tapHTTPURL, cfg.TapAdminPassword)\n \n adminHandler.Resolver().SetBackfillCallback(func(ctx context.Context, did string) error {\n return adminClient.AddRepos(ctx, []string{did})\n })\n \n adminHandler.Resolver().SetFullBackfillCallback(func(ctx context.Context) error {\n return fmt.Errorf(\"full network backfill not supported via Tap admin API — configure TAP_SIGNAL_COLLECTION or TAP_FULL_NETWORK on the Tap sidecar instead\")\n })\n }\n \n slog.Info(\"Tap consumer started (replaces Jetstream + Backfill)\")\n}\n\\`\\`\\`\n\n4. Add tapConsumer and tapCancel to backgroundServices struct:\n\\`\\`\\`go\ntype backgroundServices struct {\n // ... existing fields ...\n tapConsumer *tap.Consumer\n tapCancel context.CancelFunc\n}\n\\`\\`\\`\n\n5. Update backgroundServices.Stop() to stop Tap consumer:\n\\`\\`\\`go\nif bg.tapConsumer != nil {\n bg.tapConsumer.Stop()\n}\nif bg.tapCancel != nil {\n bg.tapCancel()\n}\n\\`\\`\\`\n\n## Dont\n- Do NOT remove the existing startJetstream or startBackfill functions\n- Do NOT remove the Jetstream/Backfill imports\n- Do NOT change behavior when TAP_ENABLED=false (default)\n- Do NOT remove the lexicon change callback for dynamic collection updates (that stays with Jetstream path only — Tap uses collection filters configured on the Tap side)","acceptance_criteria":"1. When TAP_ENABLED=false (default), existing Jetstream+Backfill behavior is unchanged\n2. When TAP_ENABLED=true, startTap is called instead of startJetstream+startBackfill\n3. Tap consumer is started in a goroutine with proper context and cancellation\n4. backgroundServices.Stop() cleanly shuts down the Tap consumer\n5. Admin single-actor backfill calls Tap's /repos/add API\n6. Admin full-network backfill returns informative error directing user to configure Tap\n7. go build ./... succeeds\n8. go test ./... passes","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T16:13:18.554294+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:20:25.020387+08:00","closed_at":"2026-02-18T16:20:25.020387+08:00","close_reason":"f781a70 Wire Tap consumer into main.go with TAP_ENABLED feature flag","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-2hm.6","depends_on_id":"hyperindex-2hm","type":"parent-child","created_at":"2026-02-18T16:13:18.555453+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-2hm.6","depends_on_id":"hyperindex-2hm.1","type":"blocks","created_at":"2026-02-18T16:13:18.55691+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-2hm.6","depends_on_id":"hyperindex-2hm.3","type":"blocks","created_at":"2026-02-18T16:13:18.557962+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-2hm.6","depends_on_id":"hyperindex-2hm.4","type":"blocks","created_at":"2026-02-18T16:13:18.558989+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-2hm.6","depends_on_id":"hyperindex-2hm.5","type":"blocks","created_at":"2026-02-18T16:13:18.560021+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-2hm.7","title":"Add Tap deployment docs and docker-compose sidecar config","description":"## Files\n- README.md (modify)\n- docker-compose.tap.yml (create)\n- .env.example (modify)\n\n## What to do\nAdd documentation and docker-compose configuration for running Tap as a sidecar.\n\n1. Update README.md — add a new section \"Using Tap (Recommended)\" under the existing data ingestion docs:\n - Explain what Tap is and why it's recommended (crypto verification, ordering guarantees, simplified architecture)\n - Show how to run Tap alongside Hyperindex\n - Document TAP_ENABLED, TAP_URL, TAP_ADMIN_PASSWORD, TAP_DISABLE_ACKS env vars\n - Show example: docker-compose up with Tap sidecar\n - Show example: adding repos to track via Tap admin API\n - Mention TAP_SIGNAL_COLLECTION for auto-discovery\n - Keep the existing Jetstream+Backfill docs but mark them as \"Legacy Mode\"\n\n2. Create docker-compose.tap.yml:\n\\`\\`\\`yaml\nversion: \"3.8\"\nservices:\n tap:\n image: ghcr.io/bluesky-social/indigo/tap:latest\n ports:\n - \"2480:2480\"\n volumes:\n - tap-data:/data\n environment:\n TAP_DATABASE_URL: \"sqlite:///data/tap.db\"\n TAP_COLLECTION_FILTERS: \"${JETSTREAM_COLLECTIONS}\"\n TAP_SIGNAL_COLLECTION: \"${TAP_SIGNAL_COLLECTION:-}\"\n TAP_DISABLE_ACKS: \"false\"\n TAP_ADMIN_PASSWORD: \"${TAP_ADMIN_PASSWORD:-}\"\n restart: unless-stopped\n\n hyperindex:\n build: .\n ports:\n - \"8080:8080\"\n depends_on:\n - tap\n environment:\n TAP_ENABLED: \"true\"\n TAP_URL: \"ws://tap:2480\"\n TAP_ADMIN_PASSWORD: \"${TAP_ADMIN_PASSWORD:-}\"\n DATABASE_URL: \"${DATABASE_URL:-sqlite:data/hypergoat.db}\"\n # ... other env vars\n restart: unless-stopped\n\nvolumes:\n tap-data:\n\\`\\`\\`\n\n3. Update .env.example with Tap env vars.\n\n## Dont\n- Do NOT remove existing docker-compose.yml or Dockerfile\n- Do NOT change the Dockerfile\n- Do NOT make Tap the only option — keep backward compatibility with Jetstream mode","acceptance_criteria":"1. README.md has a \"Using Tap (Recommended)\" section explaining the setup\n2. README.md marks Jetstream+Backfill as \"Legacy Mode\"\n3. docker-compose.tap.yml is valid YAML and runnable\n4. .env.example includes TAP_ENABLED, TAP_URL, TAP_ADMIN_PASSWORD, TAP_DISABLE_ACKS\n5. Documentation explains TAP_SIGNAL_COLLECTION for auto-discovery\n6. go test ./... passes (no code changes, just docs)","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-18T16:13:37.056187+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:21:47.007541+08:00","closed_at":"2026-02-18T16:21:47.007541+08:00","close_reason":"6b6f4c0 Add Tap deployment docs and docker-compose sidecar config","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-2hm.7","depends_on_id":"hyperindex-2hm","type":"parent-child","created_at":"2026-02-18T16:13:37.057297+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-2hm.7","depends_on_id":"hyperindex-2hm.6","type":"blocks","created_at":"2026-02-18T16:13:37.058872+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-2hm.8","title":"Add /health proxy endpoint to check Tap sidecar status","description":"## Files\n- cmd/hypergoat/main.go (modify) — setupRouter or startTap function\n- internal/tap/admin.go (uses existing Health method)\n\n## What to do\nWhen TAP_ENABLED=true, enhance the existing /health endpoint to also check Tap's health. If Tap is unreachable, the health check should indicate degraded status.\n\n1. In the existing /health handler (or a new /health/tap endpoint), add a Tap health check:\n\\`\\`\\`go\n// If Tap is enabled, also check Tap's health\nif cfg.TapEnabled {\n tapHTTPURL := strings.Replace(strings.Replace(cfg.TapURL, \"ws://\", \"http://\", 1), \"wss://\", \"https://\", 1)\n adminClient := tap.NewAdminClient(tapHTTPURL, cfg.TapAdminPassword)\n if err := adminClient.Health(r.Context()); err != nil {\n // Return 200 but with degraded status\n json.NewEncoder(w).Encode(map[string]interface{}{\n \"status\": \"degraded\",\n \"tap\": \"unreachable\",\n \"error\": err.Error(),\n })\n return\n }\n}\n\\`\\`\\`\n\n2. Add a /stats enhancement: when Tap is enabled, include Tap consumer stats (events received, records created, etc.) in the /stats response.\n\n## Dont\n- Do NOT make the main /health fail when Tap is temporarily down — use \"degraded\" status\n- Do NOT create a separate admin client for each health check — store it on backgroundServices or similar\n- Do NOT add this when TAP_ENABLED=false","acceptance_criteria":"1. When TAP_ENABLED=true, /health includes Tap status in the response\n2. When Tap is reachable, health shows \"ok\" with \"tap\": \"ok\"\n3. When Tap is unreachable, health shows \"degraded\" with \"tap\": \"unreachable\"\n4. /stats includes Tap consumer stats when TAP_ENABLED=true\n5. When TAP_ENABLED=false, /health behaves exactly as before\n6. go build ./... succeeds\n7. go test ./... passes","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":25,"created_at":"2026-02-18T16:13:50.18969+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:22:11.051009+08:00","closed_at":"2026-02-18T16:22:11.051009+08:00","close_reason":"5dfe636 feat: add Tap health check to /health and /stats endpoints","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-2hm.8","depends_on_id":"hyperindex-2hm","type":"parent-child","created_at":"2026-02-18T16:13:50.190924+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-2hm.8","depends_on_id":"hyperindex-2hm.6","type":"blocks","created_at":"2026-02-18T16:13:50.19256+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-2rz","title":"Support batch lexicon registration via comma/newline-separated NSIDs","description":"## Files\n- client/src/app/lexicons/page.tsx (modify)\n\n## What to do\nUpdate the lexicon registration form to accept multiple NSIDs at once, separated by commas, spaces, or newlines.\n\n### Changes to `handleRegister`:\n1. Split `nsidInput` by commas, newlines, or whitespace: `nsidInput.split(/[,\\n\\s]+/).map(s =\u003e s.trim()).filter(Boolean)`\n2. Validate each NSID with the existing `isValidNsid()` function. If any are invalid, show an error listing the invalid ones and don't submit.\n3. Call `registerMutation.mutate()` sequentially for each valid NSID. Use a loop with `mutateAsync` so they run one at a time.\n4. Show progress: update the success message as each one completes, e.g. \"Registered 3/5 lexicons...\" and then \"Registered 5 lexicons\" when done.\n5. Clear the input only after all succeed.\n\n### Changes to the input element:\n1. Replace the single-line `\u003cinput\u003e` with a `\u003ctextarea\u003e` so users can paste multi-line lists.\n2. Keep the same styling (font-mono, rounded-lg, border, etc). Set `rows={1}` as default but allow it to grow.\n3. Update placeholder to: `\"Enter NSIDs (comma or newline separated)...\"`\n\n### Edge cases:\n- Empty entries after splitting should be filtered out\n- Duplicate NSIDs in the input should be deduplicated before submitting\n- If one NSID fails mid-batch, show the error but keep already-registered ones (don't roll back)\n- The Register button should show loading state during the batch and be disabled\n\n## Don't\n- Change the GraphQL mutation (backend stays as-is, one NSID per call)\n- Modify the tree view, search, or delete functionality\n- Change any other file","acceptance_criteria":"1. `npm run build` succeeds in client/ directory\n2. Pasting `app.gainforest.dwc.occurrence, app.gainforest.dwc.taxon, app.gainforest.dwc.event` into the input and clicking Register calls registerLexicon 3 times\n3. Pasting a newline-separated list works the same way\n4. Invalid NSIDs in a batch are caught and shown in the error message before any mutations fire\n5. Duplicate NSIDs in the input are deduplicated (only one mutation per unique NSID)\n6. The input field is a textarea that accepts multi-line paste\n7. Progress feedback is shown during batch registration","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-19T12:09:36.692704+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-19T12:10:48.788943+08:00","closed_at":"2026-02-19T12:10:48.788943+08:00","close_reason":"6275449 feat: support batch lexicon registration via comma/newline-separated NSIDs","labels":["scope:small"]}
+{"id":"hyperindex-3gm","title":"Test gap: coerceRequiredFields not tested via single-record (ByUri) resolver path (from hyperindex-vz7.3)","description":"## Files\n- internal/graphql/schema/builder_test.go (modify)\n\n## What to do\nAdd a test `TestCoerceRequiredFields_SingleRecordResolver` that verifies the `ByUri` (single record) resolver path also coerces missing required fields.\n\nUse the existing `setupCoercionTestDB` and `buildActivitySchema` helpers already in the file. The test should:\n\n1. Set up a DB with a record missing `title` and `shortDescription`: `{\"createdAt\":\"2025-01-01T00:00:00Z\"}`\n2. Build the activity schema\n3. Execute a GraphQL query using the single-record query field:\n ```graphql\n {\n orgHypercertsClaimActivityByUri(uri: \"at://did:plc:test/org.hypercerts.claim.activity/rkey1\") {\n title\n shortDescription\n }\n }\n ```\n4. Assert: no GraphQL errors, `title` is `\"\"`, `shortDescription` is `\"\"`\n\nFollow the exact same assertion pattern used in `TestCoerceRequiredFields_MissingFields` but adapted for the single-record response shape (no edges/nodes, just direct fields).\n\n## Dont\n- Do not modify production code\n- Do not duplicate the collection resolver tests — only test the ByUri path\n- Do not use external test frameworks","acceptance_criteria":"1. go test -v -run TestCoerceRequiredFields_SingleRecordResolver ./internal/graphql/schema/... passes\n2. The test queries via orgHypercertsClaimActivityByUri (not the collection query)\n3. The test asserts that missing required string fields are coerced to empty string\n4. go test ./... passes (no regressions)","status":"closed","priority":3,"issue_type":"bug","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":15,"created_at":"2026-02-19T20:20:38.520059+08:00","created_by":"einstein.climateai.org","updated_at":"2026-03-05T15:27:57.55139+08:00","closed_at":"2026-02-19T20:23:58.003339+08:00","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-3gm","depends_on_id":"hyperindex-vz7","type":"discovered-from","created_at":"2026-02-19T20:20:41.357178+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-3jy","title":"Fix: spurious 'closed unexpectedly' warn logged on clean shutdown (from hyperindex-md3.10)","description":"Review of hyperindex-md3.10 (commit b28087c) found: the ctx.Err() guard was only added to the err != nil branch. The else branch (when runOnce returns nil, i.e. server sent a clean close frame) still logs slog.Warn('Tap connection closed unexpectedly, will reconnect') without checking ctx.Err(). If the server sends a clean close frame at the same moment the context is cancelled, this warning fires during intentional shutdown. Fix: add ctx.Err() check to the else branch too: } else { if ctx.Err() != nil { return ctx.Err() } slog.Warn(...) }. Also: TestConsumer_ShutdownNoSpuriousLog only checks for Error-level messages; it would not catch this spurious Warn. Evidence: consumer.go lines 133-137.","status":"open","priority":3,"issue_type":"bug","owner":"einstein.climateai.org","created_at":"2026-02-18T16:49:47.686073+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:49:47.686073+08:00","dependencies":[{"issue_id":"hyperindex-3jy","depends_on_id":"hyperindex-md3.10","type":"discovered-from","created_at":"2026-02-18T16:49:49.968822+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s","title":"Epic: GraphQL Filtering, Sorting \u0026 Query Completeness","description":"## Why\nThe public GraphQL API currently lacks essential query capabilities that developers need to efficiently consume the API. There are no WHERE filters, no sorting control, no DID/author filtering, no backward pagination, no totalCount, no max page size, and no search. The only way to find specific records is by exact URI lookup or paginating through entire collections newest-first. This makes the API unusable for any non-trivial frontend or data integration.\n\n## What Success Looks Like\n- Developers can filter collection queries by any scalar field (eq, neq, gt, lt, gte, lte, in, contains, startsWith, isNull)\n- Developers can filter by DID (author) on any collection query\n- Developers can sort by any scalar field ASC or DESC\n- Connections return totalCount when requested (opt-in)\n- Pagination has a max page size (100) to prevent abuse\n- Backward pagination (last/before) works per Relay spec\n- A basic LIKE-based search query exists for cross-collection text search\n- Typed records expose did and rkey metadata fields\n\n## Key Constraints\n- All changes must work on both SQLite and PostgreSQL (use Executor interface)\n- JSON field filtering uses existing JSONExtract/JSONExtractPath helpers\n- Field name validation via existing validJSONFieldName regex (SQL injection prevention)\n- No breaking changes to existing queries (additive only)\n- All existing tests must continue to pass (go test ./...)\n- Performance: totalCount must be opt-in (only computed when selected)","status":"open","priority":1,"issue_type":"epic","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","created_at":"2026-02-18T14:22:55.632055+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:22:43.275042+08:00","labels":["scope:medium"]}
+{"id":"hyperindex-49s.1","title":"Scaffold shared filter input types (StringFilter, IntFilter, etc.)","description":"## Files\n- internal/graphql/types/filters.go (create)\n- internal/graphql/types/filters_test.go (create)\n\n## What to do\nCreate a new file `internal/graphql/types/filters.go` that defines shared GraphQL InputObject types for field-level filtering. These are reused across all collections.\n\nDefine these 5 input types using `graphql.NewInputObject()`:\n\n1. **StringFilterInput** — fields: `eq: String`, `neq: String`, `in: [String!]`, `contains: String`, `startsWith: String`, `isNull: Boolean`\n2. **IntFilterInput** — fields: `eq: Int`, `neq: Int`, `gt: Int`, `lt: Int`, `gte: Int`, `lte: Int`, `in: [Int!]`, `isNull: Boolean`\n3. **FloatFilterInput** — fields: `eq: Float`, `neq: Float`, `gt: Float`, `lt: Float`, `gte: Float`, `lte: Float`, `isNull: Boolean`\n4. **BooleanFilterInput** — fields: `eq: Boolean`, `isNull: Boolean`\n5. **DateTimeFilterInput** — fields: `eq: DateTime`, `neq: DateTime`, `gt: DateTime`, `lt: DateTime`, `gte: DateTime`, `lte: DateTime`, `isNull: Boolean` (use the existing `DateTimeScalar` from mapper.go)\n\nExport each as a package-level `var` (e.g., `var StringFilterInput = graphql.NewInputObject(...)`).\n\nAlso export a function `FilterInputForLexiconType(lexiconType, format string) *graphql.InputObject` that maps lexicon property types to the correct filter input:\n- `\"string\"` + `\"datetime\"` format → `DateTimeFilterInput`\n- `\"string\"` (no format or other format) → `StringFilterInput`\n- `\"integer\"` → `IntFilterInput`\n- `\"number\"` → `FloatFilterInput`\n- `\"boolean\"` → `BooleanFilterInput`\n- All other types → return `nil` (not filterable)\n\nWrite table-driven tests in `filters_test.go` that verify:\n- Each input type has the correct fields with correct GraphQL types\n- `FilterInputForLexiconType` returns the right input for each lexicon type/format combo\n- Non-filterable types return nil\n\n## Dont\n- Do not import or modify any existing files\n- Do not define per-collection types here (that is a separate task)\n- Do not add resolver logic or SQL generation","acceptance_criteria":"1. `go build ./...` passes\n2. `go test -v ./internal/graphql/types/...` passes with all new tests green\n3. Each of the 5 InputObject types has exactly the fields listed above\n4. `FilterInputForLexiconType` returns correct type for string, string+datetime, integer, number, boolean\n5. `FilterInputForLexiconType` returns nil for blob, bytes, unknown, ref, union, array, object, record\n6. Package doc comment exists on filters.go","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-18T14:23:13.541776+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T14:55:10.627075+08:00","closed_at":"2026-02-18T14:55:10.627075+08:00","close_reason":"f3de82e feat: scaffold shared filter input types","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-49s.1","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:23:13.543048+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.10","title":"Wire sorting into resolvers and generalize cursor encoding","description":"## Files\n- internal/graphql/schema/builder.go (modify)\n\n## What to do\nConnect the `sortBy` and `sortDirection` arguments from typed collection queries through the resolver to the sorted repository method. Generalize cursor encoding to support sort-key-aware cursors.\n\n### 1. Generalize cursor encoding/decoding\n\nThe current `encodeCursor(indexedAt, uri)` and `decodeCursor(cursor)` functions (builder.go lines ~636-652) encode `base64(timestamp|uri)`. Generalize to encode N values:\n\n```go\n// encodeCursorValues encodes multiple cursor component values.\nfunc encodeCursorValues(values ...string) string {\n return base64.URLEncoding.EncodeToString([]byte(strings.Join(values, \"|\")))\n}\n\n// decodeCursorValues decodes a cursor into its component values.\nfunc decodeCursorValues(cursor string) ([]string, error) {\n data, err := base64.URLEncoding.DecodeString(cursor)\n if err != nil { return nil, fmt.Errorf(\"invalid cursor\") }\n return strings.Split(string(data), \"|\"), nil\n}\n```\n\nKeep the old `encodeCursor`/`decodeCursor` functions as wrappers for backward compatibility, calling the new generalized functions.\n\n### 2. Modify `resolveRecordConnection` to extract sort args\n\nAfter extracting `first`, `after`, and `where` args, also extract sort:\n\n```go\nvar sortOpt *repositories.SortOption\nif sortByArg, ok := p.Args[\"sortBy\"].(string); ok \u0026\u0026 sortByArg != \"\" {\n direction := \"DESC\" // default\n if dirArg, ok := p.Args[\"sortDirection\"].(string); ok {\n direction = dirArg\n }\n sortOpt = \u0026repositories.SortOption{Field: sortByArg, Direction: direction}\n}\n```\n\n### 3. Switch to the sorted repository method\n\nReplace the call to `GetByCollectionFilteredWithKeysetCursor` (from task 4) with `GetByCollectionSortedWithKeysetCursor`, passing the sort option and cursor values.\n\n### 4. Build sort-aware cursors for each edge\n\nWhen building edge cursors in the results loop:\n- If sorting by default (indexed_at), cursor = `encodeCursorValues(rec.IndexedAt.Format(...), rec.URI)` (same as before)\n- If sorting by a JSON field, extract the sort field value from the parsed JSON data map and use it: `encodeCursorValues(sortFieldValue, rec.URI)`\n- If sorting by a column (did, collection, etc.), extract from the Record struct\n\n### 5. Decode sort-aware cursors\n\nWhen an `after` cursor is provided:\n- Decode with `decodeCursorValues(after)` → get `[sortFieldValue, uri]`\n- Pass these as `afterCursorValues` to the repository method\n\n## Dont\n- Do not modify the repository layer (done in task 9)\n- Do not modify sort enum generation (done in task 8)\n- Do not add sorting to the generic `records` query\n- Do not break existing cursor format — old 2-value cursors still decode correctly with the generalized decoder","acceptance_criteria":"1. `go build ./...` passes\n2. `go test ./...` passes\n3. A query with `sortBy: \"indexed_at\", sortDirection: \"ASC\"` returns records oldest-first\n4. A query with sortBy set to a JSON field correctly sorts by that field\n5. Pagination cursors work correctly with custom sort fields\n6. A query WITHOUT sortBy/sortDirection returns records in default order (indexed_at DESC) — backward compatible\n7. Old-format cursors (from before this change) still decode correctly\n8. The generic `records` query still works without sort arguments","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T14:25:58.388179+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:08:11.608991+08:00","closed_at":"2026-02-18T15:08:11.608991+08:00","close_reason":"7d5f062 Wire sorting into resolvers and generalize cursor encoding","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-49s.10","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:25:58.389195+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.10","depends_on_id":"hyperindex-49s.8","type":"blocks","created_at":"2026-02-18T14:25:58.390942+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.10","depends_on_id":"hyperindex-49s.9","type":"blocks","created_at":"2026-02-18T14:25:58.392103+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.10","depends_on_id":"hyperindex-49s.4","type":"blocks","created_at":"2026-02-18T14:25:58.393164+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.11","title":"Populate totalCount on public connections (opt-in)","description":"## Files\n- internal/database/repositories/records.go (modify)\n- internal/graphql/schema/builder.go (modify)\n\n## What to do\nThe `totalCount` field exists on typed connection types but always returns null. Populate it when the client requests it, using field selection introspection.\n\n### 1. Add GetCollectionCount to repository (records.go)\n\n```go\n// GetCollectionCount returns the total record count for a collection.\nfunc (r *RecordsRepository) GetCollectionCount(ctx context.Context, collection string) (int64, error) {\n sqlStr := fmt.Sprintf(\"SELECT COUNT(*) FROM record WHERE collection = %s\", r.db.Placeholder(1))\n // ... execute and scan\n}\n```\n\nAlso add a filtered version:\n```go\n// GetCollectionCountFiltered returns the count with optional DID and field filters applied.\nfunc (r *RecordsRepository) GetCollectionCountFiltered(\n ctx context.Context, collection string, filters []FieldFilter, did string,\n) (int64, error)\n```\n\n### 2. Add field selection check in builder.go\n\n```go\nfunc isTotalCountRequested(p graphql.ResolveParams) bool {\n for _, field := range p.Info.FieldASTs {\n if field.SelectionSet == nil { continue }\n for _, sel := range field.SelectionSet.Selections {\n if f, ok := sel.(*ast.Field); ok \u0026\u0026 f.Name.Value == \"totalCount\" {\n return true\n }\n }\n }\n return false\n}\n```\n\nImport `\"github.com/graphql-go/graphql/language/ast\"` for the Field type.\n\n### 3. Populate totalCount in resolveRecordConnection\n\nAt the end of `resolveRecordConnection` (around line 462), add:\n\n```go\nresult := map[string]interface{}{\n \"edges\": edges,\n \"pageInfo\": pageInfo,\n}\nif isTotalCountRequested(p) {\n count, err := repos.Records.GetCollectionCountFiltered(ctx, collection, filters, didFilter)\n if err == nil {\n result[\"totalCount\"] = int(count)\n }\n}\nreturn result, nil\n```\n\n### 4. Fix emptyConnection inconsistency\n\nThe `emptyConnection()` helper currently sets `totalCount: 0`. Keep this (it is correct for empty results).\n\n### 5. Add totalCount to GenericRecordConnection too\n\nThe `genericRecordConnectionType` (builder.go line ~300) is missing the `totalCount` field entirely. Add it:\n```go\n\"totalCount\": \u0026graphql.Field{\n Type: graphql.Int,\n Description: \"Total number of items (if known)\",\n},\n```\n\n## Dont\n- Do not make totalCount NonNull (keep it nullable Int)\n- Do not always compute totalCount — only when the field is selected in the query\n- Do not modify admin API totalCount (it already works correctly)","acceptance_criteria":"1. `go build ./...` passes\n2. `go test ./...` passes\n3. A query that selects `totalCount` returns the correct integer count\n4. A query that does NOT select `totalCount` does not execute the COUNT SQL\n5. `GetCollectionCount` returns accurate count for a collection\n6. `GetCollectionCountFiltered` respects DID and field filters\n7. `emptyConnection()` still returns `totalCount: 0`\n8. `GenericRecordConnection` now has a `totalCount` field in its schema\n9. Admin API totalCount behavior is unchanged","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":40,"created_at":"2026-02-18T14:26:16.300672+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:04:00.384628+08:00","closed_at":"2026-02-18T15:04:00.384628+08:00","close_reason":"140f601 feat: populate totalCount on public connections opt-in","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-49s.11","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:26:16.301865+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.11","depends_on_id":"hyperindex-49s.2","type":"blocks","created_at":"2026-02-18T14:26:16.303383+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.12","title":"Add backward pagination support (last/before)","description":"## Files\n- internal/graphql/query/connection.go (modify)\n- internal/graphql/schema/builder.go (modify)\n- internal/database/repositories/records.go (modify)\n- internal/database/repositories/records_test.go (modify)\n\n## What to do\nImplement backward pagination (`last`/`before`) per the Relay Connection Spec.\n\n### 1. Add `last` and `before` arguments to ConnectionArgs() (connection.go)\n\n```go\n\"last\": \u0026graphql.ArgumentConfig{\n Type: graphql.Int,\n Description: \"Number of items to return from the end\",\n},\n\"before\": \u0026graphql.ArgumentConfig{\n Type: graphql.String,\n Description: \"Cursor to paginate before (backward pagination)\",\n},\n```\n\n### 2. Add reverse-order query to repository (records.go)\n\nAdd a new method or extend the sorted method to handle backward pagination:\n\n```go\nfunc (r *RecordsRepository) GetByCollectionReversedWithKeysetCursor(\n ctx context.Context,\n collection string,\n filters []FieldFilter,\n did string,\n sort *SortOption,\n limit int,\n beforeCursorValues []string,\n) ([]*Record, error)\n```\n\nThis method:\n- Reverses the sort direction (DESC→ASC, ASC→DESC) in the ORDER BY\n- Reverses the cursor comparison operator (\u003c becomes \u003e, \u003e becomes \u003c)\n- After fetching, reverses the result slice in-memory so edges are in the correct order\n- Fetches `limit+1` to detect `hasPreviousPage`\n\n### 3. Update resolveRecordConnection in builder.go\n\n- Extract `last` and `before` from `p.Args`\n- Validate: if both `first`/`after` AND `last`/`before` are provided, return an error: \"cannot use both first/after and last/before\"\n- If `last`/`before` are used, call the reverse query method\n- Apply `ClampPageSize` to `last` as well\n- Set `hasPreviousPage` correctly: for backward pagination, use the N+1 technique (same as hasNextPage for forward)\n- Set `hasNextPage` correctly: for backward pagination, `before != \"\"` indicates there are next pages\n\n### 4. Update generic records query args (builder.go inline args)\n\nAdd `last` and `before` arguments to the inline args for the `records` query too.\n\n## Dont\n- Do not support using first/after AND last/before simultaneously (return error)\n- Do not modify the admin API pagination\n- Do not change cursor encoding format","acceptance_criteria":"1. `go build ./...` passes\n2. `go test ./...` passes\n3. A query with `last: 5` returns the last 5 records (oldest 5 in default DESC order)\n4. A query with `last: 5, before: cursor` returns 5 records before the cursor\n5. `hasPreviousPage` is correctly computed using N+1 technique for backward pagination\n6. `hasNextPage` works correctly for backward pagination\n7. Using `first` and `last` together returns a GraphQL error\n8. `last` is clamped to MaxPageSize (100)\n9. Forward pagination (first/after) still works identically to before\n10. The generic `records` query also supports last/before","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":60,"created_at":"2026-02-18T14:26:34.962048+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:17:06.03473+08:00","closed_at":"2026-02-18T15:17:06.03473+08:00","close_reason":"a4a38bc feat: add backward pagination support (last/before)","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-49s.12","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:26:34.96307+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.12","depends_on_id":"hyperindex-49s.6","type":"blocks","created_at":"2026-02-18T14:26:34.964567+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.12","depends_on_id":"hyperindex-49s.9","type":"blocks","created_at":"2026-02-18T14:26:34.965704+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.13","title":"Add LIKE-based cross-collection search query","description":"## Files\n- internal/database/repositories/records.go (modify)\n- internal/database/repositories/records_test.go (modify)\n- internal/graphql/schema/builder.go (modify)\n\n## What to do\nAdd a basic text search capability using LIKE/ILIKE on the JSON column. This is Phase 1 of search — simple but functional. FTS can be added later.\n\n### 1. Add Search method to RecordsRepository (records.go)\n\n```go\n// Search performs a LIKE-based text search on record JSON content.\n// On PostgreSQL, uses case-insensitive ILIKE. On SQLite, LIKE is already case-insensitive for ASCII.\nfunc (r *RecordsRepository) Search(\n ctx context.Context,\n query string,\n collection string, // optional, empty = cross-collection\n limit int,\n afterTimestamp string,\n afterURI string,\n) ([]*Record, error)\n```\n\nSQL construction:\n- Base: `SELECT ... FROM record WHERE json LIKE %s` (SQLite) or `WHERE json::text ILIKE %s` (PostgreSQL) \n- Value: wrap query in `%`: `\"%\"+query+\"%\"`\n- If collection non-empty, add `AND collection = ?`\n- Keyset cursor: same `AND (indexed_at \u003c ? OR (indexed_at = ? AND uri \u003c ?))` pattern\n- ORDER BY: `indexed_at DESC, uri DESC`\n- LIMIT: `limit + 1`\n\nFor PostgreSQL, cast json to text: `json::text ILIKE ?`. Check if the Executor has a helper for this or just use dialect branching.\n\n### 2. Add `search` field to GraphQL query type (builder.go)\n\nIn `buildQueryType()`, add a new field:\n\n```go\nfields[\"search\"] = \u0026graphql.Field{\n Type: genericRecordConnectionType,\n Description: \"Search records by text content\",\n Args: graphql.FieldConfigArgument{\n \"query\": \u0026graphql.ArgumentConfig{\n Type: graphql.NewNonNull(graphql.String),\n Description: \"Search text (matched against record JSON content)\",\n },\n \"collection\": \u0026graphql.ArgumentConfig{\n Type: graphql.String,\n Description: \"Optional collection NSID to restrict search\",\n },\n \"first\": \u0026graphql.ArgumentConfig{\n Type: graphql.Int,\n DefaultValue: 20,\n },\n \"after\": \u0026graphql.ArgumentConfig{\n Type: graphql.String,\n },\n },\n Resolve: b.createSearchResolver(),\n}\n```\n\n### 3. Add search resolver (builder.go)\n\n```go\nfunc (b *Builder) createSearchResolver() graphql.FieldResolveFn\n```\n\n- Extract `query`, `collection`, `first`, `after` from args\n- Apply `ClampPageSize` to first\n- Validate: `query` must be non-empty, minimum 2 characters\n- Call `repos.Records.Search(ctx, query, collection, first+1, afterTimestamp, afterURI)`\n- Build generic record connection (same pattern as the generic records resolver)\n\n### 4. Tests\n\n- Test Search returns records containing the search term\n- Test Search with collection filter narrows results\n- Test Search with pagination\n- Test empty query returns error\n- Test very short query (1 char) returns error\n\n## Dont\n- Do not add FTS5 or tsvector (that is a future phase)\n- Do not add search to typed collection queries (this is a standalone query)\n- Do not allow empty or single-character queries (minimum 2 chars)\n- Do not add new database indexes (LIKE search is inherently O(n) — fine for Phase 1)","acceptance_criteria":"1. `go build ./...` passes\n2. `go test ./...` passes\n3. `search(query: \"hello\")` returns records whose JSON contains \"hello\"\n4. `search(query: \"hello\", collection: \"app.bsky.feed.post\")` only searches that collection\n5. Pagination works on search results (first/after)\n6. Search query shorter than 2 characters returns a GraphQL error\n7. Empty query string returns a GraphQL error\n8. Search is case-insensitive on both SQLite and PostgreSQL\n9. The search field returns `GenericRecordConnection` type (not typed records)\n10. LIKE value properly escapes % and _ in the user query to prevent wildcard injection","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T14:26:57.899071+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T14:59:18.423051+08:00","closed_at":"2026-02-18T14:59:18.423051+08:00","close_reason":"264d333 Add LIKE-based cross-collection search query","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-49s.13","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:26:57.899953+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.13","depends_on_id":"hyperindex-49s.6","type":"blocks","created_at":"2026-02-18T14:26:57.901396+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.14","title":"Add integration tests for filter/sort/search end-to-end","description":"## Files\n- internal/integration/graphql_filter_test.go (create)\n\n## What to do\nWrite end-to-end integration tests that exercise the full filter/sort/search stack by sending GraphQL queries through the HTTP handler and verifying responses. These tests validate that the schema builder, resolvers, and repository work together correctly.\n\n### Test setup\nUse the existing integration test patterns from `internal/integration/` (if any exist) or from `internal/graphql/handler_test.go`. Set up:\n- An in-memory SQLite database\n- A registry with at least one test lexicon (create a minimal lexicon JSON fixture with string, integer, datetime, and boolean fields)\n- Insert several test records with known JSON content\n- Build the schema and create the GraphQL handler\n\n### Test cases\n\n1. **Filter by string eq**: Query with `where: {title: {eq: \"Test\"}}`, verify only matching records returned\n2. **Filter by string contains**: Query with `where: {title: {contains: \"est\"}}`, verify substring matching\n3. **Filter by integer gt/lt**: Query with `where: {score: {gt: 5, lt: 10}}`, verify range filtering\n4. **Filter by DID**: Query with `where: {did: {eq: \"did:plc:test123\"}}`, verify DID filtering\n5. **Filter by isNull**: Query with `where: {optionalField: {isNull: true}}`, verify null filtering\n6. **Sort by field ASC**: Query with `sortBy: \"title\", sortDirection: \"ASC\"`, verify ascending order\n7. **Sort by field DESC**: Query with `sortBy: \"indexed_at\", sortDirection: \"DESC\"`, verify descending order\n8. **Sort + pagination**: Sort by field, paginate with cursor, verify correct continuation\n9. **totalCount opt-in**: Query selecting totalCount, verify it returns correct integer\n10. **totalCount omitted**: Query NOT selecting totalCount (verify via test that no COUNT query runs — can check by verifying null/omitted)\n11. **Max page size**: Query with `first: 500`, verify at most 100 records returned\n12. **Search**: Query `search(query: \"searchterm\")`, verify matching records found\n13. **Search with collection filter**: Query `search(query: \"term\", collection: \"test.collection\")`, verify filtered\n14. **Backward compatibility**: Query without any where/sort args, verify same behavior as before\n\n## Dont\n- Do not test admin API\n- Do not test subscriptions\n- Do not test OAuth\n- Do not start a real HTTP server — use httptest if needed, or call graphql.Do directly","acceptance_criteria":"1. `go build ./...` passes\n2. `go test -v ./internal/integration/...` passes with all 14 test cases green\n3. Tests use table-driven test pattern\n4. Test setup creates a minimal but complete test environment (DB + lexicon + records + schema)\n5. Each test case is independent and does not depend on ordering\n6. Tests verify both the happy path and edge cases (empty results, invalid input)","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":60,"created_at":"2026-02-18T14:27:17.258793+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:21:53.440089+08:00","closed_at":"2026-02-18T15:21:53.440089+08:00","close_reason":"c179d86 Add integration tests for filter/sort/search end-to-end","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-49s.14","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:27:17.259809+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.14","depends_on_id":"hyperindex-49s.4","type":"blocks","created_at":"2026-02-18T14:27:17.261276+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.14","depends_on_id":"hyperindex-49s.10","type":"blocks","created_at":"2026-02-18T14:27:17.262372+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.14","depends_on_id":"hyperindex-49s.11","type":"blocks","created_at":"2026-02-18T14:27:17.263445+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.14","depends_on_id":"hyperindex-49s.13","type":"blocks","created_at":"2026-02-18T14:27:17.264489+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.2","title":"Add FieldFilter struct and filtered query to RecordsRepository","description":"## Files\n- internal/database/repositories/records.go (modify)\n- internal/database/repositories/records_test.go (modify)\n\n## What to do\nAdd a `FieldFilter` struct and a new repository method that supports field-level filtering using JSON extraction.\n\n### 1. Define the FieldFilter struct (add near top of records.go, after Record struct)\n\n```go\n// FieldFilter represents a single filter condition on a JSON field.\ntype FieldFilter struct {\n Field string // JSON field name (e.g., \"title\", \"createdAt\"). Must be a valid field name.\n Operator string // One of: \"eq\", \"neq\", \"gt\", \"lt\", \"gte\", \"lte\", \"in\", \"contains\", \"startsWith\", \"isNull\"\n Value interface{} // The comparison value. For \"in\", must be []interface{}. For \"isNull\", must be bool.\n FieldType string // Lexicon type: \"string\", \"integer\", \"number\", \"boolean\", \"datetime\"\n}\n```\n\n### 2. Add a method to build WHERE clause fragments from filters\n\n```go\nfunc (r *RecordsRepository) buildFilterClause(filters []FieldFilter, startPlaceholder int) (string, []database.Value)\n```\n\nFor each filter:\n- Extract the JSON field: `r.db.JSONExtract(\"json\", filter.Field)` \n- Map operator to SQL: eq→`=`, neq→`!=`, gt→`\u003e`, lt→`\u003c`, gte→`\u003e=`, lte→`\u003c=`, contains→`LIKE` (wrap value in `%`), startsWith→`LIKE` (append `%`), isNull→`IS NULL`/`IS NOT NULL` (based on bool Value), in→`IN (...)`\n- For numeric types (integer, number), wrap the JSONExtract in CAST: SQLite `CAST(... AS REAL)`, PostgreSQL `(...)::numeric`\n- Use `r.db.Placeholder(n)` for parameterized values, incrementing from startPlaceholder\n- Join all conditions with ` AND `\n\n### 3. Add a new query method\n\n```go\nfunc (r *RecordsRepository) GetByCollectionFilteredWithKeysetCursor(\n ctx context.Context,\n collection string,\n filters []FieldFilter,\n did string, // optional, empty string means no DID filter\n limit int,\n afterTimestamp string, // empty means first page\n afterURI string,\n) ([]*Record, error)\n```\n\nThis builds on the existing `GetByCollectionWithKeysetCursor` pattern but:\n- Appends filter conditions from `buildFilterClause` to the WHERE clause\n- Adds `AND did = ?` if did is non-empty\n- Keeps the same `ORDER BY indexed_at DESC, uri DESC` and keyset cursor logic\n\n### 4. Tests\nAdd table-driven tests for `buildFilterClause` covering:\n- Each operator type (eq, neq, gt, lt, gte, lte, in, contains, startsWith, isNull true, isNull false)\n- Numeric type casting\n- Multiple filters combined with AND\n- Empty filter list returns empty string\n\nAdd integration-style tests for `GetByCollectionFilteredWithKeysetCursor`:\n- Filter by string eq\n- Filter by isNull\n- Filter with DID\n- Pagination with filters\n\n## Dont\n- Do not modify existing methods (GetByCollectionWithKeysetCursor etc.)\n- Do not change the Record struct\n- Do not add GraphQL code — this is DB layer only\n- Do not use raw string concatenation for field names — always go through JSONExtract which validates","acceptance_criteria":"1. `go build ./...` passes\n2. `go test -v ./internal/database/repositories/...` passes with all tests green\n3. `buildFilterClause` correctly generates SQL for all 10 operators\n4. Numeric fields (integer, number) are CAST appropriately per dialect\n5. `contains` wraps value in `%value%`, `startsWith` appends `value%`\n6. `isNull: true` generates `IS NULL`, `isNull: false` generates `IS NOT NULL`\n7. `in` operator generates `IN ($1, $2, ...)` with correct placeholder count\n8. Empty filters list returns empty clause string and nil params\n9. `GetByCollectionFilteredWithKeysetCursor` returns correct results when filtering by string field eq\n10. DID filter adds `AND did = ?` when non-empty, omits when empty","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":60,"created_at":"2026-02-18T14:23:37.993809+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:00:41.89451+08:00","closed_at":"2026-02-18T15:00:41.89451+08:00","close_reason":"8d786ca Add FieldFilter struct and filtered query to RecordsRepository","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-49s.2","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:23:37.994792+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.2","depends_on_id":"hyperindex-49s.1","type":"blocks","created_at":"2026-02-18T14:23:37.996171+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.3","title":"Generate per-collection WhereInput types in schema builder","description":"## Files\n- internal/graphql/schema/builder.go (modify)\n\n## What to do\nDuring schema build, generate a per-collection `WhereInput` GraphQL InputObject type for each registered lexicon, and add a `where` argument to each typed collection query field.\n\n### 1. Add a new build phase method: `buildWhereInputTypes()`\n\nCall this after Phase 2 (record types) but before Phase 4 (query type). For each collection lexicon:\n\n- Get the lexicon main definition properties (same iteration as in `buildRecordTypes`)\n- For each property, call `types.FilterInputForLexiconType(prop.Type, prop.Format)` from the new filters.go (task 1)\n- If the filter input is non-nil, add the property name as a field on the WhereInput with that filter type\n- Always add a `did` field with `types.StringFilterInput` (DID is a standard filterable metadata field)\n- Store the resulting InputObject in a new map: `b.whereInputTypes map[string]*graphql.InputObject`\n\nType naming: `{TypeName}WhereInput` (e.g., `AppBskyFeedPostWhereInput`)\n\n### 2. Modify `buildQueryType()` — add `where` argument to collection fields\n\nAt builder.go line ~366, where `query.ConnectionArgs()` is used, replace with custom args that include the where input:\n\n```go\nargs := query.ConnectionArgs()\nif whereInput, ok := b.whereInputTypes[lexiconID]; ok {\n args[\"where\"] = \u0026graphql.ArgumentConfig{\n Type: whereInput,\n Description: \"Filter conditions\",\n }\n}\n```\n\n### 3. Store whereInputTypes on the Builder struct\n\nAdd field: `whereInputTypes map[string]*graphql.InputObject`\nInitialize in `NewBuilder()`.\n\n## Dont\n- Do not add resolver logic to process the where argument (separate task)\n- Do not modify the generic `records` query\n- Do not add sort arguments (separate task)\n- Do not create new files — all changes go in builder.go","acceptance_criteria":"1. `go build ./...` passes\n2. `go test ./...` passes (all existing tests still green)\n3. For a lexicon with string/integer/datetime properties, the generated WhereInput has a field per filterable property\n4. Non-filterable properties (array, ref, union, blob, unknown) are NOT included in the WhereInput\n5. Every WhereInput includes a `did` field of type StringFilterInput\n6. The `where` argument appears on typed collection query fields in schema introspection\n7. The `where` argument does NOT appear on the generic `records` query or `*ByUri` queries\n8. The Builder struct has the new `whereInputTypes` map field initialized in NewBuilder","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T14:23:57.317521+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T14:58:26.465963+08:00","closed_at":"2026-02-18T14:58:26.465963+08:00","close_reason":"6050b63 Generate per-collection WhereInput types in schema builder","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-49s.3","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:23:57.318642+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.3","depends_on_id":"hyperindex-49s.1","type":"blocks","created_at":"2026-02-18T14:23:57.320142+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.4","title":"Wire WHERE filters from GraphQL args into resolvers and repository","description":"## Files\n- internal/graphql/schema/builder.go (modify)\n\n## What to do\nConnect the `where` argument from typed collection queries through the resolver to the new filtered repository method.\n\n### 1. Add a helper function to extract filters from the `where` argument\n\n```go\nfunc extractFilters(whereArg interface{}, lexiconID string, registry *lexicon.Registry) ([]repositories.FieldFilter, string)\n```\n\nThis function:\n- Type-asserts `whereArg` to `map[string]interface{}`\n- For each key-value pair in the map:\n - If key is `\"did\"`, extract the filter and return the DID value separately (or convert to a FieldFilter — but DID is a column, not a JSON field)\n - Otherwise, look up the property type in the lexicon registry to determine `FieldType`\n - The value will be a `map[string]interface{}` representing the filter input (e.g., `{\"eq\": \"hello\", \"contains\": \"world\"}`)\n - For each operator-value pair in the filter input, create a `repositories.FieldFilter{Field: key, Operator: op, Value: val, FieldType: lexiconType}`\n- Return the slice of FieldFilters and the DID string (empty if not filtered)\n\n### 2. Modify `resolveRecordConnection`\n\nCurrently at builder.go lines 396-471. The function signature stays the same but behavior changes:\n\n- After extracting `first` and `after` (lines 407-411), also extract the `where` argument:\n ```go\n var filters []repositories.FieldFilter\n var didFilter string\n if whereArg, ok := p.Args[\"where\"]; ok \u0026\u0026 whereArg != nil {\n filters, didFilter = extractFilters(whereArg, collection, b.registry)\n }\n ```\n- Replace the call to `repos.Records.GetByCollectionWithKeysetCursor(...)` (line ~424) with `repos.Records.GetByCollectionFilteredWithKeysetCursor(ctx, collection, filters, didFilter, first+1, afterTimestamp, afterURI)`\n- When filters is empty and didFilter is empty, the new method behaves identically to the old one\n\n### 3. Ensure backward compatibility\n\nThe `resolveRecordConnection` is also called by the generic `records` resolver (line 481) which does NOT have a `where` argument. Since `p.Args[\"where\"]` will be nil in that case, the filter extraction is skipped and the unfiltered path is taken. No changes needed to the generic records resolver.\n\n## Dont\n- Do not add sort logic (separate task)\n- Do not modify the repository layer (already done in task 2)\n- Do not modify the WhereInput type generation (already done in task 3)\n- Do not add new test files — add tests inline in existing builder test if one exists, or verify via integration test","acceptance_criteria":"1. `go build ./...` passes\n2. `go test ./...` passes\n3. A GraphQL query like `appBskyFeedPost(where: {text: {contains: \"hello\"}}) { edges { node { uri } } }` correctly filters results\n4. A query with `where: {did: {eq: \"did:plc:abc\"}}` correctly filters by DID\n5. Multiple filter fields are ANDed together\n6. Multiple operators on the same field are ANDed together (e.g., `{score: {gt: 5, lt: 10}}`)\n7. A query WITHOUT `where` argument returns the same results as before (backward compatible)\n8. The generic `records` query still works without where support\n9. No SQL injection possible — field names go through JSONExtract validation","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T14:24:19.056272+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:02:35.44457+08:00","closed_at":"2026-02-18T15:02:35.44457+08:00","close_reason":"110e36d Wire WHERE filters from GraphQL args into resolvers and repository","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-49s.4","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:24:19.057295+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.4","depends_on_id":"hyperindex-49s.2","type":"blocks","created_at":"2026-02-18T14:24:19.059043+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.4","depends_on_id":"hyperindex-49s.3","type":"blocks","created_at":"2026-02-18T14:24:19.060135+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.5","title":"Add did and rkey fields to typed record GraphQL types","description":"## Files\n- internal/graphql/types/object.go (modify)\n- internal/graphql/schema/builder.go (modify)\n\n## What to do\nCurrently typed record types only expose `uri` and `cid` as standard metadata fields. The `did` (author DID) and `rkey` (record key) are available in the Record struct but never injected. GenericRecord already has these fields. Fix this inconsistency.\n\n### 1. Modify `buildRecordFields()` in object.go (around line 92-103)\n\nAdd two new standard fields alongside the existing `uri` and `cid`:\n\n```go\n\"did\": \u0026graphql.Field{\n Type: graphql.NewNonNull(graphql.String),\n Description: \"DID of the record author\",\n},\n\"rkey\": \u0026graphql.Field{\n Type: graphql.NewNonNull(graphql.String),\n Description: \"Record key (last segment of AT-URI)\",\n},\n```\n\n### 2. Modify `createCollectionResolver` in builder.go (around line 498-506)\n\nIn the loop that builds data maps from records, add:\n```go\ndata[\"did\"] = rec.DID\ndata[\"rkey\"] = rec.RKey\n```\nalongside the existing `data[\"uri\"] = rec.URI` and `data[\"cid\"] = rec.CID`.\n\n### 3. Modify `createSingleRecordResolver` in builder.go (around line 535-541)\n\nSame injection — add `data[\"did\"]` and `data[\"rkey\"]` to the single-record resolver output.\n\n## Dont\n- Do not modify GenericRecord (it already has these fields)\n- Do not modify subscription types (RecordEvent already has did)\n- Do not add actor resolution (separate future task)\n- Do not add any new files","acceptance_criteria":"1. `go build ./...` passes\n2. `go test ./...` passes\n3. Typed record types now have `did: String!` and `rkey: String!` fields visible in schema introspection\n4. A typed collection query returns `did` and `rkey` values in the response\n5. A single record ByUri query returns `did` and `rkey` values\n6. Existing `uri` and `cid` fields are unchanged\n7. Subscription event types are unchanged","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":20,"created_at":"2026-02-18T14:24:33.139969+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T14:54:46.611508+08:00","closed_at":"2026-02-18T14:54:46.611508+08:00","close_reason":"c18791b Add did and rkey fields to typed record GraphQL types","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-49s.5","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:24:33.141069+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.6","title":"Add max page size limit and validate first argument","description":"## Files\n- internal/graphql/schema/builder.go (modify)\n- internal/graphql/query/connection.go (modify)\n\n## What to do\nThere is currently no upper bound on the `first` pagination argument. A client can request `first: 999999` causing the server to fetch a million records. Add a max page size.\n\n### 1. Define constants in connection.go\n\n```go\nconst (\n DefaultPageSize = 20\n MaxPageSize = 100\n)\n```\n\n### 2. Add a helper function in connection.go\n\n```go\n// ClampPageSize returns a valid page size within [1, MaxPageSize], defaulting to DefaultPageSize.\nfunc ClampPageSize(first int) int {\n if first \u003c= 0 {\n return DefaultPageSize\n }\n if first \u003e MaxPageSize {\n return MaxPageSize\n }\n return first\n}\n```\n\n### 3. Apply in builder.go `resolveRecordConnection` (around line 407-410)\n\nReplace:\n```go\nfirst, _ := p.Args[\"first\"].(int)\nif first == 0 {\n first = 20\n}\n```\n\nWith:\n```go\nfirstArg, _ := p.Args[\"first\"].(int)\nfirst := query.ClampPageSize(firstArg)\n```\n\n### 4. Apply to the generic `records` resolver too\n\nThe generic records query resolver (around line 473-493) likely has similar default logic. Apply the same clamping there.\n\n## Dont\n- Do not change the default page size (keep 20)\n- Do not return an error for first \u003e 100 — silently clamp to 100\n- Do not modify admin API pagination (labels/reports)","acceptance_criteria":"1. `go build ./...` passes\n2. `go test ./...` passes\n3. `query.ClampPageSize(0)` returns 20\n4. `query.ClampPageSize(-5)` returns 20\n5. `query.ClampPageSize(50)` returns 50\n6. `query.ClampPageSize(200)` returns 100\n7. `query.ClampPageSize(100)` returns 100\n8. A GraphQL query with `first: 500` returns at most 100 records\n9. A GraphQL query with no `first` argument returns 20 records (default unchanged)\n10. Constants `DefaultPageSize` and `MaxPageSize` are exported","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":15,"created_at":"2026-02-18T14:24:45.871588+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T14:55:02.306475+08:00","closed_at":"2026-02-18T14:55:02.306475+08:00","close_reason":"bbfdf23 Add max page size limit and ClampPageSize helper","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-49s.6","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:24:45.872519+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.7","title":"Add composite DB index for keyset pagination performance","description":"## Files\n- internal/database/migrations/sqlite/006_add_composite_index.up.sql (create)\n- internal/database/migrations/sqlite/006_add_composite_index.down.sql (create)\n- internal/database/migrations/postgres/006_add_composite_index.up.sql (create)\n- internal/database/migrations/postgres/006_add_composite_index.down.sql (create)\n\n## What to do\nThe current keyset pagination query uses `WHERE collection = ? AND (indexed_at \u003c ? OR (indexed_at = ? AND uri \u003c ?)) ORDER BY indexed_at DESC, uri DESC`. There is no composite index covering this query pattern. The existing `idx_record_indexed_at` is single-column and `idx_record_collection` is single-column. Add a composite covering index.\n\n### Migration 006 up (both dialects — SQL is identical):\n\n```sql\n-- Composite index for keyset pagination: covers WHERE collection = ? ORDER BY indexed_at DESC, uri DESC\nCREATE INDEX IF NOT EXISTS idx_record_collection_keyset ON record(collection, indexed_at DESC, uri DESC);\n```\n\n### Migration 006 down (both dialects):\n\n```sql\nDROP INDEX IF EXISTS idx_record_collection_keyset;\n```\n\n### Important notes\n- Check existing migration numbers first. The latest migration files should be numbered 005. If 006 already exists, use the next available number.\n- The migration filenames must follow the existing pattern exactly: `NNN_description.up.sql` and `NNN_description.down.sql`\n- Use `IF NOT EXISTS` / `IF EXISTS` for idempotency\n\n## Dont\n- Do not modify any Go code\n- Do not drop existing indexes\n- Do not add FTS indexes (separate future task)\n- Do not add GIN or expression indexes for JSON fields yet","acceptance_criteria":"1. `go build ./...` passes\n2. `go test ./...` passes (migrations run successfully)\n3. SQLite up migration creates the composite index\n4. PostgreSQL up migration creates the composite index\n5. Down migrations drop the index cleanly\n6. Migration file numbering is correct (no gaps, no conflicts with existing migrations)\n7. Running the server applies the migration without errors","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":10,"created_at":"2026-02-18T14:24:58.875985+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T14:54:39.728725+08:00","closed_at":"2026-02-18T14:54:39.728725+08:00","close_reason":"6b2c6ad feat: add composite DB index for keyset pagination performance","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-49s.7","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:24:58.876847+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.8","title":"Add per-collection sort enum and sortBy/sortDirection arguments","description":"## Files\n- internal/graphql/query/connection.go (modify)\n- internal/graphql/schema/builder.go (modify)\n\n## What to do\nAdd sorting arguments to typed collection queries. Each collection gets a generated enum of sortable fields. A shared SortDirection enum provides ASC/DESC.\n\n### 1. Define SortDirection enum in connection.go\n\n```go\nvar SortDirectionEnum = graphql.NewEnum(graphql.EnumConfig{\n Name: \"SortDirection\",\n Description: \"Sort direction\",\n Values: graphql.EnumValueConfigMap{\n \"ASC\": \u0026graphql.EnumValueConfig{Value: \"ASC\", Description: \"Ascending order\"},\n \"DESC\": \u0026graphql.EnumValueConfig{Value: \"DESC\", Description: \"Descending order (default)\"},\n },\n})\n```\n\n### 2. Add BuildSortFieldEnum function in connection.go (or a new sort.go in the query package)\n\n```go\n// BuildSortFieldEnum creates a per-collection enum of fields that can be used for sorting.\n// Only scalar types are sortable (string, integer, number, boolean, datetime).\n// Always includes \"indexed_at\" as a sortable meta-field.\nfunc BuildSortFieldEnum(typeName string, properties map[string]lexicon.Property) *graphql.Enum\n```\n\nFor each property, check if the type is sortable:\n- string (any format) → sortable\n- integer → sortable\n- number → sortable\n- boolean → sortable\n- datetime (string+datetime) → sortable\n- All others (array, ref, union, blob, bytes, unknown, object) → not sortable\n\nEnum value names should match the property names exactly.\n\n### 3. Store sort enums and add to query args in builder.go\n\n- Add a new map `b.sortFieldEnums map[string]*graphql.Enum` to the Builder struct, initialized in NewBuilder\n- In a new build phase (or alongside whereInputTypes), build sort enums for each collection\n- When building collection query fields in `buildQueryType()`, add `sortBy` and `sortDirection` arguments:\n\n```go\nargs[\"sortBy\"] = \u0026graphql.ArgumentConfig{\n Type: sortEnum,\n Description: \"Field to sort by (default: indexed_at)\",\n}\nargs[\"sortDirection\"] = \u0026graphql.ArgumentConfig{\n Type: SortDirectionEnum,\n Description: \"Sort direction (default: DESC)\",\n}\n```\n\n## Dont\n- Do not implement the resolver logic to actually use the sort args (separate task)\n- Do not modify cursor encoding (separate task)\n- Do not import lexicon package into connection.go — pass the needed data as params\n- Do not add sorting to the generic `records` query or admin queries","acceptance_criteria":"1. `go build ./...` passes\n2. `go test ./...` passes\n3. `SortDirectionEnum` has exactly two values: ASC and DESC\n4. `BuildSortFieldEnum` generates an enum with `indexed_at` plus one entry per sortable property\n5. Non-sortable properties (array, ref, union, blob, unknown) are excluded from the sort enum\n6. The `sortBy` argument appears on typed collection query fields in schema introspection\n7. The `sortDirection` argument appears on typed collection query fields\n8. Neither sort argument appears on the generic `records` query or `*ByUri` queries","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-18T14:25:17.754005+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T14:56:31.312978+08:00","closed_at":"2026-02-18T14:56:31.312978+08:00","close_reason":"2140d47 Add per-collection sort enum and sortBy/sortDirection arguments","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-49s.8","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:25:17.754937+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-49s.9","title":"Add sorted query method to RecordsRepository","description":"## Files\n- internal/database/repositories/records.go (modify)\n- internal/database/repositories/records_test.go (modify)\n\n## What to do\nAdd a repository method that supports dynamic ORDER BY on a single field (either a DB column or a JSON-extracted field), with configurable sort direction.\n\n### 1. Define a SortOption struct\n\n```go\n// SortOption specifies a sort field and direction for record queries.\ntype SortOption struct {\n Field string // Field name. If \"indexed_at\", \"uri\", \"did\", \"collection\" — use column directly. Otherwise, use JSONExtract.\n Direction string // \"ASC\" or \"DESC\"\n}\n```\n\n### 2. Add helper to build ORDER BY expression\n\n```go\nfunc (r *RecordsRepository) buildSortExpr(sort *SortOption) string\n```\n\n- If `sort` is nil, return `\"indexed_at DESC, uri DESC\"` (default)\n- If `sort.Field` is one of `\"indexed_at\", \"uri\", \"did\", \"collection\", \"cid\", \"rkey\"`, use it as a direct column reference\n- Otherwise, use `r.db.JSONExtract(\"json\", sort.Field)` to build the sort expression\n- Always append `, uri DESC` as the tiebreaker (unless the field IS uri)\n- The direction comes from `sort.Direction`\n\n### 3. Add new query method\n\n```go\nfunc (r *RecordsRepository) GetByCollectionSortedWithKeysetCursor(\n ctx context.Context,\n collection string,\n filters []FieldFilter,\n did string,\n sort *SortOption, // nil means default sort\n limit int,\n afterCursorValues []string, // [sortFieldValue, uri] for keyset. Empty = first page.\n) ([]*Record, error)\n```\n\nThis method:\n- Builds the ORDER BY clause from sort option\n- Builds the keyset WHERE clause using the sort field and uri as the composite cursor key\n- The comparison operator flips based on direction: DESC uses `\u003c`, ASC uses `\u003e`\n- Applies field filters from `buildFilterClause` (from task 2)\n- Applies optional DID filter\n\n### 4. Tests\n\n- Test default sort (nil SortOption) returns records in indexed_at DESC order\n- Test sort by indexed_at ASC returns records in ascending order\n- Test sort by a JSON field (e.g., createdAt) DESC\n- Test keyset cursor works correctly with custom sort field\n- Test sort + filters combined\n\n## Dont\n- Do not support multi-field sorting (only one sort field + uri tiebreaker)\n- Do not modify existing methods\n- Do not add GraphQL code","acceptance_criteria":"1. `go build ./...` passes\n2. `go test -v ./internal/database/repositories/...` passes\n3. Nil SortOption produces `ORDER BY indexed_at DESC, uri DESC`\n4. SortOption{Field: \"indexed_at\", Direction: \"ASC\"} produces `ORDER BY indexed_at ASC, uri ASC`\n5. SortOption for a JSON field uses JSONExtract in the ORDER BY\n6. Keyset cursor with ASC direction uses `\u003e` comparator\n7. Keyset cursor with DESC direction uses `\u003c` comparator\n8. Combined sort + filters produces correct results\n9. The uri tiebreaker direction matches the primary sort direction","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T14:25:38.205489+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:06:00.990256+08:00","closed_at":"2026-02-18T15:06:00.990256+08:00","close_reason":"8986495 Add sorted query method to RecordsRepository","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-49s.9","depends_on_id":"hyperindex-49s","type":"parent-child","created_at":"2026-02-18T14:25:38.206562+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-49s.9","depends_on_id":"hyperindex-49s.2","type":"blocks","created_at":"2026-02-18T14:25:38.208305+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-5ar","title":"Fix: subscription per-collection resolver returns raw record map without null coercion (from hyperindex-vz7)","description":"## Files\n- internal/graphql/schema/builder.go (modify)\n\n## What to do\nIn `buildSubscriptionType()`, find the per-collection subscription resolver that returns `event.Record` directly (around line 335). Add a call to `b.coerceRequiredFields()` before returning, matching the pattern used in `createCollectionResolver` and `createSingleRecordResolver`.\n\nThe current code looks like:\n```go\nreturn event.Record, nil\n```\n\nIt should become:\n```go\nif data, ok := event.Record.(map[string]interface{}); ok {\n b.coerceRequiredFields(data, lexiconID)\n}\nreturn event.Record, nil\n```\n\nMake sure to use the correct `lexiconID` variable that is in scope for that subscription field (the collection NSID like `org.hypercerts.claim.activity`). Check the loop variable name — it may be `id`, `lexiconID`, or `nsid` depending on how `buildSubscriptionType` iterates.\n\n## Dont\n- Do not change the subscription infrastructure or WebSocket handling\n- Do not modify `coerceRequiredFields` itself\n- Do not add new dependencies\n- Do not change any other resolver paths","acceptance_criteria":"1. go build ./... succeeds\n2. go test ./... succeeds (no regressions)\n3. The subscription resolver for typed collections calls b.coerceRequiredFields() on the record data before returning it\n4. The coercion uses the correct collection NSID (lexicon ID) for the subscription field being resolved","status":"open","priority":1,"issue_type":"bug","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":15,"created_at":"2026-02-19T20:20:15.974813+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-19T20:22:26.90495+08:00","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-5ar","depends_on_id":"hyperindex-vz7","type":"discovered-from","created_at":"2026-02-19T20:20:19.381083+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-6bl","title":"Fix: README Tap curl uses wrong Basic auth username (from hyperindex-md3.9)","description":"Review of hyperindex-md3.9 found: the README curl example was changed to `-u \":${TAP_ADMIN_PASSWORD}\"` (empty username), but admin.go line 157 sends `\"admin:\" + c.password` as the credential. These produce different Authorization headers: `Basic base64(:pass)` vs `Basic base64(admin:pass)`. The correct fix is `-u \"admin:${TAP_ADMIN_PASSWORD}\"`. Evidence: admin.go:157 `creds := base64.StdEncoding.EncodeToString([]byte(\"admin:\" + c.password))`; TestAdminClient_BasicAuth line 280 confirms username is 'admin'. The spec description incorrectly stated the code used SetBasicAuth(\"\", password) — the worker faithfully implemented the wrong spec.","status":"open","priority":1,"issue_type":"bug","owner":"einstein.climateai.org","created_at":"2026-02-18T16:48:34.553314+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:48:34.553314+08:00","dependencies":[{"issue_id":"hyperindex-6bl","depends_on_id":"hyperindex-md3.9","type":"discovered-from","created_at":"2026-02-18T16:48:53.282601+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-73q","title":"Fix: docker-compose.yml and docker-compose.postgres.yml still have static SECRET_KEY_BASE default (from hyperindex-md3.8)","description":"Review of hyperindex-md3.8 found: the security fix was correctly applied to docker-compose.tap.yml but docker-compose.yml (line 12) and docker-compose.postgres.yml (line 12) still use the static default `development-secret-key-change-in-production-64chars`. Anyone deploying with these files without setting SECRET_KEY_BASE gets a known, forgeable session key. The B8 spec explicitly excluded these files, so the worker was correct to skip them — but the vulnerability remains. Both files need the same `:?` treatment: `${SECRET_KEY_BASE:?SECRET_KEY_BASE must be set - generate with: openssl rand -hex 32}`.","status":"open","priority":2,"issue_type":"bug","owner":"einstein.climateai.org","created_at":"2026-02-18T16:48:40.474421+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:48:40.474421+08:00","dependencies":[{"issue_id":"hyperindex-73q","depends_on_id":"hyperindex-md3.8","type":"discovered-from","created_at":"2026-02-18T16:48:53.380917+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-a10","title":"Make dashboard stats, activity chart, and recent activity public (no auth required)","description":"## Files\n- internal/graphql/admin/schema.go (modify)\n\n## What to do\nRemove the `requireAdmin(p.Context)` guard from three query field resolvers in `buildQueryType()`:\n\n1. **`statistics`** (around line 62-65): Remove the `if err := requireAdmin(...)` block. Update Description from `\"Get system statistics (admin only)\"` to `\"Get system statistics (public)\"`.\n\n2. **`activityBuckets`** (around line 118-121): Remove the `if err := requireAdmin(...)` block. Update Description from `\"Get aggregated activity data for a time range (admin only)\"` to `\"Get aggregated activity data for a time range (public)\"`.\n\n3. **`recentActivity`** (around line 136-139): Remove the `if err := requireAdmin(...)` block. Update Description from `\"Get recent activity entries (admin only)\"` to `\"Get recent activity entries (public)\"`.\n\nKeep the rest of each resolver body intact (args parsing, hour clamping, etc). Do NOT touch any other fields — `settings`, `lexicons`, `oauthClients`, `isBackfilling`, `labelDefinitions`, `labels` must stay admin-only.\n\n## Don't\n- Remove requireAdmin from any other field besides statistics, activityBuckets, recentActivity\n- Change resolver logic, argument handling, or return types\n- Modify any other file","acceptance_criteria":"1. `go build ./...` succeeds\n2. `go test ./...` passes\n3. `curl -s -X POST -H 'Content-Type: application/json' -d '{\"query\":\"{ statistics { recordCount } }\"}' http://localhost:8080/admin/graphql` returns data (no X-User-DID header, no auth) without 'admin privileges required' error\n4. `curl -s -X POST -H 'Content-Type: application/json' -d '{\"query\":\"{ activityBuckets(range: ONE_DAY) { timestamp } }\"}' http://localhost:8080/admin/graphql` returns data without auth\n5. `curl -s -X POST -H 'Content-Type: application/json' -d '{\"query\":\"{ recentActivity(hours: 1) { id } }\"}' http://localhost:8080/admin/graphql` returns data without auth\n6. `curl -s -X POST -H 'Content-Type: application/json' -d '{\"query\":\"{ settings { domainAuthority } }\"}' http://localhost:8080/admin/graphql` still returns 'admin privileges required' (settings remains protected)","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":15,"created_at":"2026-02-19T10:09:54.01132+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-19T10:10:59.004248+08:00","closed_at":"2026-02-19T10:10:59.004248+08:00","close_reason":"9e8acfc Make statistics, activityBuckets, recentActivity public","labels":["scope:small"]}
+{"id":"hyperindex-bn7","title":"Fix: ZeroValueForType format parameter is accepted but never used (from hyperindex-vz7.1)","description":"## Files\n- internal/lexicon/types.go (modify)\n- internal/lexicon/types_test.go (modify)\n\n## What to do\nRemove the unused `format` parameter from `ZeroValueForType`. It currently accepts `format string` but never reads it.\n\n1. Change the signature from `ZeroValueForType(propType, format string) interface{}` to `ZeroValueForType(propType string) interface{}`\n2. Update all callers. There is one caller in `internal/graphql/schema/builder.go` inside `coerceRequiredFields()`:\n ```go\n // Before:\n zero := lexicon.ZeroValueForType(entry.Property.Type, entry.Property.Format)\n // After:\n zero := lexicon.ZeroValueForType(entry.Property.Type)\n ```\n3. Update tests in `internal/lexicon/types_test.go` — remove the `format` field from the test table and update the call sites\n\n## Dont\n- Do not change the behavior of the function\n- Do not add format-dependent logic — just remove the dead parameter","acceptance_criteria":"1. go build ./... succeeds\n2. go test ./... succeeds\n3. ZeroValueForType signature is ZeroValueForType(propType string) interface{}\n4. No callers pass a format parameter\n5. All existing tests still pass with updated signatures","status":"open","priority":3,"issue_type":"bug","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":10,"created_at":"2026-02-19T20:20:27.870624+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-19T20:22:35.280806+08:00","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-bn7","depends_on_id":"hyperindex-vz7","type":"discovered-from","created_at":"2026-02-19T20:20:30.798429+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-m1t","title":"Fix: README TAP_ADMIN_PASSWORD table entry still says default is empty after B8 made it required (from hyperindex-md3.8)","description":"Review of hyperindex-md3.8 found: README.md line 91 environment variable table lists TAP_ADMIN_PASSWORD default as '*(empty)*'. After hyperindex-md3.8, docker-compose.tap.yml uses ${TAP_ADMIN_PASSWORD:?...} which makes it required with no default. The table entry is now misleading — it should say '*(required)*' or '*(no default — must be set)*' to match the compose file behavior.","status":"open","priority":3,"issue_type":"bug","owner":"einstein.climateai.org","created_at":"2026-02-18T16:48:45.609562+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:48:45.609562+08:00","dependencies":[{"issue_id":"hyperindex-m1t","depends_on_id":"hyperindex-md3.8","type":"discovered-from","created_at":"2026-02-18T16:48:53.471005+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-md3","title":"Epic: Fix Tap integration bugs found by code review","description":"Ten bugs (B1-B10) were identified by specialized reviewers of the Tap sidecar integration (epic hyperindex-2hm). These range from HIGH severity (data race, static secret) to LOW (log noise, docs). All bugs are in internal/tap/ package files, docker-compose.tap.yml, and README.md. Success = all 10 bugs fixed, all existing tests still pass (go test ./...), new tests added for each fix, and go test -race ./internal/tap/... passes clean.","status":"closed","priority":1,"issue_type":"epic","owner":"einstein.climateai.org","created_at":"2026-02-18T16:30:49.657865+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:45:50.134402+08:00","closed_at":"2026-02-18T16:45:50.134402+08:00","close_reason":"b28087c all 10 Tap review bugs fixed, all tests green","labels":["scope:medium"]}
+{"id":"hyperindex-md3.1","title":"Fix data race: Stop() writes close frame concurrently with dispatch() acks","description":"## Files\n- internal/tap/consumer.go (modify)\n- internal/tap/consumer_test.go (modify)\n\n## What to do\nIn `Stop()` (line 280-297), the call to `conn.WriteMessage(CloseMessage, ...)` at line 290 races with `writeText()` calls from `dispatch()` because gorilla/websocket does not allow concurrent writers. The `connMu` mutex only protects the `c.conn` pointer swap, NOT the write.\n\nFix: Remove the `WriteMessage(CloseMessage, ...)` call from `Stop()`. Keep only `conn.Close()`. The gorilla library sends a close frame internally on `Close()` when the connection is not already closed.\n\nThe fixed `Stop()` should look like:\n```go\nfunc (c *Consumer) Stop() {\n c.stopOnce.Do(func() {\n close(c.done)\n c.connMu.Lock()\n conn := c.conn\n c.conn = nil\n c.connMu.Unlock()\n if conn != nil {\n _ = conn.Close()\n }\n })\n}\n```\n\nAdd a test `TestConsumer_StopDuringDispatch` that:\n1. Sends many events rapidly from the mock server\n2. Calls `Stop()` concurrently while events are being dispatched\n3. Passes with `go test -race`\n\n## Dont\n- Do NOT add a write mutex — the simpler fix of removing the close-frame write is sufficient\n- Do NOT change the `connMu` usage for the pointer swap (that is correct)\n- Do NOT change `writeText()` signature","acceptance_criteria":"1. `go test -race ./internal/tap/...` passes with no race detected\n2. `TestConsumer_StopDuringDispatch` exists and passes\n3. `Stop()` no longer calls `conn.WriteMessage(CloseMessage, ...)`\n4. All existing consumer tests still pass","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-18T16:31:04.408366+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:40:26.969429+08:00","closed_at":"2026-02-18T16:40:26.969429+08:00","close_reason":"788e1cf fix: remove concurrent WriteMessage in Stop() to eliminate data race","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-md3.1","depends_on_id":"hyperindex-md3","type":"parent-child","created_at":"2026-02-18T16:31:04.40981+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-md3.10","title":"Suppress spurious context.Canceled error log on graceful shutdown","description":"## Files\n- internal/tap/consumer.go (modify)\n- internal/tap/consumer_test.go (modify)\n\n## What to do\nIn `Start()` at lines 116-125, when the context is cancelled during shutdown, `runOnce()` returns `context.Canceled`. The code then logs it as:\n```\nslog.Error(\"Tap connection lost, will reconnect\", \"error\", err, \"backoff\", backoff)\n```\n\nThis is misleading — a context cancellation during shutdown is expected, not an error. The log message says \"will reconnect\" but the very next iteration of the for loop will hit the `\u003c-ctx.Done()` check and return.\n\nFix: Check for context cancellation before logging. Insert the ctx check BEFORE the error logging block:\n\n```go\nerr := c.runOnce(ctx)\n\n// Reset backoff after successful connection.\nbackoff = minBackoff\n\n// Check if we should stop after connection ended.\nselect {\ncase \u003c-ctx.Done():\n return ctx.Err()\ncase \u003c-c.done:\n return nil\ndefault:\n}\n\n// Only log reconnection messages for genuine errors (not shutdown).\nif err != nil {\n slog.Error(\"Tap connection lost, will reconnect\",\n \"error\", err,\n \"backoff\", backoff,\n )\n} else {\n slog.Warn(\"Tap connection closed unexpectedly, will reconnect\",\n \"backoff\", backoff,\n )\n}\n```\n\nThis is actually already the structure — the ctx/done checks at lines 108-114 run BEFORE the log. But the issue is that `runOnce()` can return with `ctx.Err()` when `ReadMessage` fails due to context cancellation, but the outer `select` on `ctx.Done()` might not trigger because the done channel fires first (race between done and ctx). \n\nThe real fix: After the stop/ctx checks at lines 108-114, add an explicit check for `context.Canceled` or `context.DeadlineExceeded`:\n\n```go\nif err != nil {\n if ctx.Err() != nil {\n return ctx.Err()\n }\n slog.Error(\"Tap connection lost, will reconnect\",\n \"error\", err,\n \"backoff\", backoff,\n )\n} else {\n slog.Warn(\"Tap connection closed unexpectedly, will reconnect\",\n \"backoff\", backoff,\n )\n}\n```\n\nAlso change the log level from `slog.Error` to `slog.Warn` — a lost connection that triggers reconnection is a warning, not an error. Errors should be reserved for unrecoverable situations.\n\nAdd a test `TestConsumer_ShutdownNoSpuriousLog` that:\n1. Uses a `slog.Handler` that captures log records\n2. Starts the consumer, then cancels the context\n3. Verifies no Error-level log messages are emitted during shutdown\n\n## Dont\n- Do NOT change the reconnection logic\n- Do NOT suppress ALL error logs — only context cancellation during shutdown\n- Do NOT change the Stop() method (B1 handles that)","acceptance_criteria":"1. When context is cancelled, `Start()` does NOT log an Error-level message about connection lost\n2. Genuine connection errors still log at Warn level (changed from Error)\n3. `TestConsumer_ShutdownNoSpuriousLog` exists and passes\n4. All existing consumer tests still pass\n5. `go test -race ./internal/tap/...` passes","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-18T16:33:17.579359+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:45:46.285687+08:00","closed_at":"2026-02-18T16:45:46.285687+08:00","close_reason":"b28087c suppress spurious context.Canceled error log on graceful shutdown","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-md3.10","depends_on_id":"hyperindex-md3","type":"parent-child","created_at":"2026-02-18T16:33:17.580701+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-md3.10","depends_on_id":"hyperindex-md3.3","type":"blocks","created_at":"2026-02-18T16:33:32.999393+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-md3.2","title":"Fix backoff never resetting after successful reconnection","description":"## Files\n- internal/tap/consumer.go (modify)\n- internal/tap/consumer_test.go (modify)\n\n## What to do\nIn `Start()` (lines 92-143), `backoff` is initialized to `minBackoff` at line 93, then doubled on each iteration at lines 136-140, but it is never reset after a successful `runOnce()` call. This means after one disconnection-reconnection cycle, the backoff stays escalated forever (e.g., after reaching 2 minutes, every subsequent reconnection waits 2 minutes even if the connection was healthy for hours).\n\nFix: Add `backoff = minBackoff` right after the `runOnce(ctx)` call returns successfully (i.e., before the error/stop checks). Insert at line ~106:\n\n```go\nerr := c.runOnce(ctx)\n\n// Reset backoff after a successful connection that processed events.\nbackoff = minBackoff\n```\n\nThe comment at line 170 (\"Reset backoff is implicit — caller resets on next successful connection\") is now wrong. Remove that comment.\n\nAdd a test `TestConsumer_BackoffResetsAfterSuccess` that:\n1. First connection: server closes immediately → triggers backoff\n2. Second connection: server stays open, sends one event, then closes cleanly\n3. Third connection: verify it reconnects within ~1.5s (not 2s+), confirming backoff was reset\n\n## Dont\n- Do NOT change the backoff doubling logic (lines 136-140)\n- Do NOT change minBackoff or maxBackoff constants\n- Do NOT add configurable backoff — keep it simple","acceptance_criteria":"1. After a successful connection, backoff resets to minBackoff (1s)\n2. The stale comment at line 170 is removed\n3. `TestConsumer_BackoffResetsAfterSuccess` exists and passes\n4. All existing consumer tests still pass\n5. `go test -race ./internal/tap/...` passes","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-18T16:31:17.44932+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:42:11.42544+08:00","closed_at":"2026-02-18T16:42:11.42544+08:00","close_reason":"692e95b fix: reset backoff to minBackoff after successful reconnection","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-md3.2","depends_on_id":"hyperindex-md3","type":"parent-child","created_at":"2026-02-18T16:31:17.450448+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-md3.2","depends_on_id":"hyperindex-md3.1","type":"blocks","created_at":"2026-02-18T16:33:32.785261+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-md3.3","title":"Add WebSocket SetReadLimit to prevent OOM from oversized messages","description":"## Files\n- internal/tap/consumer.go (modify)\n- internal/tap/consumer_test.go (modify)\n\n## What to do\nIn `runOnce()` at line 152, after `websocket.DefaultDialer.DialContext()` succeeds, there is no call to `conn.SetReadLimit()`. The gorilla/websocket default read limit is 0 (unlimited). A malicious or buggy Tap server could send a multi-GB message causing OOM.\n\nFix: Add a constant and a `SetReadLimit` call immediately after the successful dial:\n\n```go\nconst maxMessageSize = 4 * 1024 * 1024 // 4 MB\n```\n\nAdd this as a package-level constant in the `const` block at lines 14-29.\n\nThen in `runOnce()`, right after the successful dial and before the `connMu.Lock()`:\n\n```go\nconn, _, err := websocket.DefaultDialer.DialContext(ctx, channelURL, nil)\nif err != nil {\n return fmt.Errorf(\"failed to connect to Tap: %w\", err)\n}\nconn.SetReadLimit(maxMessageSize)\n```\n\nAdd a test `TestConsumer_LargeMessageRejected` that:\n1. Mock server sends a text message larger than 4MB\n2. Verify the consumer gets a read error (the connection closes with CloseMessageTooBig)\n3. The consumer reconnects (does not crash or OOM)\n\n## Dont\n- Do NOT set the limit lower than 4MB — AT Protocol records can be up to 1MB\n- Do NOT make the limit configurable — 4MB is a reasonable fixed cap","acceptance_criteria":"1. `maxMessageSize` constant (4*1024*1024) exists in consumer.go\n2. `conn.SetReadLimit(maxMessageSize)` is called after every successful dial\n3. `TestConsumer_LargeMessageRejected` exists and passes\n4. All existing consumer tests still pass\n5. `go test -race ./internal/tap/...` passes","status":"in_progress","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":25,"created_at":"2026-02-18T16:31:29.214553+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:42:25.997348+08:00","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-md3.3","depends_on_id":"hyperindex-md3","type":"parent-child","created_at":"2026-02-18T16:31:29.215597+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-md3.3","depends_on_id":"hyperindex-md3.2","type":"blocks","created_at":"2026-02-18T16:33:32.892499+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-md3.4","title":"Add nil guard for PubSub in IndexHandler to prevent nil-pointer panic","description":"## Files\n- internal/tap/handler.go (modify)\n- internal/tap/handler_test.go (modify)\n\n## What to do\nIn `HandleRecord()`, the `h.activity` field is nil-checked at lines 55 and 73 before calling methods on it, but `h.pubsub` is NOT nil-checked at lines 66 and 72. If `NewIndexHandler()` is called with `pubsub: nil`, line 66 panics with a nil-pointer dereference.\n\nFix: Add nil guards before both `h.pubsub.PublishRecord(...)` calls:\n\nLine 66 (create/update case):\n```go\nif h.pubsub != nil {\n h.pubsub.PublishRecord(eventType, uri, event.CID, event.DID, event.Collection, event.Record)\n}\n```\n\nLine 72 (delete case):\n```go\nif h.pubsub != nil {\n h.pubsub.PublishRecord(subscription.EventDelete, uri, \"\", event.DID, event.Collection, nil)\n}\n```\n\nAdd a test `TestIndexHandler_HandleRecord_NilPubSub` that:\n1. Creates an IndexHandler with `pubsub: nil` (similar to existing `TestIndexHandler_HandleRecord_NilActivity`)\n2. Calls HandleRecord with a create event\n3. Verifies it does NOT panic and the record is still stored correctly\n4. Also test with a delete event\n\n## Dont\n- Do NOT make pubsub a required parameter or add validation in NewIndexHandler\n- Do NOT change the PubSub interface\n- Keep the same pattern as the existing nil-check for activity","acceptance_criteria":"1. `h.pubsub.PublishRecord(...)` calls at both lines 66 and 72 are wrapped in `if h.pubsub != nil`\n2. `TestIndexHandler_HandleRecord_NilPubSub` exists and passes (create + delete with nil pubsub)\n3. No panic when pubsub is nil\n4. All existing handler tests still pass\n5. `go test ./internal/tap/...` passes","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":20,"created_at":"2026-02-18T16:31:41.815405+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:40:27.061154+08:00","closed_at":"2026-02-18T16:40:27.061154+08:00","close_reason":"ee9d022 Add nil guard for PubSub in IndexHandler to prevent nil-pointer panic","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-md3.4","depends_on_id":"hyperindex-md3","type":"parent-child","created_at":"2026-02-18T16:31:41.820488+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-md3.5","title":"Validate required fields in ParseEvent to prevent garbage URIs","description":"## Files\n- internal/tap/event.go (modify)\n- internal/tap/event_test.go (modify)\n\n## What to do\n`ParseEvent()` (lines 60-68) only validates that `event.Type` is non-empty. It does NOT validate that record events have non-empty DID, Collection, and RKey fields. Empty fields produce garbage URIs like `at:///` from `RecordEvent.URI()`.\n\nFix: Add field validation in `ParseEvent()` after unmarshaling, for record events:\n\n```go\nfunc ParseEvent(data []byte) (*Event, error) {\n var event Event\n if err := json.Unmarshal(data, \u0026event); err != nil {\n return nil, fmt.Errorf(\"failed to parse tap event: %w\", err)\n }\n if event.Type == \"\" {\n return nil, fmt.Errorf(\"tap event missing type field\")\n }\n // Validate required fields for record events.\n if event.Type == EventTypeRecord \u0026\u0026 event.Record != nil {\n if event.Record.DID == \"\" {\n return nil, fmt.Errorf(\"tap record event missing did field\")\n }\n if event.Record.Collection == \"\" {\n return nil, fmt.Errorf(\"tap record event missing collection field\")\n }\n if event.Record.RKey == \"\" {\n return nil, fmt.Errorf(\"tap record event missing rkey field\")\n }\n if event.Record.Action == \"\" {\n return nil, fmt.Errorf(\"tap record event missing action field\")\n }\n }\n // Validate required fields for identity events.\n if event.Type == EventTypeIdentity \u0026\u0026 event.Identity != nil {\n if event.Identity.DID == \"\" {\n return nil, fmt.Errorf(\"tap identity event missing did field\")\n }\n }\n return \u0026event, nil\n}\n```\n\nAdd table-driven tests in event_test.go for these new validation cases:\n- Record with empty DID → error\n- Record with empty Collection → error\n- Record with empty RKey → error\n- Record with empty Action → error\n- Identity with empty DID → error\n- Valid record → no error (already exists, verify still passes)\n- Valid identity → no error (already exists, verify still passes)\n\n## Dont\n- Do NOT validate DID format (e.g., did:plc: prefix) — just check non-empty\n- Do NOT validate in handler.go — keep validation at the parse layer\n- Do NOT change the Event/RecordEvent/IdentityEvent struct definitions","acceptance_criteria":"1. `ParseEvent` returns error for record events with empty DID, Collection, RKey, or Action\n2. `ParseEvent` returns error for identity events with empty DID\n3. At least 5 new test cases in event_test.go covering all validation paths\n4. All existing event tests still pass\n5. `go test ./internal/tap/...` passes","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":25,"created_at":"2026-02-18T16:31:55.130608+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:40:27.151654+08:00","closed_at":"2026-02-18T16:40:27.151654+08:00","close_reason":"c763c4f validate required fields in ParseEvent to prevent garbage URIs","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-md3.5","depends_on_id":"hyperindex-md3","type":"parent-child","created_at":"2026-02-18T16:31:55.131661+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-md3.6","title":"Return error from records.Delete failure instead of silently swallowing","description":"## Files\n- internal/tap/handler.go (modify)\n- internal/tap/handler_test.go (modify)\n\n## What to do\nIn `HandleRecord()` at lines 69-71, when `records.Delete()` fails, the error is logged at Debug level and execution continues to send an ack (via the nil return at line 81). This means a failed delete is acked to Tap and the event is lost forever.\n\nCurrent code (lines 68-71):\n```go\ncase ActionDelete:\n if err := h.records.Delete(ctx, uri); err != nil {\n slog.Debug(\"Failed to delete record\", \"uri\", uri, \"error\", err)\n }\n```\n\nFix: Return the error so the consumer does NOT ack:\n\n```go\ncase ActionDelete:\n if err := h.records.Delete(ctx, uri); err != nil {\n return fmt.Errorf(\"failed to delete record: %w\", err)\n }\n```\n\nKeep the PubSub publish and activity logging AFTER the successful delete (they should only run if delete succeeded). The fixed block should be:\n\n```go\ncase ActionDelete:\n if err := h.records.Delete(ctx, uri); err != nil {\n return fmt.Errorf(\"failed to delete record: %w\", err)\n }\n if h.pubsub != nil {\n h.pubsub.PublishRecord(subscription.EventDelete, uri, \"\", event.DID, event.Collection, nil)\n }\n if h.activity != nil {\n if _, err := h.activity.LogActivity(ctx, time.Now(), \"delete\", event.Collection, event.DID, event.RKey, \"\"); err != nil {\n slog.Debug(\"Failed to log delete activity\", \"error\", err)\n }\n }\n```\n\nNOTE: If B4 (pubsub nil guard) lands first, the pubsub nil check will already be there. If not, add it here.\n\nAdd a test `TestIndexHandler_HandleRecord_DeleteError` that:\n1. Creates a handler with a real SQLite DB\n2. Attempts to delete a non-existent URI\n3. Verifies... actually `DELETE FROM record WHERE uri = X` succeeds even if nothing matches (SQL DELETE of 0 rows is not an error). So to test this, we need to cause an actual DB error. Instead, verify the behavioral change: the test should verify that the function properly wraps and returns errors from the delete path by checking the error message format.\n\nA simpler approach: Just verify the code path by confirming that for a valid delete (record exists), it returns nil and publishes correctly. The existing `TestIndexHandler_HandleRecord_Delete` already covers this.\n\nFocus the new test on verifying the error propagation by checking the `fmt.Errorf` wrapping is present in the code structure. Or add a test that verifies delete of a non-existent record does NOT return an error (since SQL DELETE of 0 rows is fine).\n\n## Dont\n- Do NOT change the records.Delete() repository method signature\n- Do NOT add a \"not found\" check for 0 affected rows — SQL DELETE of 0 rows is fine\n- Do NOT change the create/update error handling pattern (those already return errors correctly)","acceptance_criteria":"1. `HandleRecord` with ActionDelete returns `fmt.Errorf(\"failed to delete record: %w\", err)` when `records.Delete()` fails\n2. PubSub publish and activity logging only occur AFTER successful delete\n3. Existing `TestIndexHandler_HandleRecord_Delete` still passes\n4. All existing handler tests still pass\n5. `go test ./internal/tap/...` passes","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":20,"created_at":"2026-02-18T16:32:14.758776+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:42:11.520776+08:00","closed_at":"2026-02-18T16:42:11.520776+08:00","close_reason":"9525b34 return error from records.Delete failure instead of silently swallowing","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-md3.6","depends_on_id":"hyperindex-md3","type":"parent-child","created_at":"2026-02-18T16:32:14.760746+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-md3.6","depends_on_id":"hyperindex-md3.4","type":"blocks","created_at":"2026-02-18T16:33:33.695173+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-md3.7","title":"Update activity status to completed/failed after record processing","description":"## Files\n- internal/tap/handler.go (modify)\n- internal/tap/handler_test.go (modify)\n\n## What to do\nIn `HandleRecord()`, `LogActivity()` is called at lines 56 and 74 which returns `(int64, error)` — the int64 is the activity row ID. Currently the ID is discarded (assigned to `_`). The activity entry is logged with status \"pending\" (the default in `LogActivity`) but never updated to \"completed\" or \"failed\". Entries stay stuck as \"pending\" forever.\n\nFix: For create/update (lines 54-59), capture the activity ID and update status:\n\n```go\n// Log activity (if activity repo available)\nif h.activity != nil {\n activityID, err := h.activity.LogActivity(ctx, time.Now(), string(event.Action), event.Collection, event.DID, event.RKey, string(event.Record))\n if err != nil {\n slog.Debug(\"Failed to log activity\", \"error\", err)\n } else {\n if err := h.activity.UpdateStatus(ctx, activityID, \"completed\", nil); err != nil {\n slog.Debug(\"Failed to update activity status\", \"error\", err)\n }\n }\n}\n```\n\nFor delete (lines 73-77), same pattern:\n\n```go\nif h.activity != nil {\n activityID, err := h.activity.LogActivity(ctx, time.Now(), \"delete\", event.Collection, event.DID, event.RKey, \"\")\n if err != nil {\n slog.Debug(\"Failed to log delete activity\", \"error\", err)\n } else {\n if err := h.activity.UpdateStatus(ctx, activityID, \"completed\", nil); err != nil {\n slog.Debug(\"Failed to update activity status\", \"error\", err)\n }\n }\n}\n```\n\nThe `UpdateStatus` method signature is: `func (r *JetstreamActivityRepository) UpdateStatus(ctx context.Context, id int64, status string, errorMessage *string) error`\n\nAdd/update the test `TestIndexHandler_HandleRecord_ActivityLogged` to also verify:\n1. The activity entry status is \"completed\" (not \"pending\")\n2. Query the activity entry by ID and check its status field\n\nThe `JetstreamActivityRepository` does not have a `GetByID` method. Instead, use `GetRecentActivity(ctx, 1)` and check the first entry status. Alternatively, the test can query the DB directly via testutil.\n\n## Dont\n- Do NOT change the LogActivity or UpdateStatus signatures\n- Do NOT make activity logging blocking for the main flow — keep errors as Debug-level log-and-continue\n- Do NOT add a LogActivityWithStatus(\"completed\") shortcut — use the existing two-step LogActivity+UpdateStatus pattern for consistency with the rest of the codebase","acceptance_criteria":"1. Activity ID from `LogActivity` is captured (not discarded) in both create/update and delete paths\n2. `UpdateStatus(ctx, activityID, \"completed\", nil)` is called after successful LogActivity\n3. Test verifies activity entries have status \"completed\" not \"pending\"\n4. Activity update errors are logged at Debug level (not returned)\n5. All existing handler tests still pass\n6. `go test ./internal/tap/...` passes","status":"in_progress","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-18T16:32:31.735235+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:42:27.741892+08:00","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-md3.7","depends_on_id":"hyperindex-md3","type":"parent-child","created_at":"2026-02-18T16:32:31.73632+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-md3.7","depends_on_id":"hyperindex-md3.6","type":"blocks","created_at":"2026-02-18T16:33:33.799152+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-md3.8","title":"Remove static SECRET_KEY_BASE default from docker-compose.tap.yml","description":"## Files\n- docker-compose.tap.yml (modify)\n\n## What to do\nAt line 31 in docker-compose.tap.yml:\n```yaml\nSECRET_KEY_BASE: \"${SECRET_KEY_BASE:-development-secret-key-change-in-production-64chars}\"\n```\n\nThe static default `development-secret-key-change-in-production-64chars` is a security risk — anyone who deploys with the default compose file without setting the env var gets a known secret key. Session tokens signed with this key would be forgeable.\n\nFix: Change to require the variable be set:\n```yaml\nSECRET_KEY_BASE: \"${SECRET_KEY_BASE:?SECRET_KEY_BASE must be set - generate with: openssl rand -hex 32}\"\n```\n\nThe `:?` syntax makes docker compose fail with an error message if the variable is not set.\n\nAlso fix the `TAP_ADMIN_PASSWORD` default on line 15 — it has `:-` (empty default) which means Tap admin API runs without auth:\n```yaml\nTAP_ADMIN_PASSWORD: \"${TAP_ADMIN_PASSWORD:?TAP_ADMIN_PASSWORD must be set}\"\n```\n\nAnd on line 29 (hyperindex service):\n```yaml\nTAP_ADMIN_PASSWORD: \"${TAP_ADMIN_PASSWORD:?TAP_ADMIN_PASSWORD must be set}\"\n```\n\nAlso while here, fix C4 — bind the Tap port to localhost only (line 7):\n```yaml\nports:\n - \"127.0.0.1:2480:2480\"\n```\n\n## Dont\n- Do NOT change any other environment variables\n- Do NOT change the Dockerfile or docker-compose.yml (only docker-compose.tap.yml)\n- Do NOT remove the ADMIN_DIDS default (empty is fine — it just means no admin access)","acceptance_criteria":"1. SECRET_KEY_BASE uses `${SECRET_KEY_BASE:?...}` syntax (no default value)\n2. TAP_ADMIN_PASSWORD uses `${TAP_ADMIN_PASSWORD:?...}` syntax on BOTH services (lines 15 and 29)\n3. Tap port is bound to localhost: `127.0.0.1:2480:2480`\n4. `docker compose -f docker-compose.tap.yml config` fails with clear error if SECRET_KEY_BASE or TAP_ADMIN_PASSWORD are unset\n5. File is valid YAML","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":15,"created_at":"2026-02-18T16:32:46.578786+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:40:27.244735+08:00","closed_at":"2026-02-18T16:40:27.244735+08:00","close_reason":"7b362ca security: require SECRET_KEY_BASE and TAP_ADMIN_PASSWORD, bind tap to localhost","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-md3.8","depends_on_id":"hyperindex-md3","type":"parent-child","created_at":"2026-02-18T16:32:46.580309+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-md3.9","title":"Fix README Tap curl example: Bearer should be Basic auth","description":"## Files\n- README.md (modify)\n\n## What to do\nAt line 72 in README.md, the Tap admin API curl example uses `Bearer` auth:\n```bash\ncurl -X POST http://localhost:2480/repos/add \\\n -H \"Authorization: Bearer ${TAP_ADMIN_PASSWORD}\" \\\n```\n\nBut the Tap admin client in `internal/tap/admin.go` sends Basic auth (password-only, no username). Looking at the Tap source code, it actually accepts either format depending on version, but the code in admin.go uses:\n```go\nreq.SetBasicAuth(\"\", c.password)\n```\n\nFix: Update the README curl example to match what the code sends:\n```bash\ncurl -X POST http://localhost:2480/repos/add \\\n -u \":${TAP_ADMIN_PASSWORD}\" \\\n -H \"Content-Type: application/json\" \\\n -d '{\"dids\": [\"did:plc:your-did-here\"]}'\n```\n\nThe `-u \":password\"` syntax sends Basic auth with empty username, which is equivalent to what `req.SetBasicAuth(\"\", password)` does.\n\n## Dont\n- Do NOT change the admin.go code — the README should match the code\n- Do NOT change any other README sections\n- Do NOT add additional curl examples","acceptance_criteria":"1. README curl example for Tap admin uses `-u \":${TAP_ADMIN_PASSWORD}\"` (Basic auth) instead of `-H \"Authorization: Bearer ...\"`\n2. The curl command is syntactically correct and would work with a running Tap instance\n3. No other README sections are changed","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":10,"created_at":"2026-02-18T16:32:57.530897+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:40:27.336004+08:00","closed_at":"2026-02-18T16:40:27.336004+08:00","close_reason":"db029dc fix: use Basic auth in README Tap curl example","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-md3.9","depends_on_id":"hyperindex-md3","type":"parent-child","created_at":"2026-02-18T16:32:57.532476+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-q00","title":"Epic: Fix bugs identified by code review of filter-feature","description":"## Why\nThe filter-feature branch (epic hyperindex-49s) has 14 completed tasks implementing GraphQL filtering, sorting, search, and pagination. Eight specialized code reviewers identified 10 bugs ranging from data corruption (cursor encoding) to security issues (LIKE wildcard injection) to DoS vectors (unbounded IN clauses, no query timeout). These must be fixed before merging to main.\n\n## What Success Looks Like\n- All 4 HIGH priority bugs fixed: cursor encoding, backward pagination, IN clause limit, query timeout\n- All 6 MEDIUM priority bugs fixed: LIKE escape, DID filter, field name collisions, JSON error logging, search min length, filter cap\n- Missing unit tests added for helper functions\n- All existing tests still pass (go test ./...)\n- No breaking changes to the GraphQL API\n\n## Key Constraints\n- All changes on the filter-feature branch\n- Must work on both SQLite and PostgreSQL\n- Each fix must include a regression test\n- No breaking changes to existing queries","status":"closed","priority":1,"issue_type":"epic","owner":"einstein.climateai.org","created_at":"2026-02-18T15:32:53.927543+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:59:22.255246+08:00","closed_at":"2026-02-18T15:59:22.255246+08:00","close_reason":"56a3c6f All 11 bug fixes from code review complete, all tests passing","labels":["scope:medium"]}
+{"id":"hyperindex-q00.1","title":"Fix cursor pipe delimiter collision in encodeCursorValues/decodeCursorValues","description":"## Files\n- internal/graphql/schema/builder.go (modify) — lines 1086-1098\n\n## What to do\nReplace the `|`-delimited cursor encoding with JSON array encoding to prevent corruption when sort field values contain `|`.\n\n**Current code (broken):**\n```go\nfunc encodeCursorValues(values ...string) string {\n return base64.URLEncoding.EncodeToString([]byte(strings.Join(values, \"|\")))\n}\nfunc decodeCursorValues(cursor string) ([]string, error) {\n data, err := base64.URLEncoding.DecodeString(cursor)\n if err != nil { return nil, fmt.Errorf(\"invalid cursor\") }\n return strings.Split(string(data), \"|\"), nil\n}\n```\n\n**New implementation:**\n```go\nfunc encodeCursorValues(values ...string) string {\n jsonBytes, _ := json.Marshal(values)\n return base64.URLEncoding.EncodeToString(jsonBytes)\n}\nfunc decodeCursorValues(cursor string) ([]string, error) {\n data, err := base64.URLEncoding.DecodeString(cursor)\n if err != nil { return nil, fmt.Errorf(\"invalid cursor\") }\n var parts []string\n if err := json.Unmarshal(data, \u0026parts); err != nil {\n // Backward compatibility: try legacy pipe-delimited format\n parts = strings.Split(string(data), \"|\")\n if len(parts) \u003c 2 {\n return nil, fmt.Errorf(\"invalid cursor format\")\n }\n }\n return parts, nil\n}\n```\n\nThe backward-compatibility fallback ensures existing cursors from clients still work during the transition. The `encodeCursor` and `decodeCursor` wrapper functions remain unchanged — they delegate to these.\n\n## Dont\n- Do NOT change the `encodeCursor`/`decodeCursor` wrapper functions (lines 1100-1117) — they delegate to these and should keep working\n- Do NOT change the base64 encoding scheme (URLEncoding)\n- Do NOT remove the `strings` import — it is still used elsewhere in the file","acceptance_criteria":"1. `encodeCursorValues(\"hello|world\", \"at://did:plc:abc/col/rkey\")` produces a cursor that `decodeCursorValues` correctly decodes back to `[\"hello|world\", \"at://did:plc:abc/col/rkey\"]`\n2. Legacy cursors (pipe-delimited, no pipes in values) still decode correctly via the fallback path\n3. `decodeCursorValues(\"!!!invalid!!!\")` returns an error\n4. `go test -v -run TestEncodeDecode ./internal/graphql/schema/...` passes — add a table-driven test with cases: normal values, values containing pipes, empty strings, single value, legacy format\n5. All existing tests pass: `go test ./...`","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-18T15:33:11.786051+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:44:03.485106+08:00","closed_at":"2026-02-18T15:44:03.485106+08:00","close_reason":"6c2350d Fix cursor pipe delimiter collision with JSON encoding","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-q00.1","depends_on_id":"hyperindex-q00","type":"parent-child","created_at":"2026-02-18T15:33:11.787209+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-q00.10","title":"Add MaxFilterConditions cap to prevent abuse","description":"## Files\n- internal/graphql/schema/builder.go (modify) — extractFilters function, around line 540\n- internal/database/repositories/records.go (modify) — add constant\n\n## What to do\nUsers can pass an unlimited number of filter conditions in a single query, which could generate extremely complex SQL. Add a cap.\n\n1. Add constant in `records.go` near the other constants:\n```go\n// MaxFilterConditions is the maximum number of individual filter conditions allowed per query.\nconst MaxFilterConditions = 20\n```\n\n2. In `extractFilters` in `builder.go`, after building the filters slice, validate:\n```go\nfunc extractFilters(whereArg interface{}, collection string, registry *lexicon.Registry) ([]repositories.FieldFilter, string) {\n```\n\nThe function currently returns `([]repositories.FieldFilter, string)`. Change it to return an error:\n```go\nfunc extractFilters(whereArg interface{}, collection string, registry *lexicon.Registry) ([]repositories.FieldFilter, string, error) {\n```\n\nAfter the loop that builds filters, add:\n```go\nif len(filters) \u003e repositories.MaxFilterConditions {\n return nil, \"\", fmt.Errorf(\"too many filter conditions: %d (maximum %d)\", len(filters), repositories.MaxFilterConditions)\n}\n```\n\n3. Update the caller in `resolveRecordConnection` (line 673) to handle the error:\n```go\nif whereArg, ok := p.Args[\"where\"]; ok \u0026\u0026 whereArg != nil {\n var err error\n filters, didFilter, err = extractFilters(whereArg, collection, b.registry)\n if err != nil {\n return nil, err\n }\n}\n```\n\n## Dont\n- Do NOT count the DID filter toward the cap (it is a single column condition)\n- Do NOT change the filter operators themselves\n- Do NOT set the cap lower than 20 (some legitimate queries may have many fields)","acceptance_criteria":"1. A query with 21 filter conditions returns an error containing \"too many filter conditions\"\n2. A query with 20 filter conditions succeeds\n3. A query with 0 filter conditions succeeds\n4. The DID filter does not count toward the cap\n5. `MaxFilterConditions` constant is exported and set to 20\n6. `extractFilters` now returns `([]FieldFilter, string, error)` and the caller handles the error\n7. All existing tests pass: `go test ./...`","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":25,"created_at":"2026-02-18T15:35:25.881969+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:55:54.683671+08:00","closed_at":"2026-02-18T15:55:54.683671+08:00","close_reason":"fff01bd Add MaxFilterConditions cap to prevent filter abuse","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-q00.10","depends_on_id":"hyperindex-q00","type":"parent-child","created_at":"2026-02-18T15:35:25.883409+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-q00.11","title":"Add missing unit tests for helper functions","description":"## Files\n- internal/graphql/schema/builder_test.go (create) — new test file\n- internal/graphql/query/connection_test.go (create) — new test file\n\n## What to do\nAdd table-driven unit tests for the following helper functions that currently have zero test coverage:\n\n### 1. ClampPageSize (internal/graphql/query/connection.go)\nCreate `internal/graphql/query/connection_test.go`:\n```go\nfunc TestClampPageSize(t *testing.T) {\n // Cases: 0 → 20 (default), -1 → 20, 50 → 50, 100 → 100, 101 → 100 (max), 200 → 100\n}\n```\n\n### 2. isTotalCountRequested (internal/graphql/schema/builder.go)\nThis is harder to test in isolation because it requires a `graphql.ResolveParams` with AST. If too complex to mock, skip and note in a comment.\n\n### 3. sortFieldValueForRecord (internal/graphql/schema/builder.go)\nCreate `internal/graphql/schema/builder_test.go`:\n```go\nfunc TestSortFieldValueForRecord(t *testing.T) {\n // Cases: nil sortOpt → indexed_at, direct column fields (uri, did, cid, rkey, collection, indexed_at),\n // JSON field present, JSON field missing → \"\"\n}\n```\n\n### 4. extractFilters (internal/graphql/schema/builder.go)\n```go\nfunc TestExtractFilters(t *testing.T) {\n // Cases: nil whereArg, empty map, single eq filter, multiple operators,\n // did filter extraction, unknown field type → defaults to string\n}\n```\nNote: extractFilters requires a lexicon.Registry. Create a minimal one in the test or pass nil and test the default behavior.\n\n### 5. emptyConnection (internal/graphql/schema/builder.go)\n```go\nfunc TestEmptyConnection(t *testing.T) {\n // Verify structure: edges is empty slice, pageInfo has all false/nil, totalCount is 0\n}\n```\n\n## Dont\n- Do NOT test private functions that are already covered by integration tests\n- Do NOT add tests that require a running database (those are integration tests)\n- Do NOT modify any production code in this task — only add test files\n- If a function is unexported and hard to test from the _test package, use the same package (not _test suffix)","acceptance_criteria":"1. `go test -v -run TestClampPageSize ./internal/graphql/query/...` passes with at least 5 test cases\n2. `go test -v -run TestSortFieldValueForRecord ./internal/graphql/schema/...` passes with at least 6 test cases\n3. `go test -v -run TestExtractFilters ./internal/graphql/schema/...` passes with at least 4 test cases\n4. `go test -v -run TestEmptyConnection ./internal/graphql/schema/...` passes\n5. All tests are table-driven with descriptive case names\n6. All existing tests pass: `go test ./...`","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T15:35:43.714828+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:58:31.003592+08:00","closed_at":"2026-02-18T15:58:31.003592+08:00","close_reason":"56a3c6f test: add unit tests for ClampPageSize, sortFieldValueForRecord, emptyConnection","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-q00.11","depends_on_id":"hyperindex-q00","type":"parent-child","created_at":"2026-02-18T15:35:43.715871+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-q00.11","depends_on_id":"hyperindex-q00.1","type":"blocks","created_at":"2026-02-18T15:35:53.411072+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-q00.11","depends_on_id":"hyperindex-q00.2","type":"blocks","created_at":"2026-02-18T15:35:53.523573+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-q00.11","depends_on_id":"hyperindex-q00.3","type":"blocks","created_at":"2026-02-18T15:35:53.636488+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-q00.11","depends_on_id":"hyperindex-q00.4","type":"blocks","created_at":"2026-02-18T15:35:53.747742+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-q00.11","depends_on_id":"hyperindex-q00.5","type":"blocks","created_at":"2026-02-18T15:35:53.865242+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-q00.11","depends_on_id":"hyperindex-q00.6","type":"blocks","created_at":"2026-02-18T15:35:53.978956+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-q00.11","depends_on_id":"hyperindex-q00.7","type":"blocks","created_at":"2026-02-18T15:35:54.092679+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-q00.11","depends_on_id":"hyperindex-q00.8","type":"blocks","created_at":"2026-02-18T15:35:54.208203+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-q00.11","depends_on_id":"hyperindex-q00.9","type":"blocks","created_at":"2026-02-18T15:35:54.331806+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-q00.11","depends_on_id":"hyperindex-q00.10","type":"blocks","created_at":"2026-02-18T15:35:54.44539+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-q00.2","title":"Fix records query backward pagination broken by DefaultValue on first arg","description":"## Files\n- internal/graphql/schema/builder.go (modify) — lines 400-404\n\n## What to do\nRemove `DefaultValue: 20` from the `first` argument of the generic `records` query. The problem: `DefaultValue: 20` means `p.Args[\"first\"].(int)` always succeeds (returns 20), so `hasFirst` at line 659 is always true. When a user sends `records(last: 5, collection: \"...\")`, the validation at line 665 fires \"cannot use both first/after and last/before\" because `hasFirst` is true.\n\n**Current code (broken):**\n```go\n\"first\": \u0026graphql.ArgumentConfig{\n Type: graphql.Int,\n DefaultValue: 20,\n Description: \"Number of records to return\",\n},\n```\n\n**Fix:**\n```go\n\"first\": \u0026graphql.ArgumentConfig{\n Type: graphql.Int,\n Description: \"Number of records to return (default 20, max 100)\",\n},\n```\n\nThis is safe because `resolveRecordConnection` at line 775 already calls `query.ClampPageSize(firstArg)` which returns 20 when firstArg is 0 (the zero value when the type assertion fails).\n\n## Dont\n- Do NOT change `ClampPageSize` behavior\n- Do NOT add DefaultValue to the `last` argument\n- Do NOT modify any other query definitions (typed collection queries do not have this bug — verify they also lack DefaultValue)","acceptance_criteria":"1. GraphQL query `{ records(collection: \"test.collection\", last: 5) { edges { node { uri } } } }` does NOT return error \"cannot use both first/after and last/before\"\n2. GraphQL query `{ records(collection: \"test.collection\") { edges { node { uri } } } }` still returns up to 20 records (ClampPageSize default)\n3. GraphQL query `{ records(collection: \"test.collection\", first: 50) { edges { node { uri } } } }` returns up to 50 records\n4. All existing tests pass: `go test ./...`","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":15,"created_at":"2026-02-18T15:33:23.861269+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:38:44.479379+08:00","closed_at":"2026-02-18T15:38:44.479379+08:00","close_reason":"09fd2a3 fix: remove DefaultValue from records query first arg to fix backward pagination","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-q00.2","depends_on_id":"hyperindex-q00","type":"parent-child","created_at":"2026-02-18T15:33:23.862324+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-q00.3","title":"Add MaxINListSize limit to prevent SQLite parameter overflow","description":"## Files\n- internal/database/repositories/records.go (modify) — lines 413-426, and near line 16\n\n## What to do\nAdd a `MaxINListSize` constant and validate the `in` operator list length in `buildFilterClause` before building the query.\n\n1. Add constant near the existing batch size constants (line 16-26):\n```go\n// MaxINListSize is the maximum number of values allowed in an IN filter clause.\n// SQLite has a hard 999 parameter limit (SQLITE_MAX_VARIABLE_NUMBER).\n// We cap well below that to leave room for other query parameters.\nconst MaxINListSize = 100\n```\n\n2. In `buildFilterClause`, in the `case \"in\":` block (line 413), add validation:\n```go\ncase \"in\":\n inVals, _ := f.Value.([]interface{})\n if len(inVals) == 0 {\n conditions = append(conditions, \"1 = 0\")\n continue\n }\n if len(inVals) \u003e MaxINListSize {\n return \"\", nil, fmt.Errorf(\"IN filter on field %q exceeds maximum of %d values\", f.Field, MaxINListSize)\n }\n // ... rest unchanged\n```\n\n3. Update `buildFilterClause` signature to return an error:\n```go\nfunc (r *RecordsRepository) buildFilterClause(filters []FieldFilter, startPlaceholder int) (string, []database.Value, error)\n```\n\n4. Update ALL callers of `buildFilterClause` to handle the new error return. Search for `buildFilterClause(` — there should be callers in `GetByCollectionFilteredWithKeysetCursor`, `GetByCollectionSortedWithKeysetCursor`, `GetByCollectionReversedWithKeysetCursor`, and `GetCollectionCountFiltered`.\n\n## Dont\n- Do NOT change the existing `SQLParamBatchSize` constant\n- Do NOT change the behavior for empty IN lists (keep returning \"1 = 0\")\n- Do NOT silently truncate — return an error so the GraphQL layer can report it to the user","acceptance_criteria":"1. `buildFilterClause` with an IN list of 101 values returns a non-nil error containing \"exceeds maximum\"\n2. `buildFilterClause` with an IN list of 100 values succeeds\n3. `buildFilterClause` with an empty IN list still returns \"1 = 0\" condition\n4. Add a table-driven test `TestBuildFilterClause_INLimit` in `records_filter_test.go` covering: 0 values, 1 value, 100 values (boundary), 101 values (error)\n5. All callers of `buildFilterClause` propagate the error correctly\n6. All existing tests pass: `go test ./...`","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-18T15:33:40.428928+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:46:10.57729+08:00","closed_at":"2026-02-18T15:46:10.57729+08:00","close_reason":"ff5e7f5 Add MaxINListSize limit to prevent SQLite parameter overflow","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-q00.3","depends_on_id":"hyperindex-q00","type":"parent-child","created_at":"2026-02-18T15:33:40.429954+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-q00.4","title":"Add search query timeout and concurrency control","description":"## Files\n- internal/database/repositories/records.go (modify) — Search method (~line 1095)\n- internal/database/repositories/records.go (modify) — add constants near line 16\n\n## What to do\nThe LIKE-based `Search` method does full table scans with no per-query timeout. SQLite uses `SetMaxOpenConns(1)`, so one expensive query blocks everything. Add a search-specific context timeout.\n\n1. Add constant:\n```go\n// SearchTimeout is the maximum duration for a search query.\nconst SearchTimeout = 10 * time.Second\n```\n\n2. At the top of the `Search` method, wrap the context with a timeout:\n```go\nfunc (r *RecordsRepository) Search(ctx context.Context, ...) ([]*Record, error) {\n ctx, cancel := context.WithTimeout(ctx, SearchTimeout)\n defer cancel()\n // ... rest of method unchanged\n}\n```\n\nThis is the minimal, safe fix. The `context.WithTimeout` will cause the SQL query to be cancelled if it exceeds 10 seconds, and the database/sql package will return a context deadline exceeded error.\n\n## Dont\n- Do NOT add a semaphore or concurrency limiter — that is a larger architectural change for a separate task\n- Do NOT change the Search method signature\n- Do NOT add timeout to other repository methods (only Search does full table scans)\n- Do NOT import any new packages beyond what is already imported (time and context are already imported)","acceptance_criteria":"1. The `Search` method creates a derived context with `context.WithTimeout(ctx, SearchTimeout)` and defers cancel\n2. `SearchTimeout` constant is exported and set to 10 seconds\n3. A unit test `TestSearchTimeout` verifies that the timeout is applied (can check that the context passed to the DB has a deadline set, or test with a very short timeout)\n4. All existing tests pass: `go test ./...`","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":20,"created_at":"2026-02-18T15:33:52.911528+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:59:18.948483+08:00","closed_at":"2026-02-18T15:59:18.948483+08:00","close_reason":"0af843c Add search query timeout and concurrency control","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-q00.4","depends_on_id":"hyperindex-q00","type":"parent-child","created_at":"2026-02-18T15:33:52.912629+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-q00.5","title":"Fix LIKE wildcard injection in contains/startsWith filter operators","description":"## Files\n- internal/database/repositories/records.go (modify) — lines 396-405\n\n## What to do\nThe `contains` and `startsWith` operators in `buildFilterClause` do NOT call `escapeLIKE()` on user input. The `escapeLIKE` function already exists at line 1084 and is used by the `Search` method, but the filter operators skip it. Values containing `%` or `_` act as SQL wildcards.\n\n**Current code (broken):**\n```go\ncase \"contains\":\n conditions = append(conditions, fmt.Sprintf(\"%s LIKE %s\", extract, r.db.Placeholder(placeholderIdx)))\n val := fmt.Sprintf(\"%%%v%%\", f.Value)\n params = append(params, database.Text(val))\ncase \"startsWith\":\n conditions = append(conditions, fmt.Sprintf(\"%s LIKE %s\", extract, r.db.Placeholder(placeholderIdx)))\n val := fmt.Sprintf(\"%v%%\", f.Value)\n params = append(params, database.Text(val))\n```\n\n**Fix — apply escapeLIKE and add ESCAPE clause:**\n```go\ncase \"contains\":\n conditions = append(conditions, fmt.Sprintf(\"%s LIKE %s ESCAPE \\\", extract, r.db.Placeholder(placeholderIdx)))\n val := fmt.Sprintf(\"%%%s%%\", escapeLIKE(fmt.Sprintf(\"%v\", f.Value)))\n params = append(params, database.Text(val))\ncase \"startsWith\":\n conditions = append(conditions, fmt.Sprintf(\"%s LIKE %s ESCAPE \\\", extract, r.db.Placeholder(placeholderIdx)))\n val := fmt.Sprintf(\"%s%%\", escapeLIKE(fmt.Sprintf(\"%v\", f.Value)))\n params = append(params, database.Text(val))\n```\n\nAlso handle PostgreSQL dialect: if `r.db.Dialect() == database.PostgreSQL`, use `ILIKE` instead of `LIKE` for case-insensitive matching (matching the Search method behavior). Actually, check the existing Search method — if it uses ILIKE for Postgres, do the same here. If not, keep LIKE for both.\n\n## Dont\n- Do NOT modify the `escapeLIKE` function itself\n- Do NOT change the Search method\n- Do NOT add ESCAPE clause to operators other than contains/startsWith (eq, neq, etc. use = not LIKE)","acceptance_criteria":"1. A filter `{title: {contains: \"100%\"}}` matches only records where title literally contains \"100%\", not records where title contains \"100\" followed by anything\n2. A filter `{title: {startsWith: \"test_\"}}` matches only records where title starts with literal \"test_\", not \"test\" + any single char\n3. The LIKE clause includes `ESCAPE \\` \n4. Add test cases in `records_filter_test.go`: `TestBuildFilterClause_LIKEEscape` with values containing %, _, and \\\n5. All existing tests pass: `go test ./...`","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":25,"created_at":"2026-02-18T15:34:09.353004+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:48:21.269548+08:00","closed_at":"2026-02-18T15:48:21.269548+08:00","close_reason":"c9d2dc3 fix: escape LIKE wildcards in contains/startsWith filter operators","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-q00.5","depends_on_id":"hyperindex-q00","type":"parent-child","created_at":"2026-02-18T15:34:09.354258+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-q00.5","depends_on_id":"hyperindex-q00.3","type":"blocks","created_at":"2026-02-18T15:35:59.089925+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-q00.6","title":"Expand DID filter to support all StringFilter operators, not just eq","description":"## Files\n- internal/graphql/schema/builder.go (modify) — extractFilters function, lines 550-557\n- internal/database/repositories/records.go (modify) — query methods that accept didFilter string\n\n## What to do\nThe WhereInput exposes full `StringFilterInput` for `did` (eq, neq, in, contains, startsWith, etc.) but `extractFilters` only handles `eq`. Other operators are silently ignored.\n\n**Option A (recommended — simpler):** Restrict the GraphQL type to only accept `eq` and `in` for DID. Replace `types.StringFilterInput` with a new `DIDFilterInput` that only has `eq` and `in` fields. This is the safer approach since DID is a column, not a JSON field, and operators like `contains` on DIDs are not meaningful.\n\n**Option B:** Expand `extractFilters` to convert DID filter operators into SQL WHERE conditions on the `did` column. This requires changing the `didFilter string` parameter to something richer (e.g., `[]FieldFilter` with a special marker for column filters vs JSON filters).\n\n**Go with Option A.** Create a restricted input type:\n\n1. In `internal/graphql/types/filters.go`, add:\n```go\n// DIDFilterInput is a restricted filter for DID fields (column-level, not JSON).\n// Only supports eq and in operators.\nvar DIDFilterInput = graphql.NewInputObject(graphql.InputObjectConfig{\n Name: \"DIDFilterInput\",\n Fields: graphql.InputObjectConfigFieldMap{\n \"eq\": \u0026graphql.InputObjectFieldConfig{Type: graphql.String, Description: \"Equals\"},\n \"in\": \u0026graphql.InputObjectFieldConfig{Type: graphql.NewList(graphql.String), Description: \"In list\"},\n },\n})\n```\n\n2. In `builder.go` line 166-168, change `types.StringFilterInput` to `types.DIDFilterInput` for the `did` field in WhereInput generation.\n\n3. In `extractFilters` (line 550-557), expand to handle `in`:\n```go\nif fieldName == \"did\" {\n if eqVal, ok := filterMap[\"eq\"].(string); ok \u0026\u0026 eqVal != \"\" {\n didFilter = eqVal\n }\n // Handle \"in\" for DID — convert to comma-separated or a slice\n // Actually, the simplest approach: convert DID \"in\" to multiple FieldFilters on the did column\n // But didFilter is a string... so for now, just support eq.\n continue\n}\n```\n\nActually, the cleanest approach: change `didFilter` from `string` to `[]FieldFilter` where the Field is \"did\" (a column name). Then in the repository, handle \"did\" filters as column conditions instead of JSON extract conditions. But this is a bigger refactor.\n\n**Simplest safe fix:** Keep `didFilter string` for `eq` only, but add `in` support by changing `didFilter` to a new type:\n```go\ntype DIDFilter struct {\n EQ string\n IN []string\n}\n```\n\nThen update `extractFilters` to populate both fields, and update repository methods to build the appropriate WHERE clause.\n\n## Dont\n- Do NOT add contains/startsWith/gt/lt operators for DID — they are not meaningful\n- Do NOT break existing DID eq filtering\n- Do NOT change the FieldFilter struct — DID filtering is column-level, not JSON-level","acceptance_criteria":"1. WhereInput for any collection shows `did: DIDFilterInput` (not StringFilterInput) in the schema\n2. `DIDFilterInput` only has `eq` and `in` fields\n3. `{where: {did: {eq: \"did:plc:abc\"}}}` still works as before\n4. `{where: {did: {in: [\"did:plc:abc\", \"did:plc:def\"]}}}` returns records from both DIDs\n5. Passing unsupported operators like `{where: {did: {contains: \"abc\"}}}` is rejected by GraphQL schema validation (not silently ignored)\n6. All existing tests pass: `go test ./...`","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":45,"created_at":"2026-02-18T15:34:32.797385+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:53:38.437521+08:00","closed_at":"2026-02-18T15:53:38.437521+08:00","close_reason":"b149838 fix: expand DID filter to support eq and in operators","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-q00.6","depends_on_id":"hyperindex-q00","type":"parent-child","created_at":"2026-02-18T15:34:32.798459+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-q00.7","title":"Skip lexicon properties that collide with reserved field names","description":"## Files\n- internal/graphql/types/object.go (modify) — buildRecordFields, lines 121-126\n- internal/graphql/schema/builder.go (modify) — WhereInput generation, lines 171-181\n\n## What to do\nLexicon properties named `did`, `rkey`, `uri`, or `cid` silently overwrite the standard metadata fields that are injected into typed record types and WhereInputs. This causes data corruption — the metadata field disappears and is replaced by the JSON property.\n\n1. In `internal/graphql/types/object.go`, in `buildRecordFields` (line 121), add a skip for reserved names:\n```go\n// reservedRecordFields are field names injected as metadata and must not be overwritten by lexicon properties.\nvar reservedRecordFields = map[string]bool{\n \"uri\": true,\n \"cid\": true,\n \"did\": true,\n \"rkey\": true,\n}\n```\n\nThen in the loop at line 121:\n```go\nfor _, entry := range def.Properties {\n if reservedRecordFields[entry.Name] {\n slog.Warn(\"Skipping lexicon property that collides with reserved field name\",\n \"lexicon\", lexiconID, \"property\", entry.Name)\n continue\n }\n field := b.buildField(lexiconID, entry.Name, \u0026entry.Property, requiredSet[entry.Name])\n if field != nil {\n fields[entry.Name] = field\n }\n}\n```\n\n2. In `builder.go`, in the WhereInput generation loop (line 172), add the same skip:\n```go\nfor _, entry := range lex.Defs.Main.Properties {\n if entry.Name == \"did\" {\n continue // did is handled separately as a metadata filter\n }\n if reservedRecordFields[entry.Name] {\n continue // Skip properties that collide with reserved metadata fields\n }\n // ... rest unchanged\n}\n```\n\nNote: `did` is already handled separately in WhereInput (line 166), so the skip there is just for `uri`, `cid`, `rkey`.\n\n3. Add `\"log/slog\"` import to object.go if not already present.\n\n## Dont\n- Do NOT rename the conflicting properties (e.g., prefixing with underscore) — just skip them\n- Do NOT change the reserved field definitions themselves (uri, cid, did, rkey)\n- Do NOT skip properties in the generic record type (only typed record types)","acceptance_criteria":"1. A lexicon with a property named \"uri\" does not overwrite the metadata uri field on the typed GraphQL type\n2. A lexicon with a property named \"did\" does not overwrite the metadata did field\n3. A warning is logged (slog.Warn) when a property is skipped due to collision\n4. The WhereInput for a lexicon with a \"uri\" property does not include a duplicate \"uri\" filter field\n5. Normal lexicon properties (non-colliding) still appear on the typed record type\n6. All existing tests pass: `go test ./...`","status":"closed","priority":2,"issue_type":"task","owner":"einstein.climateai.org","estimated_minutes":25,"created_at":"2026-02-18T15:34:49.855258+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:46:26.196831+08:00","closed_at":"2026-02-18T15:46:26.196831+08:00","close_reason":"d60d85d Skip lexicon properties that collide with reserved field names","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-q00.7","depends_on_id":"hyperindex-q00","type":"parent-child","created_at":"2026-02-18T15:34:49.857079+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-q00.8","title":"Log json.Unmarshal errors instead of silently skipping records","description":"## Files\n- internal/graphql/schema/builder.go (modify) — lines 722, 798, 883\n\n## What to do\nThree places in `resolveRecordConnection` and `createSearchResolver` silently skip records when `json.Unmarshal` fails on the record JSON. This hides data corruption. Add `slog.Warn` logging.\n\nFind all three locations where this pattern appears:\n```go\nif err := json.Unmarshal([]byte(rec.JSON), \u0026value); err != nil {\n continue // Skip records with invalid JSON\n}\n```\n\nReplace each with:\n```go\nif err := json.Unmarshal([]byte(rec.JSON), \u0026value); err != nil {\n slog.Warn(\"Skipping record with invalid JSON\", \"uri\", rec.URI, \"error\", err)\n continue\n}\n```\n\nThe three locations are:\n1. Line ~722 — backward pagination path in `resolveRecordConnection`\n2. Line ~798 — forward pagination path in `resolveRecordConnection`\n3. Line ~883 — search resolver in `createSearchResolver`\n\nMake sure `\"log/slog\"` is in the imports (it likely already is — check).\n\n## Dont\n- Do NOT return an error — just log and continue (dropping one bad record should not fail the whole query)\n- Do NOT change the control flow (still continue after logging)\n- Do NOT add logging to any other error paths in this task","acceptance_criteria":"1. All three json.Unmarshal error paths in builder.go now log with slog.Warn including the record URI and error\n2. The log message includes \"uri\" and \"error\" structured fields\n3. Records with valid JSON are still returned normally\n4. All existing tests pass: `go test ./...`","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":15,"created_at":"2026-02-18T15:35:01.67118+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:39:06.204066+08:00","closed_at":"2026-02-18T15:39:06.204066+08:00","close_reason":"09fd2a3 Log json.Unmarshal errors with slog.Warn in all three locations","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-q00.8","depends_on_id":"hyperindex-q00","type":"parent-child","created_at":"2026-02-18T15:35:01.672253+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-q00.9","title":"Fix search minimum length to use rune count and increase to 3","description":"## Files\n- internal/graphql/schema/builder.go (modify) — createSearchResolver, line 844\n\n## What to do\nThe search minimum length check uses `len()` which counts bytes, not characters. A 2-byte UTF-8 character (e.g., \"é\") would pass the check with just 1 character. Also, 2 characters is too short for a meaningful LIKE search — increase to 3.\n\n**Current code:**\n```go\nif len(searchQuery) \u003c 2 {\n return nil, fmt.Errorf(\"search query must be at least 2 characters\")\n}\n```\n\n**Fix:**\n```go\nif utf8.RuneCountInString(searchQuery) \u003c 3 {\n return nil, fmt.Errorf(\"search query must be at least 3 characters\")\n}\n```\n\nAdd `\"unicode/utf8\"` to the imports.\n\n## Dont\n- Do NOT change the minimum for any other validation (only the search query)\n- Do NOT add maximum length validation in this task (that is a separate concern)","acceptance_criteria":"1. Search query \"ab\" returns error \"search query must be at least 3 characters\"\n2. Search query \"abc\" succeeds (3 chars)\n3. Search query \"éé\" (2 multi-byte chars, 4 bytes) returns error (only 2 runes)\n4. Search query \"éée\" (3 multi-byte chars) succeeds\n5. `unicode/utf8` is imported\n6. All existing tests pass: `go test ./...` — update any existing tests that use 2-char search queries to use 3+ chars","status":"closed","priority":2,"issue_type":"task","owner":"einstein.climateai.org","estimated_minutes":15,"created_at":"2026-02-18T15:35:12.361536+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T15:44:47.927275+08:00","closed_at":"2026-02-18T15:44:47.927275+08:00","close_reason":"99b68c3 Fix search minimum length to use rune count and increase to 3","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-q00.9","depends_on_id":"hyperindex-q00","type":"parent-child","created_at":"2026-02-18T15:35:12.362657+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-uau","title":"Fix: backoff resets unconditionally on failed dials (from hyperindex-md3.2)","description":"Review of hyperindex-md3.2 (commit 692e95b) found: backoff = minBackoff is placed unconditionally after every runOnce() call, including immediate dial failures. The comment says 'after a successful connection' but the code does not check for success. If the server is persistently unreachable, the effective maximum backoff is 2s (reset to 1s → wait 1s → double to 2s → reset to 1s...) instead of the intended 2 minutes. This defeats the purpose of exponential backoff for persistent outages. Fix: move the reset inside a success condition: if err == nil { backoff = minBackoff }. Evidence: consumer.go line 113 — backoff = minBackoff is outside any err check, runs unconditionally after runOnce() returns regardless of whether the dial succeeded.","status":"open","priority":1,"issue_type":"bug","owner":"einstein.climateai.org","created_at":"2026-02-18T16:49:37.491393+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T16:49:37.491393+08:00","dependencies":[{"issue_id":"hyperindex-uau","depends_on_id":"hyperindex-md3.2","type":"discovered-from","created_at":"2026-02-18T16:49:41.017962+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-vz7","title":"Epic: Fix NonNull GraphQL violations for required fields with missing data","description":"## Problem\nGraphQL warnings: 'Cannot return null for non-nullable field OrgHypercertsClaimActivity.title' (and shortDescription). The lexicon marks these fields as required, causing the schema to declare them as NonNull (String!). However, old records in the DB were written when these fields were optional, so their JSON lacks these keys. The resolver returns raw maps without coercion, causing NonNull violations.\n\n## Goal\nAdd a null-coercion layer in the resolver that, for each required field in the lexicon, ensures a type-appropriate zero value is present in the data map before returning it to GraphQL. This preserves schema strictness while handling historical data gracefully.\n\n## Scope\n- internal/graphql/schema/builder.go (resolver layer)\n- internal/graphql/types/object.go (optional: export zero-value helper)\n- internal/lexicon/types.go (optional: add helper to get zero value per type)\n- Tests for the coercion logic\n\n## Key Constraints\n- Do NOT make required fields nullable in the schema — that would be a schema regression\n- Do NOT validate/reject records at ingestion — we must tolerate historical data\n- Zero values: string → \"\", integer → 0, boolean → false, array → [], ref/union/blob → nil (keep nullable)\n- The coercion must happen in both createCollectionResolver and createSingleRecordResolver paths","status":"closed","priority":1,"issue_type":"epic","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","created_at":"2026-02-19T20:09:45.606789+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-19T20:17:45.88078+08:00","closed_at":"2026-02-19T20:17:45.880785+08:00","labels":["needs-integration-review","scope:medium"]}
+{"id":"hyperindex-vz7.1","title":"Add ZeroValue helper to lexicon package for type-appropriate defaults","description":"## Files\n- internal/lexicon/types.go (modify)\n- internal/lexicon/types_test.go (create)\n\n## What to do\nAdd a function `ZeroValueForType(propType string, format string) interface{}` to `internal/lexicon/types.go` that returns the appropriate Go zero value for a given lexicon property type:\n\n```go\nfunc ZeroValueForType(propType, format string) interface{} {\n switch propType {\n case TypeString:\n return \"\"\n case TypeInteger:\n return 0\n case TypeBoolean:\n return false\n case TypeArray:\n return []interface{}{}\n default:\n // ref, union, blob, bytes, cid-link, unknown, object — return nil\n // These are complex types that cannot have a meaningful zero value\n return nil\n }\n}\n```\n\nAlso add a method on `RecordDef` to get all required properties with their types:\n\n```go\n// RequiredProperties returns all properties that are marked as required.\nfunc (r *RecordDef) RequiredProperties() []PropertyEntry {\n var result []PropertyEntry\n for _, entry := range r.Properties {\n if entry.Property.Required {\n result = append(result, entry)\n }\n }\n return result\n}\n```\n\n## Don't\n- Do not modify any existing methods or types\n- Do not change the Property struct\n- Do not add dependencies on graphql packages","acceptance_criteria":"1. `ZeroValueForType(\"string\", \"\")` returns `\"\"` (empty string)\n2. `ZeroValueForType(\"string\", \"datetime\")` returns `\"\"` (empty string)\n3. `ZeroValueForType(\"integer\", \"\")` returns `0` (int)\n4. `ZeroValueForType(\"boolean\", \"\")` returns `false`\n5. `ZeroValueForType(\"array\", \"\")` returns `[]interface{}{}`\n6. `ZeroValueForType(\"ref\", \"\")` returns `nil`\n7. `ZeroValueForType(\"union\", \"\")` returns `nil`\n8. `ZeroValueForType(\"blob\", \"\")` returns `nil`\n9. `RecordDef.RequiredProperties()` returns only entries where `Property.Required == true`\n10. `go test ./internal/lexicon/...` passes with new tests","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":20,"created_at":"2026-02-19T20:10:02.055823+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-19T20:12:45.490745+08:00","closed_at":"2026-02-19T20:12:45.490745+08:00","close_reason":"7fc19ad Add ZeroValueForType and RequiredProperties to lexicon package","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-vz7.1","depends_on_id":"hyperindex-vz7","type":"parent-child","created_at":"2026-02-19T20:10:02.056679+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-vz7.2","title":"Add coerceRequiredFields to schema builder and wire into resolvers","description":"## Files\n- internal/graphql/schema/builder.go (modify)\n\n## What to do\nAdd a method `coerceRequiredFields(data map[string]interface{}, collection string)` to the `Builder` struct that:\n\n1. Looks up the `RecordDef` from `b.registry.GetRecordDef(collection)`\n2. Iterates over `RecordDef.RequiredProperties()` (from task vz7.1)\n3. For each required property, checks if `data[prop.Name]` is nil or absent\n4. If so, sets `data[prop.Name] = lexicon.ZeroValueForType(prop.Type, prop.Format)`\n5. Skips properties where `ZeroValueForType` returns nil (complex types like ref/union)\n6. Logs a `slog.Debug` message when coercing a field (for observability)\n\nThen wire this into the two resolver paths:\n\n### Path 1: `createCollectionResolver` (line ~974)\nIn the `buildNode` callback, after `data[\"rkey\"] = rec.RKey`, add:\n```go\nb.coerceRequiredFields(data, lexiconID)\n```\n\n### Path 2: `createSingleRecordResolver` (line ~1018)\nAfter `data[\"rkey\"] = rec.RKey`, add:\n```go\nb.coerceRequiredFields(data, lexiconID)\n```\n\n### Path 3: `resolveRecordConnection` backward pagination path (line ~753)\nThe `buildNode` callback is already called, so this is covered by Path 1.\n\n### Path 4: Generic `createRecordsResolver` (line ~937 area)\nCheck if there is a generic records resolver that also returns record data maps. If it uses a `buildNode` callback, it's already covered. If not, add coercion there too.\n\nThe signature:\n```go\n// coerceRequiredFields fills in zero values for required fields that are missing or null.\n// This prevents NonNull violations when historical records lack fields that became required.\nfunc (b *Builder) coerceRequiredFields(data map[string]interface{}, collection string) {\n```\n\n## Don't\n- Do not change the GraphQL schema (no nullability changes)\n- Do not modify how records are stored or ingested\n- Do not add any new dependencies\n- Do not change the function signatures of existing resolvers\n- Do not coerce fields that already have a non-nil value","acceptance_criteria":"1. `go build ./...` succeeds\n2. `go test ./...` succeeds (no regressions)\n3. A record with JSON `{\"createdAt\":\"2025-01-01T00:00:00Z\"}` (missing title and shortDescription) for collection `org.hypercerts.claim.activity` gets coerced to have `title: \"\"` and `shortDescription: \"\"` when resolved via the collection query\n4. A record with JSON `{\"title\":\"Hello\",\"shortDescription\":\"World\",\"createdAt\":\"2025-01-01T00:00:00Z\"}` is NOT modified (all required fields present)\n5. Coercion happens in both createCollectionResolver and createSingleRecordResolver paths\n6. No NonNull GraphQL warnings for missing required string/integer/boolean fields\n7. The coercion does not apply to complex types (ref, union, blob) — those remain nil if missing","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":30,"created_at":"2026-02-19T20:10:20.768829+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-19T20:14:20.063401+08:00","closed_at":"2026-02-19T20:14:20.063401+08:00","close_reason":"fb5c723 Add coerceRequiredFields to schema builder and wire into resolvers","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-vz7.2","depends_on_id":"hyperindex-vz7","type":"parent-child","created_at":"2026-02-19T20:10:20.769797+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-vz7.2","depends_on_id":"hyperindex-vz7.1","type":"blocks","created_at":"2026-02-19T20:10:20.771107+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-vz7.3","title":"Add integration test for null coercion of required fields","description":"## Files\n- internal/graphql/schema/builder_test.go (modify or create)\n\n## What to do\nAdd a test (or tests) that verifies the null coercion behavior end-to-end through the schema builder.\n\n### Test: TestCoerceRequiredFields_MissingFields\n1. Create a `lexicon.Registry` and register the `org.hypercerts.claim.activity` lexicon (load from `testdata/lexicons/org/hypercerts/claim/activity.json`)\n2. Build a schema using `schema.NewBuilder(registry).Build()`\n3. Create a mock record in the DB with JSON that is MISSING the `title` and `shortDescription` fields: `{\"createdAt\":\"2025-01-01T00:00:00Z\"}`\n4. Execute a GraphQL query for that record\n5. Assert: the result contains `title: \"\"` and `shortDescription: \"\"` (coerced zero values) with NO errors/warnings\n\n### Test: TestCoerceRequiredFields_PresentFields\n1. Same setup but with a complete record: `{\"title\":\"My Title\",\"shortDescription\":\"My Desc\",\"createdAt\":\"2025-01-01T00:00:00Z\"}`\n2. Execute a GraphQL query\n3. Assert: the result contains `title: \"My Title\"` and `shortDescription: \"My Desc\"` (original values preserved)\n\n### Test: TestCoerceRequiredFields_NullFields\n1. Same setup but with explicit null values: `{\"title\":null,\"shortDescription\":null,\"createdAt\":\"2025-01-01T00:00:00Z\"}`\n2. Execute a GraphQL query\n3. Assert: the result contains `title: \"\"` and `shortDescription: \"\"` (coerced from null)\n\nLook at existing tests in `internal/graphql/schema/` and `internal/integration/` for patterns on how to set up the schema builder with a registry and execute queries. Follow the same patterns.\n\nIf there is no existing test infrastructure for executing GraphQL queries against the schema builder, create a minimal helper:\n- Build schema from registry\n- Use `graphql.Do()` to execute a query\n- Assert on the result\n\n## Don't\n- Do not modify production code\n- Do not use external test frameworks (use standard `testing` package)\n- Do not duplicate tests that already exist","acceptance_criteria":"1. `go test -v -run TestCoerceRequiredFields ./internal/graphql/schema/...` passes (or wherever tests are placed)\n2. TestCoerceRequiredFields_MissingFields proves that missing required string fields get coerced to \"\"\n3. TestCoerceRequiredFields_PresentFields proves that present fields are not modified\n4. TestCoerceRequiredFields_NullFields proves that explicit null required string fields get coerced to \"\"\n5. `go test ./...` passes (no regressions)","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":40,"created_at":"2026-02-19T20:10:37.526215+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-19T20:17:11.982287+08:00","closed_at":"2026-02-19T20:17:11.982287+08:00","close_reason":"ae64fae test: add integration tests for null coercion of required fields","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-vz7.3","depends_on_id":"hyperindex-vz7","type":"parent-child","created_at":"2026-02-19T20:10:37.527301+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-vz7.3","depends_on_id":"hyperindex-vz7.2","type":"blocks","created_at":"2026-02-19T20:10:37.528557+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-xuq","title":"Epic: Fix bugs found by reviewers of hyperindex-md3 bug fixes","description":"7 issues found by 4 specialized reviewers of the hyperindex-md3 bug-fix epic. Includes 2 P1 bugs (backoff logic broken, README auth wrong), 2 P2 concerns (shutdown log else branch, compose files insecure), and 3 P3 quality items (stale docs, test coverage, event validation gap). All existing tests must continue to pass.","status":"closed","priority":1,"issue_type":"epic","owner":"einstein.climateai.org","created_at":"2026-02-18T17:10:15.981231+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:19:15.515469+08:00","closed_at":"2026-02-18T17:19:15.515469+08:00","close_reason":"5d24874 all 6 reviewer follow-up fixes complete, all tests green","labels":["scope:small"]}
+{"id":"hyperindex-xuq.1","title":"Fix backoff reset to only fire after successful connection, not failed dials","description":"## Files\n- internal/tap/consumer.go (modify)\n- internal/tap/consumer_test.go (modify)\n\n## What to do\nThe B2 fix (commit 692e95b) added `backoff = minBackoff` unconditionally at line 113 after every `runOnce()` return. This is wrong — it also resets after FAILED dials (e.g., server unreachable), which caps effective backoff at 2s for persistent outages instead of the intended 2 minutes.\n\nCurrent code (lines 110-113):\n```go\nerr := c.runOnce(ctx)\n\n// Reset backoff after a successful connection that processed events.\nbackoff = minBackoff\n```\n\nFix: Make the reset conditional on `err == nil`:\n```go\nerr := c.runOnce(ctx)\n\n// Reset backoff only after a successful connection (not failed dials).\nif err == nil {\n backoff = minBackoff\n}\n```\n\nThis means:\n- Connection succeeds, processes events, server closes cleanly → backoff resets ✓\n- Dial fails immediately → backoff escalates ✓\n- Connection succeeds but drops with read error → backoff resets (debatable, but better than never resetting) ✓\n\nUpdate the existing `TestConsumer_BackoffResetsAfterSuccess` to ensure it still passes (the second connection closes with CloseNormalClosure which makes runOnce return nil, so the test should still work).\n\nAdd a new test `TestConsumer_BackoffEscalatesOnPersistentFailure` that:\n1. Uses an unreachable URL (e.g., ws://127.0.0.1:1 — port 1 will refuse connections)\n2. Starts the consumer with a context timeout of ~4s\n3. Counts how many connection attempts happen in that window\n4. Verifies that fewer than 4 attempts occur (proving backoff is escalating, not resetting to 1s every time)\n\n## Dont\n- Do NOT change the backoff doubling logic\n- Do NOT change minBackoff or maxBackoff constants\n- Do NOT make backoff configurable","acceptance_criteria":"1. `backoff = minBackoff` is inside `if err == nil { ... }` (conditional on success)\n2. `TestConsumer_BackoffResetsAfterSuccess` still passes\n3. `TestConsumer_BackoffEscalatesOnPersistentFailure` exists and passes\n4. All existing consumer tests still pass\n5. `go test -race ./internal/tap/...` passes","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":25,"created_at":"2026-02-18T17:10:35.042751+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:17:31.998125+08:00","closed_at":"2026-02-18T17:17:31.998125+08:00","close_reason":"a8ea6bb fix: reset backoff only on successful connection","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-xuq.1","depends_on_id":"hyperindex-xuq","type":"parent-child","created_at":"2026-02-18T17:10:35.043622+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-xuq.2","title":"Fix README curl example: use admin username for Basic auth","description":"## Files\n- README.md (modify)\n\n## What to do\nThe B9 fix (commit db029dc) changed the README curl from Bearer to Basic auth, but used `-u \":${TAP_ADMIN_PASSWORD}\"` (empty username). The actual Go code in `internal/tap/admin.go` line 157 uses `\"admin:\" + c.password`:\n\n```go\ncreds := base64.StdEncoding.EncodeToString([]byte(\"admin:\" + c.password))\nreq.Header.Set(\"Authorization\", \"Basic \"+creds)\n```\n\nSo the username is `\"admin\"`, not empty.\n\nFix line 72 of README.md:\n```bash\n# Before (wrong):\ncurl -X POST http://localhost:2480/repos/add \\\n -u \":${TAP_ADMIN_PASSWORD}\" \\\n\n# After (correct):\ncurl -X POST http://localhost:2480/repos/add \\\n -u \"admin:${TAP_ADMIN_PASSWORD}\" \\\n```\n\nAlso fix the env var table at line 91. Change:\n```\n| `TAP_ADMIN_PASSWORD` | Password for Tap's admin HTTP API | *(empty)* |\n```\nto:\n```\n| `TAP_ADMIN_PASSWORD` | Password for Tap's admin HTTP API | *(required for docker-compose.tap.yml)* |\n```\n\n## Dont\n- Do NOT change admin.go\n- Do NOT change any other README sections beyond lines 72 and 91","acceptance_criteria":"1. README curl example uses `-u \"admin:${TAP_ADMIN_PASSWORD}\"` (username is \"admin\")\n2. README env var table shows TAP_ADMIN_PASSWORD as required for docker-compose.tap.yml\n3. No other README sections changed\n4. The curl command matches what admin.go actually sends","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":10,"created_at":"2026-02-18T17:10:46.158443+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:17:32.090462+08:00","closed_at":"2026-02-18T17:17:32.090462+08:00","close_reason":"c72322b fix: use admin username in README curl Basic auth example","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-xuq.2","depends_on_id":"hyperindex-xuq","type":"parent-child","created_at":"2026-02-18T17:10:46.159255+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-xuq.3","title":"Reject record/identity events with nil payload in ParseEvent","description":"## Files\n- internal/tap/event.go (modify)\n- internal/tap/event_test.go (modify)\n\n## What to do\nIn ParseEvent(), the validation guards are:\n```go\nif event.Type == EventTypeRecord \u0026\u0026 event.Record != nil { ... }\nif event.Type == EventTypeIdentity \u0026\u0026 event.Identity != nil { ... }\n```\n\nThis means `type=record` with `Record: nil` (e.g., JSON `{\"id\":1,\"type\":\"record\"}`) silently passes validation and returns a valid `*Event`. In the consumer, `IsRecord()` returns false (because Record is nil), so the event falls through to the \"Unknown event type\" default case — logged as a warning and silently dropped without incrementing the errors counter properly.\n\nFix: Change the guards to reject nil payloads:\n\n```go\n// Validate record events.\nif event.Type == EventTypeRecord {\n if event.Record == nil {\n return nil, fmt.Errorf(\"tap record event missing record payload\")\n }\n if event.Record.DID == \"\" {\n return nil, fmt.Errorf(\"tap record event missing did field\")\n }\n // ... rest of field checks unchanged ...\n}\n\n// Validate identity events.\nif event.Type == EventTypeIdentity {\n if event.Identity == nil {\n return nil, fmt.Errorf(\"tap identity event missing identity payload\")\n }\n if event.Identity.DID == \"\" {\n return nil, fmt.Errorf(\"tap identity event missing did field\")\n }\n}\n```\n\nAlso add validation that create/update actions have a non-empty record body:\n```go\nif event.Record.Action == ActionCreate || event.Record.Action == ActionUpdate {\n if len(event.Record.Record) == 0 {\n return nil, fmt.Errorf(\"tap record event action %q missing record body\", event.Record.Action)\n }\n}\n```\n\nAdd table-driven tests:\n- `type=record` with nil Record payload → error\n- `type=identity` with nil Identity payload → error\n- `type=record`, action=create, with empty Record body → error\n- `type=record`, action=update, with empty Record body → error\n- `type=record`, action=delete, with empty Record body → NO error (deletes have no body)\n\n## Dont\n- Do NOT validate unknown action values (out of scope — would need Tap protocol spec confirmation)\n- Do NOT change IsRecord() or IsIdentity() methods\n- Do NOT change struct definitions","acceptance_criteria":"1. ParseEvent rejects `type=record` with nil Record payload (returns error)\n2. ParseEvent rejects `type=identity` with nil Identity payload (returns error)\n3. ParseEvent rejects create/update with empty record body (returns error)\n4. ParseEvent allows delete with empty record body (no error)\n5. At least 5 new test cases covering nil-payload and empty-body scenarios\n6. All existing event tests still pass\n7. `go test ./internal/tap/...` passes","status":"closed","priority":1,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":25,"created_at":"2026-02-18T17:11:04.450231+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:17:32.180675+08:00","closed_at":"2026-02-18T17:17:32.180675+08:00","close_reason":"6c73f1a fix: reject nil-payload and empty-body events in ParseEvent","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-xuq.3","depends_on_id":"hyperindex-xuq","type":"parent-child","created_at":"2026-02-18T17:11:04.45108+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-xuq.4","title":"Add ctx.Err() guard to else branch in Start() reconnect logging","description":"## Files\n- internal/tap/consumer.go (modify)\n- internal/tap/consumer_test.go (modify)\n\n## What to do\nThe B10 fix (commit b28087c) added a `ctx.Err()` check in the `err != nil` branch of Start() (line 126), but missed the `else` branch (line 133-136). When the server closes the connection cleanly (NormalClosure) simultaneously with context cancellation, `runOnce()` returns nil, the ctx/done select at lines 116-122 might not catch it (race between channels), and the else branch logs:\n\n```go\nslog.Warn(\"Tap connection closed unexpectedly, will reconnect\", \"backoff\", backoff)\n```\n\nThis is misleading during shutdown — the connection closed because we are shutting down, not \"unexpectedly\".\n\nFix: Add the same `ctx.Err()` check to the else branch:\n\n```go\nif err != nil {\n if ctx.Err() != nil {\n return ctx.Err()\n }\n slog.Warn(\"Tap connection lost, will reconnect\",\n \"error\", err,\n \"backoff\", backoff,\n )\n} else {\n if ctx.Err() != nil {\n return ctx.Err()\n }\n slog.Warn(\"Tap connection closed unexpectedly, will reconnect\",\n \"backoff\", backoff,\n )\n}\n```\n\nOr refactor to check ctx.Err() once before the if/else:\n\n```go\n// If context was cancelled, this is a graceful shutdown — do not log.\nif ctx.Err() != nil {\n return ctx.Err()\n}\n\nif err != nil {\n slog.Warn(\"Tap connection lost, will reconnect\",\n \"error\", err,\n \"backoff\", backoff,\n )\n} else {\n slog.Warn(\"Tap connection closed unexpectedly, will reconnect\",\n \"backoff\", backoff,\n )\n}\n```\n\nThe second form is cleaner — prefer it.\n\nUpdate `TestConsumer_ShutdownNoSpuriousLog` to also check for Warn-level messages containing \"unexpectedly\" to verify neither branch logs spuriously during shutdown.\n\n## Dont\n- Do NOT change the reconnection logic\n- Do NOT remove the existing err != nil / else structure\n- Do NOT change Stop()","acceptance_criteria":"1. `ctx.Err()` is checked before BOTH the error and no-error log branches\n2. No Warn-level \"closed unexpectedly\" message during graceful shutdown\n3. `TestConsumer_ShutdownNoSpuriousLog` updated to verify no spurious Warn messages\n4. All existing consumer tests still pass\n5. `go test -race ./internal/tap/...` passes","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":20,"created_at":"2026-02-18T17:11:20.027274+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:19:15.425593+08:00","closed_at":"2026-02-18T17:19:15.425593+08:00","close_reason":"5d24874 fix: guard else branch with ctx.Err() check before reconnect log","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-xuq.4","depends_on_id":"hyperindex-xuq","type":"parent-child","created_at":"2026-02-18T17:11:20.028081+08:00","created_by":"einstein.climateai.org"},{"issue_id":"hyperindex-xuq.4","depends_on_id":"hyperindex-xuq.1","type":"blocks","created_at":"2026-02-18T17:11:48.533378+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-xuq.5","title":"Remove static SECRET_KEY_BASE default from docker-compose.yml and docker-compose.postgres.yml","description":"## Files\n- docker-compose.yml (modify)\n- docker-compose.postgres.yml (modify)\n\n## What to do\nThe B8 fix only hardened `docker-compose.tap.yml`. The same insecure static default `development-secret-key-change-in-production-64chars` exists in the other two compose files:\n\n`docker-compose.yml` line 12:\n```yaml\n- SECRET_KEY_BASE=${SECRET_KEY_BASE:-development-secret-key-change-in-production-64chars}\n```\n\n`docker-compose.postgres.yml` line 12:\n```yaml\n- SECRET_KEY_BASE=${SECRET_KEY_BASE:-development-secret-key-change-in-production-64chars}\n```\n\nFix both to use the `:?` syntax like docker-compose.tap.yml:\n```yaml\n- SECRET_KEY_BASE=${SECRET_KEY_BASE:?SECRET_KEY_BASE must be set - generate with: openssl rand -hex 32}\n```\n\n## Dont\n- Do NOT change any other environment variables in these files\n- Do NOT change docker-compose.tap.yml (already fixed in B8)\n- Do NOT change the Dockerfile","acceptance_criteria":"1. docker-compose.yml uses `${SECRET_KEY_BASE:?...}` (no static default)\n2. docker-compose.postgres.yml uses `${SECRET_KEY_BASE:?...}` (no static default)\n3. Both files are valid YAML\n4. docker-compose.tap.yml is unchanged","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":10,"created_at":"2026-02-18T17:11:29.666594+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:17:32.270604+08:00","closed_at":"2026-02-18T17:17:32.270604+08:00","close_reason":"75b4d24 fix: require SECRET_KEY_BASE via :? syntax in compose files","labels":["scope:trivial"],"dependencies":[{"issue_id":"hyperindex-xuq.5","depends_on_id":"hyperindex-xuq","type":"parent-child","created_at":"2026-02-18T17:11:29.667509+08:00","created_by":"einstein.climateai.org"}]}
+{"id":"hyperindex-xuq.6","title":"Improve handler_test.go: fix misleading test name and add delete-path activity status test","description":"## Files\n- internal/tap/handler_test.go (modify)\n\n## What to do\nTwo test quality issues found by reviewers:\n\n### 1. Rename misleading test\n`TestIndexHandler_HandleRecord_DeleteError` (line 332) tests the HAPPY path (delete of non-existent record, which is NOT an error). The name implies it tests error handling. \n\nRename to `TestIndexHandler_HandleRecord_DeleteNonExistent` and update the doc comment to:\n```go\n// TestIndexHandler_HandleRecord_DeleteNonExistent verifies that deleting a record\n// that does not exist is not an error (SQL DELETE of 0 rows is fine) and that\n// pubsub events are still published.\n```\n\n### 2. Add delete-path activity status test\nB7 added `UpdateStatus(\"completed\")` calls for both create and delete paths, but the test only verifies the create path. Add a test `TestIndexHandler_HandleRecord_DeleteActivityCompleted` that:\n1. Creates a record (so there is something to delete)\n2. Deletes the record\n3. Queries `GetRecentActivity(ctx, 1)` \n4. Finds the activity entry with operation=\"delete\" and verifies its Status is \"completed\"\n\nNote: There will be multiple activity entries (create + delete). Filter for the delete one. `GetRecentActivity` returns entries ordered by timestamp DESC, so the delete entry should be first.\n\n## Dont\n- Do NOT change handler.go\n- Do NOT change any existing test logic (only rename + add new test)\n- Do NOT change consumer_test.go or event_test.go","acceptance_criteria":"1. `TestIndexHandler_HandleRecord_DeleteError` is renamed to `TestIndexHandler_HandleRecord_DeleteNonExistent`\n2. `TestIndexHandler_HandleRecord_DeleteActivityCompleted` exists and passes\n3. The new test verifies delete activity status is \"completed\" (not \"pending\")\n4. All existing handler tests still pass\n5. `go test ./internal/tap/...` passes","status":"closed","priority":2,"issue_type":"task","assignee":"einstein.climateai.org","owner":"einstein.climateai.org","estimated_minutes":25,"created_at":"2026-02-18T17:11:43.521265+08:00","created_by":"einstein.climateai.org","updated_at":"2026-02-18T17:17:32.359884+08:00","closed_at":"2026-02-18T17:17:32.359884+08:00","close_reason":"8f394aa test: rename DeleteError test and add delete activity status test","labels":["scope:small"],"dependencies":[{"issue_id":"hyperindex-xuq.6","depends_on_id":"hyperindex-xuq","type":"parent-child","created_at":"2026-02-18T17:11:43.522043+08:00","created_by":"einstein.climateai.org"}]}
diff --git a/.beads/last-touched b/.beads/last-touched
new file mode 100644
index 0000000..90feae8
--- /dev/null
+++ b/.beads/last-touched
@@ -0,0 +1 @@
+hyperindex-3gm
diff --git a/.beads/metadata.json b/.beads/metadata.json
new file mode 100644
index 0000000..c787975
--- /dev/null
+++ b/.beads/metadata.json
@@ -0,0 +1,4 @@
+{
+ "database": "beads.db",
+ "jsonl_export": "issues.jsonl"
+}
\ No newline at end of file
diff --git a/.env.example b/.env.example
index 865de2a..48ea453 100644
--- a/.env.example
+++ b/.env.example
@@ -72,7 +72,27 @@ ADMIN_DIDS=did:plc:qc42fmqqlsmdq7jiypiiigww
# LEXICON_DIR=
# =============================================================================
-# Jetstream Configuration
+# Tap Configuration (Recommended — replaces Jetstream+Backfill)
+# =============================================================================
+
+# Enable Tap consumer. When true, Jetstream and Backfill are disabled.
+# TAP_ENABLED=false
+
+# WebSocket URL of the Tap sidecar (default: ws://localhost:2480)
+# TAP_URL=ws://localhost:2480
+
+# Password for Tap's admin HTTP API (used for /repos/add backfill calls)
+# TAP_ADMIN_PASSWORD=
+
+# Disable ack-based delivery (useful for debugging; not recommended in production)
+# TAP_DISABLE_ACKS=false
+
+# Collection NSID for Tap auto-discovery of repos (e.g. app.bsky.feed.post)
+# When set, Tap will automatically discover and index all repos publishing this collection.
+# TAP_SIGNAL_COLLECTION=
+
+# =============================================================================
+# Jetstream Configuration (Legacy Mode)
# =============================================================================
# Jetstream WebSocket URL (default: wss://jetstream2.us-west.bsky.network/subscribe)
diff --git a/README.md b/README.md
index 3440244..4a485bf 100644
--- a/README.md
+++ b/README.md
@@ -43,6 +43,59 @@ Or place lexicon JSON files in a directory and set `LEXICON_DIR` environment var
### 2. Start Indexing
+#### Using Tap (Recommended)
+
+[Tap](https://github.com/bluesky-social/indigo/tree/main/cmd/tap) is Bluesky's official sidecar utility for consuming AT Protocol events. It is the recommended way to run Hyperindex because it provides:
+
+- **Cryptographic verification** — verifies repo structure, MST integrity, and identity signatures
+- **Ordering guarantees** — strict per-repo event ordering, no backfill/live race conditions
+- **At-least-once delivery** — ack-based protocol ensures no events are lost on crash
+- **Identity tracking** — handle changes and account status updates are handled automatically
+- **Simplified architecture** — Tap manages backfill automatically; no separate backfill worker needed
+
+**Run with Tap sidecar:**
+
+```bash
+# Copy and configure environment
+cp .env.example .env
+# Set TAP_ADMIN_PASSWORD and other vars in .env
+
+# Start Tap + Hyperindex together
+docker compose -f docker-compose.tap.yml up --build
+```
+
+**Add repos to track via Tap admin API:**
+
+```bash
+# Add a specific repo (DID) for Tap to index
+curl -X POST http://localhost:2480/repos/add \
+ -u "admin:${TAP_ADMIN_PASSWORD}" \
+ -H "Content-Type: application/json" \
+ -d '{"dids": ["did:plc:your-did-here"]}'
+```
+
+**Auto-discovery with `TAP_SIGNAL_COLLECTION`:**
+
+Set `TAP_SIGNAL_COLLECTION` to a collection NSID (e.g. `app.bsky.feed.post`) and Tap will automatically discover and index all repos that publish records in that collection. This replaces the need for a manual full-network backfill.
+
+```bash
+TAP_SIGNAL_COLLECTION=app.bsky.feed.post docker compose -f docker-compose.tap.yml up
+```
+
+**Tap environment variables:**
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `TAP_ENABLED` | Enable Tap consumer (disables Jetstream+Backfill) | `false` |
+| `TAP_URL` | WebSocket URL of the Tap sidecar | `ws://localhost:2480` |
+| `TAP_ADMIN_PASSWORD` | Password for Tap's admin HTTP API | *(required for docker-compose.tap.yml)* |
+| `TAP_DISABLE_ACKS` | Disable ack-based delivery (useful for debugging) | `false` |
+| `TAP_SIGNAL_COLLECTION` | Collection NSID for auto-discovery of repos | *(empty)* |
+
+#### Legacy Mode: Jetstream + Backfill
+
+> **Note:** Jetstream+Backfill mode is the legacy ingestion path. It lacks cryptographic verification and ordering guarantees. Use Tap (above) for new deployments.
+
Once lexicons are registered, Hyperindex automatically:
- **Connects to Jetstream** for real-time events
- **Indexes matching records** to your database
@@ -65,33 +118,95 @@ mutation {
Access your indexed data at `/graphql`:
```graphql
-# Query records by collection
+# Generic query — all records by collection
query {
- records(collection: "app.bsky.feed.post") {
+ records(collection: "app.bsky.feed.post", first: 20) {
edges {
- node {
- uri
- did
- value # JSON record data
- }
+ node { uri did collection value }
+ cursor
}
+ pageInfo { hasNextPage endCursor }
+ totalCount
}
}
-# With typed queries (when lexicon schemas are loaded)
+# Typed queries — with filtering, sorting, and field-level access
query {
- appBskyFeedPost(first: 10, where: { did: { eq: "did:plc:..." } }) {
+ appBskyFeedPost(
+ where: { text: { contains: "hello" }, did: { eq: "did:plc:..." } }
+ sortBy: "createdAt"
+ sortDirection: DESC
+ first: 10
+ ) {
edges {
node {
uri
+ did
+ rkey
text
createdAt
}
}
+ totalCount
+ pageInfo { hasNextPage hasPreviousPage endCursor }
+ }
+}
+
+# Backward pagination
+query {
+ appBskyFeedPost(last: 10, before: "cursor_value") {
+ edges { node { uri text } }
+ pageInfo { hasPreviousPage startCursor }
+ }
+}
+
+# Cross-collection text search
+query {
+ search(query: "climate", collection: "app.bsky.feed.post", first: 20) {
+ edges {
+ node { uri did collection value }
+ }
+ }
+}
+```
+
+#### Filtering (`where`)
+
+Typed collection queries accept a `where` argument with per-field filters:
+
+| Operator | Types | Example |
+|----------|-------|---------|
+| `eq` | All | `{ title: { eq: "Hello" } }` |
+| `neq` | All | `{ status: { neq: "draft" } }` |
+| `gt`, `lt`, `gte`, `lte` | Int, Float, DateTime | `{ score: { gt: 5, lte: 100 } }` |
+| `in` | String, Int, Float | `{ type: { in: ["post", "reply"] } }` |
+| `contains` | String | `{ text: { contains: "forest" } }` |
+| `startsWith` | String | `{ name: { startsWith: "Gain" } }` |
+| `isNull` | All | `{ optionalField: { isNull: true } }` |
+
+Every `where` input also includes a `did` field for filtering by author DID.
+
+#### Sorting (`sortBy`, `sortDirection`)
+
+Typed queries support sorting by any scalar field:
+
+```graphql
+query {
+ appBskyFeedPost(sortBy: "createdAt", sortDirection: ASC, first: 10) {
+ edges { node { uri createdAt } }
}
}
```
+Default sort is `indexed_at DESC` (newest first). Available sort fields are generated per-collection from the lexicon schema.
+
+#### Pagination
+
+- **Forward**: `first` + `after` (default: 20, max: 100)
+- **Backward**: `last` + `before`
+- **`totalCount`**: Returned when requested (opt-in, computed only when selected)
+- Cannot use `first`/`after` and `last`/`before` simultaneously
+
## Endpoints
| Endpoint | Description |
diff --git a/client/.env.example b/client/.env.example
index a7468af..93e724d 100644
--- a/client/.env.example
+++ b/client/.env.example
@@ -1,5 +1,5 @@
# =============================================================================
-# Hypergoat Client Configuration
+# Hyperindex Client Configuration
# =============================================================================
# Backend API URL (the Go server)
@@ -8,7 +8,7 @@
NEXT_PUBLIC_API_URL=http://localhost:8080
# Same as above, used for server-side API calls
-HYPERGOAT_URL=http://127.0.0.1:8080
+HYPERINDEX_URL=http://127.0.0.1:8080
# =============================================================================
# OAuth Configuration
diff --git a/client/Dockerfile b/client/Dockerfile
new file mode 100644
index 0000000..acf5e75
--- /dev/null
+++ b/client/Dockerfile
@@ -0,0 +1,47 @@
+# Build stage
+FROM node:22-alpine AS builder
+
+WORKDIR /app
+
+# Accept build-time env vars for Next.js (NEXT_PUBLIC_* are inlined at build time)
+ARG NEXT_PUBLIC_API_URL
+ENV NEXT_PUBLIC_API_URL=$NEXT_PUBLIC_API_URL
+
+# Copy package files
+COPY package.json package-lock.json* ./
+
+# Install dependencies
+RUN npm ci
+
+# Copy source
+COPY . .
+
+# Build Next.js
+ENV NEXT_TELEMETRY_DISABLED=1
+RUN npm run build
+
+# Runtime stage
+FROM node:22-alpine AS runner
+
+WORKDIR /app
+
+ENV NODE_ENV=production
+ENV NEXT_TELEMETRY_DISABLED=1
+
+# Create non-root user
+RUN addgroup --system --gid 1001 nodejs && \
+ adduser --system --uid 1001 nextjs
+
+# Copy built files
+COPY --from=builder /app/public ./public
+COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
+COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
+
+USER nextjs
+
+EXPOSE 3000
+
+ENV PORT=3000
+ENV HOSTNAME="0.0.0.0"
+
+CMD ["node", "server.js"]
diff --git a/client/next.config.ts b/client/next.config.ts
index 17a0c6e..7458866 100644
--- a/client/next.config.ts
+++ b/client/next.config.ts
@@ -1,6 +1,8 @@
import type { NextConfig } from "next";
const nextConfig: NextConfig = {
+ // Enable standalone output for Docker deployment
+ output: "standalone",
// Allow external images from Bluesky CDN
images: {
remotePatterns: [
@@ -11,7 +13,7 @@ const nextConfig: NextConfig = {
},
],
},
- // Proxy API requests to Hypergoat backend during development
+ // Proxy API requests to Hyperindex backend during development
async rewrites() {
const apiUrl = process.env.NEXT_PUBLIC_API_URL || "http://localhost:8080";
return [
@@ -23,10 +25,6 @@ const nextConfig: NextConfig = {
source: "/graphql",
destination: `${apiUrl}/graphql`,
},
- {
- source: "/graphiql",
- destination: `${apiUrl}/graphiql`,
- },
{
source: "/oauth/:path*",
destination: `${apiUrl}/oauth/:path*`,
diff --git a/client/package.json b/client/package.json
index ce11b6c..8218662 100644
--- a/client/package.json
+++ b/client/package.json
@@ -23,6 +23,7 @@
"react": "19.2.3",
"react-dom": "19.2.3",
"recharts": "^3.7.0",
+ "next-themes": "^0.4.6",
"tailwind-merge": "^3.4.0"
},
"devDependencies": {
diff --git a/client/src/app/api/admin/graphql/route.ts b/client/src/app/api/admin/graphql/route.ts
index 9c04eaf..a8009e5 100644
--- a/client/src/app/api/admin/graphql/route.ts
+++ b/client/src/app/api/admin/graphql/route.ts
@@ -6,14 +6,14 @@ export const dynamic = "force-dynamic";
/**
* Proxy for admin GraphQL requests.
- * Checks session authentication and passes user DID to Hypergoat.
+ * Checks session authentication and passes user DID to Hyperindex.
*/
export async function POST(request: NextRequest) {
try {
const session = await getSession();
const body = await request.json();
- // Build headers for Hypergoat
+ // Build headers for Hyperindex
const headers: HeadersInit = {
"Content-Type": "application/json",
};
@@ -26,8 +26,8 @@ export async function POST(request: NextRequest) {
console.log("[admin-graphql] Unauthenticated request - no session DID");
}
- // Proxy to Hypergoat
- const response = await fetch(`${env.HYPERGOAT_URL}/admin/graphql`, {
+ // Proxy to Hyperindex
+ const response = await fetch(`${env.HYPERINDEX_URL}/admin/graphql`, {
method: "POST",
headers,
body: JSON.stringify(body),
@@ -35,7 +35,7 @@ export async function POST(request: NextRequest) {
const data = await response.json();
- // Log errors from Hypergoat
+ // Log errors from Hyperindex
if (data.errors) {
console.log("[admin-graphql] GraphQL errors:", JSON.stringify(data.errors));
}
diff --git a/client/src/app/api/graphql/route.ts b/client/src/app/api/graphql/route.ts
index 0649010..05c91b0 100644
--- a/client/src/app/api/graphql/route.ts
+++ b/client/src/app/api/graphql/route.ts
@@ -4,13 +4,13 @@ import { env } from "@/lib/env";
export const dynamic = "force-dynamic";
/**
- * Proxy for public GraphQL requests to Hypergoat.
+ * Proxy for public GraphQL requests to Hyperindex.
*/
export async function POST(request: NextRequest) {
try {
const body = await request.json();
- const response = await fetch(`${env.HYPERGOAT_URL}/graphql`, {
+ const response = await fetch(`${env.HYPERINDEX_URL}/graphql`, {
method: "POST",
headers: {
"Content-Type": "application/json",
diff --git a/client/src/app/backfill/page.tsx b/client/src/app/backfill/page.tsx
index 03aa2eb..98ac57e 100644
--- a/client/src/app/backfill/page.tsx
+++ b/client/src/app/backfill/page.tsx
@@ -73,10 +73,10 @@ export default function BackfillPage() {
{/* Hero Section */}
-
+
Backfill
-
+
Sync historical data from the AT Protocol relay
@@ -95,13 +95,13 @@ export default function BackfillPage() {
{/* Status */}