From 0598b1cd6461c93faaac0064c9ad9ab166d2edce Mon Sep 17 00:00:00 2001 From: Edgecaser Date: Sat, 21 Mar 2026 13:05:05 -0700 Subject: [PATCH 1/3] feat: add daemon module with scheduler, web UI, capture endpoint, digest, and service install MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Implements the full optional daemon module (pip install memsync[daemon]): - DaemonConfig dataclass in config.py with 20 configurable fields - scheduler.py: APScheduler wrapper with 4 jobs (nightly refresh, backup mirror, drift check, weekly digest) — all jobs return early gracefully - web.py: Flask UI for browser-based GLOBAL_MEMORY.md view/edit - capture.py: REST endpoint for iPhone Shortcuts mobile note capture - digest.py: weekly email digest via Claude API - service.py: systemd (Linux) and launchd (Mac) service installation - watchdog.py: thin wrapper for standalone drift check invocation - notify.py: env-var-first SMTP password, file-flag, log channels - 8 daemon CLI subcommands: start/stop/status/schedule/install/uninstall/web - 67 new tests (188 total, 25 smoke), 80% coverage, 0 lint errors - pyproject.toml: [daemon] optional dependency group (apscheduler, flask) Co-Authored-By: Claude Sonnet 4.6 --- .github/ISSUE_TEMPLATE/bug_report.md | 30 + .github/ISSUE_TEMPLATE/provider_request.md | 19 + .github/workflows/ci.yml | 54 ++ .github/workflows/release.yml | 28 + ARCHITECTURE.md | 183 ++++ CLAUDE.md | 92 ++ COMMANDS.md | 220 +++++ CONFIG.md | 247 ++++++ CONTRIBUTING.md | 72 ++ DAEMON.md | 801 +++++++++++++++++ DAEMON_CONFIG.md | 176 ++++ DAEMON_PITFALLS.md | 138 +++ EXISTING_CODE.md | 526 ++++++++++++ PITFALLS.md | 195 +++++ PROVIDERS.md | 367 ++++++++ README.md | 184 ++++ REPO.md | 292 +++++++ STYLE.md | 133 +++ docs/DAEMON_SETUP.md | 226 +++++ docs/adding-a-provider.md | 192 +++++ docs/global-memory-guide.md | 116 +++ memsync/__init__.py | 1 + memsync/backups.py | 48 ++ memsync/claude_md.py | 55 ++ memsync/cli.py | 953 +++++++++++++++++++++ memsync/config.py | 205 +++++ memsync/daemon/__init__.py | 11 + memsync/daemon/capture.py | 83 ++ memsync/daemon/digest.py | 76 ++ memsync/daemon/notify.py | 68 ++ memsync/daemon/scheduler.py | 222 +++++ memsync/daemon/service.py | 141 +++ memsync/daemon/watchdog.py | 17 + memsync/daemon/web.py | 109 +++ memsync/providers/__init__.py | 75 ++ memsync/providers/custom.py | 27 + memsync/providers/gdrive.py | 73 ++ memsync/providers/icloud.py | 48 ++ memsync/providers/onedrive.py | 71 ++ memsync/sync.py | 179 ++++ pyproject.toml | 94 ++ tests/conftest.py | 58 ++ tests/test_backups.py | 102 +++ tests/test_claude_md.py | 108 +++ tests/test_cli.py | 655 ++++++++++++++ tests/test_config.py | 102 +++ tests/test_daemon_capture.py | 190 ++++ tests/test_daemon_digest.py | 123 +++ tests/test_daemon_notify.py | 94 ++ tests/test_daemon_scheduler.py | 294 +++++++ tests/test_daemon_watchdog.py | 19 + tests/test_daemon_web.py | 99 +++ tests/test_providers.py | 178 ++++ tests/test_sync.py | 208 +++++ 54 files changed, 9077 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/provider_request.md create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/release.yml create mode 100644 ARCHITECTURE.md create mode 100644 CLAUDE.md create mode 100644 COMMANDS.md create mode 100644 CONFIG.md create mode 100644 CONTRIBUTING.md create mode 100644 DAEMON.md create mode 100644 DAEMON_CONFIG.md create mode 100644 DAEMON_PITFALLS.md create mode 100644 EXISTING_CODE.md create mode 100644 PITFALLS.md create mode 100644 PROVIDERS.md create mode 100644 README.md create mode 100644 REPO.md create mode 100644 STYLE.md create mode 100644 docs/DAEMON_SETUP.md create mode 100644 docs/adding-a-provider.md create mode 100644 docs/global-memory-guide.md create mode 100644 memsync/__init__.py create mode 100644 memsync/backups.py create mode 100644 memsync/claude_md.py create mode 100644 memsync/cli.py create mode 100644 memsync/config.py create mode 100644 memsync/daemon/__init__.py create mode 100644 memsync/daemon/capture.py create mode 100644 memsync/daemon/digest.py create mode 100644 memsync/daemon/notify.py create mode 100644 memsync/daemon/scheduler.py create mode 100644 memsync/daemon/service.py create mode 100644 memsync/daemon/watchdog.py create mode 100644 memsync/daemon/web.py create mode 100644 memsync/providers/__init__.py create mode 100644 memsync/providers/custom.py create mode 100644 memsync/providers/gdrive.py create mode 100644 memsync/providers/icloud.py create mode 100644 memsync/providers/onedrive.py create mode 100644 memsync/sync.py create mode 100644 pyproject.toml create mode 100644 tests/conftest.py create mode 100644 tests/test_backups.py create mode 100644 tests/test_claude_md.py create mode 100644 tests/test_cli.py create mode 100644 tests/test_config.py create mode 100644 tests/test_daemon_capture.py create mode 100644 tests/test_daemon_digest.py create mode 100644 tests/test_daemon_notify.py create mode 100644 tests/test_daemon_scheduler.py create mode 100644 tests/test_daemon_watchdog.py create mode 100644 tests/test_daemon_web.py create mode 100644 tests/test_providers.py create mode 100644 tests/test_sync.py diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..5584187 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,30 @@ +--- +name: Bug report +about: Something isn't working +labels: bug +--- + +**OS:** (macOS / Windows / Linux + version) + +**Python version:** (e.g. 3.12.1) + +**Provider:** (onedrive / icloud / gdrive / custom) + +**`memsync status` output:** +``` +(paste here) +``` + +**Error message:** +``` +(paste full error here) +``` + +**Steps to reproduce:** +1. +2. +3. + +**Expected behavior:** + +**Actual behavior:** diff --git a/.github/ISSUE_TEMPLATE/provider_request.md b/.github/ISSUE_TEMPLATE/provider_request.md new file mode 100644 index 0000000..ec6ac05 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/provider_request.md @@ -0,0 +1,19 @@ +--- +name: Provider request +about: Request support for a new cloud storage provider +labels: provider +--- + +**Provider name:** (e.g. Dropbox, Box, Synology Drive) + +**OS(es) where you use it:** (macOS / Windows / Linux) + +**Default install path(s):** +- macOS: `~/...` +- Windows: `C:/Users/.../...` +- Linux: `~/...` + +**Are you willing to implement it?** +Yes / No / Maybe — if yes, see [docs/adding-a-provider.md](../../docs/adding-a-provider.md) for a guide. + +**Any other notes:** diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..970725e --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,54 @@ +name: CI + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + lint: + name: Lint & security + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Install dev dependencies + run: pip install -e ".[dev]" + + - name: ruff (lint + style) + run: ruff check memsync/ + + - name: bandit (security scan) + run: bandit -r memsync/ -c pyproject.toml + + test: + name: Test (${{ matrix.os }}, Python ${{ matrix.python-version }}) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + python-version: ["3.11", "3.12"] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: pip install -e ".[dev]" + + - name: Run tests (with coverage) + run: pytest tests/ -v + + - name: Smoke test + run: pytest -m smoke -v diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..54d84a7 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,28 @@ +name: Release + +on: + push: + tags: + - "v*" + +jobs: + publish: + runs-on: ubuntu-latest + permissions: + id-token: write # for PyPI Trusted Publishing (OIDC) + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Build + run: | + pip install build + python -m build + + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md new file mode 100644 index 0000000..de6d7f8 --- /dev/null +++ b/ARCHITECTURE.md @@ -0,0 +1,183 @@ +# ARCHITECTURE.md + +## System overview + +``` +User runs: memsync refresh --notes "..." + │ + ▼ + memsync/cli.py ← argument parsing, routes to commands + │ + ▼ + memsync/config.py ← loads ~/.config/memsync/config.toml + │ + ▼ + memsync/providers/.py ← resolves sync root path for this machine + │ + ▼ + memsync/sync.py ← calls Claude API, merges notes into memory + │ + ▼ + memsync/backups.py ← backs up before writing + │ + ▼ + memsync/claude_md.py ← syncs GLOBAL_MEMORY.md → ~/.claude/CLAUDE.md +``` + +--- + +## Module responsibilities + +### `memsync/cli.py` +- Entry point. Parses args, loads config, routes to command functions. +- Does NOT contain business logic — only wiring. +- Every command function signature: `def cmd_(args, config) -> int` +- Returns exit code. Print errors to stderr, output to stdout. + +### `memsync/config.py` +- Loads and saves `~/.config/memsync/config.toml` (Mac/Linux) + or `%APPDATA%\memsync\config.toml` (Windows). +- Exposes a `Config` dataclass — no raw dicts passed around the codebase. +- Handles missing keys with sensible defaults. +- See `CONFIG.md` for full schema. + +### `memsync/providers/` +- `__init__.py` — defines `BaseProvider` ABC and `get_provider(name)` registry function. +- One file per provider: `onedrive.py`, `icloud.py`, `gdrive.py`, `custom.py`. +- Each provider implements `detect() -> Path | None` and `is_available() -> bool`. +- See `PROVIDERS.md` for full spec and all three implementations. + +### `memsync/sync.py` +- The only module that calls the Anthropic API. +- Takes: notes (str), current memory (str), config (Config). +- Returns: updated memory (str), changed (bool). +- Does NOT write files. Caller handles I/O. +- See `PITFALLS.md` — this module has the most trust/safety concerns. + +### `memsync/backups.py` +- `backup(source: Path, backup_dir: Path) -> Path` — copies with timestamp. +- `prune(backup_dir: Path, keep_days: int) -> list[Path]` — removes old backups. +- `list_backups(backup_dir: Path) -> list[Path]` — sorted newest-first. +- `latest_backup(backup_dir: Path) -> Path | None` + +### `memsync/claude_md.py` +- `sync(memory_path: Path, target_path: Path) -> None` + - `target_path` comes from `config.claude_md_target` — never hardcoded. + - Mac/Linux: create symlink if not already correct, backup any existing file first. + - Windows: copy (symlinks require admin rights on Windows). +- `is_synced(memory_path: Path, target_path: Path) -> bool` + +--- + +## Data flow: `memsync init` + +``` +1. cli.py — parse args +2. config.py — check if config already exists (warn if --force not set) +3. providers/ — run detect() on each registered provider in priority order +4. cli.py — if multiple detected, prompt user to choose +5. config.py — write config with chosen provider + detected path +6. providers/ — call get_memory_root() to get the .claude-memory path +7. (filesystem) — create .claude-memory/, backups/, sessions/ dirs +8. (filesystem) — write starter GLOBAL_MEMORY.md if not exists +9. claude_md.py — sync to ~/.claude/CLAUDE.md +10. cli.py — print summary of what was created +``` + +## Data flow: `memsync refresh` + +``` +1. cli.py — parse args, read notes from --notes / --file / stdin +2. config.py — load config +3. providers/ — resolve memory root path +4. (filesystem) — read current GLOBAL_MEMORY.md +5. sync.py — call Claude API with current memory + notes +6. sync.py — enforce hard constraints (append-only diff) +7. backups.py — backup current file before overwriting +8. (filesystem) — write updated GLOBAL_MEMORY.md +9. claude_md.py — sync to ~/.claude/CLAUDE.md +10. sessions/ — append notes to dated session log +11. cli.py — print summary (changed/unchanged, backup path) +``` + +--- + +## File layout on disk + +``` +# In cloud sync folder (synced across machines): +OneDrive/.claude-memory/ ← or iCloud/.claude-memory/, etc. + GLOBAL_MEMORY.md ← source of truth + backups/ + GLOBAL_MEMORY_20260321_143022.md + GLOBAL_MEMORY_20260320_091145.md + ... + sessions/ + 2026-03-21.md ← raw notes, append-only, never deleted + 2026-03-20.md + ... + +# On each machine (not synced): +~/.config/memsync/config.toml ← machine-specific config +~/.claude/CLAUDE.md ← symlink → OneDrive/.claude-memory/GLOBAL_MEMORY.md + (or copy on Windows) +``` + +--- + +## What does NOT belong in this tool + +- Project-specific memory (that belongs in each project's CLAUDE.md) +- Cold storage / knowledge bases (use Hipocampus or RAG for that) +- Multi-user or team memory (out of scope for v1) +- Anything that requires a server, database, or API key beyond Anthropic's + +--- + +## Futureproofing decisions + +These are low-effort now and expensive to retrofit later. All three are +already reflected in the code specs above — this section explains the *why*. + +### 1. Version the memory file format + +Write a version comment at the top of every `GLOBAL_MEMORY.md` when it's +first created: + +```markdown + +# Global Memory +... +``` + +If the schema ever needs to change (section names, structure, anything), +the version comment lets migration code know what it's dealing with. +Without it, you can't distinguish an old file from a new one. + +Implementation: write this comment in `load_or_init_memory()` when creating +the starter template. Check for it in `refresh_memory_content()` and warn +(don't fail) if it's missing. + +### 2. Don't hardcode the CLAUDE.md target path + +`~/.claude/CLAUDE.md` is where Claude Code reads its global config today. +That could change. The target path lives in `config.claude_md_target` and +is never hardcoded anywhere in the logic modules. `cli.py` reads it from +config and passes it to `claude_md.sync()`. This is already reflected in +the `claude_md.py` module spec above. + +### 3. Keep the Anthropic SDK version loose + +`pyproject.toml` already has `anthropic>=0.40.0` — keep it that way. +Never pin to an exact version. Users should get SDK updates automatically +when they upgrade their environment. + +--- + +## Key constraints + +- Python 3.11+ only. Use match statements, `Path` everywhere, `tomllib` (stdlib). +- No dependencies beyond `anthropic`. Everything else stdlib. +- `tomllib` is read-only (stdlib in 3.11+). Use `tomli_w` for writing, or write TOML + manually for the simple schema we have. See `CONFIG.md`. +- Must work offline except for `memsync refresh` (the only command needing the API). diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..3478f3c --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,92 @@ +# memsync — Project Context for Claude Code + +You are building **memsync**, a cross-platform CLI tool that maintains a global +identity-layer memory file for Claude Code users, synced across machines via +cloud storage they already have (OneDrive, iCloud Drive, Google Drive). + +This document is your entry point. Read all linked documents before writing any code. + +--- + +## Document map + +| File | What it contains | +|---|---| +| `CLAUDE.md` | This file — start here | +| `ARCHITECTURE.md` | Full system design, module map, data flow | +| `PROVIDERS.md` | Provider plugin system — BaseProvider ABC, all three implementations | +| `CONFIG.md` | Config file design, schema, platform paths | +| `COMMANDS.md` | Every CLI command — args, behavior, edge cases | +| `EXISTING_CODE.md` | Working prototype code — use this as the base, not a reference | +| `PITFALLS.md` | Known issues, trust boundaries, things that have already gone wrong | +| `REPO.md` | Repository structure, CI, PyPI, GitHub conventions | +| `STYLE.md` | Code style, naming conventions, what good looks like here | + +Read them in this order: +1. ARCHITECTURE.md — understand the shape of the system +2. EXISTING_CODE.md — understand what already works +3. PROVIDERS.md — the most important new piece +4. CONFIG.md — feeds into everything +5. PITFALLS.md — read before touching sync.py or providers +6. COMMANDS.md, REPO.md, STYLE.md — as needed + +--- + +## What this project is + +memsync solves a specific problem: Claude Code has no memory between sessions. +The standard fix is `~/.claude/CLAUDE.md`, but it drifts, bloats, and doesn't +sync across machines. + +memsync maintains one canonical `GLOBAL_MEMORY.md` in your cloud sync folder. +At session start, Claude Code reads it via a symlink (Mac/Linux) or copy (Windows) +at `~/.claude/CLAUDE.md`. After meaningful sessions, the user runs +`memsync refresh --notes "..."` and the Claude API merges the notes in. + +This is the **identity layer** — who the user is, what they're working on, standing +preferences. Not project docs. Not cold storage. Not a knowledge base. + +--- + +## What already exists + +A working prototype was built in a Claude.ai chat session. It covers: +- OneDrive path detection (Mac + Windows) +- Core refresh logic (Claude API call, backup, sync to CLAUDE.md) +- CLI with: init, refresh, status, show, diff, prune + +All prototype code is in `EXISTING_CODE.md`. Use it as the foundation. +Do not rewrite from scratch — refactor to fit the target architecture. + +--- + +## What needs to be built + +1. Provider abstraction layer (`memsync/providers/`) — BaseProvider ABC + 3 implementations +2. Config system (`memsync/config.py`) — TOML, platform-aware paths +3. Refactor existing code to use config + providers +4. Tests (`tests/`) — mocked filesystem + mocked API +5. CI (`.github/workflows/`) — test matrix Mac/Windows/Linux × Python 3.11/3.12 +6. Docs — README, CONTRIBUTING, adding-a-provider guide +7. GitHub issue templates + +See `REPO.md` for full repository layout and build order. + +--- + +## Hard rules + +- Never hardcode the model string. Always read from config. +- Never hardcode any path. Always go through the provider or config system. +- Hard constraints in GLOBAL_MEMORY.md are append-only. Enforce this in code, not prompts. +- Backups before every write. No exceptions. +- See `PITFALLS.md` before touching anything related to the Claude API call or path resolution. + +--- + +## Owner context + +Built by Ian (product leader, writer, not a full-time engineer). +Maintenance appetite: active at launch, wants to go passive or hand off over time. +That means: clear contributor docs, plugin architecture that doesn't require +touching core to add a provider, and CI that catches regressions without manual effort. diff --git a/COMMANDS.md b/COMMANDS.md new file mode 100644 index 0000000..c11dc7a --- /dev/null +++ b/COMMANDS.md @@ -0,0 +1,220 @@ +# COMMANDS.md + +## Command map + +``` +memsync +├── init Set up memory structure for the first time +├── refresh Merge session notes into global memory +├── show Print current GLOBAL_MEMORY.md +├── diff Diff current memory vs last backup +├── status Show paths, provider, sync state +├── config +│ ├── show Print current config.toml +│ └── set Update a config value +├── providers List all providers and their detection status +└── prune Remove old backups +``` + +--- + +## `memsync init` + +**Purpose:** First-time setup. Creates directory structure, writes starter memory, +syncs to CLAUDE.md. + +**Args:** +- `--force` — reinitialize even if memory already exists (prompts confirmation) +- `--provider ` — skip auto-detection, use this provider +- `--sync-root ` — skip auto-detection, use this path directly + +**Behavior:** +1. Check if config already exists → warn and exit unless `--force` +2. If no `--provider` given, run auto-detection across all providers + - If 0 detected: print friendly error explaining how to set manually + - If 1 detected: use it, confirm with user + - If 2+ detected: prompt user to choose +3. Resolve memory root from provider +4. Create: `memory_root/`, `memory_root/backups/`, `memory_root/sessions/` +5. If `GLOBAL_MEMORY.md` doesn't exist, write starter template +6. Write config +7. Run `claude_md.sync()` to create the CLAUDE.md link +8. Print summary + +**Output (success):** +``` +memsync initialized. + + Provider: OneDrive + Sync root: /Users/ian/OneDrive + Memory: /Users/ian/OneDrive/.claude-memory/GLOBAL_MEMORY.md + CLAUDE.md: /Users/ian/.claude/CLAUDE.md → (symlink) + +Next: edit your memory file, then run: + memsync refresh --notes "initial setup complete" +``` + +--- + +## `memsync refresh` + +**Purpose:** Core command. Merge session notes into GLOBAL_MEMORY.md via Claude API. + +**Args:** +- `--notes ` / `-n ` — notes as inline string +- `--file ` / `-f ` — read notes from file +- `--dry-run` — print what would change, don't write anything +- (stdin) — if no --notes or --file and stdin is not a tty, read from stdin + +**Exactly one of --notes, --file, or stdin must be provided.** + +**Behavior:** +1. Load config +2. Resolve memory path via provider +3. Read current GLOBAL_MEMORY.md +4. Call Claude API (see sync.py spec below and PITFALLS.md) +5. Enforce hard constraints (append-only diff) +6. If changed AND not dry-run: + a. Backup current file + b. Write updated memory + c. Sync to CLAUDE.md + d. Append notes to sessions/.md +7. Print summary + +**Output (changed):** +``` +Memory updated. + Backup: /Users/ian/OneDrive/.claude-memory/backups/GLOBAL_MEMORY_20260321_143022.md + Memory: /Users/ian/OneDrive/.claude-memory/GLOBAL_MEMORY.md + CLAUDE.md synced ✓ +``` + +**Output (no change):** +``` +No changes detected. +``` + +**Output (dry-run):** +``` +[DRY RUN] No files written. + +--- diff --- +- Old line ++ New line +... +``` + +--- + +## `memsync show` + +**Purpose:** Print current GLOBAL_MEMORY.md to stdout. + +**Args:** none + +**Use case:** Pipe to less, copy to clipboard, quick review. + +--- + +## `memsync diff` + +**Purpose:** Show unified diff between current memory and the most recent backup. + +**Args:** +- `--backup ` — diff against a specific backup instead of latest + +**Output:** Standard unified diff format. If no backups exist, print a message. + +--- + +## `memsync status` + +**Purpose:** Sanity check — what is memsync pointing at on this machine? + +**Output:** +``` +Platform: macOS (Darwin) +Config: /Users/ian/.config/memsync/config.toml ✓ +Provider: OneDrive +Sync root: /Users/ian/Library/CloudStorage/OneDrive-Personal ✓ +Memory: /Users/ian/Library/CloudStorage/OneDrive-Personal/.claude-memory/GLOBAL_MEMORY.md ✓ +CLAUDE.md: /Users/ian/.claude/CLAUDE.md → symlink ✓ +Backups: 14 file(s) +Session logs: 22 day(s) +Model: claude-sonnet-4-20250514 +``` + +--- + +## `memsync config show` + +Print the contents of config.toml. + +## `memsync config set ` + +Update a single config value and save. + +```bash +memsync config set provider icloud +memsync config set model claude-opus-4-20250514 +memsync config set keep_days 60 +memsync config set sync_root "/Users/ian/Dropbox" +``` + +After `config set sync_root`, automatically set provider to "custom". +After any change, print the updated value to confirm. + +--- + +## `memsync providers` + +List all registered providers and their detection status on this machine. + +**Output:** +``` +Available providers: + + onedrive OneDrive ✓ detected at /Users/ian/Library/CloudStorage/OneDrive-Personal + icloud iCloud Drive ✓ detected at /Users/ian/Library/Mobile Documents/com~apple~CloudDocs + gdrive Google Drive ✗ not detected + custom Custom Path ✗ no path configured + +Active provider: onedrive +``` + +--- + +## `memsync prune` + +**Args:** +- `--keep-days ` — default from config (30) +- `--dry-run` — list what would be deleted without deleting + +**Output:** +``` +Pruned 3 backup(s) older than 30 days. + removed: GLOBAL_MEMORY_20260101_120000.md + removed: GLOBAL_MEMORY_20260102_083000.md + removed: GLOBAL_MEMORY_20260115_201500.md +``` + +--- + +## Exit codes + +| Code | Meaning | +|---|---| +| 0 | Success | +| 1 | General error (printed to stderr) | +| 2 | Config not found — run `memsync init` | +| 3 | Memory file not found | +| 4 | Provider detection failed | +| 5 | API error | + +--- + +## Error message conventions + +- Always print errors to stderr +- Always suggest a fix, not just a description of the problem +- Example: `Error: no provider detected. Run 'memsync init' or set a custom path with 'memsync config set sync_root /path/to/folder'` diff --git a/CONFIG.md b/CONFIG.md new file mode 100644 index 0000000..865c5f0 --- /dev/null +++ b/CONFIG.md @@ -0,0 +1,247 @@ +# CONFIG.md + +## Config file location + +```python +import platform +from pathlib import Path + +def get_config_path() -> Path: + if platform.system() == "Windows": + import os + appdata = os.environ.get("APPDATA", str(Path.home() / "AppData" / "Roaming")) + return Path(appdata) / "memsync" / "config.toml" + else: + # Mac and Linux — XDG standard + import os + xdg_config = os.environ.get("XDG_CONFIG_HOME", str(Path.home() / ".config")) + return Path(xdg_config) / "memsync" / "config.toml" +``` + +--- + +## Config schema (TOML) + +```toml +[core] +provider = "onedrive" # which provider is active on this machine +model = "claude-sonnet-4-20250514" # Anthropic model for refresh +max_memory_lines = 400 # soft cap passed to the refresh prompt + +[paths] +# Optional overrides — set by memsync if auto-detect finds a non-default location +# sync_root = "/Users/ian/Library/CloudStorage/OneDrive-Personal" + +# Where to write the CLAUDE.md file that Claude Code reads at session start. +# Change this if Claude Code ever moves its config location, or if you use +# a non-standard Claude Code install. +claude_md_target = "~/.claude/CLAUDE.md" + +[backups] +keep_days = 30 + +[providers.onedrive] +# provider-specific config (currently unused, reserved for future) + +[providers.icloud] +# same + +[providers.gdrive] +# same +``` + +--- + +## Config dataclass + +```python +# memsync/config.py + +from __future__ import annotations +import tomllib +import platform +from dataclasses import dataclass, field +from pathlib import Path + + +@dataclass +class Config: + # [core] + provider: str = "onedrive" + model: str = "claude-sonnet-4-20250514" + max_memory_lines: int = 400 + + # [paths] + sync_root: Path | None = None # None = use provider auto-detect + claude_md_target: Path = Path("~/.claude/CLAUDE.md") + + # [backups] + keep_days: int = 30 + + @classmethod + def load(cls) -> "Config": + path = get_config_path() + if not path.exists(): + return cls() # all defaults + with open(path, "rb") as f: + raw = tomllib.load(f) + return cls._from_dict(raw) + + @classmethod + def _from_dict(cls, raw: dict) -> "Config": + core = raw.get("core", {}) + paths = raw.get("paths", {}) + backups = raw.get("backups", {}) + + sync_root = paths.get("sync_root") + claude_md_target = paths.get("claude_md_target", "~/.claude/CLAUDE.md") + return cls( + provider=core.get("provider", "onedrive"), + model=core.get("model", "claude-sonnet-4-20250514"), + max_memory_lines=core.get("max_memory_lines", 400), + sync_root=Path(sync_root) if sync_root else None, + claude_md_target=Path(claude_md_target).expanduser(), + keep_days=backups.get("keep_days", 30), + ) + + def save(self) -> None: + path = get_config_path() + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(self._to_toml(), encoding="utf-8") + + def _to_toml(self) -> str: + """ + tomllib is read-only (stdlib). We write TOML manually. + Schema is simple enough that this is fine. + If it grows, add tomli_w as a dependency. + """ + lines = [ + "[core]", + f'provider = "{self.provider}"', + f'model = "{self.model}"', + f"max_memory_lines = {self.max_memory_lines}", + "", + "[paths]", + f'claude_md_target = "{self.claude_md_target.as_posix()}"', + ] + if self.sync_root: + # TOML strings need forward slashes or escaped backslashes + lines.append(f'sync_root = "{self.sync_root.as_posix()}"') + lines += [ + "", + "[backups]", + f"keep_days = {self.keep_days}", + "", + ] + return "\n".join(lines) + + +def get_config_path() -> Path: + if platform.system() == "Windows": + import os + appdata = os.environ.get("APPDATA", str(Path.home() / "AppData" / "Roaming")) + return Path(appdata) / "memsync" / "config.toml" + else: + import os + xdg_config = os.environ.get("XDG_CONFIG_HOME", str(Path.home() / ".config")) + return Path(xdg_config) / "memsync" / "config.toml" +``` + +--- + +## Model handling + +The model string is the only config value that will need regular user attention +as Anthropic releases new models. Design for this explicitly: + +**`memsync config set model `** — already in the plan, primary update path. + +**`memsync refresh --model `** — one-off override without touching config. +Useful when a user wants to test a new model before committing, or use a cheaper +model for a quick session without changing their default. + +```python +# In cmd_refresh — merge --model into config before passing to sync +if args.model: + config = dataclasses.replace(config, model=args.model) +result = refresh_memory_content(notes, current_memory, config) +``` + +**Friendly error on bad model string.** The Anthropic API returns a specific error +when a model ID is not found. Catch it and print a useful message: + +```python +except anthropic.BadRequestError as e: + if "model" in str(e).lower(): + print( + f"Error: model '{config.model}' may be unavailable or misspelled.\n" + f"Update with: memsync config set model \n" + f"Current models: https://docs.anthropic.com/en/docs/about-claude/models", + file=sys.stderr, + ) + return 5 + raise +``` + +**`memsync models` command** — v2, not v1. Would call the Anthropic API to list +available models and flag if the configured one is deprecated. Don't build it yet — +note it in CHANGELOG as a planned feature. + +**Valid model strings as of writing (2026-03):** +- `claude-sonnet-4-20250514` — default, best balance of quality and cost +- `claude-opus-4-20250514` — highest quality, higher cost +- `claude-haiku-4-5-20251001` — fastest, lowest cost, fine for simple memory updates + +Users on a budget can set Haiku as their default. The memory refresh prompt is +not complex enough to need Opus for most use cases. + +--- + +## `memsync config` commands + +``` +memsync config show + → prints current config.toml contents + +memsync config set provider icloud + → updates config.provider, saves + +memsync config set model claude-opus-4-20250514 + → updates config.model, saves + +memsync config set sync_root /path/to/custom/folder + → updates config.sync_root, saves + → also sets provider to "custom" automatically + +memsync config set keep_days 60 + → updates config.keep_days, saves +``` + +Valid keys for `memsync config set`: +- `provider` — must be a registered provider name +- `model` — any string (validated on first API call with friendly error) +- `sync_root` — path, must exist +- `claude_md_target` — path to write CLAUDE.md (default: `~/.claude/CLAUDE.md`) +- `max_memory_lines` — integer +- `keep_days` — integer + +--- + +## Notes + +- Config is machine-specific. It lives in `~/.config/` or `%APPDATA%`, NOT in the + sync folder. Two machines can use different providers pointing to the same cloud + storage location — that's fine and expected. + +- The model default (`claude-sonnet-4-20250514`) will rot as Anthropic releases new + models. The intent is for users to update it via `memsync config set model ...` + when they want to upgrade. Do not auto-update the model. Do not pin to a specific + version in code — always read from config. + +- `claude_md_target` defaults to `~/.claude/CLAUDE.md` but is configurable so users + aren't broken if Claude Code ever changes its config location, or if they have a + non-standard setup. Always expand `~` via `.expanduser()` before use. + +- `tomllib` (stdlib, Python 3.11+) is read-only. Writing is done manually via + `_to_toml()`. If the config schema grows significantly, add `tomli_w` as a + dependency. For now, keep the dep count at 1 (anthropic only). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..25c17cc --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,72 @@ +# Contributing to memsync + +Thanks for your interest. memsync is designed to be easy to extend — adding a new provider requires touching exactly one new file and one line in `__init__.py`. + +--- + +## Setup + +```bash +git clone https://github.com/YOUR_USERNAME/memsync +cd memsync +pip install -e ".[dev]" +``` + +Run tests: + +```bash +pytest tests/ -v +``` + +--- + +## Adding a provider + +The most common contribution is adding support for a new cloud storage provider (Dropbox, Box, Synology Drive, etc.). See [docs/adding-a-provider.md](docs/adding-a-provider.md) for a complete guide with a worked example. + +--- + +## Code style + +- Python 3.11+. Use `Path` everywhere, `from __future__ import annotations` at the top of every module. +- Type hints on all functions. +- No dependencies beyond `anthropic` (stdlib only, except dev deps). +- See [STYLE.md](STYLE.md) for the full style guide. + +--- + +## Module boundaries + +Each module has one job: + +- `sync.py` — calls the API, returns text. Does not write files. +- `cli.py` — handles I/O. Does not contain business logic. +- `providers/` — detect paths. Do not create directories or write files. +- `config.py` — loads and saves config. Does not call the API. + +--- + +## Tests + +- All tests use `tmp_path` for filesystem isolation — never touch `~/.config`, `~/.claude`, or any cloud folder. +- All tests mock the Anthropic API — never make real API calls. +- Tests run on macOS, Windows, and Linux via CI. If your change is platform-specific, add a `pytest.mark.skipif` guard. + +--- + +## Pull requests + +- Open an issue first for anything beyond a small bug fix. +- PRs require CI green on all 6 matrix combinations (3 OS × 2 Python versions). +- Squash merge — keep the commit history clean. +- Commit style: `feat:`, `fix:`, `refactor:`, `test:`, `docs:`, `chore:` prefix, present tense, no period. + +--- + +## Hard rules + +- Never hardcode the model string. Always read from `config.model`. +- Never hardcode paths. Always go through the provider or config system. +- Hard constraints in GLOBAL_MEMORY.md are append-only. This is enforced in Python in `sync.py` — do not remove that check. +- Backups before every write. No exceptions. +- Read [PITFALLS.md](PITFALLS.md) before touching `sync.py` or any provider. diff --git a/DAEMON.md b/DAEMON.md new file mode 100644 index 0000000..8e389d6 --- /dev/null +++ b/DAEMON.md @@ -0,0 +1,801 @@ +# DAEMON.md + +## What this module is + +The daemon is an optional, always-on companion to memsync core. It runs on a +persistent machine (Raspberry Pi, home server, always-on desktop) and automates +the operations that core requires you to trigger manually. + +It is a separate optional install. Core memsync never imports from this module. + +```bash +pip install memsync[daemon] # installs core + daemon extras +``` + +Read `CLAUDE.md` and `ARCHITECTURE.md` before this file — this module extends +that system, it does not replace any of it. + +--- + +## What it does + +| Feature | What it automates | +|---|---| +| Scheduled refresh | Runs `memsync refresh` nightly from session logs — no manual trigger needed | +| Backup mirror | rsync of `.claude-memory/` to a local path hourly — independent of OneDrive | +| Web UI | Browser-based view/edit of `GLOBAL_MEMORY.md` on the local network | +| Capture endpoint | REST endpoint for mobile notes (iPhone Shortcuts, etc.) | +| Drift detection | Alerts when `CLAUDE.md` on any machine is stale vs `GLOBAL_MEMORY.md` | +| Weekly digest | Email summary of the week's session logs and memory changes | + +All features are individually toggleable in config. None are on by default except +scheduled refresh and backup mirror. + +--- + +## Module structure + +``` +memsync/daemon/ +├── __init__.py # version, public API +├── scheduler.py # APScheduler wrapper, job definitions +├── web.py # Flask web UI (view + edit GLOBAL_MEMORY.md) +├── capture.py # REST endpoint for mobile note capture +├── watchdog.py # drift detection between CLAUDE.md and GLOBAL_MEMORY.md +├── digest.py # weekly email digest +├── service.py # systemd (Pi/Linux) and launchd (Mac) service install +└── notify.py # notification abstraction (email, file flag, log) +``` + +--- + +## New CLI commands + +``` +memsync daemon start start the daemon in the foreground (for testing) +memsync daemon start --detach start as background process +memsync daemon stop stop background process +memsync daemon status show running status, last job times, next scheduled runs +memsync daemon install register as system service (auto-starts on boot) +memsync daemon uninstall remove system service registration +memsync daemon schedule show all scheduled jobs and last/next run times +memsync daemon web open web UI in browser (shortcut) +``` + +--- + +## Config additions + +The daemon adds a `[daemon]` section to `config.toml`. Written by `memsync daemon install`, +not present in a core-only install. + +```toml +[daemon] +enabled = true + +# Scheduled refresh +# Reads today's sessions/.md and runs memsync refresh automatically. +# Cron syntax. Default: 11:55pm daily. +refresh_schedule = "55 23 * * *" +refresh_enabled = true + +# Backup mirror +# Independent local copy of .claude-memory/ — not subject to OneDrive sync. +# Empty string = disabled. +backup_mirror_path = "" +backup_mirror_schedule = "0 * * * *" # hourly + +# Web UI +web_ui_enabled = true +web_ui_port = 5000 +web_ui_host = "0.0.0.0" # 0.0.0.0 = accessible on local network + # 127.0.0.1 = localhost only + +# Mobile capture endpoint +capture_enabled = true +capture_port = 5001 +capture_token = "" # optional shared secret for the endpoint + +# Drift detection +drift_check_enabled = true +drift_check_interval_hours = 6 +drift_notify = "log" # "log", "email", or "file" + +# Weekly digest +digest_enabled = false +digest_schedule = "0 9 * * 1" # Monday 9am +digest_email_to = "" +digest_email_from = "" +digest_smtp_host = "" +digest_smtp_port = 587 +digest_smtp_user = "" +digest_smtp_password = "" # consider using keyring instead +``` + +--- + +## scheduler.py + +Uses APScheduler in blocking mode for foreground, background thread mode for detached. + +```python +# memsync/daemon/scheduler.py +from __future__ import annotations + +from pathlib import Path +from apscheduler.schedulers.background import BackgroundScheduler +from apscheduler.schedulers.blocking import BlockingScheduler +from apscheduler.triggers.cron import CronTrigger + +from memsync.config import Config +from memsync.sync import refresh_memory_content +from memsync.backups import backup + + +def build_scheduler(config: Config, blocking: bool = False): + """ + Build and configure the scheduler from config. + blocking=True for foreground (testing), False for daemon mode. + """ + scheduler = BlockingScheduler() if blocking else BackgroundScheduler() + + if config.daemon.refresh_enabled: + scheduler.add_job( + func=job_nightly_refresh, + trigger=CronTrigger.from_crontab(config.daemon.refresh_schedule), + args=[config], + id="nightly_refresh", + name="Nightly memory refresh", + misfire_grace_time=3600, # run even if missed by up to 1 hour + ) + + if config.daemon.backup_mirror_path: + scheduler.add_job( + func=job_backup_mirror, + trigger=CronTrigger.from_crontab(config.daemon.backup_mirror_schedule), + args=[config], + id="backup_mirror", + name="Backup mirror sync", + misfire_grace_time=3600, + ) + + if config.daemon.drift_check_enabled: + scheduler.add_job( + func=job_drift_check, + trigger="interval", + hours=config.daemon.drift_check_interval_hours, + args=[config], + id="drift_check", + name="CLAUDE.md drift check", + ) + + if config.daemon.digest_enabled: + scheduler.add_job( + func=job_weekly_digest, + trigger=CronTrigger.from_crontab(config.daemon.digest_schedule), + args=[config], + id="weekly_digest", + name="Weekly digest email", + ) + + return scheduler + + +def job_nightly_refresh(config: Config) -> None: + """ + Read today's session log and run a refresh if there are notes. + Silently skips if no session log exists for today. + """ + from datetime import date + from memsync.providers import get_provider + + provider = get_provider(config.provider) + sync_root = config.sync_root or provider.detect() + if not sync_root: + return + + memory_root = provider.get_memory_root(sync_root) + today = date.today().strftime("%Y-%m-%d") + session_log = memory_root / "sessions" / f"{today}.md" + + if not session_log.exists(): + return + + notes = session_log.read_text(encoding="utf-8").strip() + if not notes: + return + + memory_path = memory_root / "GLOBAL_MEMORY.md" + current_memory = memory_path.read_text(encoding="utf-8") + + result = refresh_memory_content(notes, current_memory, config) + + if result["changed"]: + backup(memory_path, memory_root / "backups") + memory_path.write_text(result["updated_content"], encoding="utf-8") + from memsync.claude_md import sync as sync_claude_md + sync_claude_md(memory_path, config.claude_md_target) + + +def job_backup_mirror(config: Config) -> None: + """rsync .claude-memory/ to the local mirror path.""" + import shutil + from memsync.providers import get_provider + + provider = get_provider(config.provider) + sync_root = config.sync_root or provider.detect() + if not sync_root: + return + + memory_root = provider.get_memory_root(sync_root) + mirror = Path(config.daemon.backup_mirror_path).expanduser() + mirror.mkdir(parents=True, exist_ok=True) + + # Copy all files, preserve timestamps + for src in memory_root.rglob("*"): + if src.is_file(): + rel = src.relative_to(memory_root) + dst = mirror / rel + dst.parent.mkdir(parents=True, exist_ok=True) + shutil.copy2(src, dst) + + +def job_drift_check(config: Config) -> None: + """Check if CLAUDE.md is stale relative to GLOBAL_MEMORY.md.""" + from memsync.claude_md import is_synced + from memsync.providers import get_provider + from memsync.daemon.notify import notify + + provider = get_provider(config.provider) + sync_root = config.sync_root or provider.detect() + if not sync_root: + return + + memory_root = provider.get_memory_root(sync_root) + memory_path = memory_root / "GLOBAL_MEMORY.md" + + if not is_synced(memory_path, config.claude_md_target): + notify( + config, + subject="memsync: CLAUDE.md is out of sync", + body=( + f"CLAUDE.md at {config.claude_md_target} does not match " + f"GLOBAL_MEMORY.md at {memory_path}.\n" + f"Run: memsync refresh to resync." + ), + ) + + +def job_weekly_digest(config: Config) -> None: + """Generate and email a weekly digest of session logs.""" + from memsync.daemon.digest import generate_and_send + generate_and_send(config) +``` + +--- + +## web.py + +Simple Flask app. Read-only view by default, edit mode behind a confirmation. +Accessible on the local network at `http://pi.local:5000` (or whatever the +Pi's hostname is). + +```python +# memsync/daemon/web.py +from __future__ import annotations + +from pathlib import Path +from flask import Flask, render_template_string, request, redirect, url_for + +from memsync.config import Config +from memsync.backups import backup +from memsync.claude_md import sync as sync_claude_md + +# Inline template — no separate template files needed for this simple UI +TEMPLATE = """ + + + + memsync — Global Memory + + + + +

Global Memory

+
+ {{ memory_path }}
+ Last modified: {{ last_modified }} + {% if message %} — {{ message }}{% endif %} +
+
+ +
+ + Cancel +
+
+ + +""" + + +def create_app(config: Config) -> Flask: + app = Flask(__name__) + app.config["MEMSYNC_CONFIG"] = config + + def get_memory_path() -> Path: + from memsync.providers import get_provider + provider = get_provider(config.provider) + sync_root = config.sync_root or provider.detect() + return provider.get_memory_root(sync_root) / "GLOBAL_MEMORY.md" + + @app.get("/") + def index(): + path = get_memory_path() + content = path.read_text(encoding="utf-8") if path.exists() else "" + import datetime + last_mod = ( + datetime.datetime.fromtimestamp(path.stat().st_mtime).strftime("%Y-%m-%d %H:%M") + if path.exists() else "never" + ) + return render_template_string( + TEMPLATE, + content=content, + memory_path=path, + last_modified=last_mod, + message=request.args.get("message", ""), + message_class=request.args.get("cls", "saved"), + ) + + @app.post("/save") + def save(): + path = get_memory_path() + new_content = request.form["content"] + try: + if path.exists(): + backup(path, path.parent / "backups") + path.write_text(new_content, encoding="utf-8") + sync_claude_md(path, config.claude_md_target) + return redirect("/?message=Saved+successfully&cls=saved") + except Exception as e: + return redirect(f"/?message=Error:+{e}&cls=error") + + return app + + +def run_web(config: Config) -> None: + app = create_app(config) + app.run( + host=config.daemon.web_ui_host, + port=config.daemon.web_ui_port, + debug=False, + ) +``` + +--- + +## capture.py + +Minimal REST endpoint. Accepts a POST with a note string, appends to today's +session log. Designed for iPhone Shortcuts or any HTTP client. + +```python +# memsync/daemon/capture.py +from __future__ import annotations + +from datetime import datetime +from pathlib import Path + +from flask import Flask, request, jsonify + +from memsync.config import Config + + +def create_capture_app(config: Config) -> Flask: + app = Flask(__name__) + + def get_session_log() -> Path: + from memsync.providers import get_provider + provider = get_provider(config.provider) + sync_root = config.sync_root or provider.detect() + memory_root = provider.get_memory_root(sync_root) + today = datetime.now().strftime("%Y-%m-%d") + return memory_root / "sessions" / f"{today}.md" + + def check_token() -> bool: + token = config.daemon.capture_token + if not token: + return True # no auth configured — accept all (local network only) + return request.headers.get("X-Memsync-Token") == token + + @app.post("/note") + def add_note(): + if not check_token(): + return jsonify({"error": "unauthorized"}), 401 + + body = request.get_json(silent=True) + if not body or "text" not in body: + return jsonify({"error": "missing 'text' field"}), 400 + + text = body["text"].strip() + if not text: + return jsonify({"error": "empty note"}), 400 + + log_path = get_session_log() + log_path.parent.mkdir(parents=True, exist_ok=True) + timestamp = datetime.now().strftime("%H:%M:%S") + + with open(log_path, "a", encoding="utf-8") as f: + f.write(f"\n---\n### {timestamp} (captured)\n{text}\n") + + return jsonify({"ok": True, "timestamp": timestamp}) + + @app.get("/health") + def health(): + return jsonify({"ok": True}) + + return app + + +def run_capture(config: Config) -> None: + app = create_capture_app(config) + app.run( + host="0.0.0.0", # always local-network accessible + port=config.daemon.capture_port, + debug=False, + ) +``` + +**iPhone Shortcut setup:** Create a "Get Contents of URL" action with: +- URL: `http://pi.local:5001/note` +- Method: POST +- Headers: `X-Memsync-Token: ` (if configured) +- Body JSON: `{"text": "Shortcut Input"}` + +--- + +## service.py + +Installs memsync daemon as a system service so it starts on boot. + +```python +# memsync/daemon/service.py +from __future__ import annotations + +import platform +import subprocess +from pathlib import Path + +from memsync.config import get_config_path + + +SYSTEMD_UNIT = """\ +[Unit] +Description=memsync daemon +After=network.target + +[Service] +Type=simple +ExecStart={memsync_bin} daemon start +Restart=on-failure +RestartSec=10 +Environment=ANTHROPIC_API_KEY={api_key_placeholder} + +[Install] +WantedBy=multi-user.target +""" + +LAUNCHD_PLIST = """\ + + + + + Label + com.memsync.daemon + ProgramArguments + + {memsync_bin} + daemon + start + + RunAtLoad + + KeepAlive + + StandardOutPath + {log_dir}/memsync-daemon.log + StandardErrorPath + {log_dir}/memsync-daemon.err + + +""" + + +def install_service() -> None: + system = platform.system() + memsync_bin = _find_memsync_bin() + + if system == "Linux": + _install_systemd(memsync_bin) + elif system == "Darwin": + _install_launchd(memsync_bin) + else: + raise NotImplementedError( + "Service install not supported on Windows. " + "Run 'memsync daemon start --detach' from Task Scheduler instead." + ) + + +def uninstall_service() -> None: + system = platform.system() + if system == "Linux": + _uninstall_systemd() + elif system == "Darwin": + _uninstall_launchd() + + +def _install_systemd(memsync_bin: str) -> None: + unit_path = Path("/etc/systemd/system/memsync.service") + unit_content = SYSTEMD_UNIT.format( + memsync_bin=memsync_bin, + api_key_placeholder="", + ) + unit_path.write_text(unit_content) + subprocess.run(["systemctl", "daemon-reload"], check=True) + subprocess.run(["systemctl", "enable", "memsync"], check=True) + subprocess.run(["systemctl", "start", "memsync"], check=True) + print(f"Service installed: {unit_path}") + print("Set ANTHROPIC_API_KEY in the unit file, then: systemctl restart memsync") + + +def _install_launchd(memsync_bin: str) -> None: + log_dir = Path.home() / "Library" / "Logs" / "memsync" + log_dir.mkdir(parents=True, exist_ok=True) + plist_path = Path.home() / "Library" / "LaunchAgents" / "com.memsync.daemon.plist" + plist_content = LAUNCHD_PLIST.format(memsync_bin=memsync_bin, log_dir=log_dir) + plist_path.write_text(plist_content) + subprocess.run(["launchctl", "load", str(plist_path)], check=True) + print(f"Service installed: {plist_path}") + + +def _uninstall_systemd() -> None: + subprocess.run(["systemctl", "stop", "memsync"], check=False) + subprocess.run(["systemctl", "disable", "memsync"], check=False) + unit_path = Path("/etc/systemd/system/memsync.service") + if unit_path.exists(): + unit_path.unlink() + subprocess.run(["systemctl", "daemon-reload"], check=True) + print("Service removed.") + + +def _uninstall_launchd() -> None: + plist_path = Path.home() / "Library" / "LaunchAgents" / "com.memsync.daemon.plist" + if plist_path.exists(): + subprocess.run(["launchctl", "unload", str(plist_path)], check=False) + plist_path.unlink() + print("Service removed.") + + +def _find_memsync_bin() -> str: + import shutil + bin_path = shutil.which("memsync") + if not bin_path: + raise FileNotFoundError( + "memsync not found in PATH. Install with: pip install memsync[daemon]" + ) + return bin_path +``` + +--- + +## notify.py + +Abstraction so watchdog and digest can send alerts without caring about the channel. + +```python +# memsync/daemon/notify.py +from __future__ import annotations + +import logging +from memsync.config import Config + +logger = logging.getLogger("memsync.daemon") + + +def notify(config: Config, subject: str, body: str) -> None: + """ + Send a notification via the configured channel. + Channels: "log" (default), "email", "file" + Never raises — notification failure should not crash the daemon. + """ + try: + match config.daemon.drift_notify: + case "email": + _send_email(config, subject, body) + case "file": + _write_flag_file(config, subject, body) + case _: + logger.warning("%s: %s", subject, body) + except Exception as e: + logger.error("Notification failed: %s", e) + + +def _send_email(config: Config, subject: str, body: str) -> None: + import smtplib + from email.message import EmailMessage + + msg = EmailMessage() + msg["Subject"] = subject + msg["From"] = config.daemon.digest_email_from + msg["To"] = config.daemon.digest_email_to + msg.set_content(body) + + with smtplib.SMTP(config.daemon.digest_smtp_host, config.daemon.digest_smtp_port) as smtp: + smtp.starttls() + smtp.login(config.daemon.digest_smtp_user, config.daemon.digest_smtp_password) + smtp.send_message(msg) + + +def _write_flag_file(config: Config, subject: str, body: str) -> None: + from pathlib import Path + from datetime import datetime + + flag_dir = Path.home() / ".config" / "memsync" / "alerts" + flag_dir.mkdir(parents=True, exist_ok=True) + ts = datetime.now().strftime("%Y%m%d_%H%M%S") + flag_file = flag_dir / f"{ts}_alert.txt" + flag_file.write_text(f"{subject}\n\n{body}\n", encoding="utf-8") +``` + +--- + +## digest.py + +Weekly email summarizing what changed in the memory file and what was logged. + +```python +# memsync/daemon/digest.py +from __future__ import annotations + +from datetime import date, timedelta +from pathlib import Path + +import anthropic + +from memsync.config import Config + + +def generate_and_send(config: Config) -> None: + """Generate a weekly digest and send via configured email.""" + from memsync.providers import get_provider + from memsync.daemon.notify import _send_email + + provider = get_provider(config.provider) + sync_root = config.sync_root or provider.detect() + if not sync_root: + return + + memory_root = provider.get_memory_root(sync_root) + digest_text = generate_digest(memory_root, config) + + if digest_text: + _send_email( + config, + subject=f"memsync weekly digest — week of {date.today().strftime('%b %d')}", + body=digest_text, + ) + + +def generate_digest(memory_root: Path, config: Config) -> str: + """ + Collect this week's session logs and generate a plain-text summary + via the Claude API. + """ + today = date.today() + week_ago = today - timedelta(days=7) + + session_logs = [] + for i in range(7): + day = week_ago + timedelta(days=i + 1) + log_path = memory_root / "sessions" / f"{day.strftime('%Y-%m-%d')}.md" + if log_path.exists(): + session_logs.append(f"## {day.strftime('%A %b %d')}\n{log_path.read_text(encoding='utf-8')}") + + if not session_logs: + return "" + + all_notes = "\n\n".join(session_logs) + + client = anthropic.Anthropic() + response = client.messages.create( + model=config.model, + max_tokens=1000, + system=( + "You are summarizing a week of AI assistant session notes for the user. " + "Write a brief, plain-text weekly summary: what they worked on, " + "any notable decisions or completions, and anything that seems worth " + "following up on. 150-250 words. No headers. Direct and useful." + ), + messages=[{"role": "user", "content": all_notes}], + ) + + return response.content[0].text.strip() +``` + +--- + +## Pitfalls specific to the daemon + +### API key in systemd unit file +The systemd unit template includes a placeholder for `ANTHROPIC_API_KEY`. +Storing secrets in unit files is not ideal — they're world-readable by default. +Document that users should use `systemctl edit memsync` to add the key in an +override file, or use a secrets manager. Do not store keys in the repo. + +### Flask in production +The Flask dev server (`app.run()`) is fine for local network use on a Pi. +Do not suggest or document using it as a public-facing server. If a user +asks about exposing it to the internet, redirect them to proper WSGI + auth. + +### Port conflicts +5000 and 5001 are common dev ports. Document that they're configurable and +how to change them if there's a conflict. + +### systemd on Pi requires sudo +`systemctl enable` and the unit file write require root. The install function +will fail without it. Print a clear error and suggest `sudo memsync daemon install`. + +### APScheduler job persistence +APScheduler by default runs jobs in memory — if the daemon restarts, job +history is lost. That's fine for memsync (jobs are time-based, not state-based). +Do not add a job store database — it's unnecessary complexity. + +### Nightly refresh with empty session log +If the user didn't run any sessions that day, `sessions/.md` won't exist. +`job_nightly_refresh` handles this with an early return. Make sure this stays +in place — an empty notes payload to the API wastes tokens and may produce +hallucinated changes. + +--- + +## Build order for daemon module + +Do this after core memsync is complete and tested. + +1. `DaemonConfig` dataclass additions to `config.py` +2. `scheduler.py` + `notify.py` — the backbone +3. `web.py` — Flask UI +4. `capture.py` — REST endpoint +5. `service.py` — system service install +6. `digest.py` — weekly email (depends on notify) +7. Tests for scheduler jobs (mock filesystem + mock API) +8. Tests for web UI (Flask test client) +9. Tests for capture endpoint (Flask test client) +10. Update `pyproject.toml` with `[daemon]` optional dependencies +11. Update `REPO.md` directory structure +12. Update README with daemon section + +--- + +## pyproject.toml additions + +```toml +[project.optional-dependencies] +daemon = [ + "apscheduler>=3.10", + "flask>=3.0", +] +dev = [ + "pytest>=8.0", + "pytest-mock>=3.12", + "ruff>=0.4", +] +``` diff --git a/DAEMON_CONFIG.md b/DAEMON_CONFIG.md new file mode 100644 index 0000000..7710cd3 --- /dev/null +++ b/DAEMON_CONFIG.md @@ -0,0 +1,176 @@ +# DAEMON_CONFIG.md + +## What this file is + +Additions to `memsync/config.py` needed to support the daemon module. +Do not apply these until core memsync is complete and tested. +These changes are additive — nothing in the existing Config dataclass changes. + +--- + +## DaemonConfig dataclass + +Add this class to `memsync/config.py` alongside the existing `Config`: + +```python +@dataclass +class DaemonConfig: + """ + Configuration for the optional daemon module. + Only present in config.toml if the user has run 'memsync daemon install'. + """ + enabled: bool = True + + # Scheduled refresh + refresh_enabled: bool = True + refresh_schedule: str = "55 23 * * *" # 11:55pm daily + + # Backup mirror + backup_mirror_path: str = "" # empty = disabled + backup_mirror_schedule: str = "0 * * * *" # hourly + + # Web UI + web_ui_enabled: bool = True + web_ui_port: int = 5000 + web_ui_host: str = "0.0.0.0" + + # Capture endpoint + capture_enabled: bool = True + capture_port: int = 5001 + capture_token: str = "" # empty = no auth + + # Drift detection + drift_check_enabled: bool = True + drift_check_interval_hours: int = 6 + drift_notify: str = "log" # "log", "email", or "file" + + # Weekly digest + digest_enabled: bool = False + digest_schedule: str = "0 9 * * 1" # Monday 9am + digest_email_to: str = "" + digest_email_from: str = "" + digest_smtp_host: str = "" + digest_smtp_port: int = 587 + digest_smtp_user: str = "" + digest_smtp_password: str = "" +``` + +--- + +## Config dataclass update + +Add `daemon` field to the existing `Config` dataclass: + +```python +@dataclass +class Config: + # ... existing fields unchanged ... + + # Optional daemon config — only populated if [daemon] section exists in config.toml + daemon: DaemonConfig = field(default_factory=DaemonConfig) +``` + +--- + +## _from_dict update + +Add daemon section parsing to `Config._from_dict()`: + +```python +@classmethod +def _from_dict(cls, raw: dict) -> "Config": + # ... existing parsing unchanged ... + + daemon_raw = raw.get("daemon", {}) + daemon = DaemonConfig( + enabled=daemon_raw.get("enabled", True), + refresh_enabled=daemon_raw.get("refresh_enabled", True), + refresh_schedule=daemon_raw.get("refresh_schedule", "55 23 * * *"), + backup_mirror_path=daemon_raw.get("backup_mirror_path", ""), + backup_mirror_schedule=daemon_raw.get("backup_mirror_schedule", "0 * * * *"), + web_ui_enabled=daemon_raw.get("web_ui_enabled", True), + web_ui_port=daemon_raw.get("web_ui_port", 5000), + web_ui_host=daemon_raw.get("web_ui_host", "0.0.0.0"), + capture_enabled=daemon_raw.get("capture_enabled", True), + capture_port=daemon_raw.get("capture_port", 5001), + capture_token=daemon_raw.get("capture_token", ""), + drift_check_enabled=daemon_raw.get("drift_check_enabled", True), + drift_check_interval_hours=daemon_raw.get("drift_check_interval_hours", 6), + drift_notify=daemon_raw.get("drift_notify", "log"), + digest_enabled=daemon_raw.get("digest_enabled", False), + digest_schedule=daemon_raw.get("digest_schedule", "0 9 * * 1"), + digest_email_to=daemon_raw.get("digest_email_to", ""), + digest_email_from=daemon_raw.get("digest_email_from", ""), + digest_smtp_host=daemon_raw.get("digest_smtp_host", ""), + digest_smtp_port=daemon_raw.get("digest_smtp_port", 587), + digest_smtp_user=daemon_raw.get("digest_smtp_user", ""), + digest_smtp_password=daemon_raw.get("digest_smtp_password", ""), + ) + + return cls( + # ... existing fields unchanged ... + daemon=daemon, + ) +``` + +--- + +## _to_toml update + +Add daemon section to `Config._to_toml()`. +Only write the `[daemon]` section if `daemon.enabled` is True +(i.e. user has run `memsync daemon install`): + +```python +def _to_toml(self) -> str: + # ... existing lines unchanged ... + + if self.daemon.enabled: + lines += [ + "", + "[daemon]", + f"enabled = {str(self.daemon.enabled).lower()}", + f'refresh_schedule = "{self.daemon.refresh_schedule}"', + f"refresh_enabled = {str(self.daemon.refresh_enabled).lower()}", + f'backup_mirror_path = "{self.daemon.backup_mirror_path}"', + f'backup_mirror_schedule = "{self.daemon.backup_mirror_schedule}"', + f"web_ui_enabled = {str(self.daemon.web_ui_enabled).lower()}", + f"web_ui_port = {self.daemon.web_ui_port}", + f'web_ui_host = "{self.daemon.web_ui_host}"', + f"capture_enabled = {str(self.daemon.capture_enabled).lower()}", + f"capture_port = {self.daemon.capture_port}", + f'capture_token = "{self.daemon.capture_token}"', + f"drift_check_enabled = {str(self.daemon.drift_check_enabled).lower()}", + f"drift_check_interval_hours = {self.daemon.drift_check_interval_hours}", + f'drift_notify = "{self.daemon.drift_notify}"', + f"digest_enabled = {str(self.daemon.digest_enabled).lower()}", + f'digest_schedule = "{self.daemon.digest_schedule}"', + f'digest_email_to = "{self.daemon.digest_email_to}"', + f'digest_email_from = "{self.daemon.digest_email_from}"', + f'digest_smtp_host = "{self.daemon.digest_smtp_host}"', + f"digest_smtp_port = {self.daemon.digest_smtp_port}", + f'digest_smtp_user = "{self.daemon.digest_smtp_user}"', + f'digest_smtp_password = "{self.daemon.digest_smtp_password}"', + "", + ] + + return "\n".join(lines) +``` + +--- + +## Important: SMTP password handling + +Storing SMTP passwords in a plaintext config file is not ideal. +For v1 it's acceptable with a clear warning, but note in the README: + +> For better security, leave `digest_smtp_password` empty and use an +> app-specific password stored in your system keyring instead. +> Set it at runtime with: `MEMSYNC_SMTP_PASSWORD=... memsync daemon start` + +Add `MEMSYNC_SMTP_PASSWORD` env var support as a fallback in `notify.py`: + +```python +import os +password = config.daemon.digest_smtp_password or os.environ.get("MEMSYNC_SMTP_PASSWORD", "") +``` diff --git a/DAEMON_PITFALLS.md b/DAEMON_PITFALLS.md new file mode 100644 index 0000000..07b70a4 --- /dev/null +++ b/DAEMON_PITFALLS.md @@ -0,0 +1,138 @@ +# DAEMON_PITFALLS.md + +Daemon-specific pitfalls on top of the core ones in `PITFALLS.md`. +Read both before building the daemon module. + +--- + +## 1. Core module boundary is sacred + +The daemon imports from core. Core never imports from daemon. + +If you find yourself adding a daemon import to `sync.py`, `config.py`, +`backups.py`, or any other core module — stop. Restructure so the daemon +calls core, not the other way around. Violating this boundary means +`pip install memsync` (core only) pulls in daemon dependencies. + +--- + +## 2. The nightly refresh job must handle missing session logs gracefully + +If the user didn't run any sessions that day, `sessions/.md` won't exist. +`job_nightly_refresh` returns early if the file doesn't exist or is empty. +This is already in the spec — do not remove this guard. An empty notes +payload to the API wastes tokens and risks producing hallucinated changes. + +--- + +## 3. systemd unit file and API key exposure + +The generated systemd unit file includes `Environment=ANTHROPIC_API_KEY=...` +as a placeholder. Unit files in `/etc/systemd/system/` are world-readable by default. + +Two mitigations to document clearly: +- Use `systemctl edit memsync` to create a drop-in override file (mode 600) +- Use `EnvironmentFile=/etc/memsync/secrets` pointing to a mode 600 file + +Do not suggest storing the real key in the main unit file. +Print a prominent warning after `memsync daemon install` on Linux. + +--- + +## 4. Flask dev server is fine for local network, not for internet exposure + +`app.run()` is the Flask development server. It's single-threaded and has no +auth. This is acceptable for a Pi on a home LAN. It is not acceptable for any +internet-facing deployment. + +If a user asks about exposing the web UI to the internet: +- Tell them this is out of scope for v1 +- Point them toward nginx + basic auth as a general approach +- Do not add this to the tool itself + +--- + +## 5. Port conflicts on common dev machines + +5000 is used by AirPlay Receiver on Mac (macOS 12+) and many dev servers. +5001 is also commonly used. Document both ports as configurable. + +On Mac, if `web_ui_host = "0.0.0.0"` and port 5000 is taken by AirPlay, +the web UI will silently fail to start or throw a bind error. Print a clear +error message pointing to `memsync config set web_ui_port `. + +--- + +## 6. APScheduler timezone handling + +APScheduler uses local system time by default. On a Pi, make sure the system +timezone is set correctly (`timedatectl set-timezone America/New_York` or +wherever the user is). The nightly refresh at 11:55pm will fire at 11:55pm +in the Pi's system timezone, which may not match the user's timezone if the +Pi was set up with UTC (the default on many Pi images). + +Document this in the Pi setup guide. Add a note to `memsync daemon install` +output: "Make sure your Pi's timezone is set correctly: `timedatectl`" + +--- + +## 7. OneDrive sync lag on the Pi + +If the Pi has OneDrive mounted (via rclone or similar), there may be sync lag +between when `GLOBAL_MEMORY.md` is updated on a Mac/Windows machine and when +the Pi sees the change. The nightly refresh reads the file at job time — +if OneDrive hasn't synced yet, it reads a stale version. + +This is an inherent limitation of filesystem-based sync. Document it. +Workaround: schedule the nightly refresh a few minutes after midnight rather +than 11:55pm, giving OneDrive time to sync the day's changes before the +Pi reads them. + +--- + +## 8. The backup mirror is not a substitute for OneDrive + +The `job_backup_mirror` rsync copies files from the OneDrive-synced +`.claude-memory/` to a local path. It's a redundant local backup in case +OneDrive has an outage or the user accidentally deletes something in OneDrive. + +It is not a real-time mirror. It runs on a schedule (default: hourly). +Document this limitation clearly — it's not a safety net for changes made +in the last hour. + +--- + +## 9. Digest email and SMTP credentials + +SMTP credentials in a config file are a security concern. The v1 approach +(plaintext in config.toml) is acceptable with a warning, but: + +- Never log or print SMTP credentials +- Support `MEMSYNC_SMTP_PASSWORD` env var as an alternative (see DAEMON_CONFIG.md) +- Document that Gmail requires an App Password, not the account password +- Document that many ISPs block outbound port 587 — common user frustration + +--- + +## 10. Test isolation for daemon jobs + +Daemon jobs touch the filesystem and call the API. Tests must mock both. +Never let a test job run `job_nightly_refresh` against a real filesystem +or make a real API call. Use `tmp_path` and `unittest.mock.patch` throughout. + +For Flask tests, use the Flask test client — never bind to a real port in tests. + +```python +# Good +def test_capture_endpoint(tmp_config): + from memsync.daemon.capture import create_capture_app + config, tmp_path = tmp_config + app = create_capture_app(config) + client = app.test_client() + response = client.post("/note", json={"text": "test note"}) + assert response.status_code == 200 + +# Bad — binds to real port, can conflict with other tests +def test_capture_endpoint(): + run_capture(config) # never do this in tests +``` diff --git a/EXISTING_CODE.md b/EXISTING_CODE.md new file mode 100644 index 0000000..fa726fc --- /dev/null +++ b/EXISTING_CODE.md @@ -0,0 +1,526 @@ +# EXISTING_CODE.md + +This is the working prototype built before the architecture was formalized. +Use this as the foundation — refactor it to fit the target architecture +described in ARCHITECTURE.md. Do not rewrite from scratch. + +The prototype works and has been designed with the final architecture in mind. +The main gaps are: no provider abstraction, no config system, hardcoded model string. + +--- + +## memsync/paths.py (prototype) + +This becomes the provider system. Replace with `memsync/providers/`. +The detection logic here is correct and tested — migrate it into +`OneDriveProvider.detect()`. + +```python +""" +Path resolution for memsync. +Handles Mac, Windows, and OneDrive sync layer. +""" + +import os +import platform +from pathlib import Path + + +def get_platform() -> str: + system = platform.system() + if system == "Darwin": + return "mac" + elif system == "Windows": + return "windows" + else: + return "linux" + + +def get_onedrive_root() -> Path: + """ + Resolve the OneDrive root directory cross-platform. + Checks env vars first (most reliable), then common default paths. + """ + if get_platform() == "windows": + onedrive = os.environ.get("OneDrive") or os.environ.get("ONEDRIVE") + if onedrive: + return Path(onedrive) + candidates = [ + Path.home() / "OneDrive", + Path("C:/Users") / os.environ.get("USERNAME", "") / "OneDrive", + ] + else: + candidates = [ + Path.home() / "OneDrive", + Path.home() / "Library" / "CloudStorage" / "OneDrive-Personal", + ] + cloud_storage = Path.home() / "Library" / "CloudStorage" + if cloud_storage.exists(): + for d in cloud_storage.iterdir(): + if d.name.startswith("OneDrive"): + candidates.insert(0, d) + + for path in candidates: + if path.exists(): + return path + + raise FileNotFoundError( + "OneDrive directory not found. " + "Set MEMSYNC_ONEDRIVE env var to your OneDrive path." + ) + + +def get_memory_paths() -> dict[str, Path]: + """ + Returns all relevant paths for memsync. + MEMSYNC_ONEDRIVE env var overrides auto-detection. + """ + onedrive_override = os.environ.get("MEMSYNC_ONEDRIVE") + onedrive_root = Path(onedrive_override) if onedrive_override else get_onedrive_root() + + memory_root = onedrive_root / ".claude-memory" + + if get_platform() == "windows": + claude_config = Path.home() / ".claude" + else: + claude_config = Path.home() / ".claude" + + return { + "onedrive_root": onedrive_root, + "memory_root": memory_root, + "global_memory": memory_root / "GLOBAL_MEMORY.md", + "backups": memory_root / "backups", + "session_log": memory_root / "sessions", + "claude_config": claude_config, + "claude_md": claude_config / "CLAUDE.md", + } + + +def ensure_directories(paths: dict[str, Path]) -> None: + for key in ("memory_root", "backups", "session_log"): + paths[key].mkdir(parents=True, exist_ok=True) +``` + +--- + +## memsync/sync.py (prototype) + +The core API call and compaction logic. Migrate this into the new `sync.py` +but pull `model` from config instead of hardcoding it. +The system prompt here is the result of iteration — don't change it lightly. +See PITFALLS.md for why specific lines are the way they are. + +```python +""" +Memory refresh logic. +Calls Claude API to merge session notes into GLOBAL_MEMORY.md. +""" + +import shutil +from datetime import datetime +from pathlib import Path + +import anthropic + +from .paths import get_memory_paths, ensure_directories + +SYSTEM_PROMPT = """You are maintaining a persistent global memory file for an AI assistant user. +This file is loaded at the start of every Claude Code session, on every machine and project. +It is the user's identity layer — not project docs, not cold storage. + +YOUR JOB: +- Merge new session notes into the existing memory file +- Keep the file tight (under 400 lines) +- Update facts that have changed +- Demote completed items from "Current priorities" to a brief "Recent completions" section +- Preserve the user's exact voice, formatting, and section structure +- NEVER remove entries under any "Hard constraints" or "Constraints" section — only append +- If nothing meaningful changed, return the file UNCHANGED + +RETURN: Only the updated GLOBAL_MEMORY.md content. No explanation, no preamble.""" + + +def load_or_init_memory(path: Path) -> str: + if path.exists(): + return path.read_text(encoding="utf-8") + + return """\ +# Global Memory + +> Loaded by Claude Code at session start on all machines and projects. +> Edit directly or run: memsync refresh --notes "..." + +## Identity & context +- (Fill this in — who you are, your roles, active projects) + +## Current priorities +- (What you're working on right now) + +## Standing preferences +- (How you like to work — communication style, output format, etc.) + +## Hard constraints +- (Rules that must never be lost or softened through compaction) +""" + + +def backup_memory(memory_path: Path, backup_dir: Path) -> Path: + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + backup_path = backup_dir / f"GLOBAL_MEMORY_{timestamp}.md" + shutil.copy2(memory_path, backup_path) + return backup_path + + +def refresh_memory(notes: str, dry_run: bool = False) -> dict: + paths = get_memory_paths() + ensure_directories(paths) + + current_memory = load_or_init_memory(paths["global_memory"]) + + client = anthropic.Anthropic() + + user_prompt = f"""\ +CURRENT GLOBAL MEMORY: +{current_memory} + +SESSION NOTES: +{notes}""" + + response = client.messages.create( + model="claude-sonnet-4-20250514", # ← HARDCODED: move to config in refactor + max_tokens=4000, + system=SYSTEM_PROMPT, + messages=[{"role": "user", "content": user_prompt}], + ) + + updated_content = response.content[0].text.strip() + changed = updated_content != current_memory.strip() + + if dry_run: + return { + "updated_content": updated_content, + "backup_path": None, + "changed": changed, + "dry_run": True, + } + + backup_path = None + if paths["global_memory"].exists() and changed: + backup_path = backup_memory(paths["global_memory"], paths["backups"]) + + paths["global_memory"].write_text(updated_content, encoding="utf-8") + sync_to_claude_md(paths) + log_session_notes(notes, paths["session_log"]) + + return { + "updated_content": updated_content, + "backup_path": backup_path, + "changed": changed, + "dry_run": False, + } + + +def sync_to_claude_md(paths: dict) -> None: + """ + Keep ~/.claude/CLAUDE.md in sync with the OneDrive master. + Mac/Linux: symlink. Windows: copy. + """ + import platform + + source = paths["global_memory"] + dest = paths["claude_md"] + + dest.parent.mkdir(parents=True, exist_ok=True) + + if platform.system() == "Windows": + shutil.copy2(source, dest) + return + + if dest.is_symlink(): + if dest.resolve() == source.resolve(): + return + dest.unlink() + + if dest.exists(): + dest.rename(dest.with_suffix(".pre-memsync.bak")) + + try: + dest.symlink_to(source) + except OSError: + shutil.copy2(source, dest) + + +def log_session_notes(notes: str, session_dir: Path) -> None: + today = datetime.now().strftime("%Y-%m-%d") + log_path = session_dir / f"{today}.md" + timestamp = datetime.now().strftime("%H:%M:%S") + + with open(log_path, "a", encoding="utf-8") as f: + f.write(f"\n---\n### {timestamp}\n{notes}\n") + + +def prune_backups(backup_dir: Path, keep_days: int = 30) -> list[Path]: + from datetime import timedelta + + cutoff = datetime.now() - timedelta(days=keep_days) + deleted = [] + + for backup in backup_dir.glob("GLOBAL_MEMORY_*.md"): + try: + ts_str = backup.stem.replace("GLOBAL_MEMORY_", "") + ts = datetime.strptime(ts_str, "%Y%m%d_%H%M%S") + if ts < cutoff: + backup.unlink() + deleted.append(backup) + except ValueError: + pass + + return deleted +``` + +--- + +## memsync/cli.py (prototype) + +The full CLI. Refactor to pass `config` into each command function +and replace direct path dict calls with provider + config resolution. + +```python +""" +memsync CLI — see COMMANDS.md for full spec. +""" + +import sys +import argparse +from pathlib import Path + +from .paths import get_memory_paths, ensure_directories, get_platform +from .sync import refresh_memory, prune_backups, load_or_init_memory + + +def cmd_refresh(args: argparse.Namespace) -> int: + notes = "" + + if args.notes: + notes = args.notes + elif args.file: + note_path = Path(args.file) + if not note_path.exists(): + print(f"Error: file not found: {args.file}", file=sys.stderr) + return 1 + notes = note_path.read_text(encoding="utf-8") + else: + if not sys.stdin.isatty(): + notes = sys.stdin.read() + else: + print("Error: provide --notes, --file, or pipe notes via stdin.", file=sys.stderr) + return 1 + + if not notes.strip(): + print("Error: notes are empty.", file=sys.stderr) + return 1 + + print("Refreshing global memory...", end=" ", flush=True) + result = refresh_memory(notes, dry_run=args.dry_run) + + if args.dry_run: + print("\n[DRY RUN] No files written.\n") + if result["changed"]: + print("Changes detected. Updated content:") + print("─" * 60) + print(result["updated_content"]) + else: + print("No changes detected.") + return 0 + + if result["changed"]: + print("done.") + if result["backup_path"]: + print(f" Backup: {result['backup_path']}") + paths = get_memory_paths() + print(f" Memory: {paths['global_memory']}") + print(f" CLAUDE.md: {paths['claude_md']}") + else: + print("no changes.") + + return 0 + + +def cmd_status(args: argparse.Namespace) -> int: + try: + paths = get_memory_paths() + except FileNotFoundError as e: + print(f"Error: {e}", file=sys.stderr) + return 1 + + print(f"Platform: {get_platform()}") + print(f"OneDrive root: {paths['onedrive_root']}") + print(f"Memory file: {paths['global_memory']} ", end="") + print("✓" if paths["global_memory"].exists() else "✗ (not created yet)") + print(f"CLAUDE.md: {paths['claude_md']} ", end="") + + claude_md = paths["claude_md"] + if claude_md.is_symlink(): + print(f"→ symlink to {claude_md.resolve()}") + elif claude_md.exists(): + print("✓ (copy)") + else: + print("✗ (not synced)") + + backup_dir = paths["backups"] + if backup_dir.exists(): + backups = list(backup_dir.glob("GLOBAL_MEMORY_*.md")) + print(f"Backups: {len(backups)} file(s) in {backup_dir}") + + session_dir = paths["session_log"] + if session_dir.exists(): + sessions = list(session_dir.glob("*.md")) + print(f"Session logs: {len(sessions)} day(s) logged in {session_dir}") + + return 0 + + +def cmd_show(args: argparse.Namespace) -> int: + paths = get_memory_paths() + if not paths["global_memory"].exists(): + print("No global memory file yet. Run: memsync init") + return 1 + print(paths["global_memory"].read_text(encoding="utf-8")) + return 0 + + +def cmd_diff(args: argparse.Namespace) -> int: + import difflib + + paths = get_memory_paths() + backup_dir = paths["backups"] + + if not paths["global_memory"].exists(): + print("No global memory file yet.") + return 1 + + backups = sorted(backup_dir.glob("GLOBAL_MEMORY_*.md")) + if not backups: + print("No backups found.") + return 0 + + latest_backup = backups[-1] + current = paths["global_memory"].read_text(encoding="utf-8").splitlines(keepends=True) + previous = latest_backup.read_text(encoding="utf-8").splitlines(keepends=True) + + diff = list(difflib.unified_diff( + previous, current, + fromfile=f"backup ({latest_backup.name})", + tofile="current", + )) + + if diff: + print("".join(diff)) + else: + print("No differences from last backup.") + + return 0 + + +def cmd_prune(args: argparse.Namespace) -> int: + paths = get_memory_paths() + deleted = prune_backups(paths["backups"], keep_days=args.keep_days) + if deleted: + print(f"Pruned {len(deleted)} backup(s) older than {args.keep_days} days.") + for p in deleted: + print(f" removed: {p.name}") + else: + print(f"No backups older than {args.keep_days} days.") + return 0 + + +def cmd_init(args: argparse.Namespace) -> int: + try: + paths = get_memory_paths() + except FileNotFoundError as e: + print(f"Error: {e}", file=sys.stderr) + print("Set MEMSYNC_ONEDRIVE env var to your OneDrive path and retry.") + return 1 + + ensure_directories(paths) + + if paths["global_memory"].exists() and not args.force: + print(f"Memory file already exists: {paths['global_memory']}") + print("Use --force to reinitialize.") + return 0 + + starter = load_or_init_memory(Path("/dev/null")) + paths["global_memory"].write_text(starter, encoding="utf-8") + + from .sync import sync_to_claude_md + sync_to_claude_md(paths) + + print("memsync initialized.") + print(f" Memory: {paths['global_memory']}") + print(f" CLAUDE.md: {paths['claude_md']}") + return 0 + + +def main(): + parser = argparse.ArgumentParser( + prog="memsync", + description="Cross-platform global memory manager for Claude Code.", + ) + subparsers = parser.add_subparsers(dest="command", required=True) + + p_refresh = subparsers.add_parser("refresh", help="Merge session notes into global memory") + p_refresh.add_argument("--notes", "-n", help="Session notes as a string") + p_refresh.add_argument("--file", "-f", help="Path to a file containing session notes") + p_refresh.add_argument("--dry-run", action="store_true", help="Preview changes without writing") + p_refresh.set_defaults(func=cmd_refresh) + + p_status = subparsers.add_parser("status", help="Show paths and sync status") + p_status.set_defaults(func=cmd_status) + + p_show = subparsers.add_parser("show", help="Print current global memory") + p_show.set_defaults(func=cmd_show) + + p_diff = subparsers.add_parser("diff", help="Diff current memory against last backup") + p_diff.set_defaults(func=cmd_diff) + + p_prune = subparsers.add_parser("prune", help="Remove old backups") + p_prune.add_argument("--keep-days", type=int, default=30) + p_prune.set_defaults(func=cmd_prune) + + p_init = subparsers.add_parser("init", help="Initialize memory structure") + p_init.add_argument("--force", action="store_true") + p_init.set_defaults(func=cmd_init) + + args = parser.parse_args() + sys.exit(args.func(args)) + + +if __name__ == "__main__": + main() +``` + +--- + +## pyproject.toml (prototype) + +```toml +[build-system] +requires = ["setuptools>=68", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "memsync" +version = "0.1.0" +description = "Cross-platform global memory manager for Claude Code" +requires-python = ">=3.11" +dependencies = [ + "anthropic>=0.40.0", +] + +[project.scripts] +memsync = "memsync.cli:main" + +[tool.setuptools.packages.find] +where = ["."] +include = ["memsync*"] +``` diff --git a/PITFALLS.md b/PITFALLS.md new file mode 100644 index 0000000..6adb766 --- /dev/null +++ b/PITFALLS.md @@ -0,0 +1,195 @@ +# PITFALLS.md + +Everything here either went wrong during prototyping, was identified as a risk, +or is a subtle behavior that will cause hard-to-debug issues if you miss it. +Read this before touching sync.py, any provider, or the CLAUDE.md sync logic. + +--- + +## 1. Hard constraints must be enforced in code, not just in the prompt + +**The problem:** The system prompt tells the model to never remove hard constraints. +But the model compresses by semantic salience — a constraint that didn't appear in +this week's session notes is easy to quietly drop. + +**The fix:** After getting the updated content back from the API, diff the +`## Hard constraints` section between old and new. Any item present in old +but missing in new gets re-appended. This is done in Python, not by the model. + +```python +def enforce_hard_constraints(old: str, new: str) -> str: + """ + Re-append any hard constraint lines that the model dropped. + Works on the raw markdown text — finds the constraints section and diffs it. + """ + old_constraints = extract_constraints_section(old) + new_constraints = extract_constraints_section(new) + + dropped = set(old_constraints) - set(new_constraints) + if not dropped: + return new + + # Re-append dropped constraints to the section + # Find the end of the constraints section in `new` and insert there + return reinsert_constraints(new, sorted(dropped)) +``` + +This is not implemented in the prototype yet. It must be in the refactor. + +--- + +## 2. The model string will rot + +`claude-sonnet-4-20250514` will become outdated. Do not hardcode it anywhere. +It lives in config. The prototype has it hardcoded in `sync.py` — that's the +first thing to fix in the refactor. + +The risk is not just stale output quality — old model strings may eventually +return API errors, silently breaking refresh for users who never check. + +--- + +## 3. iCloud hides dot-folders + +iCloud Drive on Mac does not sync folders whose names begin with `.` to other +devices. If the memory root is `.claude-memory`, it will exist on the Mac that +created it but be invisible to iCloud sync and won't appear on other Macs or +on Windows. + +**Fix:** The `ICloudProvider` overrides `get_memory_root()` to return +`claude-memory` (no leading dot). This is already in PROVIDERS.md. +Do not change it. Do not use `.claude-memory` with iCloud. + +--- + +## 4. Windows symlinks require admin rights + +On Windows, creating symlinks requires either admin rights or Developer Mode +enabled. Most users won't have either. Don't attempt a symlink on Windows — +always copy. The copy approach means `CLAUDE.md` can drift from `GLOBAL_MEMORY.md` +if the user edits the memory file directly without running `memsync refresh`. + +Document this clearly in the Windows section of the README. The copy gets +updated on every `memsync refresh`, so it's fine in practice. + +--- + +## 5. OneDrive path instability across client versions + +OneDrive has had three different default paths on Mac across major client versions: + +- `~/OneDrive` — old consumer client +- `~/Library/CloudStorage/OneDrive-Personal` — newer client +- `~/Library/CloudStorage/OneDrive - CompanyName` — business/work accounts + +The prototype's `get_onedrive_root()` checks all three. Keep all three checks. +Business account names vary (it's the company name in the Microsoft tenant). +The `startswith("OneDrive")` check in the CloudStorage loop catches most cases. + +If a user reports detection failure on Mac with a business OneDrive account, +the fix is: `memsync config set sync_root /path/to/their/onedrive` + +--- + +## 6. Google Drive path instability across client versions + +Google Drive is worse than OneDrive for this. There have been at least four +different default paths: + +- `~/Google Drive` — legacy Backup and Sync (before 2021) +- `~/Library/CloudStorage/GoogleDrive-email@domain.com/My Drive` — current (Drive for Desktop) +- `G:/My Drive` — Windows, Drive for Desktop with G: drive mapping +- Custom drive letter — Windows users can change the drive letter + +The provider checks all known paths. The `GoogleDrive-` prefix in CloudStorage +is the most reliable current indicator on Mac. On Windows, check for `G:/My Drive` +as well as `~/Google Drive`. + +If detection fails: `memsync config set sync_root /path/to/their/gdrive` + +--- + +## 7. Concurrent writes from two machines + +If the user runs `memsync refresh` on Mac and Windows at nearly the same time, +both will read the same `GLOBAL_MEMORY.md`, update independently, and the last +write wins — the other change is lost. + +This is an edge case (refresh is a deliberate manual action), not a background +sync, so the risk is low. Document it in the README. Do not add locking — it's +not worth the complexity for v1. + +If a user hits this: the backup from the losing write is in `backups/`. They +can manually merge. + +--- + +## 8. The system prompt is load-bearing — don't casually edit it + +The system prompt in `sync.py` was iterated over multiple sessions. Specific +phrases matter: + +- **"identity layer — not project docs, not cold storage"** — prevents the model + from treating this like a knowledge base and trying to be exhaustive. +- **"Preserve the user's exact voice, formatting, and section structure"** — without + this, the model reformats the memory into its own preferred style after a few + refreshes, eroding the user's structure. +- **"If nothing meaningful changed, return the file UNCHANGED"** — without this, + the model always makes small edits just to show it did something, creating + spurious diffs and unnecessary backups. +- **"RETURN: Only the updated GLOBAL_MEMORY.md content. No explanation, no preamble."** + — without this, the model occasionally prepends "Here is the updated memory file:" + which then gets written into the file. + +If you edit the prompt, test with `--dry-run` across several different notes +inputs before committing. Prompt changes are the highest-risk edits in this codebase. + +--- + +## 9. Empty notes should not trigger a refresh + +If `--notes ""` or a notes file that's all whitespace is passed, refuse with +a clear error. Don't send an empty notes payload to the API — it will either +change nothing (wasted tokens) or hallucinate something to change. + +This is handled in the prototype's `cmd_refresh`. Keep it in the refactor. + +--- + +## 10. The `max_tokens=4000` ceiling + +GLOBAL_MEMORY.md is capped at ~400 lines. At average prose density, 4000 tokens +is enough headroom. But if a user has a very dense memory file and writes extensive +notes, the response can get truncated — the file gets written with the truncation +mid-sentence. + +Mitigation: set `max_tokens` to 4096 (the safe maximum for most models) and +add a post-write check that the file ends with a complete line (no truncation mid-word). +If truncated, restore from backup and print an error. + +Not implemented in the prototype — add it in the refactor. + +--- + +## 11. Sessions log is append-only by design + +`sessions/.md` files are never pruned. They're the raw audit trail. +The `prune` command only touches `backups/`. This is intentional — session logs +are cheap (text only) and losing them removes the only way to recover if the +compaction drops something important. + +If a user asks for a way to prune sessions, direct them to delete manually. +Don't add a `--sessions` flag to `prune`. + +--- + +## 12. Test isolation — never touch real filesystem or real API in tests + +All tests must mock: +- The filesystem (use `tmp_path` from pytest) +- The Anthropic API (use `unittest.mock.patch`) + +Never create files in `~/.config`, `~/.claude`, or any cloud sync folder during tests. +Never make real API calls in tests. + +See REPO.md for the test structure and mock patterns. diff --git a/PROVIDERS.md b/PROVIDERS.md new file mode 100644 index 0000000..1d766a6 --- /dev/null +++ b/PROVIDERS.md @@ -0,0 +1,367 @@ +# PROVIDERS.md + +## The plugin contract + +Every provider implements this ABC. Nothing else in the codebase needs to change +when a new provider is added. + +```python +# memsync/providers/__init__.py + +from abc import ABC, abstractmethod +from pathlib import Path + + +class BaseProvider(ABC): + """ + A sync provider knows how to find the cloud storage root on the current machine. + That's its only job. Memory structure lives above it. + """ + + name: str # short id used in config: "onedrive", "icloud", "gdrive", "custom" + display_name: str # human-readable: "OneDrive", "iCloud Drive", "Google Drive", "Custom Path" + + @abstractmethod + def detect(self) -> Path | None: + """ + Try to find this provider's sync root on the current machine. + Returns the path if found and accessible, None otherwise. + Never raises — detection failure is not an error. + """ + + @abstractmethod + def is_available(self) -> bool: + """ + Quick check: is this provider installed and its sync folder accessible? + Should be fast — no API calls, just filesystem checks. + """ + + def get_memory_root(self, sync_root: Path) -> Path: + """ + Where inside the sync root to store memsync data. + Default is /.claude-memory + Providers can override if needed (e.g. iCloud has invisible dot-folders). + """ + return sync_root / ".claude-memory" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(name={self.name!r})" + + +# Provider registry — add new providers here +_REGISTRY: dict[str, type[BaseProvider]] = {} + + +def register(cls: type[BaseProvider]) -> type[BaseProvider]: + """Decorator to register a provider.""" + _REGISTRY[cls.name] = cls + return cls + + +def get_provider(name: str) -> BaseProvider: + """Get a provider instance by name. Raises KeyError if not found.""" + if name not in _REGISTRY: + available = ", ".join(_REGISTRY.keys()) + raise KeyError(f"Unknown provider {name!r}. Available: {available}") + return _REGISTRY[name]() + + +def all_providers() -> list[BaseProvider]: + """Return one instance of each registered provider.""" + return [cls() for cls in _REGISTRY.values()] + + +def auto_detect() -> list[BaseProvider]: + """ + Return all providers that detect successfully on this machine, + in priority order: OneDrive, iCloud, Google Drive, Custom. + """ + return [p for p in all_providers() if p.detect() is not None] +``` + +--- + +## OneDrive provider + +```python +# memsync/providers/onedrive.py + +import os +import platform +from pathlib import Path +from . import BaseProvider, register + + +@register +class OneDriveProvider(BaseProvider): + name = "onedrive" + display_name = "OneDrive" + + def detect(self) -> Path | None: + try: + return self._find() + except Exception: + return None + + def is_available(self) -> bool: + return self.detect() is not None + + def _find(self) -> Path | None: + system = platform.system() + + if system == "Windows": + # Windows sets these env vars when OneDrive is running + for var in ("OneDrive", "ONEDRIVE", "OneDriveConsumer", "OneDriveCommercial"): + val = os.environ.get(var) + if val: + p = Path(val) + if p.exists(): + return p + # Fallback: common default paths + username = os.environ.get("USERNAME", "") + for candidate in [ + Path.home() / "OneDrive", + Path(f"C:/Users/{username}/OneDrive"), + ]: + if candidate.exists(): + return candidate + + elif system == "Darwin": + # Mac: OneDrive doesn't set env vars, check filesystem + # Personal OneDrive + personal = Path.home() / "OneDrive" + if personal.exists(): + return personal + + # OneDrive via CloudStorage (newer Mac client) + cloud_storage = Path.home() / "Library" / "CloudStorage" + if cloud_storage.exists(): + # Personal first, then business + for d in sorted(cloud_storage.iterdir()): + if d.name == "OneDrive-Personal": + return d + for d in sorted(cloud_storage.iterdir()): + if d.name.startswith("OneDrive") and d.is_dir(): + return d + + else: + # Linux: OneDrive via rclone or manual mount + candidates = [ + Path.home() / "OneDrive", + Path.home() / "onedrive", + ] + for c in candidates: + if c.exists(): + return c + + return None +``` + +--- + +## iCloud Drive provider + +```python +# memsync/providers/icloud.py + +import platform +from pathlib import Path +from . import BaseProvider, register + + +@register +class ICloudProvider(BaseProvider): + name = "icloud" + display_name = "iCloud Drive" + + def detect(self) -> Path | None: + try: + return self._find() + except Exception: + return None + + def is_available(self) -> bool: + return self.detect() is not None + + def _find(self) -> Path | None: + system = platform.system() + + if system == "Darwin": + # Primary path on Mac + icloud = Path.home() / "Library" / "Mobile Documents" / "com~apple~CloudDocs" + if icloud.exists(): + return icloud + + elif system == "Windows": + # iCloud for Windows installs here + import os + username = os.environ.get("USERNAME", "") + candidates = [ + Path.home() / "iCloudDrive", + Path(f"C:/Users/{username}/iCloudDrive"), + ] + for c in candidates: + if c.exists(): + return c + + # Linux: iCloud has no official client — not supported + return None + + def get_memory_root(self, sync_root: Path) -> Path: + # iCloud hides dot-folders on Mac — use a visible name instead + return sync_root / "claude-memory" +``` + +**Note on iCloud dot-folders:** iCloud Drive on Mac does not sync folders whose +names begin with `.` to other devices. Use `claude-memory` not `.claude-memory` +for the iCloud provider. The `get_memory_root` override handles this automatically. + +--- + +## Google Drive provider + +```python +# memsync/providers/gdrive.py + +import platform +from pathlib import Path +from . import BaseProvider, register + + +@register +class GoogleDriveProvider(BaseProvider): + name = "gdrive" + display_name = "Google Drive" + + def detect(self) -> Path | None: + try: + return self._find() + except Exception: + return None + + def is_available(self) -> bool: + return self.detect() is not None + + def _find(self) -> Path | None: + system = platform.system() + + if system == "Darwin": + # Google Drive for Desktop (current client) + cloud_storage = Path.home() / "Library" / "CloudStorage" + if cloud_storage.exists(): + for d in cloud_storage.iterdir(): + if d.name.startswith("GoogleDrive") and d.is_dir(): + # My Drive is inside the account folder + my_drive = d / "My Drive" + if my_drive.exists(): + return my_drive + return d + + # Legacy Backup and Sync path + legacy = Path.home() / "Google Drive" + if legacy.exists(): + return legacy + + elif system == "Windows": + import os + # Google Drive for Desktop on Windows + # Sets GDRIVE_ROOT or uses default path + gdrive_env = os.environ.get("GDRIVE_ROOT") + if gdrive_env: + p = Path(gdrive_env) + if p.exists(): + return p + + username = os.environ.get("USERNAME", "") + candidates = [ + Path.home() / "Google Drive", + Path(f"C:/Users/{username}/Google Drive"), + # Google Drive for Desktop default + Path("G:/My Drive"), + Path("G:/"), + ] + for c in candidates: + if c.exists(): + return c + + elif system == "Linux": + # Google Drive via google-drive-ocamlfuse or rclone + candidates = [ + Path.home() / "GoogleDrive", + Path.home() / "google-drive", + Path.home() / "gdrive", + ] + for c in candidates: + if c.exists(): + return c + + return None +``` + +**Note on Google Drive path instability:** Google Drive for Desktop changed its +mount path between versions. The `~/Library/CloudStorage/GoogleDrive-*` path is +current (2024+). The `~/Google Drive` path is legacy Backup and Sync. Both are +checked. If a user reports detection failure, first ask which Google Drive client +version they have. See `PITFALLS.md`. + +--- + +## Custom provider (manual path) + +```python +# memsync/providers/custom.py + +from pathlib import Path +from . import BaseProvider, register + + +@register +class CustomProvider(BaseProvider): + """ + Fallback for any sync service not explicitly supported. + User sets the path manually via: memsync config set sync_root /path/to/folder + """ + name = "custom" + display_name = "Custom Path" + + def __init__(self, path: Path | None = None): + self._path = path + + def detect(self) -> Path | None: + # Custom provider only works if path is explicitly configured + if self._path and self._path.exists(): + return self._path + return None + + def is_available(self) -> bool: + return self.detect() is not None +``` + +--- + +## Adding a new provider + +To add Dropbox, Box, Synology, etc.: + +1. Create `memsync/providers/dropbox.py` +2. Implement `BaseProvider` (detect + is_available) +3. Add `@register` decorator +4. Import it in `memsync/providers/__init__.py` (the import triggers registration) +5. Add tests in `tests/test_providers.py` using the mocked filesystem pattern +6. Update the providers table in README.md + +That's the complete list. No other files need to change. + +See `docs/adding-a-provider.md` for the full contributor guide. + +--- + +## Provider detection priority + +During `memsync init`, providers are tried in this order: +1. OneDrive +2. iCloud +3. Google Drive +4. Custom (only if path already configured) + +If multiple are detected, the user is prompted to choose. The choice is saved to config. diff --git a/README.md b/README.md new file mode 100644 index 0000000..6c1ae98 --- /dev/null +++ b/README.md @@ -0,0 +1,184 @@ +# memsync + +Cross-platform global memory manager for Claude Code. + +Claude Code has no memory between sessions. memsync fixes that: it maintains one canonical `GLOBAL_MEMORY.md` in your cloud sync folder, linked to `~/.claude/CLAUDE.md` so Claude Code reads it at every session start. + +After a meaningful session, run `memsync refresh --notes "..."` and the Claude API merges your notes into the memory file automatically. + +--- + +## How it works + +``` +OneDrive/.claude-memory/ + GLOBAL_MEMORY.md ← source of truth, synced across all machines + backups/ ← automatic backups before every refresh + sessions/ ← raw session notes, append-only audit trail + +~/.claude/CLAUDE.md ← symlink → GLOBAL_MEMORY.md (Mac/Linux) + copy of GLOBAL_MEMORY.md (Windows) +``` + +Every Claude Code session starts by reading `~/.claude/CLAUDE.md`. memsync keeps it current. + +--- + +## Requirements + +- Python 3.11+ +- An Anthropic API key (`ANTHROPIC_API_KEY` env var) +- One of: OneDrive, iCloud Drive, Google Drive — or any folder you specify + +--- + +## Installation + +```bash +pip install memsync +``` + +--- + +## Quick start + +```bash +# 1. Initialize (auto-detects your cloud provider) +memsync init + +# 2. Edit your memory file — fill in who you are, active projects, preferences +# File is at: OneDrive/.claude-memory/GLOBAL_MEMORY.md + +# 3. After a Claude Code session, merge in your notes +memsync refresh --notes "Finished the auth module. Decided to use JWT tokens, not sessions." + +# 4. Check everything is wired up +memsync status +``` + +--- + +## Commands + +| Command | Description | +|---|---| +| `memsync init` | First-time setup: create directory structure, sync to CLAUDE.md | +| `memsync refresh --notes "..."` | Merge session notes into memory via Claude API | +| `memsync show` | Print current GLOBAL_MEMORY.md | +| `memsync diff` | Diff current memory vs last backup | +| `memsync status` | Show paths, provider, sync state | +| `memsync providers` | List all providers and detection status | +| `memsync config show` | Print current config | +| `memsync config set ` | Update a config value | +| `memsync prune` | Remove old backups | + +### `memsync refresh` options + +```bash +memsync refresh --notes "inline notes" +memsync refresh --file notes.txt +echo "notes" | memsync refresh +memsync refresh --notes "..." --dry-run # preview changes, no write +memsync refresh --notes "..." --model claude-opus-4-20250514 # one-off model override +``` + +### `memsync init` options + +```bash +memsync init # auto-detect provider +memsync init --provider icloud # use a specific provider +memsync init --sync-root /path/to/folder # use a custom path +memsync init --force # reinitialize even if already set up +``` + +### `memsync config set` keys + +```bash +memsync config set provider icloud +memsync config set model claude-opus-4-20250514 +memsync config set sync_root /path/to/custom/folder +memsync config set keep_days 60 +memsync config set max_memory_lines 300 +memsync config set claude_md_target ~/.claude/CLAUDE.md +``` + +--- + +## Cloud providers + +| Provider | macOS | Windows | Linux | +|---|---|---|---| +| OneDrive | ✓ | ✓ | ✓ (rclone) | +| iCloud Drive | ✓ | ✓ | ✗ | +| Google Drive | ✓ | ✓ | ✓ (rclone) | +| Custom path | ✓ | ✓ | ✓ | + +Detection is automatic. If multiple providers are found during `memsync init`, you'll be prompted to choose. + +**Windows note:** Symlinks require admin rights or Developer Mode on Windows. memsync copies `GLOBAL_MEMORY.md` to `~/.claude/CLAUDE.md` instead. The copy is refreshed on every `memsync refresh`. + +**iCloud note:** iCloud Drive doesn't sync dot-folders on Mac. memsync stores data in `claude-memory/` (no leading dot) when using the iCloud provider. + +--- + +## Configuration + +Config file location: +- macOS/Linux: `~/.config/memsync/config.toml` +- Windows: `%APPDATA%\memsync\config.toml` + +Config is machine-specific — two machines can use different providers pointing to the same cloud storage location. + +Example config: + +```toml +[core] +provider = "onedrive" +model = "claude-sonnet-4-20250514" +max_memory_lines = 400 + +[paths] +claude_md_target = "/Users/ian/.claude/CLAUDE.md" + +[backups] +keep_days = 30 +``` + +To update the model when Anthropic releases new ones: + +```bash +memsync config set model claude-sonnet-4-20250514 +``` + +--- + +## What belongs in GLOBAL_MEMORY.md + +The memory file is your **identity layer** — not a knowledge base, not project docs. + +Good things to include: +- Who you are, your roles, active projects +- Current priorities and focus +- Standing preferences (communication style, output format) +- Hard constraints (rules that must never be softened through compaction) + +See `docs/global-memory-guide.md` for a complete guide. + +--- + +## Known limitations + +- **Concurrent writes:** Running `memsync refresh` on two machines simultaneously will result in the last write winning. The losing write's backup is in `backups/`. Risk is low since refresh is a deliberate manual action. +- **Max memory size:** The memory file is kept under ~400 lines. Very dense files may hit the 4096 token response limit — reduce the file size if you see truncation errors. + +--- + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md). To add a new cloud provider, see [docs/adding-a-provider.md](docs/adding-a-provider.md). + +--- + +## License + +MIT diff --git a/REPO.md b/REPO.md new file mode 100644 index 0000000..4f4e68f --- /dev/null +++ b/REPO.md @@ -0,0 +1,292 @@ +# REPO.md + +## Target repository structure + +``` +memsync/ +├── memsync/ +│ ├── __init__.py # version string only +│ ├── cli.py # entry point, argument parsing, command routing +│ ├── config.py # Config dataclass, load/save, path resolution +│ ├── sync.py # Claude API call, compaction, hard constraint enforcement +│ ├── claude_md.py # CLAUDE.md symlink/copy management +│ ├── backups.py # backup, prune, list operations +│ └── providers/ +│ ├── __init__.py # BaseProvider ABC, registry, auto_detect() +│ ├── onedrive.py +│ ├── icloud.py +│ ├── gdrive.py +│ └── custom.py +├── tests/ +│ ├── conftest.py # shared fixtures (tmp_path wrappers, mock config) +│ ├── test_config.py # Config load/save, platform path resolution +│ ├── test_providers.py # each provider's detect() with mocked filesystem +│ ├── test_sync.py # refresh logic with mocked API +│ ├── test_backups.py # backup, prune, list +│ ├── test_claude_md.py # symlink + copy behavior +│ └── test_cli.py # CLI integration (subprocess or direct function calls) +├── docs/ +│ ├── adding-a-provider.md # contributor guide for new sync providers +│ └── global-memory-guide.md # what to put in GLOBAL_MEMORY.md (user guide) +├── .github/ +│ ├── workflows/ +│ │ ├── ci.yml # test matrix +│ │ └── release.yml # PyPI publish on tag +│ └── ISSUE_TEMPLATE/ +│ ├── bug_report.md +│ └── provider_request.md +├── pyproject.toml +├── README.md +└── CONTRIBUTING.md +``` + +--- + +## pyproject.toml (target) + +```toml +[build-system] +requires = ["setuptools>=68", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "memsync" +version = "0.2.0" +description = "Cross-platform global memory manager for Claude Code" +readme = "README.md" +license = { text = "MIT" } +requires-python = ">=3.11" +keywords = ["claude", "claude-code", "ai", "memory", "cli"] +classifiers = [ + "Development Status :: 3 - Alpha", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: MacOS", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX :: Linux", +] +dependencies = [ + "anthropic>=0.40.0", +] + +[project.urls] +Homepage = "https://github.com/YOUR_USERNAME/memsync" +Issues = "https://github.com/YOUR_USERNAME/memsync/issues" + +[project.scripts] +memsync = "memsync.cli:main" + +[tool.setuptools.packages.find] +where = ["."] +include = ["memsync*"] + +[tool.pytest.ini_options] +testpaths = ["tests"] + +[tool.ruff] +line-length = 100 +target-version = "py311" +``` + +--- + +## CI workflow + +```yaml +# .github/workflows/ci.yml +name: CI + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + test: + name: Test (${{ matrix.os }}, Python ${{ matrix.python-version }}) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + python-version: ["3.11", "3.12"] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + pip install -e ".[dev]" + + - name: Run tests + run: pytest tests/ -v +``` + +--- + +## Release workflow + +```yaml +# .github/workflows/release.yml +name: Release + +on: + push: + tags: + - "v*" + +jobs: + publish: + runs-on: ubuntu-latest + permissions: + id-token: write # for trusted publishing + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Build + run: | + pip install build + python -m build + + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 +``` + +Use PyPI Trusted Publishing (OIDC) — no API keys stored in GitHub secrets. +Set up at: https://pypi.org/manage/account/publishing/ + +--- + +## Dev dependencies + +Add to pyproject.toml: + +```toml +[project.optional-dependencies] +dev = [ + "pytest>=8.0", + "pytest-mock>=3.12", + "ruff>=0.4", +] +``` + +Install with: `pip install -e ".[dev]"` + +--- + +## Test patterns + +### Mocking the filesystem + +```python +# tests/conftest.py +import pytest +from pathlib import Path +from memsync.config import Config + + +@pytest.fixture +def tmp_config(tmp_path, monkeypatch): + """Config pointing entirely to tmp_path — no real filesystem touched.""" + config = Config( + provider="custom", + sync_root=tmp_path / "sync", + ) + (tmp_path / "sync" / ".claude-memory" / "backups").mkdir(parents=True) + (tmp_path / "sync" / ".claude-memory" / "sessions").mkdir(parents=True) + monkeypatch.setattr("memsync.config.get_config_path", + lambda: tmp_path / "config.toml") + return config, tmp_path +``` + +### Mocking the Anthropic API + +```python +# tests/test_sync.py +from unittest.mock import MagicMock, patch +from memsync.sync import refresh_memory_content + + +def test_refresh_returns_updated_content(tmp_config): + config, tmp_path = tmp_config + mock_response = MagicMock() + mock_response.content = [MagicMock(text="# Updated memory\n\n## Identity\n- Test user")] + + with patch("anthropic.Anthropic") as mock_client: + mock_client.return_value.messages.create.return_value = mock_response + result = refresh_memory_content( + notes="Test session notes", + current_memory="# Global Memory\n\n## Identity\n- Test user", + config=config, + ) + + assert result["changed"] is False # content same after strip +``` + +### Mocking provider detection + +```python +# tests/test_providers.py +from memsync.providers.onedrive import OneDriveProvider + + +def test_onedrive_detects_personal_path(tmp_path, monkeypatch): + onedrive_dir = tmp_path / "OneDrive" + onedrive_dir.mkdir() + monkeypatch.setattr("pathlib.Path.home", lambda: tmp_path) + + provider = OneDriveProvider() + result = provider.detect() + assert result == onedrive_dir +``` + +--- + +## Build order (recommended) + +Work in this sequence to keep the code in a runnable state at each step: + +1. `memsync/providers/` — BaseProvider, registry, all 3 providers +2. `memsync/config.py` — Config dataclass, load/save +3. `memsync/backups.py` — extract from prototype sync.py +4. `memsync/claude_md.py` — extract sync_to_claude_md from prototype +5. `memsync/sync.py` — refactor to accept Config, fix hardcoded model +6. `memsync/cli.py` — refactor to wire config + providers through all commands, + add new commands: `providers`, `config show/set` +7. `tests/` — write tests for each module as you go +8. CI workflows +9. README, CONTRIBUTING, docs/adding-a-provider.md + +--- + +## GitHub repo conventions + +- **main** branch is always releasable +- PRs required for all changes (even from owner) +- Squash merge to keep history clean +- Version: semantic versioning. v0.x.y while in alpha. +- Changelog: keep a simple CHANGELOG.md, updated per release + +## Issue templates + +### bug_report.md +Ask for: OS, Python version, provider, `memsync status` output, error message. + +### provider_request.md +Ask for: provider name, OS, default install path, whether they're willing to +implement it (link to adding-a-provider.md). diff --git a/STYLE.md b/STYLE.md new file mode 100644 index 0000000..2dee932 --- /dev/null +++ b/STYLE.md @@ -0,0 +1,133 @@ +# STYLE.md + +## Non-negotiables + +- **Python 3.11+ only.** Use `tomllib` (stdlib), `match` statements where they clarify, + `Path` everywhere (never `os.path`), `from __future__ import annotations` at the top + of every module. +- **No dependencies beyond `anthropic`.** Everything else is stdlib. + Exception: `pytest`, `pytest-mock`, `ruff` in dev dependencies only. +- **Type hints everywhere.** Return types on all functions. No bare `dict` or `list` — + use `dict[str, Path]`, `list[Path]`, etc. +- **`Path` for all filesystem operations.** Never concatenate strings to build paths. + +--- + +## Module boundaries + +Each module has one job. Don't let them bleed: + +- `sync.py` calls the API and returns text. It does not write files. +- `cli.py` handles I/O (print, argparse). It does not contain business logic. +- `providers/` detect paths. They do not create directories or write files. +- `config.py` loads and saves config. It does not call the API. + +If you find yourself importing `cli` from `sync` or `sync` from `providers`, +stop and reconsider the design. + +--- + +## Function design + +Keep functions small and single-purpose. If a function is doing two things, +split it. The test for this: can you describe what it does in one sentence +without using "and"? + +Prefer explicit parameters over reading from global state: + +```python +# Good +def refresh_memory_content(notes: str, current_memory: str, config: Config) -> dict: + ... + +# Bad — reads global config internally, hard to test +def refresh_memory_content(notes: str) -> dict: + config = Config.load() # hidden dependency + ... +``` + +--- + +## Error handling + +- Use specific exceptions, not bare `except Exception`. +- Errors that the user can fix → print to stderr with a fix suggestion, return exit code 1. +- Errors that are bugs → let them propagate with a full traceback. +- Never swallow exceptions silently. + +```python +# Good +try: + path = provider.detect() +except PermissionError as e: + print(f"Error: can't access sync folder: {e}", file=sys.stderr) + print("Check folder permissions or run: memsync config set sync_root /path", file=sys.stderr) + return 4 + +# Bad +try: + path = provider.detect() +except Exception: + path = None +``` + +--- + +## CLI output + +- Success output → stdout +- Errors → stderr +- Keep success output minimal. Users will run this in terminal sessions — + wall-of-text output is noise. +- Use `✓` and `✗` for status indicators in `memsync status` and `memsync providers`. +- Emoji in output: only the two above, nowhere else. + +--- + +## Naming conventions + +| Thing | Convention | Example | +|---|---|---| +| Modules | snake_case | `claude_md.py` | +| Classes | PascalCase | `OneDriveProvider` | +| Functions | snake_case | `refresh_memory_content` | +| Constants | UPPER_SNAKE | `SYSTEM_PROMPT` | +| CLI commands | hyphen-case | `memsync dry-run` | +| Config keys | snake_case | `keep_days` | +| Provider names | lowercase, no hyphens | `"onedrive"`, `"icloud"`, `"gdrive"` | + +--- + +## What "done" looks like for a module + +A module is done when: +1. All functions have type hints +2. All functions have docstrings (one line is fine for obvious things) +3. Tests exist and pass on Mac, Windows, Linux (CI green) +4. No hardcoded paths, model strings, or magic numbers + +--- + +## Commit messages + +``` +feat: add iCloud provider detection +fix: restore hard constraints dropped by compaction +refactor: extract backup logic into backups.py +test: add provider detection tests with mocked filesystem +docs: update adding-a-provider guide +``` + +First word: `feat`, `fix`, `refactor`, `test`, `docs`, `chore`. +Present tense. No period at the end. + +--- + +## What to avoid + +- Don't use `print()` for debugging — use proper logging or remove before commit +- Don't use `os.path` — use `pathlib.Path` +- Don't use `open()` without `encoding="utf-8"` +- Don't write to `~/.claude/CLAUDE.md` without backing up first +- Don't call the Anthropic API in any path except `sync.py` +- Don't import from `cli.py` in any other module diff --git a/docs/DAEMON_SETUP.md b/docs/DAEMON_SETUP.md new file mode 100644 index 0000000..d67a1b0 --- /dev/null +++ b/docs/DAEMON_SETUP.md @@ -0,0 +1,226 @@ +# DAEMON_SETUP.md + +## Raspberry Pi setup guide + +This is the end-user guide for setting up the daemon on a Raspberry Pi. +It belongs in `docs/raspberry-pi.md` in the final repo. + +--- + +## What you need + +- Raspberry Pi 3B+ or newer (3B+ is fine, Pi 4 is better) +- Raspberry Pi OS Lite (no desktop needed) +- Your cloud sync folder accessible on the Pi (see options below) +- Python 3.11+ (comes with Raspberry Pi OS Bookworm and later) + +--- + +## Step 1: Get Python 3.11+ + +```bash +python3 --version +# If below 3.11: +sudo apt update && sudo apt install -y python3.11 python3.11-pip +``` + +--- + +## Step 2: Install memsync with daemon extras + +```bash +pip3 install memsync[daemon] --break-system-packages +``` + +--- + +## Step 3: Mount your cloud sync folder on the Pi + +The Pi needs to see your `GLOBAL_MEMORY.md` file. Three options: + +### Option A: rclone (OneDrive, Google Drive, iCloud via workaround) + +```bash +# Install rclone +curl https://rclone.org/install.sh | sudo bash + +# Configure (follow interactive prompts) +rclone config +# Choose your provider (OneDrive = "Microsoft OneDrive", Google Drive = "drive") + +# Mount (run at boot via cron or systemd) +rclone mount onedrive: ~/OneDrive --daemon --vfs-cache-mode full +``` + +Add to `/etc/rc.local` before `exit 0` to mount on boot: +```bash +sudo -u pi rclone mount onedrive: /home/pi/OneDrive --daemon --vfs-cache-mode full +``` + +### Option B: NFS mount from another machine on your LAN + +If your Mac or Windows machine is always on, share the OneDrive folder over +NFS and mount it on the Pi. Simpler than rclone for home LAN setups. + +### Option C: Manual sync via rsync + cron (simplest, less real-time) + +```bash +# Add to Pi's crontab — syncs from Mac every 15 minutes +*/15 * * * * rsync -az your-mac.local:/Users/ian/OneDrive/.claude-memory/ ~/claude-memory/ +``` + +Then point memsync at the local copy: +```bash +memsync config set sync_root ~/claude-memory +memsync config set provider custom +``` + +--- + +## Step 4: Configure memsync on the Pi + +```bash +# Initialize (uses OneDrive via rclone mount, or custom path from Option C) +memsync init + +# Set your API key +export ANTHROPIC_API_KEY="sk-ant-..." +# Add to ~/.bashrc to persist across reboots: +echo 'export ANTHROPIC_API_KEY="sk-ant-..."' >> ~/.bashrc + +# Check everything looks right +memsync status +``` + +--- + +## Step 5: Install and start the daemon + +```bash +# Install as a systemd service (starts on boot) +sudo memsync daemon install + +# The installer will print a warning about the API key in the unit file. +# Add it properly via override: +sudo systemctl edit memsync +``` + +In the editor that opens, add: +```ini +[Service] +Environment=ANTHROPIC_API_KEY=sk-ant-... +``` + +Save and close, then: +```bash +sudo systemctl restart memsync +sudo systemctl status memsync # should show "active (running)" +``` + +--- + +## Step 6: Set your timezone + +```bash +# Check current timezone +timedatectl + +# Set correct timezone (important for nightly refresh timing) +sudo timedatectl set-timezone America/Los_Angeles +# or America/New_York, Europe/London, etc. +``` + +--- + +## Step 7: Verify the nightly refresh + +The easiest way to test without waiting until 11:55pm: + +```bash +# Trigger a manual refresh to confirm everything works +memsync refresh --notes "Pi daemon setup and tested successfully" + +# Check it ran and updated the memory file +memsync show | head -20 +``` + +--- + +## Step 8: Set up the web UI (optional) + +The web UI starts automatically with the daemon. Access it from any browser +on your home network: + +``` +http://raspberrypi.local:5000 +``` + +If `raspberrypi.local` doesn't resolve, use the Pi's IP address instead: +```bash +hostname -I # shows Pi's IP +``` + +--- + +## Step 9: Set up mobile capture (optional) + +On iPhone, create a Shortcut: +1. Add action: "Get Contents of URL" +2. URL: `http://raspberrypi.local:5001/note` +3. Method: POST +4. Request body: JSON → `{"text": "Shortcut Input"}` +5. Add a "Text" input action before it so you can type the note + +Add to your home screen. One tap → type note → it goes into tonight's session log. + +If you want basic auth, set a token first: +```bash +memsync config set capture_token mytoken123 +``` + +Then add header to the Shortcut: `X-Memsync-Token: mytoken123` + +--- + +## Checking daemon health + +```bash +# See what's running and when jobs last fired +memsync daemon status + +# See daemon logs +sudo journalctl -u memsync -f + +# Check the schedule +memsync daemon schedule +``` + +--- + +## Troubleshooting + +**Daemon won't start:** +```bash +sudo journalctl -u memsync --no-pager | tail -30 +``` +Most common cause: ANTHROPIC_API_KEY not set in the systemd override. + +**Web UI not accessible from other devices:** +Check that `web_ui_host` is `0.0.0.0` not `127.0.0.1`: +```bash +memsync config show | grep web_ui_host +memsync config set web_ui_host 0.0.0.0 +sudo systemctl restart memsync +``` + +**Port 5000 already in use:** +```bash +memsync config set web_ui_port 5050 +sudo systemctl restart memsync +``` + +**OneDrive not syncing on Pi:** +```bash +rclone ls onedrive:.claude-memory/ # test rclone can see the files +``` +If this fails, reconfigure rclone: `rclone config reconnect onedrive:` diff --git a/docs/adding-a-provider.md b/docs/adding-a-provider.md new file mode 100644 index 0000000..763a57b --- /dev/null +++ b/docs/adding-a-provider.md @@ -0,0 +1,192 @@ +# Adding a new provider + +memsync supports any cloud storage service through a simple plugin interface. Adding a provider requires: + +1. One new file in `memsync/providers/` +2. One line in `memsync/providers/__init__.py` +3. Tests in `tests/test_providers.py` +4. A row in the README providers table + +That's the complete list. No other files need to change. + +--- + +## The provider contract + +Every provider implements two required methods and inherits one optional override: + +```python +class BaseProvider(ABC): + + name: str # short id: "dropbox", "box", etc. + display_name: str # human-readable: "Dropbox", "Box", etc. + + @abstractmethod + def detect(self) -> Path | None: + """ + Return the sync root path if found, None otherwise. + Must never raise — wrap _find() in try/except. + """ + + @abstractmethod + def is_available(self) -> bool: + """Quick check — is this provider's folder accessible?""" + + def get_memory_root(self, sync_root: Path) -> Path: + """ + Where inside the sync root to store memsync data. + Default: sync_root / ".claude-memory" + Override only if the provider hides dot-folders (e.g. iCloud). + """ + return sync_root / ".claude-memory" +``` + +--- + +## Worked example: Dropbox + +### Step 1 — Create `memsync/providers/dropbox.py` + +```python +from __future__ import annotations + +import os +import platform +from pathlib import Path + +from memsync.providers import BaseProvider, register + + +@register +class DropboxProvider(BaseProvider): + name = "dropbox" + display_name = "Dropbox" + + def detect(self) -> Path | None: + try: + return self._find() + except Exception: + return None + + def is_available(self) -> bool: + return self.detect() is not None + + def _find(self) -> Path | None: + system = platform.system() + + if system == "Darwin": + # Dropbox sets ~/.dropbox/info.json with the sync path + info = Path.home() / ".dropbox" / "info.json" + if info.exists(): + import json + data = json.loads(info.read_text(encoding="utf-8")) + path_str = data.get("personal", {}).get("path") + if path_str: + p = Path(path_str) + if p.exists(): + return p + # Fallback: common default + default = Path.home() / "Dropbox" + if default.exists(): + return default + + elif system == "Windows": + # Check Dropbox info.json on Windows + appdata = os.environ.get("APPDATA", "") + info = Path(appdata) / "Dropbox" / "info.json" + if info.exists(): + import json + data = json.loads(info.read_text(encoding="utf-8")) + path_str = data.get("personal", {}).get("path") + if path_str: + p = Path(path_str) + if p.exists(): + return p + default = Path.home() / "Dropbox" + if default.exists(): + return default + + elif system == "Linux": + # Dropbox info.json also exists on Linux + info = Path.home() / ".dropbox" / "info.json" + if info.exists(): + import json + data = json.loads(info.read_text(encoding="utf-8")) + path_str = data.get("personal", {}).get("path") + if path_str: + p = Path(path_str) + if p.exists(): + return p + default = Path.home() / "Dropbox" + if default.exists(): + return default + + return None +``` + +### Step 2 — Register it in `memsync/providers/__init__.py` + +Add one line at the bottom of the file, after the existing provider imports: + +```python +from memsync.providers import onedrive, icloud, gdrive, custom, dropbox # noqa: E402, F401 +``` + +The `@register` decorator handles the rest. The import order determines priority during `memsync init` auto-detection. + +### Step 3 — Add tests in `tests/test_providers.py` + +```python +from memsync.providers.dropbox import DropboxProvider + + +class TestDropboxProvider: + def test_detects_default_path(self, tmp_path, monkeypatch): + dropbox_dir = tmp_path / "Dropbox" + dropbox_dir.mkdir() + monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path)) + monkeypatch.setattr(platform, "system", lambda: "Darwin") + + provider = DropboxProvider() + result = provider.detect() + assert result == dropbox_dir + + def test_returns_none_when_not_found(self, tmp_path, monkeypatch): + monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path)) + monkeypatch.setattr(platform, "system", lambda: "Darwin") + provider = DropboxProvider() + assert provider.detect() is None + + def test_never_raises(self, monkeypatch): + monkeypatch.setattr(DropboxProvider, "_find", lambda self: (_ for _ in ()).throw(Exception("boom"))) + provider = DropboxProvider() + assert provider.detect() is None +``` + +### Step 4 — Update the README + +Add a row to the providers table in `README.md`: + +```markdown +| Dropbox | ✓ | ✓ | ✓ | +``` + +--- + +## Things to get right + +**`detect()` must never raise.** Wrap all detection logic in `_find()` and call it from `detect()` inside `try/except Exception`. A provider that throws crashes `memsync providers` for everyone. + +**Check `exists()` before returning.** Always verify the path actually exists before returning it. A path that exists in the config but not on disk is wrong. + +**`info.json` vs env vars vs default paths.** Prefer provider-documented paths (like Dropbox's `info.json`) over guessing default paths. The guesses are a fallback. + +**Don't override `get_memory_root()` unless necessary.** The default (`.claude-memory`) is correct for most providers. Only override it if the provider has a technical reason not to sync dot-folders (like iCloud on Mac). + +**Detection priority.** Providers are detected in import order. If you want your provider to be checked before Google Drive but after iCloud, put it in that order in the import line. + +--- + +## Testing your provider without a real account + +Use `tmp_path` and `monkeypatch` to simulate the filesystem. See the existing provider tests in `tests/test_providers.py` for the pattern. Never create real files in `~`, `~/.config`, or any cloud folder during tests. diff --git a/docs/global-memory-guide.md b/docs/global-memory-guide.md new file mode 100644 index 0000000..6d3837c --- /dev/null +++ b/docs/global-memory-guide.md @@ -0,0 +1,116 @@ +# What to put in GLOBAL_MEMORY.md + +This is the file Claude Code reads at the start of every session. It's your **identity layer** — not a project wiki, not a knowledge base. Keep it tight and personal. + +--- + +## The starter template + +When you run `memsync init`, you get this: + +```markdown + +# Global Memory + +> Loaded by Claude Code at session start on all machines and projects. +> Edit directly or run: memsync refresh --notes "..." + +## Identity & context +- (Fill this in — who you are, your roles, active projects) + +## Current priorities +- (What you're working on right now) + +## Standing preferences +- (How you like to work — communication style, output format, etc.) + +## Hard constraints +- (Rules that must never be lost or softened through compaction) +``` + +Fill it in. The section names are your structure — keep them. + +--- + +## Identity & context + +Who you are and what you're working on. Claude reads this cold at every session. + +```markdown +## Identity & context +- Ian, product leader at a B2B SaaS company +- Side projects: memsync (Python CLI), personal finance tracker (Go) +- Background: 10 years PM, comfortable with code but not a full-time engineer +- Working across: Mac (home), Windows (work) +``` + +--- + +## Current priorities + +What's active right now. This section gets updated most often by `memsync refresh`. + +```markdown +## Current priorities +- memsync v0.2: finish tests and CI, publish to PyPI +- Q2 planning deck due April 15 +- Hiring: two PM openings, first round interviews next week +``` + +Completed items get demoted to a brief "Recent completions" section automatically during refresh. They don't stay forever — that's what session logs are for. + +--- + +## Standing preferences + +How you like to work. These persist across all projects and sessions. + +```markdown +## Standing preferences +- Prefer concise output — skip the preamble, just give me the thing +- Code: Python 3.11+, pathlib everywhere, no magic, no cleverness +- Writing: active voice, short sentences, no bullet-point summaries unless asked +- Don't suggest tests unless I ask — I know when I need them +- When in doubt, ask one clarifying question rather than guessing +``` + +--- + +## Hard constraints + +Rules that must never be removed or softened, no matter how much the memory compacts. Claude checks this section in Python code — it's enforced, not just prompted. + +```markdown +## Hard constraints +- Never hardcode credentials or API keys in any code I write +- Always ask before deleting files or making destructive changes +- Never rewrite from scratch — refactor what exists +- Don't add emoji to output unless I explicitly ask +``` + +Good candidates: safety rules, things that bit you in the past, preferences so strong that "sometimes" isn't acceptable. + +--- + +## What NOT to put here + +- **Project-specific docs** — those go in each project's `CLAUDE.md` +- **Reference material** — API docs, schemas, architecture diagrams +- **Cold storage** — old project summaries, historical context +- **Everything** — this file has a soft cap of ~400 lines. If it's getting long, you have too much in it. + +The memory file should read like a dense briefing note, not a wiki. If Claude can derive something from the project files, it doesn't need to be here. + +--- + +## Keeping it current + +After any session where something important shifted — a decision made, a priority changed, a preference discovered — run: + +```bash +memsync refresh --notes "Decided to use JWT auth, not sessions. Slower but simpler for our use case." +``` + +The Claude API reads your notes and the current memory file, and updates it accordingly. The old version is backed up automatically. + +You can also edit `GLOBAL_MEMORY.md` directly at any time. Just run `memsync refresh` afterward (even with minimal notes) to sync the copy to `~/.claude/CLAUDE.md`. diff --git a/memsync/__init__.py b/memsync/__init__.py new file mode 100644 index 0000000..d3ec452 --- /dev/null +++ b/memsync/__init__.py @@ -0,0 +1 @@ +__version__ = "0.2.0" diff --git a/memsync/backups.py b/memsync/backups.py new file mode 100644 index 0000000..854d31a --- /dev/null +++ b/memsync/backups.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +import shutil +from datetime import datetime, timedelta +from pathlib import Path + + +def backup(source: Path, backup_dir: Path) -> Path: + """ + Copy source to backup_dir with a timestamp suffix. + Returns the path of the new backup file. + """ + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + dest = backup_dir / f"GLOBAL_MEMORY_{timestamp}.md" + shutil.copy2(source, dest) + return dest + + +def prune(backup_dir: Path, keep_days: int) -> list[Path]: + """ + Delete backups older than keep_days. Returns list of deleted paths. + """ + cutoff = datetime.now() - timedelta(days=keep_days) + deleted: list[Path] = [] + + for backup_file in backup_dir.glob("GLOBAL_MEMORY_*.md"): + try: + ts_str = backup_file.stem.replace("GLOBAL_MEMORY_", "") + ts = datetime.strptime(ts_str, "%Y%m%d_%H%M%S") + if ts < cutoff: + backup_file.unlink() + deleted.append(backup_file) + except ValueError: + pass # skip files with unexpected names + + return deleted + + +def list_backups(backup_dir: Path) -> list[Path]: + """Return all backups sorted newest-first.""" + backups = list(backup_dir.glob("GLOBAL_MEMORY_*.md")) + return sorted(backups, reverse=True) + + +def latest_backup(backup_dir: Path) -> Path | None: + """Return the most recent backup, or None if no backups exist.""" + backups = list_backups(backup_dir) + return backups[0] if backups else None diff --git a/memsync/claude_md.py b/memsync/claude_md.py new file mode 100644 index 0000000..61ccf83 --- /dev/null +++ b/memsync/claude_md.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +import platform +import shutil +from pathlib import Path + + +def sync(memory_path: Path, target_path: Path) -> None: + """ + Keep target_path (CLAUDE.md) in sync with memory_path (GLOBAL_MEMORY.md). + + Mac/Linux: create a symlink. If a non-memsync file already exists at the + target, back it up first (.pre-memsync.bak) so user data is never lost. + + Windows: always copy — symlinks require admin/Developer Mode. The copy is + refreshed on every `memsync refresh`, so drift is acceptable in practice. + """ + target_path.parent.mkdir(parents=True, exist_ok=True) + + if platform.system() == "Windows": + shutil.copy2(memory_path, target_path) + return + + # Mac / Linux — prefer symlink + if target_path.is_symlink(): + if target_path.resolve() == memory_path.resolve(): + return # already correct + target_path.unlink() + + if target_path.exists(): + # Back up any existing file before replacing it + target_path.rename(target_path.with_suffix(".pre-memsync.bak")) + + try: + target_path.symlink_to(memory_path) + except OSError: + # Fallback to copy if symlink creation fails (e.g. cross-device) + shutil.copy2(memory_path, target_path) + + +def is_synced(memory_path: Path, target_path: Path) -> bool: + """ + Return True if target_path points at (or has the same content as) memory_path. + """ + if not target_path.exists(): + return False + + if target_path.is_symlink(): + return target_path.resolve() == memory_path.resolve() + + # Windows copy path — compare content + try: + return target_path.read_bytes() == memory_path.read_bytes() + except OSError: + return False diff --git a/memsync/cli.py b/memsync/cli.py new file mode 100644 index 0000000..40fc4b7 --- /dev/null +++ b/memsync/cli.py @@ -0,0 +1,953 @@ +from __future__ import annotations + +import argparse +import dataclasses +import difflib +import platform +import sys +from pathlib import Path + +import anthropic + +from memsync import __version__ +from memsync.backups import backup, latest_backup, list_backups, prune +from memsync.claude_md import sync as sync_claude_md +from memsync.config import Config, get_config_path +from memsync.providers import all_providers, auto_detect, get_provider +from memsync.sync import load_or_init_memory, log_session_notes, refresh_memory_content + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def _resolve_memory_root(config: Config) -> Path | None: + """ + Return the .claude-memory root directory for this machine. + Uses config.sync_root if set, otherwise asks the configured provider to detect. + """ + if config.sync_root: + sync_root = config.sync_root + else: + try: + provider = get_provider(config.provider) + except KeyError as e: + print(f"Error: {e}", file=sys.stderr) + return None + sync_root = provider.detect() + if sync_root is None: + print( + f"Error: provider '{config.provider}' could not find its sync folder.\n" + "Run 'memsync init' or set a custom path with:\n" + " memsync config set sync_root /path/to/folder", + file=sys.stderr, + ) + return None + provider_instance = provider + return provider_instance.get_memory_root(sync_root) + + try: + provider = get_provider(config.provider) + return provider.get_memory_root(sync_root) + except KeyError: + # Custom path with unknown provider name — use default .claude-memory + return sync_root / ".claude-memory" + + +def _require_memory_root(config: Config) -> tuple[Path, int] | tuple[None, int]: + """ + Resolve memory root and check it exists. Returns (path, 0) or (None, exit_code). + """ + memory_root = _resolve_memory_root(config) + if memory_root is None: + return None, 4 + if not memory_root.exists(): + print( + "Error: memory directory not found. Run 'memsync init' first.", + file=sys.stderr, + ) + return None, 2 + return memory_root, 0 + + +# --------------------------------------------------------------------------- +# Commands +# --------------------------------------------------------------------------- + +def cmd_init(args: argparse.Namespace, config: Config) -> int: + """Set up memory structure for the first time.""" + # Check if already initialized (unless --force) + if get_config_path().exists() and not args.force: + print("memsync already initialized. Use --force to reinitialize.") + return 0 + + # Resolve provider + if args.sync_root: + sync_root = Path(args.sync_root).expanduser() + if not sync_root.exists(): + print(f"Error: path does not exist: {sync_root}", file=sys.stderr) + return 1 + provider_name = args.provider or "custom" + try: + provider = get_provider(provider_name) + except KeyError: + provider = get_provider("custom") + provider_name = "custom" + memory_root = provider.get_memory_root(sync_root) + + elif args.provider: + try: + provider = get_provider(args.provider) + except KeyError as e: + print(f"Error: {e}", file=sys.stderr) + return 1 + sync_root = provider.detect() + if sync_root is None: + print( + f"Error: provider '{args.provider}' could not find its sync folder.\n" + f"Try: memsync init --sync-root /path/to/folder", + file=sys.stderr, + ) + return 4 + memory_root = provider.get_memory_root(sync_root) + provider_name = args.provider + + else: + # Auto-detect + detected = auto_detect() + if not detected: + print( + "Error: no cloud sync folder detected.\n" + "Run with --sync-root to specify a path manually:\n" + " memsync init --sync-root /path/to/sync/folder", + file=sys.stderr, + ) + return 4 + + if len(detected) == 1: + provider = detected[0] + sync_root = provider.detect() + memory_root = provider.get_memory_root(sync_root) + provider_name = provider.name + else: + # Multiple detected — ask user to choose + print("Multiple sync providers detected:") + for i, p in enumerate(detected, 1): + path = p.detect() + print(f" {i}. {p.display_name} ({path})") + while True: + choice = input(f"Choose [1-{len(detected)}]: ").strip() + if choice.isdigit() and 1 <= int(choice) <= len(detected): + provider = detected[int(choice) - 1] + sync_root = provider.detect() + memory_root = provider.get_memory_root(sync_root) + provider_name = provider.name + break + print("Invalid choice.") + + # Create directory structure + for subdir in (memory_root, memory_root / "backups", memory_root / "sessions"): + subdir.mkdir(parents=True, exist_ok=True) + + # Write starter memory if not present (--force skips this check) + global_memory = memory_root / "GLOBAL_MEMORY.md" + if not global_memory.exists() or args.force: + starter = load_or_init_memory(Path("/nonexistent/force-new")) + global_memory.write_text(starter, encoding="utf-8") + + # Write config + new_config = Config( + provider=provider_name, + sync_root=sync_root if provider_name == "custom" else None, + ) + new_config.save() + + # Sync to CLAUDE.md + sync_claude_md(global_memory, new_config.claude_md_target) + + print("memsync initialized.\n") + print(f" Provider: {provider.display_name}") + print(f" Sync root: {sync_root}") + print(f" Memory: {global_memory}") + target = new_config.claude_md_target + if target.is_symlink(): + print(f" CLAUDE.md: {target} → (symlink)") + else: + print(f" CLAUDE.md: {target}") + print() + print("Next: edit your memory file, then run:") + print(' memsync refresh --notes "initial setup complete"') + return 0 + + +def cmd_refresh(args: argparse.Namespace, config: Config) -> int: + """Merge session notes into GLOBAL_MEMORY.md via the Claude API.""" + # Gather notes + notes = "" + if args.notes: + notes = args.notes + elif args.file: + note_path = Path(args.file) + if not note_path.exists(): + print(f"Error: file not found: {args.file}", file=sys.stderr) + return 1 + notes = note_path.read_text(encoding="utf-8") + else: + if not sys.stdin.isatty(): + notes = sys.stdin.read() + else: + print( + "Error: provide --notes, --file, or pipe notes via stdin.", + file=sys.stderr, + ) + return 1 + + if not notes.strip(): + print("Error: notes are empty.", file=sys.stderr) + return 1 + + # Allow one-off model override without touching config + if args.model: + config = dataclasses.replace(config, model=args.model) + + # Resolve paths + memory_root, code = _require_memory_root(config) + if memory_root is None: + return code + + global_memory = memory_root / "GLOBAL_MEMORY.md" + if not global_memory.exists(): + print( + "Error: GLOBAL_MEMORY.md not found. Run 'memsync init' first.", + file=sys.stderr, + ) + return 3 + + current_memory = load_or_init_memory(global_memory) + + print("Refreshing global memory...", end=" ", flush=True) + + try: + result = refresh_memory_content(notes, current_memory, config) + except anthropic.BadRequestError as e: + if "model" in str(e).lower(): + print( + f"\nError: model '{config.model}' may be unavailable or misspelled.\n" + f"Update with: memsync config set model ", + file=sys.stderr, + ) + return 5 + raise + except anthropic.APIError as e: + print(f"\nError: API request failed: {e}", file=sys.stderr) + return 5 + + if args.dry_run: + print("\n[DRY RUN] No files written.\n") + if result["changed"]: + old_lines = current_memory.strip().splitlines(keepends=True) + new_lines = result["updated_content"].splitlines(keepends=True) + diff = difflib.unified_diff(old_lines, new_lines, fromfile="current", tofile="updated") + diff_text = "".join(diff) + if diff_text: + print("--- diff ---") + print(diff_text) + else: + print("No changes detected.") + return 0 + + if result["truncated"]: + print( + "\nError: API response was truncated (hit max_tokens limit).\n" + "Memory file was NOT updated. Try reducing your notes or memory file size.", + file=sys.stderr, + ) + return 5 + + if not result["changed"]: + print("no changes.") + return 0 + + # Backup then write + backup_path = backup(global_memory, memory_root / "backups") + global_memory.write_text(result["updated_content"], encoding="utf-8") + sync_claude_md(global_memory, config.claude_md_target) + log_session_notes(notes, memory_root / "sessions") + + print("done.") + print(f" Backup: {backup_path}") + print(f" Memory: {global_memory}") + print(" CLAUDE.md synced ✓") + return 0 + + +def cmd_show(args: argparse.Namespace, config: Config) -> int: + """Print current GLOBAL_MEMORY.md to stdout.""" + memory_root, code = _require_memory_root(config) + if memory_root is None: + return code + + global_memory = memory_root / "GLOBAL_MEMORY.md" + if not global_memory.exists(): + print("No global memory file yet. Run: memsync init", file=sys.stderr) + return 3 + + print(global_memory.read_text(encoding="utf-8")) + return 0 + + +def cmd_diff(args: argparse.Namespace, config: Config) -> int: + """Show unified diff between current memory and the most recent (or specified) backup.""" + memory_root, code = _require_memory_root(config) + if memory_root is None: + return code + + global_memory = memory_root / "GLOBAL_MEMORY.md" + if not global_memory.exists(): + print("No global memory file yet. Run: memsync init", file=sys.stderr) + return 3 + + backup_dir = memory_root / "backups" + + if args.backup: + backup_path = backup_dir / args.backup + if not backup_path.exists(): + print(f"Error: backup not found: {args.backup}", file=sys.stderr) + return 1 + else: + backup_path = latest_backup(backup_dir) + if backup_path is None: + print("No backups found.") + return 0 + + current = global_memory.read_text(encoding="utf-8").splitlines(keepends=True) + previous = backup_path.read_text(encoding="utf-8").splitlines(keepends=True) + + diff = list(difflib.unified_diff( + previous, current, + fromfile=f"backup ({backup_path.name})", + tofile="current", + )) + + if diff: + print("".join(diff)) + else: + print("No differences from last backup.") + return 0 + + +def cmd_status(args: argparse.Namespace, config: Config) -> int: + """Show paths, provider, and sync state.""" + system = platform.system() + _os_names = {"Darwin": "macOS (Darwin)", "Windows": "Windows", "Linux": "Linux"} + os_name = _os_names.get(system, system) + print(f"Platform: {os_name}") + + config_path = get_config_path() + config_marker = "✓" if config_path.exists() else "✗ (not found — run memsync init)" + print(f"Config: {config_path} {config_marker}") + print(f"Provider: {config.provider}") + print(f"Model: {config.model}") + + memory_root = _resolve_memory_root(config) + if memory_root is None: + return 4 + + if config.sync_root: + print(f"Sync root: {config.sync_root} {'✓' if config.sync_root.exists() else '✗'}") + else: + try: + provider = get_provider(config.provider) + sync_root = provider.detect() + label = str(sync_root) if sync_root else "(not detected)" + marker = "✓" if sync_root else "✗" + print(f"Sync root: {label} {marker}") + except KeyError: + print(f"Sync root: (unknown provider '{config.provider}')") + + global_memory = memory_root / "GLOBAL_MEMORY.md" + mem_marker = "✓" if global_memory.exists() else "✗ (run memsync init)" + print(f"Memory: {global_memory} {mem_marker}") + + target = config.claude_md_target + if target.is_symlink(): + print(f"CLAUDE.md: {target} → symlink ✓") + elif target.exists(): + print(f"CLAUDE.md: {target} ✓ (copy)") + else: + print(f"CLAUDE.md: {target} ✗ (not synced — run memsync init)") + + backup_dir = memory_root / "backups" + if backup_dir.exists(): + count = len(list_backups(backup_dir)) + print(f"Backups: {count} file(s)") + + session_dir = memory_root / "sessions" + if session_dir.exists(): + sessions = list(session_dir.glob("*.md")) + print(f"Session logs: {len(sessions)} day(s)") + + return 0 + + +def cmd_prune(args: argparse.Namespace, config: Config) -> int: + """Remove old backups.""" + memory_root, code = _require_memory_root(config) + if memory_root is None: + return code + + backup_dir = memory_root / "backups" + keep_days = args.keep_days if args.keep_days is not None else config.keep_days + + if args.dry_run: + from datetime import datetime, timedelta + cutoff = datetime.now() - timedelta(days=keep_days) + would_delete = [ + b for b in list_backups(backup_dir) + if _backup_timestamp(b) and _backup_timestamp(b) < cutoff + ] + if would_delete: + n = len(would_delete) + print(f"[DRY RUN] Would prune {n} backup(s) older than {keep_days} days:") + for p in would_delete: + print(f" {p.name}") + else: + print(f"[DRY RUN] No backups older than {keep_days} days.") + return 0 + + deleted = prune(backup_dir, keep_days=keep_days) + if deleted: + print(f"Pruned {len(deleted)} backup(s) older than {keep_days} days.") + for p in deleted: + print(f" removed: {p.name}") + else: + print(f"No backups older than {keep_days} days.") + return 0 + + +def _backup_timestamp(path: Path): + """Parse timestamp from backup filename, or return None.""" + from datetime import datetime + try: + ts_str = path.stem.replace("GLOBAL_MEMORY_", "") + return datetime.strptime(ts_str, "%Y%m%d_%H%M%S") + except ValueError: + return None + + +def cmd_providers(args: argparse.Namespace, config: Config) -> int: + """List all registered providers and their detection status.""" + print("Available providers:\n") + for provider in all_providers(): + detected_path = provider.detect() + if detected_path: + marker = f"✓ detected at {detected_path}" + else: + if provider.name == "custom": + marker = "✗ no path configured" + else: + marker = "✗ not detected" + print(f" {provider.name:<10} {provider.display_name:<18} {marker}") + + print(f"\nActive provider: {config.provider}") + return 0 + + +def cmd_doctor(args: argparse.Namespace, config: Config) -> int: + """ + Self-check: verify the installation is healthy without making any API calls. + Exits 0 if all checks pass, 1 if any check fails. + """ + import os + + checks: list[tuple[str, bool, str]] = [] # (label, ok, detail) + + # 1. Config file + config_path = get_config_path() + checks.append(("Config file", config_path.exists(), str(config_path))) + + # 2. ANTHROPIC_API_KEY set + api_key_set = bool(os.environ.get("ANTHROPIC_API_KEY")) + api_key_detail = "(set)" if api_key_set else "not set — refresh will fail" + checks.append(("ANTHROPIC_API_KEY", api_key_set, api_key_detail)) + + # 3. Provider / sync root accessible + if config.sync_root: + # Custom or explicit path — just verify it exists + provider_ok = config.sync_root.exists() + provider_detail = str(config.sync_root) + else: + try: + provider = get_provider(config.provider) + sync_root = provider.detect() + provider_ok = sync_root is not None + provider_detail = ( + str(sync_root) if sync_root else f"'{config.provider}' not detected on this machine" + ) + except KeyError: + provider_ok = False + provider_detail = f"unknown provider '{config.provider}'" + checks.append((f"Provider ({config.provider})", provider_ok, provider_detail)) + + # 4. Memory root exists + memory_root = _resolve_memory_root(config) + if memory_root: + mem_ok = memory_root.exists() + checks.append(("Memory directory", mem_ok, str(memory_root))) + + # 5. GLOBAL_MEMORY.md exists + global_memory = memory_root / "GLOBAL_MEMORY.md" + checks.append(("GLOBAL_MEMORY.md", global_memory.exists(), str(global_memory))) + + # 6. CLAUDE.md is synced + target = config.claude_md_target + from memsync.claude_md import is_synced + synced = global_memory.exists() and is_synced(global_memory, target) + detail = f"{target} → {'synced' if synced else 'not synced (run memsync init)'}" + checks.append(("CLAUDE.md synced", synced, detail)) + else: + checks.append(("Memory directory", False, "cannot resolve — fix provider first")) + + # Print results + all_ok = all(ok for _, ok, _ in checks) + print("memsync doctor\n") + for label, ok, detail in checks: + marker = "✓" if ok else "✗" + print(f" {marker} {label:<25} {detail}") + + print() + if all_ok: + print("All checks passed.") + else: + failed = [label for label, ok, _ in checks if not ok] + print(f"{len(failed)} check(s) failed: {', '.join(failed)}") + + return 0 if all_ok else 1 + + +def cmd_config_show(args: argparse.Namespace, config: Config) -> int: + """Print current config.toml contents.""" + config_path = get_config_path() + if not config_path.exists(): + print("No config file found. Run 'memsync init' first.", file=sys.stderr) + return 2 + print(config_path.read_text(encoding="utf-8")) + return 0 + + +def cmd_config_set(args: argparse.Namespace, config: Config) -> int: + """Update a single config value and save.""" + key = args.key + value = args.value + + valid_keys = { + "provider", "model", "sync_root", "claude_md_target", "max_memory_lines", "keep_days", + } + if key not in valid_keys: + print( + f"Error: unknown config key '{key}'.\n" + f"Valid keys: {', '.join(sorted(valid_keys))}", + file=sys.stderr, + ) + return 1 + + if key == "provider": + all_names = {p.name for p in all_providers()} + if value not in all_names: + print( + f"Error: unknown provider '{value}'.\n" + f"Available: {', '.join(sorted(all_names))}", + file=sys.stderr, + ) + return 1 + config = dataclasses.replace(config, provider=value) + + elif key == "sync_root": + path = Path(value).expanduser() + if not path.exists(): + print(f"Error: path does not exist: {path}", file=sys.stderr) + return 1 + config = dataclasses.replace(config, sync_root=path, provider="custom") + + elif key == "claude_md_target": + config = dataclasses.replace(config, claude_md_target=Path(value).expanduser()) + + elif key == "max_memory_lines": + if not value.isdigit(): + print("Error: max_memory_lines must be an integer.", file=sys.stderr) + return 1 + config = dataclasses.replace(config, max_memory_lines=int(value)) + + elif key == "keep_days": + if not value.isdigit(): + print("Error: keep_days must be an integer.", file=sys.stderr) + return 1 + config = dataclasses.replace(config, keep_days=int(value)) + + elif key == "model": + config = dataclasses.replace(config, model=value) + + config.save() + print(f"Set {key} = {value}") + return 0 + + +# --------------------------------------------------------------------------- +# Daemon commands (optional install — memsync[daemon]) +# --------------------------------------------------------------------------- + +_DAEMON_INSTALL_HINT = ( + "The daemon module is not installed.\n" + "Install it with: pip install memsync[daemon]" +) + +_PID_FILE = Path("~/.config/memsync/daemon.pid").expanduser() + + +def _daemon_import_guard() -> bool: + """Return True if daemon extras are installed, False (with error) if not.""" + try: + import apscheduler # noqa: F401 + import flask # noqa: F401 + return True + except ImportError: + print(_DAEMON_INSTALL_HINT, file=sys.stderr) + return False + + +def cmd_daemon_start(args: argparse.Namespace, config: Config) -> int: + """Start the daemon (foreground or detached).""" + if not _daemon_import_guard(): + return 1 + + if args.detach: + import subprocess + + script = [sys.executable, "-m", "memsync.cli", "daemon", "start"] + kwargs: dict = {"stdout": subprocess.DEVNULL, "stderr": subprocess.DEVNULL} + if platform.system() == "Windows": + _flags = subprocess.DETACHED_PROCESS | subprocess.CREATE_NEW_PROCESS_GROUP + kwargs["creationflags"] = _flags + else: + kwargs["start_new_session"] = True + + proc = subprocess.Popen(script, **kwargs) # noqa: S603 + _PID_FILE.parent.mkdir(parents=True, exist_ok=True) + _PID_FILE.write_text(str(proc.pid), encoding="utf-8") + print(f"Daemon started (PID {proc.pid}).") + print("Stop with: memsync daemon stop") + return 0 + + # Foreground mode — run everything in threads, block until interrupted + import threading + + from memsync.daemon.scheduler import build_scheduler + + threads: list[threading.Thread] = [] + + if config.daemon.web_ui_enabled: + from memsync.daemon.web import run_web + + t = threading.Thread(target=run_web, args=[config], daemon=True, name="web-ui") + t.start() + threads.append(t) + print(f"Web UI: http://{config.daemon.web_ui_host}:{config.daemon.web_ui_port}/") + + if config.daemon.capture_enabled: + from memsync.daemon.capture import run_capture + + t = threading.Thread(target=run_capture, args=[config], daemon=True, name="capture") + t.start() + threads.append(t) + print(f"Capture: http://0.0.0.0:{config.daemon.capture_port}/note") + + scheduler = build_scheduler(config, blocking=False) + scheduler.start() + + job_count = len(scheduler.get_jobs()) + print(f"Scheduler: {job_count} job(s) running. Press Ctrl+C to stop.") + + try: + import time + while True: + time.sleep(1) + except (KeyboardInterrupt, SystemExit): + scheduler.shutdown(wait=False) + print("\nDaemon stopped.") + return 0 + + +def cmd_daemon_stop(args: argparse.Namespace, config: Config) -> int: + """Stop a detached daemon process.""" + if not _PID_FILE.exists(): + print("No running daemon found (PID file not present).", file=sys.stderr) + return 1 + + import signal + + pid_text = _PID_FILE.read_text(encoding="utf-8").strip() + try: + pid = int(pid_text) + except ValueError: + print(f"Invalid PID file: {_PID_FILE}", file=sys.stderr) + return 1 + + try: + if platform.system() == "Windows": + import subprocess + subprocess.run(["taskkill", "/PID", str(pid), "/F"], check=True) # noqa: S603,S607 + else: + import os + os.kill(pid, signal.SIGTERM) + _PID_FILE.unlink(missing_ok=True) + print(f"Daemon stopped (PID {pid}).") + except (ProcessLookupError, OSError): + _PID_FILE.unlink(missing_ok=True) + print(f"Process {pid} not found (already stopped?). PID file removed.") + return 0 + + +def cmd_daemon_status(args: argparse.Namespace, config: Config) -> int: + """Show daemon running status.""" + if not _daemon_import_guard(): + return 1 + + if _PID_FILE.exists(): + pid_text = _PID_FILE.read_text(encoding="utf-8").strip() + try: + pid = int(pid_text) + # Check if process is still running + if platform.system() == "Windows": + import subprocess + result = subprocess.run( + ["tasklist", "/FI", f"PID eq {pid}"], capture_output=True, text=True + ) + running = str(pid) in result.stdout + else: + import os + try: + os.kill(pid, 0) + running = True + except (ProcessLookupError, OSError): + running = False + + if running: + print(f"Daemon is running (PID {pid}).") + else: + print(f"Daemon is NOT running (stale PID file: {pid}).") + _PID_FILE.unlink(missing_ok=True) + except ValueError: + print(f"Invalid PID file: {_PID_FILE}", file=sys.stderr) + return 1 + else: + print("Daemon is not running.") + + print(f"\nWeb UI: {'enabled' if config.daemon.web_ui_enabled else 'disabled'}" + f" (port {config.daemon.web_ui_port})") + print(f"Capture: {'enabled' if config.daemon.capture_enabled else 'disabled'}" + f" (port {config.daemon.capture_port})") + print(f"Refresh: {'enabled' if config.daemon.refresh_enabled else 'disabled'}" + f" (schedule: {config.daemon.refresh_schedule})") + return 0 + + +def cmd_daemon_schedule(args: argparse.Namespace, config: Config) -> int: + """Show all scheduled jobs and their next run times.""" + if not _daemon_import_guard(): + return 1 + + from memsync.daemon.scheduler import build_scheduler + + scheduler = build_scheduler(config, blocking=False) + jobs = scheduler.get_jobs() + + if not jobs: + print("No jobs scheduled (check daemon config — all jobs may be disabled).") + return 0 + + print("Scheduled jobs:\n") + for job in jobs: + try: + next_run = job.next_run_time + except AttributeError: + next_run = None + next_str = ( + next_run.strftime("%Y-%m-%d %H:%M:%S") if next_run else "(pending — start daemon)" + ) + print(f" {job.name}") + print(f" ID: {job.id}") + print(f" Next run: {next_str}") + print() + return 0 + + +def cmd_daemon_install(args: argparse.Namespace, config: Config) -> int: + """Register the daemon as a system service (auto-starts on boot).""" + if not _daemon_import_guard(): + return 1 + + from memsync.daemon.service import install_service + + try: + install_service() + except NotImplementedError as e: + print(f"Error: {e}", file=sys.stderr) + return 1 + except PermissionError: + print( + "Error: permission denied. Try: sudo memsync daemon install", + file=sys.stderr, + ) + return 1 + return 0 + + +def cmd_daemon_uninstall(args: argparse.Namespace, config: Config) -> int: + """Remove the daemon system service registration.""" + if not _daemon_import_guard(): + return 1 + + from memsync.daemon.service import uninstall_service + + try: + uninstall_service() + except NotImplementedError as e: + print(f"Error: {e}", file=sys.stderr) + return 1 + return 0 + + +def cmd_daemon_web(args: argparse.Namespace, config: Config) -> int: + """Open the web UI in the default browser.""" + if not _daemon_import_guard(): + return 1 + + import webbrowser + + host = config.daemon.web_ui_host + # 0.0.0.0 means listening on all interfaces — open localhost for browser + browser_host = "localhost" if host in ("0.0.0.0", "") else host # noqa: S104 + url = f"http://{browser_host}:{config.daemon.web_ui_port}/" + print(f"Opening {url}") + webbrowser.open(url) + return 0 + + +# --------------------------------------------------------------------------- +# Argument parser +# --------------------------------------------------------------------------- + +def build_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + prog="memsync", + description="Cross-platform global memory manager for Claude Code.", + ) + parser.add_argument("--version", action="version", version=f"memsync {__version__}") + subparsers = parser.add_subparsers(dest="command", required=True) + + # init + p_init = subparsers.add_parser("init", help="Set up memory structure for the first time") + p_init.add_argument("--force", action="store_true", help="Reinitialize even if already set up") + p_init.add_argument("--provider", help="Skip auto-detection, use this provider") + p_init.add_argument("--sync-root", help="Skip auto-detection, use this path directly") + p_init.set_defaults(func=cmd_init) + + # refresh + p_refresh = subparsers.add_parser("refresh", help="Merge session notes into global memory") + p_refresh.add_argument("--notes", "-n", help="Session notes as a string") + p_refresh.add_argument("--file", "-f", help="Path to a file containing session notes") + p_refresh.add_argument("--dry-run", action="store_true", help="Preview changes without writing") + p_refresh.add_argument("--model", help="One-off model override (doesn't change config)") + p_refresh.set_defaults(func=cmd_refresh) + + # show + p_show = subparsers.add_parser("show", help="Print current global memory") + p_show.set_defaults(func=cmd_show) + + # diff + p_diff = subparsers.add_parser("diff", help="Diff current memory vs last backup") + p_diff.add_argument("--backup", help="Diff against a specific backup filename") + p_diff.set_defaults(func=cmd_diff) + + # status + p_status = subparsers.add_parser("status", help="Show paths, provider, and sync state") + p_status.set_defaults(func=cmd_status) + + # prune + p_prune = subparsers.add_parser("prune", help="Remove old backups") + p_prune.add_argument("--keep-days", type=int, dest="keep_days", default=None, + help="Keep backups newer than this many days (default: from config)") + p_prune.add_argument("--dry-run", action="store_true", help="List what would be deleted") + p_prune.set_defaults(func=cmd_prune) + + # providers + p_providers = subparsers.add_parser("providers", help="List providers and detection status") + p_providers.set_defaults(func=cmd_providers) + + # doctor + p_doctor = subparsers.add_parser("doctor", help="Self-check: verify installation health") + p_doctor.set_defaults(func=cmd_doctor) + + # config + p_config = subparsers.add_parser("config", help="View or update config") + config_sub = p_config.add_subparsers(dest="config_command", required=True) + + p_config_show = config_sub.add_parser("show", help="Print current config.toml") + p_config_show.set_defaults(func=cmd_config_show) + + p_config_set = config_sub.add_parser("set", help="Update a config value") + p_config_set.add_argument("key", help="Config key to update") + p_config_set.add_argument("value", help="New value") + p_config_set.set_defaults(func=cmd_config_set) + + # daemon (requires memsync[daemon]) + p_daemon = subparsers.add_parser("daemon", help="Manage the optional daemon process") + daemon_sub = p_daemon.add_subparsers(dest="daemon_command", required=True) + + p_daemon_start = daemon_sub.add_parser("start", help="Start the daemon") + p_daemon_start.add_argument( + "--detach", action="store_true", help="Start as a background process" + ) + p_daemon_start.set_defaults(func=cmd_daemon_start) + + p_daemon_stop = daemon_sub.add_parser("stop", help="Stop the detached daemon") + p_daemon_stop.set_defaults(func=cmd_daemon_stop) + + p_daemon_status = daemon_sub.add_parser("status", help="Show daemon running status") + p_daemon_status.set_defaults(func=cmd_daemon_status) + + p_daemon_schedule = daemon_sub.add_parser( + "schedule", help="Show scheduled jobs and next run times" + ) + p_daemon_schedule.set_defaults(func=cmd_daemon_schedule) + + p_daemon_install = daemon_sub.add_parser( + "install", help="Register as a system service (auto-starts on boot)" + ) + p_daemon_install.set_defaults(func=cmd_daemon_install) + + p_daemon_uninstall = daemon_sub.add_parser( + "uninstall", help="Remove system service registration" + ) + p_daemon_uninstall.set_defaults(func=cmd_daemon_uninstall) + + p_daemon_web = daemon_sub.add_parser("web", help="Open web UI in browser") + p_daemon_web.set_defaults(func=cmd_daemon_web) + + return parser + + +def main() -> None: + # Ensure UTF-8 output on Windows (needed for ✓/✗ status indicators) + if hasattr(sys.stdout, "reconfigure"): + sys.stdout.reconfigure(encoding="utf-8", errors="replace") + if hasattr(sys.stderr, "reconfigure"): + sys.stderr.reconfigure(encoding="utf-8", errors="replace") + + parser = build_parser() + args = parser.parse_args() + config = Config.load() + sys.exit(args.func(args, config)) + + +if __name__ == "__main__": + main() diff --git a/memsync/config.py b/memsync/config.py new file mode 100644 index 0000000..fe3c087 --- /dev/null +++ b/memsync/config.py @@ -0,0 +1,205 @@ +from __future__ import annotations + +import os +import platform +import tomllib +from dataclasses import dataclass, field +from pathlib import Path + + +@dataclass +class DaemonConfig: + """ + Configuration for the optional daemon module. + Only present in config.toml if the user has run 'memsync daemon install'. + All features default to reasonable values; none are on by default except + scheduled refresh and backup mirror (which requires a path to be set). + """ + enabled: bool = True + + # Scheduled refresh — reads today's session log and calls the Claude API + refresh_enabled: bool = True + refresh_schedule: str = "55 23 * * *" # 11:55pm daily + + # Backup mirror — local rsync copy of .claude-memory/ (empty = disabled) + backup_mirror_path: str = "" + backup_mirror_schedule: str = "0 * * * *" # hourly + + # Web UI — browser-based view/edit of GLOBAL_MEMORY.md + web_ui_enabled: bool = True + web_ui_port: int = 5000 + web_ui_host: str = "0.0.0.0" # noqa: S104 # 0.0.0.0 = LAN; 127.0.0.1 = localhost only + + # Mobile capture endpoint — REST POST for iPhone Shortcuts etc. + capture_enabled: bool = True + capture_port: int = 5001 + capture_token: str = "" # empty = no auth (local network only) + + # Drift detection — alerts when CLAUDE.md is stale + drift_check_enabled: bool = True + drift_check_interval_hours: int = 6 + drift_notify: str = "log" # "log", "email", or "file" + + # Weekly digest email + digest_enabled: bool = False + digest_schedule: str = "0 9 * * 1" # Monday 9am + digest_email_to: str = "" + digest_email_from: str = "" + digest_smtp_host: str = "" + digest_smtp_port: int = 587 + digest_smtp_user: str = "" + digest_smtp_password: str = "" # prefer MEMSYNC_SMTP_PASSWORD env var + + +@dataclass +class Config: + # [core] + provider: str = "onedrive" + model: str = "claude-sonnet-4-20250514" + max_memory_lines: int = 400 + + # [paths] + sync_root: Path | None = None # None = use provider auto-detect + claude_md_target: Path = None # set in __post_init__ + + # [backups] + keep_days: int = 30 + + # [daemon] — only populated when daemon is installed + daemon: DaemonConfig = field(default_factory=DaemonConfig) + + def __post_init__(self) -> None: + if self.claude_md_target is None: + self.claude_md_target = Path("~/.claude/CLAUDE.md").expanduser() + + @classmethod + def load(cls) -> Config: + """Load config from disk, returning defaults if the file doesn't exist.""" + path = get_config_path() + if not path.exists(): + return cls() + with open(path, "rb") as f: + raw = tomllib.load(f) + return cls._from_dict(raw) + + @classmethod + def _from_dict(cls, raw: dict) -> Config: + core = raw.get("core", {}) + paths = raw.get("paths", {}) + backups = raw.get("backups", {}) + + sync_root = paths.get("sync_root") + claude_md_target_str = paths.get("claude_md_target") + + # Daemon section — only present if user has run 'memsync daemon install' + daemon_raw = raw.get("daemon", {}) + daemon = DaemonConfig( + enabled=daemon_raw.get("enabled", True), + refresh_enabled=daemon_raw.get("refresh_enabled", True), + refresh_schedule=daemon_raw.get("refresh_schedule", "55 23 * * *"), + backup_mirror_path=daemon_raw.get("backup_mirror_path", ""), + backup_mirror_schedule=daemon_raw.get("backup_mirror_schedule", "0 * * * *"), + web_ui_enabled=daemon_raw.get("web_ui_enabled", True), + web_ui_port=daemon_raw.get("web_ui_port", 5000), + web_ui_host=daemon_raw.get("web_ui_host", "0.0.0.0"), # noqa: S104 + capture_enabled=daemon_raw.get("capture_enabled", True), + capture_port=daemon_raw.get("capture_port", 5001), + capture_token=daemon_raw.get("capture_token", ""), + drift_check_enabled=daemon_raw.get("drift_check_enabled", True), + drift_check_interval_hours=daemon_raw.get("drift_check_interval_hours", 6), + drift_notify=daemon_raw.get("drift_notify", "log"), + digest_enabled=daemon_raw.get("digest_enabled", False), + digest_schedule=daemon_raw.get("digest_schedule", "0 9 * * 1"), + digest_email_to=daemon_raw.get("digest_email_to", ""), + digest_email_from=daemon_raw.get("digest_email_from", ""), + digest_smtp_host=daemon_raw.get("digest_smtp_host", ""), + digest_smtp_port=daemon_raw.get("digest_smtp_port", 587), + digest_smtp_user=daemon_raw.get("digest_smtp_user", ""), + digest_smtp_password=daemon_raw.get("digest_smtp_password", ""), + ) + + instance = cls( + provider=core.get("provider", "onedrive"), + model=core.get("model", "claude-sonnet-4-20250514"), + max_memory_lines=core.get("max_memory_lines", 400), + sync_root=Path(sync_root) if sync_root else None, + claude_md_target=( + Path(claude_md_target_str).expanduser() if claude_md_target_str else None + ), + keep_days=backups.get("keep_days", 30), + daemon=daemon, + ) + return instance + + def save(self) -> None: + """Write config to disk, creating parent directories if needed.""" + path = get_config_path() + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(self._to_toml(), encoding="utf-8") + + def _to_toml(self) -> str: + """ + Serialize config to TOML manually. + tomllib is read-only (stdlib). Schema is simple enough that manual + serialization avoids needing a tomli_w dependency. + """ + lines = [ + "[core]", + f'provider = "{self.provider}"', + f'model = "{self.model}"', + f"max_memory_lines = {self.max_memory_lines}", + "", + "[paths]", + f'claude_md_target = "{self.claude_md_target.as_posix()}"', + ] + if self.sync_root: + # TOML strings need forward slashes + lines.append(f'sync_root = "{self.sync_root.as_posix()}"') + lines += [ + "", + "[backups]", + f"keep_days = {self.keep_days}", + "", + ] + + # Only write [daemon] section if daemon is enabled (i.e. user ran daemon install) + if self.daemon.enabled: + d = self.daemon + lines += [ + "[daemon]", + f"enabled = {str(d.enabled).lower()}", + f'refresh_schedule = "{d.refresh_schedule}"', + f"refresh_enabled = {str(d.refresh_enabled).lower()}", + f'backup_mirror_path = "{d.backup_mirror_path}"', + f'backup_mirror_schedule = "{d.backup_mirror_schedule}"', + f"web_ui_enabled = {str(d.web_ui_enabled).lower()}", + f"web_ui_port = {d.web_ui_port}", + f'web_ui_host = "{d.web_ui_host}"', + f"capture_enabled = {str(d.capture_enabled).lower()}", + f"capture_port = {d.capture_port}", + f'capture_token = "{d.capture_token}"', + f"drift_check_enabled = {str(d.drift_check_enabled).lower()}", + f"drift_check_interval_hours = {d.drift_check_interval_hours}", + f'drift_notify = "{d.drift_notify}"', + f"digest_enabled = {str(d.digest_enabled).lower()}", + f'digest_schedule = "{d.digest_schedule}"', + f'digest_email_to = "{d.digest_email_to}"', + f'digest_email_from = "{d.digest_email_from}"', + f'digest_smtp_host = "{d.digest_smtp_host}"', + f"digest_smtp_port = {d.digest_smtp_port}", + f'digest_smtp_user = "{d.digest_smtp_user}"', + f'digest_smtp_password = "{d.digest_smtp_password}"', + "", + ] + + return "\n".join(lines) + + +def get_config_path() -> Path: + """Return the platform-appropriate config file path.""" + if platform.system() == "Windows": + appdata = os.environ.get("APPDATA", str(Path.home() / "AppData" / "Roaming")) + return Path(appdata) / "memsync" / "config.toml" + else: + xdg_config = os.environ.get("XDG_CONFIG_HOME", str(Path.home() / ".config")) + return Path(xdg_config) / "memsync" / "config.toml" diff --git a/memsync/daemon/__init__.py b/memsync/daemon/__init__.py new file mode 100644 index 0000000..8296f2d --- /dev/null +++ b/memsync/daemon/__init__.py @@ -0,0 +1,11 @@ +""" +memsync daemon — optional always-on companion module. + +Install with: pip install memsync[daemon] + +Core memsync never imports from this package. +This module only imports from memsync core, never the other way around. +""" +from memsync import __version__ + +DAEMON_VERSION = __version__ diff --git a/memsync/daemon/capture.py b/memsync/daemon/capture.py new file mode 100644 index 0000000..c790efb --- /dev/null +++ b/memsync/daemon/capture.py @@ -0,0 +1,83 @@ +""" +REST endpoint for mobile note capture. + +Accepts POST /note with a JSON body {"text": "..."} and appends the note +to today's session log. Designed for iPhone Shortcuts, curl, or any HTTP client. + +iPhone Shortcut setup: + Action: "Get Contents of URL" + URL: http://pi.local:5001/note + Method: POST + Headers: X-Memsync-Token: (if capture_token is configured) + Body (JSON): {"text": "Shortcut Input"} + +Token auth is optional. When capture_token is empty, all requests are accepted +(safe for local-network-only use; do not expose port to internet). +""" +from __future__ import annotations + +from datetime import datetime +from pathlib import Path + +from flask import Flask, jsonify, request + +from memsync.config import Config + + +def create_capture_app(config: Config) -> Flask: + """Create and configure the capture endpoint Flask application.""" + app = Flask(__name__) + + def get_session_log() -> Path: + from memsync.providers import get_provider + + provider = get_provider(config.provider) + sync_root = config.sync_root or provider.detect() + memory_root = provider.get_memory_root(sync_root) + today = datetime.now().strftime("%Y-%m-%d") + return memory_root / "sessions" / f"{today}.md" + + def check_token() -> bool: + """Return True if the request is authorized.""" + token = config.daemon.capture_token + if not token: + return True # no auth configured — accept all (local network only) + return request.headers.get("X-Memsync-Token") == token + + @app.post("/note") + def add_note(): + if not check_token(): + return jsonify({"error": "unauthorized"}), 401 + + body = request.get_json(silent=True) + if not body or "text" not in body: + return jsonify({"error": "missing 'text' field"}), 400 + + text = body["text"].strip() + if not text: + return jsonify({"error": "empty note"}), 400 + + log_path = get_session_log() + log_path.parent.mkdir(parents=True, exist_ok=True) + timestamp = datetime.now().strftime("%H:%M:%S") + + with open(log_path, "a", encoding="utf-8") as f: + f.write(f"\n---\n### {timestamp} (captured)\n{text}\n") + + return jsonify({"ok": True, "timestamp": timestamp}) + + @app.get("/health") + def health(): + return jsonify({"ok": True}) + + return app + + +def run_capture(config: Config) -> None: + """Start the capture endpoint server. Blocks until interrupted.""" + app = create_capture_app(config) + app.run( + host="0.0.0.0", # noqa: S104 # always local-network accessible + port=config.daemon.capture_port, + debug=False, + ) diff --git a/memsync/daemon/digest.py b/memsync/daemon/digest.py new file mode 100644 index 0000000..85d9f6e --- /dev/null +++ b/memsync/daemon/digest.py @@ -0,0 +1,76 @@ +""" +Weekly email digest for the memsync daemon. + +Collects the past 7 days of session logs, sends them to the Claude API +for summarization, and delivers the result via email. + +Only runs when config.daemon.digest_enabled is True and email is configured. +""" +from __future__ import annotations + +from datetime import date, timedelta +from pathlib import Path + +import anthropic + +from memsync.config import Config + +DIGEST_SYSTEM_PROMPT = ( + "You are summarizing a week of AI assistant session notes for the user. " + "Write a brief, plain-text weekly summary: what they worked on, " + "any notable decisions or completions, and anything that seems worth " + "following up on. 150-250 words. No headers. Direct and useful." +) + + +def generate_and_send(config: Config) -> None: + """Generate a weekly digest and send via configured email.""" + from memsync.daemon.notify import _send_email + from memsync.providers import get_provider + + provider = get_provider(config.provider) + sync_root = config.sync_root or provider.detect() + if not sync_root: + return + + memory_root = provider.get_memory_root(sync_root) + digest_text = generate_digest(memory_root, config) + + if digest_text: + _send_email( + config, + subject=f"memsync weekly digest — week of {date.today().strftime('%b %d')}", + body=digest_text, + ) + + +def generate_digest(memory_root: Path, config: Config) -> str: + """ + Collect the past 7 days of session logs and summarize via the Claude API. + Returns an empty string if there are no session logs this week. + """ + today = date.today() + week_ago = today - timedelta(days=7) + + session_logs: list[str] = [] + for i in range(7): + day = week_ago + timedelta(days=i + 1) + log_path = memory_root / "sessions" / f"{day.strftime('%Y-%m-%d')}.md" + if log_path.exists(): + day_label = day.strftime("%A %b %d") + session_logs.append(f"## {day_label}\n{log_path.read_text(encoding='utf-8')}") + + if not session_logs: + return "" + + all_notes = "\n\n".join(session_logs) + + client = anthropic.Anthropic() + response = client.messages.create( + model=config.model, + max_tokens=1000, + system=DIGEST_SYSTEM_PROMPT, + messages=[{"role": "user", "content": all_notes}], + ) + + return response.content[0].text.strip() diff --git a/memsync/daemon/notify.py b/memsync/daemon/notify.py new file mode 100644 index 0000000..6596d96 --- /dev/null +++ b/memsync/daemon/notify.py @@ -0,0 +1,68 @@ +""" +Notification abstraction for the memsync daemon. + +Sends alerts via the channel configured in config.daemon.drift_notify: + "log" — write to the daemon logger (default, always works) + "email" — send via SMTP + "file" — write a flag file to ~/.config/memsync/alerts/ + +Never raises — notification failure must not crash the daemon. +""" +from __future__ import annotations + +import logging +import os + +from memsync.config import Config + +logger = logging.getLogger("memsync.daemon") + + +def notify(config: Config, subject: str, body: str) -> None: + """ + Send a notification via the configured channel. + Silently logs any delivery error rather than propagating it. + """ + try: + match config.daemon.drift_notify: + case "email": + _send_email(config, subject, body) + case "file": + _write_flag_file(subject, body) + case _: + logger.warning("%s: %s", subject, body) + except Exception as e: + logger.error("Notification failed (%s): %s", config.daemon.drift_notify, e) + + +def _send_email(config: Config, subject: str, body: str) -> None: + """Send an alert via SMTP.""" + import smtplib + from email.message import EmailMessage + + # Prefer env var over plaintext config — see DAEMON_PITFALLS.md #9 + password = os.environ.get("MEMSYNC_SMTP_PASSWORD") or config.daemon.digest_smtp_password + + msg = EmailMessage() + msg["Subject"] = subject + msg["From"] = config.daemon.digest_email_from + msg["To"] = config.daemon.digest_email_to + msg.set_content(body) + + with smtplib.SMTP(config.daemon.digest_smtp_host, config.daemon.digest_smtp_port) as smtp: + smtp.starttls() + smtp.login(config.daemon.digest_smtp_user, password) + smtp.send_message(msg) + + +def _write_flag_file(subject: str, body: str) -> None: + """Write an alert to ~/.config/memsync/alerts/ as a timestamped text file.""" + from datetime import datetime + from pathlib import Path + + flag_dir = Path.home() / ".config" / "memsync" / "alerts" + flag_dir.mkdir(parents=True, exist_ok=True) + ts = datetime.now().strftime("%Y%m%d_%H%M%S") + flag_file = flag_dir / f"{ts}_alert.txt" + flag_file.write_text(f"{subject}\n\n{body}\n", encoding="utf-8") + logger.info("Alert written to %s", flag_file) diff --git a/memsync/daemon/scheduler.py b/memsync/daemon/scheduler.py new file mode 100644 index 0000000..46e37e0 --- /dev/null +++ b/memsync/daemon/scheduler.py @@ -0,0 +1,222 @@ +""" +APScheduler wrapper and job definitions for the memsync daemon. + +Four jobs: + nightly_refresh — reads today's session log and calls the Claude API + backup_mirror — copies .claude-memory/ to a local mirror path hourly + drift_check — checks whether CLAUDE.md is in sync with GLOBAL_MEMORY.md + weekly_digest — generates and emails a weekly summary + +All jobs return early gracefully when filesystem state is missing rather than +raising. This is load-bearing — see DAEMON_PITFALLS.md #2. +""" +from __future__ import annotations + +import logging +from pathlib import Path + +from apscheduler.schedulers.background import BackgroundScheduler +from apscheduler.schedulers.blocking import BlockingScheduler +from apscheduler.triggers.cron import CronTrigger + +from memsync.config import Config + +logger = logging.getLogger("memsync.daemon") + + +def build_scheduler( + config: Config, blocking: bool = False +) -> BackgroundScheduler | BlockingScheduler: + """ + Build and configure the APScheduler instance from config. + + blocking=True → BlockingScheduler (foreground / testing) + blocking=False → BackgroundScheduler (daemon mode, runs in a thread) + """ + scheduler: BackgroundScheduler | BlockingScheduler = ( + BlockingScheduler() if blocking else BackgroundScheduler() + ) + + if config.daemon.refresh_enabled: + scheduler.add_job( + func=job_nightly_refresh, + trigger=CronTrigger.from_crontab(config.daemon.refresh_schedule), + args=[config], + id="nightly_refresh", + name="Nightly memory refresh", + misfire_grace_time=3600, # run even if missed by up to 1 hour + ) + + if config.daemon.backup_mirror_path: + scheduler.add_job( + func=job_backup_mirror, + trigger=CronTrigger.from_crontab(config.daemon.backup_mirror_schedule), + args=[config], + id="backup_mirror", + name="Backup mirror sync", + misfire_grace_time=3600, + ) + + if config.daemon.drift_check_enabled: + scheduler.add_job( + func=job_drift_check, + trigger="interval", + hours=config.daemon.drift_check_interval_hours, + args=[config], + id="drift_check", + name="CLAUDE.md drift check", + ) + + if config.daemon.digest_enabled: + scheduler.add_job( + func=job_weekly_digest, + trigger=CronTrigger.from_crontab(config.daemon.digest_schedule), + args=[config], + id="weekly_digest", + name="Weekly digest email", + ) + + return scheduler + + +def job_nightly_refresh(config: Config) -> None: + """ + Read today's session log and run a refresh if there are notes. + Silently skips if no session log exists for today (normal — rest days happen). + Never raises — a crash here would take down the whole scheduler. + """ + from datetime import date + + from memsync.backups import backup + from memsync.claude_md import sync as sync_claude_md + from memsync.providers import get_provider + from memsync.sync import refresh_memory_content + + try: + provider = get_provider(config.provider) + sync_root = config.sync_root or provider.detect() + if not sync_root: + logger.warning("nightly_refresh: sync_root not found, skipping") + return + + memory_root = provider.get_memory_root(sync_root) + today = date.today().strftime("%Y-%m-%d") + session_log = memory_root / "sessions" / f"{today}.md" + + if not session_log.exists(): + logger.debug("nightly_refresh: no session log for %s, skipping", today) + return + + notes = session_log.read_text(encoding="utf-8").strip() + if not notes: + logger.debug("nightly_refresh: session log empty for %s, skipping", today) + return + + memory_path = memory_root / "GLOBAL_MEMORY.md" + if not memory_path.exists(): + logger.warning("nightly_refresh: GLOBAL_MEMORY.md not found, skipping") + return + + current_memory = memory_path.read_text(encoding="utf-8") + result = refresh_memory_content(notes, current_memory, config) + + if result["changed"]: + backup(memory_path, memory_root / "backups") + memory_path.write_text(result["updated_content"], encoding="utf-8") + sync_claude_md(memory_path, config.claude_md_target) + logger.info("nightly_refresh: memory updated for %s", today) + else: + logger.info("nightly_refresh: no changes for %s", today) + + except Exception: + logger.exception("nightly_refresh: unexpected error") + + +def job_backup_mirror(config: Config) -> None: + """ + Copy all files from .claude-memory/ to the configured local mirror path. + Preserves timestamps. Creates the mirror directory if missing. + Never raises. + """ + import shutil + + from memsync.providers import get_provider + + try: + provider = get_provider(config.provider) + sync_root = config.sync_root or provider.detect() + if not sync_root: + logger.warning("backup_mirror: sync_root not found, skipping") + return + + memory_root = provider.get_memory_root(sync_root) + mirror = Path(config.daemon.backup_mirror_path).expanduser() + mirror.mkdir(parents=True, exist_ok=True) + + copied = 0 + for src in memory_root.rglob("*"): + if src.is_file(): + rel = src.relative_to(memory_root) + dst = mirror / rel + dst.parent.mkdir(parents=True, exist_ok=True) + shutil.copy2(src, dst) + copied += 1 + + logger.info("backup_mirror: copied %d file(s) to %s", copied, mirror) + + except Exception: + logger.exception("backup_mirror: unexpected error") + + +def job_drift_check(config: Config) -> None: + """ + Check if CLAUDE.md is stale relative to GLOBAL_MEMORY.md. + Fires a notification via the configured channel if out of sync. + Never raises. + """ + from memsync.claude_md import is_synced + from memsync.daemon.notify import notify + from memsync.providers import get_provider + + try: + provider = get_provider(config.provider) + sync_root = config.sync_root or provider.detect() + if not sync_root: + return + + memory_root = provider.get_memory_root(sync_root) + memory_path = memory_root / "GLOBAL_MEMORY.md" + + if not memory_path.exists(): + return + + if not is_synced(memory_path, config.claude_md_target): + notify( + config, + subject="memsync: CLAUDE.md is out of sync", + body=( + f"CLAUDE.md at {config.claude_md_target} does not match " + f"GLOBAL_MEMORY.md at {memory_path}.\n" + "Run: memsync refresh to resync." + ), + ) + logger.warning("drift_check: CLAUDE.md is out of sync") + else: + logger.debug("drift_check: CLAUDE.md is in sync") + + except Exception: + logger.exception("drift_check: unexpected error") + + +def job_weekly_digest(config: Config) -> None: + """ + Generate and send a weekly digest of session logs. + Delegates to memsync.daemon.digest. Never raises. + """ + from memsync.daemon.digest import generate_and_send + + try: + generate_and_send(config) + logger.info("weekly_digest: digest sent") + except Exception: + logger.exception("weekly_digest: unexpected error") diff --git a/memsync/daemon/service.py b/memsync/daemon/service.py new file mode 100644 index 0000000..6b93bb1 --- /dev/null +++ b/memsync/daemon/service.py @@ -0,0 +1,141 @@ +""" +System service installation for the memsync daemon. + +Supports: + Linux — systemd unit file at /etc/systemd/system/memsync.service + Mac — launchd plist at ~/Library/LaunchAgents/com.memsync.daemon.plist + Windows — not supported (use Task Scheduler with 'memsync daemon start --detach') + +IMPORTANT: systemd install requires root (sudo memsync daemon install). +The unit file contains a placeholder for ANTHROPIC_API_KEY. After install, +use 'systemctl edit memsync' to add the key in an override file rather than +editing the unit file directly — override files survive package updates. +""" +from __future__ import annotations + +import platform +import subprocess +from pathlib import Path + +SYSTEMD_UNIT = """\ +[Unit] +Description=memsync daemon +After=network.target + +[Service] +Type=simple +ExecStart={memsync_bin} daemon start +Restart=on-failure +RestartSec=10 +Environment=ANTHROPIC_API_KEY= + +[Install] +WantedBy=multi-user.target +""" + +LAUNCHD_PLIST = """\ + + + + + Label + com.memsync.daemon + ProgramArguments + + {memsync_bin} + daemon + start + + RunAtLoad + + KeepAlive + + StandardOutPath + {log_dir}/memsync-daemon.log + StandardErrorPath + {log_dir}/memsync-daemon.err + + +""" + + +def install_service() -> None: + """Install the memsync daemon as a system service.""" + system = platform.system() + memsync_bin = _find_memsync_bin() + + if system == "Linux": + _install_systemd(memsync_bin) + elif system == "Darwin": + _install_launchd(memsync_bin) + else: + raise NotImplementedError( + "Service install is not supported on Windows.\n" + "Use Task Scheduler to run 'memsync daemon start --detach' on boot." + ) + + +def uninstall_service() -> None: + """Remove the memsync daemon system service registration.""" + system = platform.system() + if system == "Linux": + _uninstall_systemd() + elif system == "Darwin": + _uninstall_launchd() + else: + raise NotImplementedError("Service uninstall not supported on Windows.") + + +def _install_systemd(memsync_bin: str) -> None: + unit_path = Path("/etc/systemd/system/memsync.service") + unit_content = SYSTEMD_UNIT.format(memsync_bin=memsync_bin) + unit_path.write_text(unit_content, encoding="utf-8") + subprocess.run(["systemctl", "daemon-reload"], check=True) + subprocess.run(["systemctl", "enable", "memsync"], check=True) + subprocess.run(["systemctl", "start", "memsync"], check=True) + print(f"Service installed: {unit_path}") + print("Edit ANTHROPIC_API_KEY via: sudo systemctl edit memsync") + print("Then restart with: sudo systemctl restart memsync") + + +def _install_launchd(memsync_bin: str) -> None: + log_dir = Path.home() / "Library" / "Logs" / "memsync" + log_dir.mkdir(parents=True, exist_ok=True) + plist_dir = Path.home() / "Library" / "LaunchAgents" + plist_dir.mkdir(parents=True, exist_ok=True) + plist_path = plist_dir / "com.memsync.daemon.plist" + plist_content = LAUNCHD_PLIST.format(memsync_bin=memsync_bin, log_dir=log_dir) + plist_path.write_text(plist_content, encoding="utf-8") + subprocess.run(["launchctl", "load", str(plist_path)], check=True) + print(f"Service installed: {plist_path}") + print(f"Logs: {log_dir}/memsync-daemon.log") + + +def _uninstall_systemd() -> None: + subprocess.run(["systemctl", "stop", "memsync"], check=False) + subprocess.run(["systemctl", "disable", "memsync"], check=False) + unit_path = Path("/etc/systemd/system/memsync.service") + if unit_path.exists(): + unit_path.unlink() + subprocess.run(["systemctl", "daemon-reload"], check=True) + print("Service removed.") + + +def _uninstall_launchd() -> None: + plist_path = Path.home() / "Library" / "LaunchAgents" / "com.memsync.daemon.plist" + if plist_path.exists(): + subprocess.run(["launchctl", "unload", str(plist_path)], check=False) + plist_path.unlink() + print("Service removed.") + + +def _find_memsync_bin() -> str: + import shutil + + bin_path = shutil.which("memsync") + if not bin_path: + raise FileNotFoundError( + "memsync not found in PATH. Install with: pip install memsync[daemon]" + ) + return bin_path diff --git a/memsync/daemon/watchdog.py b/memsync/daemon/watchdog.py new file mode 100644 index 0000000..6cf2f44 --- /dev/null +++ b/memsync/daemon/watchdog.py @@ -0,0 +1,17 @@ +""" +Drift watchdog for the memsync daemon. + +Thin wrapper that exposes drift detection as a standalone callable. +The scheduler calls job_drift_check from scheduler.py directly; +this module exists for users who want to invoke drift checking outside +the scheduler (e.g. from a cron job or ad-hoc script). +""" +from __future__ import annotations + +from memsync.config import Config +from memsync.daemon.scheduler import job_drift_check + + +def run_drift_check(config: Config) -> None: + """Run a single drift check immediately, outside the scheduler.""" + job_drift_check(config) diff --git a/memsync/daemon/web.py b/memsync/daemon/web.py new file mode 100644 index 0000000..4eec27e --- /dev/null +++ b/memsync/daemon/web.py @@ -0,0 +1,109 @@ +""" +Flask web UI for memsync daemon. + +Provides a browser-based view/edit interface for GLOBAL_MEMORY.md, +accessible on the local network at http://:/ (default :5000). + +Intended for use on a home network only. Do not expose to the public internet. +See DAEMON.md for Flask-in-production guidance. +""" +from __future__ import annotations + +import datetime +from pathlib import Path + +from flask import Flask, redirect, render_template_string, request + +from memsync.backups import backup +from memsync.claude_md import sync as sync_claude_md +from memsync.config import Config + +# Inline template — no separate template files needed for this simple UI +TEMPLATE = """ + + + + memsync — Global Memory + + + + +

Global Memory

+
+ {{ memory_path }}
+ Last modified: {{ last_modified }} + {% if message %} — {{ message }}{% endif %} +
+
+ +
+ + Cancel +
+
+ + +""" + + +def create_app(config: Config) -> Flask: + """Create and configure the Flask web UI application.""" + app = Flask(__name__) + app.config["MEMSYNC_CONFIG"] = config + + def get_memory_path() -> Path: + from memsync.providers import get_provider + + provider = get_provider(config.provider) + sync_root = config.sync_root or provider.detect() + return provider.get_memory_root(sync_root) / "GLOBAL_MEMORY.md" + + @app.get("/") + def index() -> str: + path = get_memory_path() + content = path.read_text(encoding="utf-8") if path.exists() else "" + last_mod = ( + datetime.datetime.fromtimestamp(path.stat().st_mtime).strftime("%Y-%m-%d %H:%M") + if path.exists() + else "never" + ) + return render_template_string( + TEMPLATE, + content=content, + memory_path=path, + last_modified=last_mod, + message=request.args.get("message", ""), + message_class=request.args.get("cls", "saved"), + ) + + @app.post("/save") + def save(): + path = get_memory_path() + new_content = request.form["content"] + try: + if path.exists(): + backup(path, path.parent / "backups") + path.write_text(new_content, encoding="utf-8") + sync_claude_md(path, config.claude_md_target) + return redirect("/?message=Saved+successfully&cls=saved") + except Exception as e: + return redirect(f"/?message=Error:+{e}&cls=error") + + return app + + +def run_web(config: Config) -> None: + """Start the web UI server. Blocks until interrupted.""" + app = create_app(config) + app.run( + host=config.daemon.web_ui_host, + port=config.daemon.web_ui_port, + debug=False, + ) diff --git a/memsync/providers/__init__.py b/memsync/providers/__init__.py new file mode 100644 index 0000000..eba1afe --- /dev/null +++ b/memsync/providers/__init__.py @@ -0,0 +1,75 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from pathlib import Path + + +class BaseProvider(ABC): + """ + A sync provider knows how to find the cloud storage root on the current machine. + That's its only job. Memory structure lives above it. + """ + + name: str # short id used in config: "onedrive", "icloud", "gdrive", "custom" + display_name: str # human-readable: "OneDrive", "iCloud Drive", "Google Drive", "Custom Path" + + @abstractmethod + def detect(self) -> Path | None: + """ + Try to find this provider's sync root on the current machine. + Returns the path if found and accessible, None otherwise. + Never raises — detection failure is not an error. + """ + + @abstractmethod + def is_available(self) -> bool: + """ + Quick check: is this provider installed and its sync folder accessible? + Should be fast — no API calls, just filesystem checks. + """ + + def get_memory_root(self, sync_root: Path) -> Path: + """ + Where inside the sync root to store memsync data. + Default is /.claude-memory + Providers can override if needed (e.g. iCloud has invisible dot-folders). + """ + return sync_root / ".claude-memory" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(name={self.name!r})" + + +# Provider registry — add new providers here +_REGISTRY: dict[str, type[BaseProvider]] = {} + + +def register(cls: type[BaseProvider]) -> type[BaseProvider]: + """Decorator to register a provider.""" + _REGISTRY[cls.name] = cls + return cls + + +def get_provider(name: str) -> BaseProvider: + """Get a provider instance by name. Raises KeyError if not found.""" + if name not in _REGISTRY: + available = ", ".join(_REGISTRY.keys()) + raise KeyError(f"Unknown provider {name!r}. Available: {available}") + return _REGISTRY[name]() + + +def all_providers() -> list[BaseProvider]: + """Return one instance of each registered provider.""" + return [cls() for cls in _REGISTRY.values()] + + +def auto_detect() -> list[BaseProvider]: + """ + Return all providers that detect successfully on this machine, + in priority order: OneDrive, iCloud, Google Drive, Custom. + """ + return [p for p in all_providers() if p.detect() is not None] + + +# Import providers to trigger registration — order determines priority +from memsync.providers import custom, gdrive, icloud, onedrive # noqa: E402, F401 diff --git a/memsync/providers/custom.py b/memsync/providers/custom.py new file mode 100644 index 0000000..5ac93fb --- /dev/null +++ b/memsync/providers/custom.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +from pathlib import Path + +from memsync.providers import BaseProvider, register + + +@register +class CustomProvider(BaseProvider): + """ + Fallback for any sync service not explicitly supported. + User sets the path manually via: memsync config set sync_root /path/to/folder + """ + name = "custom" + display_name = "Custom Path" + + def __init__(self, path: Path | None = None): + self._path = path + + def detect(self) -> Path | None: + # Custom provider only works if path is explicitly configured + if self._path and self._path.exists(): + return self._path + return None + + def is_available(self) -> bool: + return self.detect() is not None diff --git a/memsync/providers/gdrive.py b/memsync/providers/gdrive.py new file mode 100644 index 0000000..0faf69a --- /dev/null +++ b/memsync/providers/gdrive.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +import os +import platform +from pathlib import Path + +from memsync.providers import BaseProvider, register + + +@register +class GoogleDriveProvider(BaseProvider): + name = "gdrive" + display_name = "Google Drive" + + def detect(self) -> Path | None: + try: + return self._find() + except Exception: + return None + + def is_available(self) -> bool: + return self.detect() is not None + + def _find(self) -> Path | None: + system = platform.system() + + if system == "Darwin": + # Google Drive for Desktop (current client) + cloud_storage = Path.home() / "Library" / "CloudStorage" + if cloud_storage.exists(): + for d in cloud_storage.iterdir(): + if d.name.startswith("GoogleDrive") and d.is_dir(): + # My Drive is inside the account folder + my_drive = d / "My Drive" + if my_drive.exists(): + return my_drive + return d + + # Legacy Backup and Sync path + legacy = Path.home() / "Google Drive" + if legacy.exists(): + return legacy + + elif system == "Windows": + # Google Drive for Desktop on Windows + gdrive_env = os.environ.get("GDRIVE_ROOT") + if gdrive_env: + p = Path(gdrive_env) + if p.exists(): + return p + + username = os.environ.get("USERNAME", "") + for candidate in [ + Path.home() / "Google Drive", + Path(f"C:/Users/{username}/Google Drive"), + # Google Drive for Desktop default + Path("G:/My Drive"), + Path("G:/"), + ]: + if candidate.exists(): + return candidate + + elif system == "Linux": + # Google Drive via google-drive-ocamlfuse or rclone + for candidate in [ + Path.home() / "GoogleDrive", + Path.home() / "google-drive", + Path.home() / "gdrive", + ]: + if candidate.exists(): + return candidate + + return None diff --git a/memsync/providers/icloud.py b/memsync/providers/icloud.py new file mode 100644 index 0000000..10fb261 --- /dev/null +++ b/memsync/providers/icloud.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +import os +import platform +from pathlib import Path + +from memsync.providers import BaseProvider, register + + +@register +class ICloudProvider(BaseProvider): + name = "icloud" + display_name = "iCloud Drive" + + def detect(self) -> Path | None: + try: + return self._find() + except Exception: + return None + + def is_available(self) -> bool: + return self.detect() is not None + + def _find(self) -> Path | None: + system = platform.system() + + if system == "Darwin": + # Primary path on Mac + icloud = Path.home() / "Library" / "Mobile Documents" / "com~apple~CloudDocs" + if icloud.exists(): + return icloud + + elif system == "Windows": + # iCloud for Windows installs here + username = os.environ.get("USERNAME", "") + for candidate in [ + Path.home() / "iCloudDrive", + Path(f"C:/Users/{username}/iCloudDrive"), + ]: + if candidate.exists(): + return candidate + + # Linux: iCloud has no official client — not supported + return None + + def get_memory_root(self, sync_root: Path) -> Path: + # iCloud hides dot-folders on Mac — use a visible name instead + return sync_root / "claude-memory" diff --git a/memsync/providers/onedrive.py b/memsync/providers/onedrive.py new file mode 100644 index 0000000..8d59b90 --- /dev/null +++ b/memsync/providers/onedrive.py @@ -0,0 +1,71 @@ +from __future__ import annotations + +import os +import platform +from pathlib import Path + +from memsync.providers import BaseProvider, register + + +@register +class OneDriveProvider(BaseProvider): + name = "onedrive" + display_name = "OneDrive" + + def detect(self) -> Path | None: + try: + return self._find() + except Exception: + return None + + def is_available(self) -> bool: + return self.detect() is not None + + def _find(self) -> Path | None: + system = platform.system() + + if system == "Windows": + # Windows sets these env vars when OneDrive is running + for var in ("OneDrive", "ONEDRIVE", "OneDriveConsumer", "OneDriveCommercial"): + val = os.environ.get(var) + if val: + p = Path(val) + if p.exists(): + return p + # Fallback: common default paths + username = os.environ.get("USERNAME", "") + for candidate in [ + Path.home() / "OneDrive", + Path(f"C:/Users/{username}/OneDrive"), + ]: + if candidate.exists(): + return candidate + + elif system == "Darwin": + # Mac: OneDrive doesn't set env vars, check filesystem + # Personal OneDrive + personal = Path.home() / "OneDrive" + if personal.exists(): + return personal + + # OneDrive via CloudStorage (newer Mac client) + cloud_storage = Path.home() / "Library" / "CloudStorage" + if cloud_storage.exists(): + # Personal first, then business + for d in sorted(cloud_storage.iterdir()): + if d.name == "OneDrive-Personal": + return d + for d in sorted(cloud_storage.iterdir()): + if d.name.startswith("OneDrive") and d.is_dir(): + return d + + else: + # Linux: OneDrive via rclone or manual mount + for candidate in [ + Path.home() / "OneDrive", + Path.home() / "onedrive", + ]: + if candidate.exists(): + return candidate + + return None diff --git a/memsync/sync.py b/memsync/sync.py new file mode 100644 index 0000000..06aebb1 --- /dev/null +++ b/memsync/sync.py @@ -0,0 +1,179 @@ +from __future__ import annotations + +import re +from pathlib import Path + +import anthropic + +from memsync.config import Config + +# The system prompt is load-bearing — see PITFALLS.md #8 before editing. +# Specific phrases matter; don't casually reword them. +SYSTEM_PROMPT = """You are maintaining a persistent global memory file for an AI assistant user. +This file is loaded at the start of every Claude Code session, on every machine and project. +It is the user's identity layer — not project docs, not cold storage. + +YOUR JOB: +- Merge new session notes into the existing memory file +- Keep the file tight (under 400 lines) +- Update facts that have changed +- Demote completed items from "Current priorities" to a brief "Recent completions" section +- Preserve the user's exact voice, formatting, and section structure +- NEVER remove entries under any "Hard constraints" or "Constraints" section — only append +- If nothing meaningful changed, return the file UNCHANGED + +RETURN: Only the updated GLOBAL_MEMORY.md content. No explanation, no preamble.""" + + +def refresh_memory_content(notes: str, current_memory: str, config: Config) -> dict: + """ + Call the Claude API to merge notes into current_memory. + Returns a dict with keys: updated_content (str), changed (bool). + Does NOT write files — caller handles I/O. + """ + client = anthropic.Anthropic() + + user_prompt = f"""\ +CURRENT GLOBAL MEMORY: +{current_memory} + +SESSION NOTES: +{notes}""" + + response = client.messages.create( + model=config.model, + max_tokens=4096, + system=SYSTEM_PROMPT, + messages=[{"role": "user", "content": user_prompt}], + ) + + updated_content = response.content[0].text.strip() + + # Enforce hard constraints in code — model can silently drop them (PITFALLS #1) + updated_content = enforce_hard_constraints(current_memory, updated_content) + + changed = updated_content != current_memory.strip() + + # Detect truncation via stop_reason — more reliable than content heuristics (PITFALLS #10) + truncated = response.stop_reason == "max_tokens" + + return { + "updated_content": updated_content, + "changed": changed, + "truncated": truncated, + } + + +def enforce_hard_constraints(old: str, new: str) -> str: + """ + Re-append any hard constraint lines the model dropped. + Hard constraints are append-only by design — they must never be lost + through compaction. This is enforced in Python, not by prompt alone. + """ + old_constraints = _extract_constraints(old) + new_constraints = _extract_constraints(new) + + dropped = [line for line in old_constraints if line not in new_constraints] + if not dropped: + return new + + return _reinsert_constraints(new, dropped) + + +def _extract_constraints(text: str) -> list[str]: + """ + Extract bullet lines from the Hard constraints / Constraints section. + Returns list of non-empty stripped lines within the section. + """ + lines = text.splitlines() + in_section = False + constraints: list[str] = [] + + for line in lines: + if re.match(r"^##\s+(Hard constraints|Constraints)\s*$", line, re.IGNORECASE): + in_section = True + continue + if in_section: + # Another heading ends the section + if re.match(r"^#{1,6}\s+", line) and not re.match( + r"^##\s+(Hard constraints|Constraints)\s*$", line, re.IGNORECASE + ): + break + stripped = line.strip() + if stripped: + constraints.append(stripped) + + return constraints + + +def _reinsert_constraints(text: str, dropped: list[str]) -> str: + """ + Find the Hard constraints section in text and append the dropped lines to it. + If the section doesn't exist, append it at the end. + """ + lines = text.splitlines() + insert_idx: int | None = None + + in_section = False + for i, line in enumerate(lines): + if re.match(r"^##\s+(Hard constraints|Constraints)\s*$", line, re.IGNORECASE): + in_section = True + continue + if in_section: + if re.match(r"^#{1,6}\s+", line): + # Insert before the next heading + insert_idx = i + break + insert_idx = i + 1 # keep updating to end of section + + if insert_idx is not None: + for item in dropped: + lines.insert(insert_idx, item) + insert_idx += 1 + return "\n".join(lines) + + # Section not found — append it + appended = "\n".join(lines) + appended += "\n\n## Hard constraints\n" + appended += "\n".join(dropped) + return appended + + + +def load_or_init_memory(path: Path) -> str: + """ + Read memory file, or return the starter template if it doesn't exist yet. + """ + if path.exists(): + return path.read_text(encoding="utf-8") + + return """\ + +# Global Memory + +> Loaded by Claude Code at session start on all machines and projects. +> Edit directly or run: memsync refresh --notes "..." + +## Identity & context +- (Fill this in — who you are, your roles, active projects) + +## Current priorities +- (What you're working on right now) + +## Standing preferences +- (How you like to work — communication style, output format, etc.) + +## Hard constraints +- (Rules that must never be lost or softened through compaction) +""" + + +def log_session_notes(notes: str, session_dir: Path) -> None: + """Append session notes to today's dated log file. Append-only, never pruned.""" + from datetime import datetime + today = datetime.now().strftime("%Y-%m-%d") + timestamp = datetime.now().strftime("%H:%M:%S") + log_path = session_dir / f"{today}.md" + + with open(log_path, "a", encoding="utf-8") as f: + f.write(f"\n---\n### {timestamp}\n{notes}\n") diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..1e2c3af --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,94 @@ +[build-system] +requires = ["setuptools>=68", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "memsync" +version = "0.2.0" +description = "Cross-platform global memory manager for Claude Code" +readme = "README.md" +license = { text = "MIT" } +requires-python = ">=3.11" +keywords = ["claude", "claude-code", "ai", "memory", "cli"] +classifiers = [ + "Development Status :: 3 - Alpha", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: MacOS", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX :: Linux", +] +dependencies = [ + "anthropic>=0.40.0", +] + +[project.urls] +Homepage = "https://github.com/YOUR_USERNAME/memsync" +Issues = "https://github.com/YOUR_USERNAME/memsync/issues" + +[project.scripts] +memsync = "memsync.cli:main" + +[project.optional-dependencies] +daemon = [ + "apscheduler>=3.10", + "flask>=3.0", +] +dev = [ + "pytest>=8.0", + "pytest-cov>=5.0", + "pytest-mock>=3.12", + "ruff>=0.4", + "bandit[toml]>=1.7", +] + +[tool.setuptools.packages.find] +where = ["."] +include = ["memsync*"] + +[tool.pytest.ini_options] +testpaths = ["tests"] +markers = [ + "smoke: fast read-only checks — run any time, no API calls, no filesystem writes", +] +addopts = "--cov=memsync --cov-fail-under=80 --cov-report=term-missing" + +[tool.coverage.run] +omit = [ + "memsync/daemon/service.py", # requires systemd/launchd — OS-specific privileged ops +] + +[tool.ruff] +line-length = 100 +target-version = "py311" + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "UP", # pyupgrade + "S", # flake8-bandit (security) + "B", # flake8-bugbear +] +ignore = [ + "S101", # assert used — acceptable in tests + "S603", # subprocess — not used here but avoid false positives + "S607", # subprocess partial path — not used here + "B008", # do not perform function calls in default arguments +] + +[tool.ruff.lint.per-file-ignores] +"tests/*" = ["S101", "S106"] # asserts and hardcoded passwords OK in tests + +[tool.bandit] +targets = ["memsync"] +skips = [ + "B101", # assert_used — we don't use asserts in production code + "B608", # hardcoded_sql_expressions — false positive on error message strings; no SQL in codebase +] +exclude_dirs = ["tests"] diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..ee21b7a --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +import sys + +import pytest + +from memsync.config import Config + +# Ensure UTF-8 stdout/stderr for the entire test session on Windows. +# CLI commands print ✓/✗ which fail on cp1252 without this. +if hasattr(sys.stdout, "reconfigure"): + sys.stdout.reconfigure(encoding="utf-8", errors="replace") +if hasattr(sys.stderr, "reconfigure"): + sys.stderr.reconfigure(encoding="utf-8", errors="replace") + + +@pytest.fixture +def tmp_config(tmp_path, monkeypatch): + """ + Config pointing entirely to tmp_path — no real filesystem touched. + Creates the expected directory structure under tmp_path/sync/.claude-memory/ + """ + sync_root = tmp_path / "sync" + memory_root = sync_root / ".claude-memory" + (memory_root / "backups").mkdir(parents=True) + (memory_root / "sessions").mkdir(parents=True) + + config = Config( + provider="custom", + sync_root=sync_root, + claude_md_target=tmp_path / ".claude" / "CLAUDE.md", + ) + + monkeypatch.setattr( + "memsync.config.get_config_path", + lambda: tmp_path / "config.toml", + ) + + return config, tmp_path + + +@pytest.fixture +def memory_file(tmp_config): + """A tmp_config with a pre-written GLOBAL_MEMORY.md.""" + config, tmp_path = tmp_config + memory_root = config.sync_root / ".claude-memory" + global_memory = memory_root / "GLOBAL_MEMORY.md" + global_memory.write_text( + "\n" + "# Global Memory\n\n" + "## Identity & context\n" + "- Test user, software engineer\n\n" + "## Hard constraints\n" + "- Always backup before writing\n" + "- Never skip tests\n", + encoding="utf-8", + ) + return config, tmp_path, global_memory diff --git a/tests/test_backups.py b/tests/test_backups.py new file mode 100644 index 0000000..3af7544 --- /dev/null +++ b/tests/test_backups.py @@ -0,0 +1,102 @@ +from __future__ import annotations + +import time +from pathlib import Path + +import pytest + +from memsync.backups import backup, latest_backup, list_backups, prune + + +@pytest.fixture +def backup_env(tmp_path): + """A source file and backup directory in tmp_path.""" + source = tmp_path / "GLOBAL_MEMORY.md" + source.write_text("# memory content", encoding="utf-8") + backup_dir = tmp_path / "backups" + backup_dir.mkdir() + return source, backup_dir + + +class TestBackup: + def test_creates_file_with_timestamp_name(self, backup_env): + source, backup_dir = backup_env + result = backup(source, backup_dir) + assert result.exists() + assert result.name.startswith("GLOBAL_MEMORY_") + assert result.suffix == ".md" + + def test_backup_content_matches_source(self, backup_env): + source, backup_dir = backup_env + result = backup(source, backup_dir) + assert result.read_text(encoding="utf-8") == source.read_text(encoding="utf-8") + + def test_successive_backups_have_unique_names(self, backup_env): + source, backup_dir = backup_env + b1 = backup(source, backup_dir) + time.sleep(1) + b2 = backup(source, backup_dir) + assert b1.name != b2.name + + +class TestListBackups: + def test_returns_newest_first(self, backup_env): + source, backup_dir = backup_env + b1 = backup(source, backup_dir) + time.sleep(1) + b2 = backup(source, backup_dir) + listed = list_backups(backup_dir) + assert listed[0] == b2 + assert listed[1] == b1 + + def test_empty_dir_returns_empty_list(self, tmp_path): + d = tmp_path / "backups" + d.mkdir() + assert list_backups(d) == [] + + +class TestLatestBackup: + def test_returns_most_recent(self, backup_env): + source, backup_dir = backup_env + backup(source, backup_dir) + time.sleep(1) + b2 = backup(source, backup_dir) + assert latest_backup(backup_dir) == b2 + + def test_returns_none_when_no_backups(self, tmp_path): + d = tmp_path / "backups" + d.mkdir() + assert latest_backup(d) is None + + +class TestPrune: + def test_removes_old_backups(self, backup_env): + source, backup_dir = backup_env + b = backup(source, backup_dir) + deleted = prune(backup_dir, keep_days=0) + assert b in deleted + assert not b.exists() + + def test_keeps_recent_backups(self, backup_env): + source, backup_dir = backup_env + b = backup(source, backup_dir) + deleted = prune(backup_dir, keep_days=30) + assert b not in deleted + assert b.exists() + + def test_skips_files_with_unexpected_names(self, backup_env): + _, backup_dir = backup_env + stray = backup_dir / "not-a-backup.md" + stray.write_text("stray", encoding="utf-8") + # Should not raise, should not delete stray file + prune(backup_dir, keep_days=0) + assert stray.exists() + + def test_returns_list_of_deleted_paths(self, backup_env): + source, backup_dir = backup_env + backup(source, backup_dir) + time.sleep(1) + backup(source, backup_dir) + deleted = prune(backup_dir, keep_days=0) + assert len(deleted) == 2 + assert all(isinstance(p, Path) for p in deleted) diff --git a/tests/test_claude_md.py b/tests/test_claude_md.py new file mode 100644 index 0000000..be84ac0 --- /dev/null +++ b/tests/test_claude_md.py @@ -0,0 +1,108 @@ +from __future__ import annotations + +import platform + +import pytest + +from memsync.claude_md import is_synced, sync + + +@pytest.fixture +def memory_and_target(tmp_path): + memory = tmp_path / "sync" / ".claude-memory" / "GLOBAL_MEMORY.md" + memory.parent.mkdir(parents=True) + memory.write_text("# Global Memory\n- test content", encoding="utf-8") + target = tmp_path / ".claude" / "CLAUDE.md" + return memory, target + + +class TestSyncWindows: + def test_creates_copy_on_windows(self, memory_and_target, monkeypatch): + memory, target = memory_and_target + monkeypatch.setattr(platform, "system", lambda: "Windows") + + sync(memory, target) + + assert target.exists() + assert not target.is_symlink() + assert target.read_bytes() == memory.read_bytes() + + def test_copy_is_idempotent(self, memory_and_target, monkeypatch): + memory, target = memory_and_target + monkeypatch.setattr(platform, "system", lambda: "Windows") + + sync(memory, target) + sync(memory, target) # should not raise + + assert target.read_bytes() == memory.read_bytes() + + def test_creates_parent_dirs(self, memory_and_target, monkeypatch): + memory, target = memory_and_target + monkeypatch.setattr(platform, "system", lambda: "Windows") + # Target parent doesn't exist yet + assert not target.parent.exists() + + sync(memory, target) + assert target.parent.exists() + + +class TestSyncUnix: + @pytest.mark.skipif(platform.system() == "Windows", reason="symlinks require Unix") + def test_creates_symlink(self, memory_and_target, monkeypatch): + memory, target = memory_and_target + monkeypatch.setattr(platform, "system", lambda: "Darwin") + + sync(memory, target) + + assert target.is_symlink() + assert target.resolve() == memory.resolve() + + @pytest.mark.skipif(platform.system() == "Windows", reason="symlinks require Unix") + def test_symlink_is_idempotent(self, memory_and_target, monkeypatch): + memory, target = memory_and_target + monkeypatch.setattr(platform, "system", lambda: "Darwin") + + sync(memory, target) + sync(memory, target) # already correct — should not raise + + assert target.is_symlink() + + @pytest.mark.skipif(platform.system() == "Windows", reason="symlinks require Unix") + def test_backs_up_existing_file_before_linking(self, memory_and_target, monkeypatch): + memory, target = memory_and_target + monkeypatch.setattr(platform, "system", lambda: "Darwin") + target.parent.mkdir(parents=True, exist_ok=True) + target.write_text("old content", encoding="utf-8") + + sync(memory, target) + + bak = target.with_suffix(".pre-memsync.bak") + assert bak.exists() + assert bak.read_text(encoding="utf-8") == "old content" + assert target.is_symlink() + + +class TestIsSynced: + def test_false_when_target_missing(self, memory_and_target): + memory, target = memory_and_target + assert is_synced(memory, target) is False + + def test_true_after_sync_on_windows(self, memory_and_target, monkeypatch): + memory, target = memory_and_target + monkeypatch.setattr(platform, "system", lambda: "Windows") + sync(memory, target) + assert is_synced(memory, target) is True + + def test_false_when_content_differs(self, memory_and_target, monkeypatch): + memory, target = memory_and_target + monkeypatch.setattr(platform, "system", lambda: "Windows") + sync(memory, target) + memory.write_text("updated content", encoding="utf-8") + assert is_synced(memory, target) is False + + @pytest.mark.skipif(platform.system() == "Windows", reason="symlinks require Unix") + def test_true_after_symlink(self, memory_and_target, monkeypatch): + memory, target = memory_and_target + monkeypatch.setattr(platform, "system", lambda: "Darwin") + sync(memory, target) + assert is_synced(memory, target) is True diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..ddd7bef --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,655 @@ +from __future__ import annotations + +from unittest.mock import patch + +import pytest + +from memsync.cli import ( + build_parser, + cmd_config_set, + cmd_config_show, + cmd_diff, + cmd_doctor, + cmd_init, + cmd_providers, + cmd_prune, + cmd_refresh, + cmd_show, + cmd_status, +) +from memsync.config import Config + +SAMPLE_MEMORY = """\ + +# Global Memory + +## Identity & context +- Test user + +## Hard constraints +- Always backup before writing +""" + + +def _args(**kwargs): + """Build a minimal args namespace.""" + defaults = { + "notes": None, "file": None, "dry_run": False, "model": None, + "backup": None, "keep_days": None, + } + defaults.update(kwargs) + + class Namespace: + pass + + ns = Namespace() + for k, v in defaults.items(): + setattr(ns, k, v) + return ns + + +class TestCmdShow: + def test_prints_memory_content(self, memory_file, capsys): + config, tmp_path, global_memory = memory_file + result = cmd_show(_args(), config) + out = capsys.readouterr().out + assert result == 0 + assert "Global Memory" in out + + def test_returns_3_when_no_memory_file(self, tmp_config, capsys): + config, tmp_path = tmp_config + result = cmd_show(_args(), config) + assert result == 3 + + def test_returns_2_when_memory_root_missing(self, tmp_path, capsys): + config = Config(provider="custom", sync_root=tmp_path / "sync") + result = cmd_show(_args(), config) + assert result == 2 + + +class TestCmdStatus: + def test_shows_platform_info(self, memory_file, capsys): + config, tmp_path, _ = memory_file + result = cmd_status(_args(), config) + out = capsys.readouterr().out + assert result == 0 + assert "Platform:" in out + assert "Model:" in out + + def test_shows_memory_path(self, memory_file, capsys): + config, tmp_path, global_memory = memory_file + cmd_status(_args(), config) + out = capsys.readouterr().out + assert str(global_memory) in out + + +class TestCmdPrune: + def test_prunes_old_backups(self, memory_file, capsys): + config, tmp_path, global_memory = memory_file + backup_dir = config.sync_root / ".claude-memory" / "backups" + + # Create a backup manually by copying + from memsync.backups import backup + backup(global_memory, backup_dir) + + result = cmd_prune(_args(keep_days=0), config) + out = capsys.readouterr().out + assert result == 0 + assert "Pruned" in out + + def test_reports_nothing_to_prune(self, memory_file, capsys): + config, tmp_path, _ = memory_file + result = cmd_prune(_args(keep_days=30), config) + out = capsys.readouterr().out + assert result == 0 + assert "No backups" in out + + def test_dry_run_does_not_delete(self, memory_file, capsys): + config, tmp_path, global_memory = memory_file + backup_dir = config.sync_root / ".claude-memory" / "backups" + + from memsync.backups import backup + b = backup(global_memory, backup_dir) + + result = cmd_prune(_args(keep_days=0, dry_run=True), config) + assert result == 0 + assert b.exists() # not deleted + + +class TestCmdProviders: + def test_lists_all_providers(self, tmp_config, capsys): + config, _ = tmp_config + result = cmd_providers(_args(), config) + out = capsys.readouterr().out + assert result == 0 + assert "onedrive" in out + assert "icloud" in out + assert "gdrive" in out + assert "custom" in out + + def test_shows_active_provider(self, tmp_config, capsys): + config, _ = tmp_config + cmd_providers(_args(), config) + out = capsys.readouterr().out + assert "Active provider:" in out + + +class TestCmdRefresh: + def _mock_refresh_result(self, changed=True, truncated=False, content=SAMPLE_MEMORY): + return {"updated_content": content, "changed": changed, "truncated": truncated} + + def test_returns_1_on_empty_notes(self, memory_file, capsys): + config, tmp_path, _ = memory_file + result = cmd_refresh(_args(notes=" "), config) + assert result == 1 + + def test_dry_run_does_not_write(self, memory_file, capsys): + config, tmp_path, global_memory = memory_file + original = global_memory.read_text(encoding="utf-8") + + mock_result = self._mock_refresh_result(changed=True) + with patch("memsync.cli.refresh_memory_content", return_value=mock_result): + result = cmd_refresh(_args(notes="some notes", dry_run=True), config) + + assert result == 0 + assert global_memory.read_text(encoding="utf-8") == original # unchanged + + def test_no_change_prints_message(self, memory_file, capsys): + config, tmp_path, global_memory = memory_file + mock_result = self._mock_refresh_result(changed=False) + + with patch("memsync.cli.refresh_memory_content", return_value=mock_result): + result = cmd_refresh(_args(notes="some notes"), config) + + out = capsys.readouterr().out + assert result == 0 + assert "no changes" in out.lower() + + def test_truncation_returns_5(self, memory_file, capsys): + config, tmp_path, _ = memory_file + mock_result = self._mock_refresh_result(changed=True, truncated=True) + + with patch("memsync.cli.refresh_memory_content", return_value=mock_result): + result = cmd_refresh(_args(notes="some notes"), config) + + assert result == 5 + + def test_successful_refresh_writes_backup_and_memory(self, memory_file, capsys): + config, tmp_path, global_memory = memory_file + updated = SAMPLE_MEMORY + "\n- new item added" + mock_result = self._mock_refresh_result(changed=True, content=updated) + + with patch("memsync.cli.refresh_memory_content", return_value=mock_result): + result = cmd_refresh(_args(notes="some notes"), config) + + assert result == 0 + assert global_memory.read_text(encoding="utf-8") == updated + + backup_dir = config.sync_root / ".claude-memory" / "backups" + from memsync.backups import list_backups + assert len(list_backups(backup_dir)) == 1 + + def test_model_override_passed_to_refresh(self, memory_file): + config, tmp_path, _ = memory_file + mock_result = self._mock_refresh_result(changed=False) + + with patch("memsync.cli.refresh_memory_content", return_value=mock_result) as mock_fn: + cmd_refresh(_args(notes="notes", model="claude-haiku-4-5-20251001"), config) + + called_config = mock_fn.call_args.args[2] + assert called_config.model == "claude-haiku-4-5-20251001" + + +@pytest.mark.smoke +class TestParser: + def test_refresh_requires_notes_or_file(self): + parser = build_parser() + args = parser.parse_args(["refresh", "--notes", "hello"]) + assert args.notes == "hello" + + def test_prune_default_keep_days_is_none(self): + parser = build_parser() + args = parser.parse_args(["prune"]) + assert args.keep_days is None # falls back to config.keep_days + + def test_config_set_parses_key_value(self): + parser = build_parser() + args = parser.parse_args(["config", "set", "model", "claude-opus-4-20250514"]) + assert args.key == "model" + assert args.value == "claude-opus-4-20250514" + + def test_doctor_is_registered(self): + parser = build_parser() + args = parser.parse_args(["doctor"]) + assert args.func is cmd_doctor + + +# --------------------------------------------------------------------------- +# cmd_init +# --------------------------------------------------------------------------- + +class TestCmdInit: + def _init_args(self, **kwargs): + defaults = {"force": False, "provider": None, "sync_root": None} + defaults.update(kwargs) + + class Namespace: + pass + + ns = Namespace() + for k, v in defaults.items(): + setattr(ns, k, v) + return ns + + def test_init_with_sync_root(self, tmp_config, monkeypatch): + config, tmp_path = tmp_config + sync_dir = tmp_path / "my-sync" + sync_dir.mkdir() + + monkeypatch.setattr("memsync.cli.sync_claude_md", lambda src, dst: None) + + result = cmd_init(self._init_args(sync_root=str(sync_dir)), config) + assert result == 0 + + memory = sync_dir / ".claude-memory" / "GLOBAL_MEMORY.md" + assert memory.exists() + assert "" in memory.read_text(encoding="utf-8") + + def test_init_with_sync_root_creates_dirs(self, tmp_config, monkeypatch): + config, tmp_path = tmp_config + sync_dir = tmp_path / "sync-root" + sync_dir.mkdir() + + monkeypatch.setattr("memsync.cli.sync_claude_md", lambda src, dst: None) + cmd_init(self._init_args(sync_root=str(sync_dir)), config) + + assert (sync_dir / ".claude-memory" / "backups").exists() + assert (sync_dir / ".claude-memory" / "sessions").exists() + + def test_init_with_explicit_provider(self, tmp_config, monkeypatch): + config, tmp_path = tmp_config + fake_root = tmp_path / "onedrive" + fake_root.mkdir() + + from memsync.providers.onedrive import OneDriveProvider + monkeypatch.setattr(OneDriveProvider, "detect", lambda self: fake_root) + monkeypatch.setattr("memsync.cli.sync_claude_md", lambda src, dst: None) + + result = cmd_init(self._init_args(provider="onedrive"), config) + assert result == 0 + + def test_init_returns_4_when_provider_not_found(self, tmp_config, capsys): + config, tmp_path = tmp_config + result = cmd_init(self._init_args(provider="onedrive"), config) + # OneDrive not present in tmp_path → 4 (detection failed) + # OR 0 if OneDrive is detected on this machine; just check it ran + assert result in (0, 4) + + def test_init_sync_root_nonexistent_returns_1(self, tmp_config, capsys): + config, tmp_path = tmp_config + result = cmd_init(self._init_args(sync_root="/nonexistent/path/xyz"), config) + assert result == 1 + + def test_init_already_initialized_without_force(self, tmp_config, monkeypatch): + config, tmp_path = tmp_config + config_path = tmp_path / "config.toml" + config_path.write_text("[core]\nprovider = 'onedrive'\n", encoding="utf-8") + monkeypatch.setattr("memsync.config.get_config_path", lambda: config_path) + monkeypatch.setattr("memsync.cli.get_config_path", lambda: config_path) + + result = cmd_init(self._init_args(), config) + assert result == 0 # exits gracefully + + def test_init_force_overwrites_existing_memory(self, tmp_config, monkeypatch): + config, tmp_path = tmp_config + sync_dir = tmp_path / "sync-force" + sync_dir.mkdir() + memory_dir = sync_dir / ".claude-memory" + memory_dir.mkdir() + existing = memory_dir / "GLOBAL_MEMORY.md" + existing.write_text("# Old content", encoding="utf-8") + + monkeypatch.setattr("memsync.cli.sync_claude_md", lambda src, dst: None) + cmd_init(self._init_args(sync_root=str(sync_dir), force=True), config) + + new_content = existing.read_text(encoding="utf-8") + assert "" in new_content + + def test_init_writes_config_file(self, tmp_config, monkeypatch): + config, tmp_path = tmp_config + sync_dir = tmp_path / "sync-cfg" + sync_dir.mkdir() + + saved_configs = [] + + def capture_save(self): + saved_configs.append(self) + monkeypatch.setattr(Config, "save", capture_save) + monkeypatch.setattr("memsync.cli.sync_claude_md", lambda src, dst: None) + + cmd_init(self._init_args(sync_root=str(sync_dir)), config) + assert len(saved_configs) == 1 + + +# --------------------------------------------------------------------------- +# cmd_diff +# --------------------------------------------------------------------------- + +class TestCmdDiff: + def test_returns_3_when_no_memory_file(self, tmp_config, capsys): + config, tmp_path = tmp_config + result = cmd_diff(_args(), config) + assert result == 3 + + def test_prints_no_backups_message(self, memory_file, capsys): + config, tmp_path, _ = memory_file + result = cmd_diff(_args(), config) + out = capsys.readouterr().out + assert result == 0 + assert "No backups found" in out + + def test_shows_diff_against_latest_backup(self, memory_file, capsys): + config, tmp_path, global_memory = memory_file + backup_dir = config.sync_root / ".claude-memory" / "backups" + + # Create a backup of the original + from memsync.backups import backup + backup(global_memory, backup_dir) + + # Modify the current memory + global_memory.write_text( + global_memory.read_text(encoding="utf-8") + "\n- New item added", + encoding="utf-8", + ) + + result = cmd_diff(_args(), config) + out = capsys.readouterr().out + assert result == 0 + assert "New item added" in out + + def test_no_diff_when_identical(self, memory_file, capsys): + config, tmp_path, global_memory = memory_file + backup_dir = config.sync_root / ".claude-memory" / "backups" + + from memsync.backups import backup + backup(global_memory, backup_dir) + + result = cmd_diff(_args(), config) + out = capsys.readouterr().out + assert result == 0 + assert "No differences" in out + + def test_specific_backup_flag(self, memory_file, capsys): + config, tmp_path, global_memory = memory_file + backup_dir = config.sync_root / ".claude-memory" / "backups" + + from memsync.backups import backup + b = backup(global_memory, backup_dir) + + result = cmd_diff(_args(backup=b.name), config) + assert result == 0 + + def test_nonexistent_backup_returns_1(self, memory_file, capsys): + config, tmp_path, _ = memory_file + result = cmd_diff(_args(backup="GLOBAL_MEMORY_19991231_235959.md"), config) + assert result == 1 + + +# --------------------------------------------------------------------------- +# cmd_config_show +# --------------------------------------------------------------------------- + +class TestCmdConfigShow: + def test_returns_2_when_no_config(self, tmp_config, capsys): + config, tmp_path = tmp_config + result = cmd_config_show(_args(), config) + assert result == 2 + + def test_prints_config_contents(self, tmp_config, monkeypatch, capsys): + config, tmp_path = tmp_config + config_path = tmp_path / "config.toml" + config_path.write_text("[core]\nprovider = \"onedrive\"\n", encoding="utf-8") + monkeypatch.setattr("memsync.cli.get_config_path", lambda: config_path) + + result = cmd_config_show(_args(), config) + out = capsys.readouterr().out + assert result == 0 + assert "onedrive" in out + + +# --------------------------------------------------------------------------- +# cmd_config_set +# --------------------------------------------------------------------------- + +class TestCmdConfigSet: + def _set_args(self, key, value): + class Namespace: + pass + ns = Namespace() + ns.key = key + ns.value = value + return ns + + def test_set_model(self, tmp_config, monkeypatch): + config, tmp_path = tmp_config + saved = [] + monkeypatch.setattr(Config, "save", lambda self: saved.append(self)) + + result = cmd_config_set(self._set_args("model", "claude-opus-4-20250514"), config) + assert result == 0 + assert saved[0].model == "claude-opus-4-20250514" + + def test_set_provider(self, tmp_config, monkeypatch): + config, tmp_path = tmp_config + saved = [] + monkeypatch.setattr(Config, "save", lambda self: saved.append(self)) + + result = cmd_config_set(self._set_args("provider", "icloud"), config) + assert result == 0 + assert saved[0].provider == "icloud" + + def test_set_invalid_provider_returns_1(self, tmp_config, capsys): + config, tmp_path = tmp_config + result = cmd_config_set(self._set_args("provider", "dropbox"), config) + err = capsys.readouterr().err + assert result == 1 + assert "dropbox" in err + + def test_set_keep_days(self, tmp_config, monkeypatch): + config, tmp_path = tmp_config + saved = [] + monkeypatch.setattr(Config, "save", lambda self: saved.append(self)) + + result = cmd_config_set(self._set_args("keep_days", "60"), config) + assert result == 0 + assert saved[0].keep_days == 60 + + def test_set_keep_days_non_integer_returns_1(self, tmp_config, capsys): + config, tmp_path = tmp_config + result = cmd_config_set(self._set_args("keep_days", "thirty"), config) + assert result == 1 + + def test_set_max_memory_lines(self, tmp_config, monkeypatch): + config, tmp_path = tmp_config + saved = [] + monkeypatch.setattr(Config, "save", lambda self: saved.append(self)) + + result = cmd_config_set(self._set_args("max_memory_lines", "300"), config) + assert result == 0 + assert saved[0].max_memory_lines == 300 + + def test_set_sync_root(self, tmp_config, monkeypatch): + config, tmp_path = tmp_config + sync_dir = tmp_path / "new-sync" + sync_dir.mkdir() + saved = [] + monkeypatch.setattr(Config, "save", lambda self: saved.append(self)) + + result = cmd_config_set(self._set_args("sync_root", str(sync_dir)), config) + assert result == 0 + assert saved[0].sync_root == sync_dir + assert saved[0].provider == "custom" # auto-set when sync_root configured + + def test_set_sync_root_nonexistent_returns_1(self, tmp_config, capsys): + config, tmp_path = tmp_config + result = cmd_config_set(self._set_args("sync_root", "/nonexistent/xyz"), config) + assert result == 1 + + def test_set_unknown_key_returns_1(self, tmp_config, capsys): + config, tmp_path = tmp_config + result = cmd_config_set(self._set_args("unknown_key", "value"), config) + err = capsys.readouterr().err + assert result == 1 + assert "unknown_key" in err + + +# --------------------------------------------------------------------------- +# cmd_doctor +# --------------------------------------------------------------------------- + +class TestCmdDoctor: + def test_all_checks_pass_returns_0(self, memory_file, monkeypatch): + config, tmp_path, global_memory = memory_file + + # Sync CLAUDE.md first + from memsync.claude_md import sync as sync_claude_md + config.claude_md_target.parent.mkdir(parents=True, exist_ok=True) + sync_claude_md(global_memory, config.claude_md_target) + + monkeypatch.setenv("ANTHROPIC_API_KEY", "test-key-abc") + monkeypatch.setattr("memsync.cli.get_config_path", + lambda: tmp_path / "config.toml") + (tmp_path / "config.toml").write_text("[core]\n", encoding="utf-8") + + result = cmd_doctor(_args(), config) + assert result == 0 + + def test_missing_api_key_fails(self, memory_file, monkeypatch, capsys): + config, tmp_path, global_memory = memory_file + monkeypatch.delenv("ANTHROPIC_API_KEY", raising=False) + + result = cmd_doctor(_args(), config) + out = capsys.readouterr().out + assert result == 1 + assert "ANTHROPIC_API_KEY" in out + + def test_missing_memory_file_fails(self, tmp_config, monkeypatch, capsys): + config, tmp_path = tmp_config + monkeypatch.setenv("ANTHROPIC_API_KEY", "test-key") + # Memory root exists but no GLOBAL_MEMORY.md + + result = cmd_doctor(_args(), config) + capsys.readouterr() + assert result == 1 + + def test_output_includes_all_check_labels(self, memory_file, monkeypatch, capsys): + config, tmp_path, _ = memory_file + monkeypatch.delenv("ANTHROPIC_API_KEY", raising=False) + + cmd_doctor(_args(), config) + out = capsys.readouterr().out + assert "Config file" in out + assert "ANTHROPIC_API_KEY" in out + assert "Provider" in out + + +# --------------------------------------------------------------------------- +# Daemon CLI commands +# --------------------------------------------------------------------------- + +class TestDaemonCLIGuard: + """When daemon extras are not installed, all commands print a hint.""" + + def test_guard_fails_gracefully_when_no_extras(self, tmp_config, capsys): + config, _ = tmp_config + from memsync.cli import cmd_daemon_start + + class FakeArgs: + detach = False + + with patch("memsync.cli._daemon_import_guard", return_value=False): + result = cmd_daemon_start(FakeArgs(), config) + assert result == 1 + + def test_stop_without_pid_file_returns_1(self, tmp_config, capsys, tmp_path, monkeypatch): + config, _ = tmp_config + from memsync.cli import cmd_daemon_stop + + class FakeArgs: + pass + + monkeypatch.setattr("memsync.cli._PID_FILE", tmp_path / "nonexistent.pid") + result = cmd_daemon_stop(FakeArgs(), config) + assert result == 1 + + def test_status_no_pid_file(self, tmp_config, capsys, tmp_path, monkeypatch): + config, _ = tmp_config + from memsync.cli import cmd_daemon_status + + class FakeArgs: + pass + + monkeypatch.setattr("memsync.cli._PID_FILE", tmp_path / "nonexistent.pid") + result = cmd_daemon_status(FakeArgs(), config) + out = capsys.readouterr().out + assert result == 0 + assert "not running" in out.lower() + + def test_schedule_shows_jobs(self, tmp_config, capsys): + config, _ = tmp_config + from memsync.cli import cmd_daemon_schedule + + class FakeArgs: + pass + + # daemon extras installed, config has refresh enabled — should show jobs + result = cmd_daemon_schedule(FakeArgs(), config) + capsys.readouterr() + assert result == 0 + + def test_install_raises_not_implemented_on_windows(self, tmp_config, capsys): + config, _ = tmp_config + from memsync.cli import cmd_daemon_install + + class FakeArgs: + pass + + with patch("memsync.daemon.service.install_service", + side_effect=NotImplementedError("Windows not supported")): + result = cmd_daemon_install(FakeArgs(), config) + assert result == 1 + + def test_uninstall_raises_not_implemented_on_windows(self, tmp_config, capsys): + config, _ = tmp_config + from memsync.cli import cmd_daemon_uninstall + + class FakeArgs: + pass + + with patch("memsync.daemon.service.uninstall_service", + side_effect=NotImplementedError("Windows not supported")): + result = cmd_daemon_uninstall(FakeArgs(), config) + assert result == 1 + + def test_web_opens_browser(self, tmp_config, capsys): + config, _ = tmp_config + from memsync.cli import cmd_daemon_web + + class FakeArgs: + pass + + with patch("webbrowser.open") as mock_open: + result = cmd_daemon_web(FakeArgs(), config) + assert result == 0 + mock_open.assert_called_once() + + def test_parser_has_daemon_subcommand(self): + parser = build_parser() + args = parser.parse_args(["daemon", "stop"]) + from memsync.cli import cmd_daemon_stop + assert args.func is cmd_daemon_stop + + def test_parser_daemon_start_has_detach_flag(self): + parser = build_parser() + args = parser.parse_args(["daemon", "start", "--detach"]) + assert args.detach is True diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 0000000..ca94c47 --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,102 @@ +from __future__ import annotations + +import platform +from pathlib import Path + +import pytest + +from memsync.config import Config, get_config_path + + +@pytest.mark.smoke +class TestConfigDefaults: + def test_default_provider(self): + c = Config() + assert c.provider == "onedrive" + + def test_default_model(self): + c = Config() + assert c.model == "claude-sonnet-4-20250514" + + def test_default_max_memory_lines(self): + c = Config() + assert c.max_memory_lines == 400 + + def test_default_keep_days(self): + c = Config() + assert c.keep_days == 30 + + def test_default_sync_root_is_none(self): + c = Config() + assert c.sync_root is None + + def test_default_claude_md_target_is_set(self): + c = Config() + assert c.claude_md_target is not None + assert c.claude_md_target == Path("~/.claude/CLAUDE.md").expanduser() + + +class TestConfigPath: + def test_windows_path_uses_appdata(self, monkeypatch): + monkeypatch.setattr(platform, "system", lambda: "Windows") + monkeypatch.setenv("APPDATA", "C:/Users/test/AppData/Roaming") + path = get_config_path() + assert "memsync" in str(path) + assert path.suffix == ".toml" + + def test_linux_path_uses_xdg(self, monkeypatch): + monkeypatch.setattr(platform, "system", lambda: "Linux") + monkeypatch.setenv("XDG_CONFIG_HOME", "/home/test/.config") + path = get_config_path() + assert path.as_posix().endswith("memsync/config.toml") + + def test_mac_path_uses_dotconfig(self, monkeypatch, tmp_path): + monkeypatch.setattr(platform, "system", lambda: "Darwin") + monkeypatch.delenv("XDG_CONFIG_HOME", raising=False) + monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path)) + path = get_config_path() + assert "memsync" in str(path) + assert path.suffix == ".toml" + + +class TestConfigRoundTrip: + def test_save_and_load(self, tmp_path, monkeypatch): + monkeypatch.setattr( + "memsync.config.get_config_path", + lambda: tmp_path / "config.toml", + ) + c = Config( + provider="icloud", + model="claude-haiku-4-5-20251001", + keep_days=60, + sync_root=tmp_path / "sync", + ) + c.save() + + loaded = Config.load() + assert loaded.provider == "icloud" + assert loaded.model == "claude-haiku-4-5-20251001" + assert loaded.keep_days == 60 + assert loaded.sync_root == tmp_path / "sync" + + def test_load_defaults_when_file_missing(self, tmp_path, monkeypatch): + monkeypatch.setattr( + "memsync.config.get_config_path", + lambda: tmp_path / "nonexistent.toml", + ) + c = Config.load() + assert c.provider == "onedrive" + + def test_toml_output_is_valid(self): + import tomllib + c = Config(provider="gdrive", keep_days=14) + toml_text = c._to_toml() + parsed = tomllib.loads(toml_text) + assert parsed["core"]["provider"] == "gdrive" + assert parsed["backups"]["keep_days"] == 14 + + def test_sync_root_serialized_with_forward_slashes(self, tmp_path): + c = Config(sync_root=tmp_path / "my sync" / "folder") + toml_text = c._to_toml() + # Forward slashes in path (TOML-safe) + assert "\\" not in toml_text.split("sync_root")[1].split("\n")[0] diff --git a/tests/test_daemon_capture.py b/tests/test_daemon_capture.py new file mode 100644 index 0000000..a450a32 --- /dev/null +++ b/tests/test_daemon_capture.py @@ -0,0 +1,190 @@ +""" +Tests for memsync.daemon.capture + +Uses Flask's built-in test client. Verifies auth, request validation, +and session log writing. +""" +from __future__ import annotations + +import json +from pathlib import Path + +import pytest + +from memsync.config import Config, DaemonConfig +from memsync.daemon.capture import create_capture_app + + +@pytest.fixture +def capture_config(tmp_path: Path) -> tuple[Config, Path]: + sync_root = tmp_path / "sync" + memory_root = sync_root / ".claude-memory" + (memory_root / "sessions").mkdir(parents=True) + + config = Config( + provider="custom", + sync_root=sync_root, + daemon=DaemonConfig( + capture_enabled=True, + capture_port=5001, + capture_token="", # no token by default + ), + ) + return config, memory_root + + +class TestHealth: + def test_health_returns_ok(self, capture_config: tuple) -> None: + config, _ = capture_config + app = create_capture_app(config) + with app.test_client() as client: + resp = client.get("/health") + assert resp.status_code == 200 + assert json.loads(resp.data)["ok"] is True + + +class TestAddNote: + def test_accepts_valid_note(self, capture_config: tuple) -> None: + config, _ = capture_config + app = create_capture_app(config) + with app.test_client() as client: + resp = client.post( + "/note", + data=json.dumps({"text": "Test note from iPhone"}), + content_type="application/json", + ) + assert resp.status_code == 200 + body = json.loads(resp.data) + assert body["ok"] is True + assert "timestamp" in body + + def test_rejects_empty_text(self, capture_config: tuple) -> None: + config, _ = capture_config + app = create_capture_app(config) + with app.test_client() as client: + resp = client.post( + "/note", + data=json.dumps({"text": " "}), + content_type="application/json", + ) + assert resp.status_code == 400 + assert b"empty" in resp.data + + def test_rejects_missing_text_field(self, capture_config: tuple) -> None: + config, _ = capture_config + app = create_capture_app(config) + with app.test_client() as client: + resp = client.post( + "/note", + data=json.dumps({"msg": "wrong key"}), + content_type="application/json", + ) + assert resp.status_code == 400 + + def test_rejects_non_json_body(self, capture_config: tuple) -> None: + config, _ = capture_config + app = create_capture_app(config) + with app.test_client() as client: + resp = client.post("/note", data="not json", content_type="text/plain") + assert resp.status_code == 400 + + def test_writes_to_session_log(self, capture_config: tuple) -> None: + config, memory_root = capture_config + app = create_capture_app(config) + with app.test_client() as client: + client.post( + "/note", + data=json.dumps({"text": "Important note captured"}), + content_type="application/json", + ) + from datetime import datetime + today = datetime.now().strftime("%Y-%m-%d") + log_path = memory_root / "sessions" / f"{today}.md" + assert log_path.exists() + content = log_path.read_text(encoding="utf-8") + assert "Important note captured" in content + assert "(captured)" in content + + def test_appends_to_existing_session_log(self, capture_config: tuple) -> None: + config, memory_root = capture_config + from datetime import datetime + today = datetime.now().strftime("%Y-%m-%d") + log_path = memory_root / "sessions" / f"{today}.md" + log_path.write_text("existing content\n", encoding="utf-8") + + app = create_capture_app(config) + with app.test_client() as client: + client.post( + "/note", + data=json.dumps({"text": "appended note"}), + content_type="application/json", + ) + content = log_path.read_text(encoding="utf-8") + assert "existing content" in content + assert "appended note" in content + + +class TestTokenAuth: + def test_accepts_without_token_when_none_configured(self, capture_config: tuple) -> None: + config, _ = capture_config # capture_token = "" + app = create_capture_app(config) + with app.test_client() as client: + resp = client.post( + "/note", + data=json.dumps({"text": "unauthenticated"}), + content_type="application/json", + ) + assert resp.status_code == 200 + + def test_requires_token_when_configured(self, capture_config: tuple) -> None: + import dataclasses + + config, _ = capture_config + config = dataclasses.replace( + config, + daemon=dataclasses.replace(config.daemon, capture_token="secret123"), + ) + app = create_capture_app(config) + with app.test_client() as client: + resp = client.post( + "/note", + data=json.dumps({"text": "no token"}), + content_type="application/json", + ) + assert resp.status_code == 401 + + def test_accepts_valid_token(self, capture_config: tuple) -> None: + import dataclasses + + config, _ = capture_config + config = dataclasses.replace( + config, + daemon=dataclasses.replace(config.daemon, capture_token="secret123"), + ) + app = create_capture_app(config) + with app.test_client() as client: + resp = client.post( + "/note", + data=json.dumps({"text": "authenticated"}), + content_type="application/json", + headers={"X-Memsync-Token": "secret123"}, + ) + assert resp.status_code == 200 + + def test_rejects_wrong_token(self, capture_config: tuple) -> None: + import dataclasses + + config, _ = capture_config + config = dataclasses.replace( + config, + daemon=dataclasses.replace(config.daemon, capture_token="secret123"), + ) + app = create_capture_app(config) + with app.test_client() as client: + resp = client.post( + "/note", + data=json.dumps({"text": "wrong token"}), + content_type="application/json", + headers={"X-Memsync-Token": "wrongtoken"}, + ) + assert resp.status_code == 401 diff --git a/tests/test_daemon_digest.py b/tests/test_daemon_digest.py new file mode 100644 index 0000000..4327ff1 --- /dev/null +++ b/tests/test_daemon_digest.py @@ -0,0 +1,123 @@ +""" +Tests for memsync.daemon.digest + +API calls are always mocked — no real Claude API calls in tests. +""" +from __future__ import annotations + +from datetime import date, timedelta +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest + +from memsync.config import Config, DaemonConfig +from memsync.daemon.digest import generate_and_send, generate_digest + + +@pytest.fixture +def digest_memory_root(tmp_path: Path) -> Path: + memory_root = tmp_path / ".claude-memory" + (memory_root / "sessions").mkdir(parents=True) + return memory_root + + +def _write_session(memory_root: Path, day: date, content: str) -> None: + log = memory_root / "sessions" / f"{day.strftime('%Y-%m-%d')}.md" + log.write_text(content, encoding="utf-8") + + +@pytest.fixture +def digest_config(tmp_path: Path) -> Config: + sync_root = tmp_path / "sync" + (sync_root / ".claude-memory" / "sessions").mkdir(parents=True) + return Config( + provider="custom", + sync_root=sync_root, + daemon=DaemonConfig( + digest_enabled=True, + digest_email_to="to@example.com", + digest_email_from="from@example.com", + digest_smtp_host="smtp.example.com", + ), + ) + + +class TestGenerateDigest: + def test_returns_empty_string_when_no_logs(self, digest_memory_root: Path) -> None: + config = Config() + result = generate_digest(digest_memory_root, config) + assert result == "" + + def test_collects_past_7_days(self, digest_memory_root: Path) -> None: + today = date.today() + for i in range(1, 6): + day = today - timedelta(days=i) + _write_session(digest_memory_root, day, f"Notes for day -{i}") + + mock_response = MagicMock() + mock_response.content = [MagicMock(text="Weekly summary text")] + + with patch("anthropic.Anthropic") as mock_client: + mock_client.return_value.messages.create.return_value = mock_response + result = generate_digest(digest_memory_root, Config()) + + assert result == "Weekly summary text" + + def test_includes_today_in_window(self, digest_memory_root: Path) -> None: + today = date.today() + _write_session(digest_memory_root, today, "Today's notes") + + mock_response = MagicMock() + mock_response.content = [MagicMock(text="summary")] + + with patch("anthropic.Anthropic") as mock_client: + mock_client.return_value.messages.create.return_value = mock_response + result = generate_digest(digest_memory_root, Config()) + + # Today is in the 7-day window (week_ago + 7 days = today) + assert result == "summary" + + def test_passes_model_from_config(self, digest_memory_root: Path) -> None: + today = date.today() + _write_session(digest_memory_root, today - timedelta(days=1), "Yesterday's notes") + + mock_response = MagicMock() + mock_response.content = [MagicMock(text="summary")] + config = Config(model="claude-haiku-4-5-20251001") + + with patch("anthropic.Anthropic") as mock_client: + mock_client.return_value.messages.create.return_value = mock_response + generate_digest(digest_memory_root, config) + + call_kwargs = mock_client.return_value.messages.create.call_args[1] + assert call_kwargs["model"] == "claude-haiku-4-5-20251001" + + +class TestGenerateAndSend: + def test_sends_email_when_digest_available(self, digest_config: Config) -> None: + memory_root = digest_config.sync_root / ".claude-memory" + yesterday = date.today() - timedelta(days=1) + _write_session(memory_root, yesterday, "Worked on testing") + + mock_response = MagicMock() + mock_response.content = [MagicMock(text="Weekly digest text")] + + with patch("anthropic.Anthropic") as mock_client: + mock_client.return_value.messages.create.return_value = mock_response + with patch("memsync.daemon.notify._send_email") as mock_email: + generate_and_send(digest_config) + + mock_email.assert_called_once() + call_kwargs = mock_email.call_args + assert "weekly digest" in call_kwargs[1]["subject"].lower() + + def test_skips_send_when_no_logs(self, digest_config: Config) -> None: + with patch("memsync.daemon.notify._send_email") as mock_email: + generate_and_send(digest_config) + mock_email.assert_not_called() + + def test_skips_when_sync_root_missing(self) -> None: + config = Config(provider="custom", sync_root=None) + # Should not raise + generate_and_send(config) diff --git a/tests/test_daemon_notify.py b/tests/test_daemon_notify.py new file mode 100644 index 0000000..1f8a657 --- /dev/null +++ b/tests/test_daemon_notify.py @@ -0,0 +1,94 @@ +""" +Tests for memsync.daemon.notify + +Tests the three notification channels: log, email, file. +Email is always mocked — no real SMTP connections made. +""" +from __future__ import annotations + +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest + +from memsync.config import Config, DaemonConfig +from memsync.daemon.notify import _write_flag_file, notify + + +@pytest.fixture +def log_config() -> Config: + return Config(daemon=DaemonConfig(drift_notify="log")) + + +@pytest.fixture +def email_config() -> Config: + return Config( + daemon=DaemonConfig( + drift_notify="email", + digest_email_from="from@example.com", + digest_email_to="to@example.com", + digest_smtp_host="smtp.example.com", + digest_smtp_port=587, + digest_smtp_user="user", + digest_smtp_password="pass", + ) + ) + + +@pytest.fixture +def file_config() -> Config: + return Config(daemon=DaemonConfig(drift_notify="file")) + + +class TestNotifyLog: + def test_log_channel_does_not_raise(self, log_config: Config) -> None: + notify(log_config, "test subject", "test body") + + def test_unknown_channel_falls_back_to_log(self) -> None: + config = Config(daemon=DaemonConfig(drift_notify="unknown_channel")) + notify(config, "subject", "body") # must not raise + + +class TestNotifyEmail: + def test_email_channel_calls_smtp(self, email_config: Config) -> None: + with patch("smtplib.SMTP") as mock_smtp_class: + mock_smtp = MagicMock() + mock_smtp_class.return_value.__enter__ = MagicMock(return_value=mock_smtp) + mock_smtp_class.return_value.__exit__ = MagicMock(return_value=False) + notify(email_config, "subject", "body") + mock_smtp.send_message.assert_called_once() + + def test_email_failure_does_not_raise(self, email_config: Config) -> None: + with patch("smtplib.SMTP", side_effect=ConnectionRefusedError("no server")): + notify(email_config, "subject", "body") # must not raise + + def test_uses_env_var_password_over_config(self, email_config: Config, monkeypatch) -> None: + """MEMSYNC_SMTP_PASSWORD env var takes precedence over plaintext config.""" + monkeypatch.setenv("MEMSYNC_SMTP_PASSWORD", "env_secret") + with patch("smtplib.SMTP") as mock_smtp_class: + mock_smtp = MagicMock() + mock_smtp_class.return_value.__enter__ = MagicMock(return_value=mock_smtp) + mock_smtp_class.return_value.__exit__ = MagicMock(return_value=False) + from memsync.daemon.notify import _send_email + _send_email(email_config, "subject", "body") + mock_smtp.login.assert_called_once_with("user", "env_secret") + + +class TestNotifyFile: + def test_file_channel_writes_alert( + self, file_config: Config, tmp_path: Path, monkeypatch + ) -> None: + monkeypatch.setattr(Path, "home", lambda: tmp_path) + _write_flag_file("alert subject", "alert body") + alerts_dir = tmp_path / ".config" / "memsync" / "alerts" + files = list(alerts_dir.glob("*_alert.txt")) + assert len(files) == 1 + content = files[0].read_text(encoding="utf-8") + assert "alert subject" in content + assert "alert body" in content + + def test_file_channel_notify_does_not_raise( + self, file_config: Config, tmp_path: Path, monkeypatch + ) -> None: + monkeypatch.setattr(Path, "home", lambda: tmp_path) + notify(file_config, "subject", "body") # must not raise diff --git a/tests/test_daemon_scheduler.py b/tests/test_daemon_scheduler.py new file mode 100644 index 0000000..f141e84 --- /dev/null +++ b/tests/test_daemon_scheduler.py @@ -0,0 +1,294 @@ +""" +Tests for memsync.daemon.scheduler + +All jobs are tested in isolation by calling them directly with mocked filesystem +and mocked API. No real APScheduler scheduling occurs in these tests. +""" +from __future__ import annotations + +from datetime import date +from pathlib import Path +from unittest.mock import patch + +import pytest + +from memsync.config import Config, DaemonConfig +from memsync.daemon.scheduler import ( + build_scheduler, + job_backup_mirror, + job_drift_check, + job_nightly_refresh, + job_weekly_digest, +) + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + +@pytest.fixture +def daemon_config(tmp_path: Path) -> Config: + """Config with daemon enabled, sync_root pointing to tmp_path.""" + sync_root = tmp_path / "sync" + memory_root = sync_root / ".claude-memory" + (memory_root / "sessions").mkdir(parents=True) + (memory_root / "backups").mkdir(parents=True) + (memory_root / "GLOBAL_MEMORY.md").write_text( + "# Global Memory\n\n## Identity\n- Test user\n\n## Hard constraints\n- Always test\n", + encoding="utf-8", + ) + claude_md = tmp_path / "claude" / "CLAUDE.md" + claude_md.parent.mkdir(parents=True) + claude_md.write_text("# Global Memory\n", encoding="utf-8") + + return Config( + provider="custom", + sync_root=sync_root, + claude_md_target=claude_md, + daemon=DaemonConfig( + enabled=True, + refresh_enabled=True, + backup_mirror_path="", + drift_check_enabled=True, + digest_enabled=False, + ), + ) + + +# --------------------------------------------------------------------------- +# build_scheduler +# --------------------------------------------------------------------------- + +class TestBuildScheduler: + def test_returns_background_scheduler_by_default(self, daemon_config: Config) -> None: + from apscheduler.schedulers.background import BackgroundScheduler + + scheduler = build_scheduler(daemon_config, blocking=False) + assert isinstance(scheduler, BackgroundScheduler) + + def test_returns_blocking_scheduler_when_requested(self, daemon_config: Config) -> None: + from apscheduler.schedulers.blocking import BlockingScheduler + + scheduler = build_scheduler(daemon_config, blocking=True) + assert isinstance(scheduler, BlockingScheduler) + + def test_refresh_job_added_when_enabled(self, daemon_config: Config) -> None: + scheduler = build_scheduler(daemon_config) + job_ids = [j.id for j in scheduler.get_jobs()] + assert "nightly_refresh" in job_ids + + def test_refresh_job_not_added_when_disabled(self, daemon_config: Config) -> None: + import dataclasses + + cfg = dataclasses.replace( + daemon_config, + daemon=dataclasses.replace(daemon_config.daemon, refresh_enabled=False), + ) + scheduler = build_scheduler(cfg) + job_ids = [j.id for j in scheduler.get_jobs()] + assert "nightly_refresh" not in job_ids + + def test_backup_mirror_job_added_when_path_set( + self, daemon_config: Config, tmp_path: Path + ) -> None: + import dataclasses + + mirror = tmp_path / "mirror" + cfg = dataclasses.replace( + daemon_config, + daemon=dataclasses.replace(daemon_config.daemon, backup_mirror_path=str(mirror)), + ) + scheduler = build_scheduler(cfg) + job_ids = [j.id for j in scheduler.get_jobs()] + assert "backup_mirror" in job_ids + + def test_backup_mirror_job_not_added_when_path_empty(self, daemon_config: Config) -> None: + scheduler = build_scheduler(daemon_config) + job_ids = [j.id for j in scheduler.get_jobs()] + assert "backup_mirror" not in job_ids + + def test_drift_check_job_added_when_enabled(self, daemon_config: Config) -> None: + scheduler = build_scheduler(daemon_config) + job_ids = [j.id for j in scheduler.get_jobs()] + assert "drift_check" in job_ids + + def test_digest_job_not_added_when_disabled(self, daemon_config: Config) -> None: + scheduler = build_scheduler(daemon_config) + job_ids = [j.id for j in scheduler.get_jobs()] + assert "weekly_digest" not in job_ids + + def test_digest_job_added_when_enabled(self, daemon_config: Config) -> None: + import dataclasses + + cfg = dataclasses.replace( + daemon_config, + daemon=dataclasses.replace(daemon_config.daemon, digest_enabled=True), + ) + scheduler = build_scheduler(cfg) + job_ids = [j.id for j in scheduler.get_jobs()] + assert "weekly_digest" in job_ids + + +# --------------------------------------------------------------------------- +# job_nightly_refresh +# --------------------------------------------------------------------------- + +class TestJobNightlyRefresh: + def test_skips_when_no_session_log(self, daemon_config: Config) -> None: + """No session log for today → early return, no API call.""" + with patch("memsync.sync.refresh_memory_content") as mock_refresh: + job_nightly_refresh(daemon_config) + mock_refresh.assert_not_called() + + def test_skips_when_session_log_empty(self, daemon_config: Config) -> None: + memory_root = daemon_config.sync_root / ".claude-memory" + today = date.today().strftime("%Y-%m-%d") + (memory_root / "sessions" / f"{today}.md").write_text(" \n", encoding="utf-8") + + with patch("memsync.sync.refresh_memory_content") as mock_refresh: + job_nightly_refresh(daemon_config) + mock_refresh.assert_not_called() + + def test_calls_api_when_notes_exist(self, daemon_config: Config) -> None: + memory_root = daemon_config.sync_root / ".claude-memory" + today = date.today().strftime("%Y-%m-%d") + (memory_root / "sessions" / f"{today}.md").write_text( + "Today I worked on testing.", encoding="utf-8" + ) + + mock_result = {"changed": False, "updated_content": "# Global Memory\n", "truncated": False} + + with patch("memsync.sync.refresh_memory_content", return_value=mock_result) as mock_refresh: + job_nightly_refresh(daemon_config) + mock_refresh.assert_called_once() + + def test_writes_updated_memory_when_changed(self, daemon_config: Config) -> None: + memory_root = daemon_config.sync_root / ".claude-memory" + today = date.today().strftime("%Y-%m-%d") + (memory_root / "sessions" / f"{today}.md").write_text( + "Worked on something new.", encoding="utf-8" + ) + + new_content = "# Global Memory\n\n## Identity\n- Updated user\n" + mock_result = {"changed": True, "updated_content": new_content, "truncated": False} + + with patch("memsync.sync.refresh_memory_content", return_value=mock_result): + with patch("memsync.claude_md.sync"): + job_nightly_refresh(daemon_config) + + written = (memory_root / "GLOBAL_MEMORY.md").read_text(encoding="utf-8") + assert written == new_content + + def test_does_not_raise_on_exception(self, daemon_config: Config) -> None: + """Job must never propagate exceptions — daemon would crash.""" + memory_root = daemon_config.sync_root / ".claude-memory" + today = date.today().strftime("%Y-%m-%d") + (memory_root / "sessions" / f"{today}.md").write_text("notes", encoding="utf-8") + + with patch( + "memsync.sync.refresh_memory_content", + side_effect=RuntimeError("boom"), + ): + job_nightly_refresh(daemon_config) # must not raise + + def test_skips_when_sync_root_missing(self, tmp_path: Path) -> None: + """No sync root → early return, no crash.""" + config = Config(provider="custom", sync_root=None) + job_nightly_refresh(config) # must not raise + + +# --------------------------------------------------------------------------- +# job_backup_mirror +# --------------------------------------------------------------------------- + +class TestJobBackupMirror: + def test_copies_files_to_mirror(self, daemon_config: Config, tmp_path: Path) -> None: + import dataclasses + + mirror = tmp_path / "mirror" + cfg = dataclasses.replace( + daemon_config, + daemon=dataclasses.replace(daemon_config.daemon, backup_mirror_path=str(mirror)), + ) + job_backup_mirror(cfg) + + assert (mirror / "GLOBAL_MEMORY.md").exists() + + def test_creates_mirror_directory(self, daemon_config: Config, tmp_path: Path) -> None: + import dataclasses + + mirror = tmp_path / "deep" / "mirror" + cfg = dataclasses.replace( + daemon_config, + daemon=dataclasses.replace(daemon_config.daemon, backup_mirror_path=str(mirror)), + ) + assert not mirror.exists() + job_backup_mirror(cfg) + assert mirror.exists() + + def test_does_not_raise_on_exception(self, daemon_config: Config) -> None: + import dataclasses + + cfg = dataclasses.replace( + daemon_config, + daemon=dataclasses.replace( + daemon_config.daemon, backup_mirror_path="/nonexistent/\x00bad" + ), + ) + # Should log the error, not raise + job_backup_mirror(cfg) # must not raise + + +# --------------------------------------------------------------------------- +# job_drift_check +# --------------------------------------------------------------------------- + +class TestJobDriftCheck: + def test_sends_notification_when_out_of_sync(self, daemon_config: Config) -> None: + with patch("memsync.claude_md.is_synced", return_value=False): + with patch("memsync.daemon.notify.notify") as mock_notify: + job_drift_check(daemon_config) + mock_notify.assert_called_once() + call_kwargs = mock_notify.call_args + assert "out of sync" in call_kwargs[1]["subject"].lower() + + def test_no_notification_when_in_sync(self, daemon_config: Config) -> None: + with patch("memsync.claude_md.is_synced", return_value=True): + with patch("memsync.daemon.notify.notify") as mock_notify: + job_drift_check(daemon_config) + mock_notify.assert_not_called() + + def test_skips_when_memory_missing(self, daemon_config: Config) -> None: + memory_path = daemon_config.sync_root / ".claude-memory" / "GLOBAL_MEMORY.md" + memory_path.unlink() + + with patch("memsync.daemon.notify.notify") as mock_notify: + job_drift_check(daemon_config) + mock_notify.assert_not_called() + + def test_does_not_raise_on_exception(self, daemon_config: Config) -> None: + with patch("memsync.claude_md.is_synced", side_effect=RuntimeError("boom")): + job_drift_check(daemon_config) # must not raise + + +# --------------------------------------------------------------------------- +# job_weekly_digest +# --------------------------------------------------------------------------- + +class TestJobWeeklyDigest: + def test_calls_generate_and_send(self, daemon_config: Config) -> None: + with patch("memsync.daemon.digest.generate_and_send") as mock_send: + import dataclasses + + cfg = dataclasses.replace( + daemon_config, + daemon=dataclasses.replace(daemon_config.daemon, digest_enabled=True), + ) + job_weekly_digest(cfg) + mock_send.assert_called_once_with(cfg) + + def test_does_not_raise_on_exception(self, daemon_config: Config) -> None: + with patch( + "memsync.daemon.digest.generate_and_send", + side_effect=RuntimeError("smtp error"), + ): + job_weekly_digest(daemon_config) # must not raise diff --git a/tests/test_daemon_watchdog.py b/tests/test_daemon_watchdog.py new file mode 100644 index 0000000..5557a51 --- /dev/null +++ b/tests/test_daemon_watchdog.py @@ -0,0 +1,19 @@ +"""Tests for memsync.daemon.watchdog""" +from __future__ import annotations + +from unittest.mock import patch + +from memsync.config import Config +from memsync.daemon.watchdog import run_drift_check + + +class TestRunDriftCheck: + def test_delegates_to_job_drift_check(self) -> None: + config = Config(provider="custom", sync_root=None) + with patch("memsync.daemon.watchdog.job_drift_check") as mock_job: + run_drift_check(config) + mock_job.assert_called_once_with(config) + + def test_does_not_raise_on_missing_sync_root(self) -> None: + config = Config(provider="custom", sync_root=None) + run_drift_check(config) # must not raise diff --git a/tests/test_daemon_web.py b/tests/test_daemon_web.py new file mode 100644 index 0000000..ee15ee2 --- /dev/null +++ b/tests/test_daemon_web.py @@ -0,0 +1,99 @@ +""" +Tests for memsync.daemon.web + +Uses Flask's built-in test client. No real filesystem needed for route tests +except for paths inside tmp_path. +""" +from __future__ import annotations + +from pathlib import Path + +import pytest + +from memsync.config import Config, DaemonConfig +from memsync.daemon.web import create_app + + +@pytest.fixture +def web_config(tmp_path: Path) -> tuple[Config, Path]: + sync_root = tmp_path / "sync" + memory_root = sync_root / ".claude-memory" + (memory_root / "backups").mkdir(parents=True) + memory_file = memory_root / "GLOBAL_MEMORY.md" + memory_file.write_text("# Global Memory\n\n## Identity\n- Test\n", encoding="utf-8") + + claude_md = tmp_path / "claude" / "CLAUDE.md" + claude_md.parent.mkdir(parents=True) + + config = Config( + provider="custom", + sync_root=sync_root, + claude_md_target=claude_md, + daemon=DaemonConfig(web_ui_enabled=True, web_ui_port=5000, web_ui_host="127.0.0.1"), + ) + return config, memory_file + + +class TestWebIndex: + def test_get_index_returns_200(self, web_config: tuple) -> None: + config, _ = web_config + app = create_app(config) + with app.test_client() as client: + resp = client.get("/") + assert resp.status_code == 200 + + def test_index_contains_memory_content(self, web_config: tuple) -> None: + config, _ = web_config + app = create_app(config) + with app.test_client() as client: + resp = client.get("/") + assert b"Global Memory" in resp.data + + def test_index_shows_never_when_file_missing(self, web_config: tuple) -> None: + config, memory_file = web_config + memory_file.unlink() + app = create_app(config) + with app.test_client() as client: + resp = client.get("/") + assert resp.status_code == 200 + assert b"never" in resp.data + + +class TestWebSave: + def test_save_writes_content(self, web_config: tuple) -> None: + config, memory_file = web_config + app = create_app(config) + new_content = "# Updated Memory\n\n- new item\n" + with app.test_client() as client: + resp = client.post("/save", data={"content": new_content}) + assert resp.status_code == 302 # redirect after save + assert memory_file.read_text(encoding="utf-8") == new_content + + def test_save_creates_backup(self, web_config: tuple) -> None: + config, memory_file = web_config + backup_dir = memory_file.parent / "backups" + assert len(list(backup_dir.glob("*.md"))) == 0 + + app = create_app(config) + with app.test_client() as client: + client.post("/save", data={"content": "# New Content\n"}) + + assert len(list(backup_dir.glob("*.md"))) == 1 + + def test_save_redirect_contains_success_message(self, web_config: tuple) -> None: + config, _ = web_config + app = create_app(config) + with app.test_client() as client: + resp = client.post("/save", data={"content": "# Content\n"}) + assert resp.status_code == 302 + location = resp.headers.get("Location", "") + assert "Saved" in location or "saved" in location.lower() + + def test_save_syncs_to_claude_md(self, web_config: tuple) -> None: + config, _ = web_config + app = create_app(config) + new_content = "# Synced Memory\n" + with app.test_client() as client: + client.post("/save", data={"content": new_content}) + # CLAUDE.md should have been written (copy on first run) + assert config.claude_md_target.exists() diff --git a/tests/test_providers.py b/tests/test_providers.py new file mode 100644 index 0000000..92a1113 --- /dev/null +++ b/tests/test_providers.py @@ -0,0 +1,178 @@ +from __future__ import annotations + +import platform +from pathlib import Path + +import pytest + +from memsync.providers import all_providers, auto_detect, get_provider +from memsync.providers.custom import CustomProvider +from memsync.providers.gdrive import GoogleDriveProvider +from memsync.providers.icloud import ICloudProvider +from memsync.providers.onedrive import OneDriveProvider + + +def _raise_boom(self): + raise Exception("boom") + + +@pytest.mark.smoke +class TestRegistry: + def test_all_four_providers_registered(self): + names = {p.name for p in all_providers()} + assert names == {"onedrive", "icloud", "gdrive", "custom"} + + def test_get_provider_by_name(self): + p = get_provider("onedrive") + assert isinstance(p, OneDriveProvider) + + def test_get_provider_raises_for_unknown(self): + with pytest.raises(KeyError, match="dropbox"): + get_provider("dropbox") + + +class TestOneDriveProvider: + def test_detects_home_onedrive(self, tmp_path, monkeypatch): + onedrive_dir = tmp_path / "OneDrive" + onedrive_dir.mkdir() + monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path)) + monkeypatch.setattr(platform, "system", lambda: "Darwin") + + provider = OneDriveProvider() + result = provider.detect() + assert result == onedrive_dir + + def test_detects_cloudstore_personal(self, tmp_path, monkeypatch): + cloud = tmp_path / "Library" / "CloudStorage" + personal = cloud / "OneDrive-Personal" + personal.mkdir(parents=True) + monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path)) + monkeypatch.setattr(platform, "system", lambda: "Darwin") + + provider = OneDriveProvider() + result = provider.detect() + assert result == personal + + def test_returns_none_when_not_found(self, tmp_path, monkeypatch): + monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path)) + monkeypatch.setattr(platform, "system", lambda: "Darwin") + + provider = OneDriveProvider() + result = provider.detect() + assert result is None + + def test_never_raises(self, monkeypatch): + # detect() must never raise — patch _find to throw internally + monkeypatch.setattr(OneDriveProvider, "_find", _raise_boom) + provider = OneDriveProvider() + result = provider.detect() + assert result is None + + def test_memory_root_uses_dot_prefix(self, tmp_path): + provider = OneDriveProvider() + root = provider.get_memory_root(tmp_path) + assert root.name == ".claude-memory" + + def test_is_available_true_when_detected(self, tmp_path, monkeypatch): + onedrive_dir = tmp_path / "OneDrive" + onedrive_dir.mkdir() + monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path)) + monkeypatch.setattr(platform, "system", lambda: "Darwin") + provider = OneDriveProvider() + assert provider.is_available() is True + + +class TestICloudProvider: + def test_memory_root_has_no_dot(self, tmp_path): + """iCloud hides dot-folders — memory root must not start with '.'""" + provider = ICloudProvider() + root = provider.get_memory_root(tmp_path) + assert not root.name.startswith(".") + assert root.name == "claude-memory" + + def test_detects_mac_icloud(self, tmp_path, monkeypatch): + icloud_path = tmp_path / "Library" / "Mobile Documents" / "com~apple~CloudDocs" + icloud_path.mkdir(parents=True) + monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path)) + monkeypatch.setattr(platform, "system", lambda: "Darwin") + + provider = ICloudProvider() + result = provider.detect() + assert result == icloud_path + + def test_returns_none_on_linux(self, monkeypatch): + monkeypatch.setattr(platform, "system", lambda: "Linux") + provider = ICloudProvider() + result = provider.detect() + assert result is None + + def test_never_raises(self, monkeypatch): + monkeypatch.setattr(ICloudProvider, "_find", _raise_boom) + provider = ICloudProvider() + result = provider.detect() + assert result is None + + +class TestGoogleDriveProvider: + def test_detects_legacy_path(self, tmp_path, monkeypatch): + gdrive = tmp_path / "Google Drive" + gdrive.mkdir() + monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path)) + monkeypatch.setattr(platform, "system", lambda: "Darwin") + + provider = GoogleDriveProvider() + result = provider.detect() + assert result == gdrive + + def test_detects_cloudstore_my_drive(self, tmp_path, monkeypatch): + cloud = tmp_path / "Library" / "CloudStorage" + gdrive_dir = cloud / "GoogleDrive-test@gmail.com" + my_drive = gdrive_dir / "My Drive" + my_drive.mkdir(parents=True) + monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path)) + monkeypatch.setattr(platform, "system", lambda: "Darwin") + + provider = GoogleDriveProvider() + result = provider.detect() + assert result == my_drive + + def test_returns_none_when_not_found(self, tmp_path, monkeypatch): + monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path)) + monkeypatch.setattr(platform, "system", lambda: "Darwin") + provider = GoogleDriveProvider() + result = provider.detect() + assert result is None + + def test_never_raises(self, monkeypatch): + monkeypatch.setattr(GoogleDriveProvider, "_find", _raise_boom) + provider = GoogleDriveProvider() + result = provider.detect() + assert result is None + + +class TestCustomProvider: + def test_detects_when_path_set_and_exists(self, tmp_path): + provider = CustomProvider(path=tmp_path) + assert provider.detect() == tmp_path + + def test_returns_none_when_path_not_set(self): + provider = CustomProvider() + assert provider.detect() is None + + def test_returns_none_when_path_missing(self, tmp_path): + provider = CustomProvider(path=tmp_path / "nonexistent") + assert provider.detect() is None + + +class TestAutoDetect: + def test_returns_only_detected_providers(self, tmp_path, monkeypatch): + # Only OneDrive folder exists + onedrive = tmp_path / "OneDrive" + onedrive.mkdir() + monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path)) + monkeypatch.setattr(platform, "system", lambda: "Darwin") + + detected = auto_detect() + names = [p.name for p in detected] + assert "onedrive" in names + assert "gdrive" not in names diff --git a/tests/test_sync.py b/tests/test_sync.py new file mode 100644 index 0000000..bf3bfd7 --- /dev/null +++ b/tests/test_sync.py @@ -0,0 +1,208 @@ +from __future__ import annotations + +from unittest.mock import MagicMock, patch + +import pytest + +from memsync.config import Config +from memsync.sync import ( + _extract_constraints, + enforce_hard_constraints, + load_or_init_memory, + log_session_notes, + refresh_memory_content, +) + +SAMPLE_MEMORY = """\ + +# Global Memory + +## Identity & context +- Test user, product leader + +## Current priorities +- Finish memsync + +## Hard constraints +- Never rewrite from scratch +- Always backup before writing + +## Standing preferences +- Concise output +""" + + +@pytest.mark.smoke +class TestExtractConstraints: + def test_extracts_bullet_lines(self): + constraints = _extract_constraints(SAMPLE_MEMORY) + assert "- Never rewrite from scratch" in constraints + assert "- Always backup before writing" in constraints + + def test_excludes_other_sections(self): + constraints = _extract_constraints(SAMPLE_MEMORY) + assert "- Test user, product leader" not in constraints + assert "- Finish memsync" not in constraints + + def test_empty_when_no_section(self): + text = "# Memory\n\n## Identity\n- Some user\n" + assert _extract_constraints(text) == [] + + def test_handles_constraints_heading_variant(self): + text = "# Memory\n\n## Constraints\n- Rule one\n- Rule two\n" + constraints = _extract_constraints(text) + assert "- Rule one" in constraints + assert "- Rule two" in constraints + + +@pytest.mark.smoke +class TestEnforceHardConstraints: + def test_no_op_when_nothing_dropped(self): + result = enforce_hard_constraints(SAMPLE_MEMORY, SAMPLE_MEMORY) + assert result == SAMPLE_MEMORY + + def test_reappends_dropped_constraint(self): + # Simulate model removing one constraint + dropped = SAMPLE_MEMORY.replace("- Never rewrite from scratch\n", "") + result = enforce_hard_constraints(SAMPLE_MEMORY, dropped) + assert "Never rewrite from scratch" in result + + def test_preserves_remaining_content(self): + dropped = SAMPLE_MEMORY.replace("- Never rewrite from scratch\n", "") + result = enforce_hard_constraints(SAMPLE_MEMORY, dropped) + assert "Always backup before writing" in result + assert "Test user, product leader" in result + + def test_handles_all_constraints_dropped(self): + # Remove entire section from new content + lines = [ln for ln in SAMPLE_MEMORY.splitlines() + if "Never rewrite" not in ln and "Always backup" not in ln] + stripped = "\n".join(lines) + result = enforce_hard_constraints(SAMPLE_MEMORY, stripped) + assert "Never rewrite from scratch" in result + assert "Always backup before writing" in result + + def test_handles_no_section_in_new(self): + old = "# Memory\n\n## Hard constraints\n- Keep this\n" + new = "# Memory\n\n## Identity\n- User\n" + result = enforce_hard_constraints(old, new) + assert "Keep this" in result + + +@pytest.mark.smoke +class TestLoadOrInitMemory: + def test_reads_existing_file(self, tmp_path): + p = tmp_path / "GLOBAL_MEMORY.md" + p.write_text("# existing", encoding="utf-8") + assert load_or_init_memory(p) == "# existing" + + def test_returns_template_when_missing(self, tmp_path): + p = tmp_path / "nonexistent.md" + result = load_or_init_memory(p) + assert result.startswith("") + assert "## Hard constraints" in result + + def test_template_has_version_comment(self, tmp_path): + p = tmp_path / "nonexistent.md" + result = load_or_init_memory(p) + assert "" in result + + +class TestLogSessionNotes: + def test_creates_dated_file(self, tmp_path): + sessions = tmp_path / "sessions" + sessions.mkdir() + log_session_notes("Worked on tests", sessions) + files = list(sessions.glob("*.md")) + assert len(files) == 1 + + def test_appends_on_same_day(self, tmp_path): + sessions = tmp_path / "sessions" + sessions.mkdir() + log_session_notes("First note", sessions) + log_session_notes("Second note", sessions) + files = list(sessions.glob("*.md")) + assert len(files) == 1 + content = files[0].read_text(encoding="utf-8") + assert "First note" in content + assert "Second note" in content + + def test_content_includes_notes(self, tmp_path): + sessions = tmp_path / "sessions" + sessions.mkdir() + log_session_notes("my session notes here", sessions) + content = list(sessions.glob("*.md"))[0].read_text(encoding="utf-8") + assert "my session notes here" in content + + +class TestRefreshMemoryContent: + def _make_mock_response(self, text: str, stop_reason: str = "end_turn") -> MagicMock: + mock_response = MagicMock() + mock_response.content = [MagicMock(text=text)] + mock_response.stop_reason = stop_reason + return mock_response + + def test_returns_changed_true_when_content_differs(self): + config = Config() + updated = SAMPLE_MEMORY.replace("- Finish memsync", "- Finish memsync\n- New priority") + mock_response = self._make_mock_response(updated) + + with patch("anthropic.Anthropic") as mock_client: + mock_client.return_value.messages.create.return_value = mock_response + result = refresh_memory_content("Added new priority", SAMPLE_MEMORY, config) + + assert result["changed"] is True + assert "New priority" in result["updated_content"] + + def test_returns_changed_false_when_content_same(self): + config = Config() + mock_response = self._make_mock_response(SAMPLE_MEMORY) + + with patch("anthropic.Anthropic") as mock_client: + mock_client.return_value.messages.create.return_value = mock_response + result = refresh_memory_content("Notes", SAMPLE_MEMORY, config) + + assert result["changed"] is False + + def test_uses_model_from_config(self): + config = Config(model="claude-haiku-4-5-20251001") + mock_response = self._make_mock_response(SAMPLE_MEMORY) + + with patch("anthropic.Anthropic") as mock_client: + mock_client.return_value.messages.create.return_value = mock_response + refresh_memory_content("Notes", SAMPLE_MEMORY, config) + + call_kwargs = mock_client.return_value.messages.create.call_args.kwargs + assert call_kwargs["model"] == "claude-haiku-4-5-20251001" + + def test_detects_truncation_via_stop_reason(self): + config = Config() + mock_response = self._make_mock_response(SAMPLE_MEMORY, stop_reason="max_tokens") + + with patch("anthropic.Anthropic") as mock_client: + mock_client.return_value.messages.create.return_value = mock_response + result = refresh_memory_content("Notes", SAMPLE_MEMORY, config) + + assert result["truncated"] is True + + def test_no_truncation_on_end_turn(self): + config = Config() + mock_response = self._make_mock_response(SAMPLE_MEMORY, stop_reason="end_turn") + + with patch("anthropic.Anthropic") as mock_client: + mock_client.return_value.messages.create.return_value = mock_response + result = refresh_memory_content("Notes", SAMPLE_MEMORY, config) + + assert result["truncated"] is False + + def test_hard_constraints_enforced_even_if_model_drops_them(self): + config = Config() + # Model drops one constraint + without_constraint = SAMPLE_MEMORY.replace("- Never rewrite from scratch\n", "") + mock_response = self._make_mock_response(without_constraint) + + with patch("anthropic.Anthropic") as mock_client: + mock_client.return_value.messages.create.return_value = mock_response + result = refresh_memory_content("Notes", SAMPLE_MEMORY, config) + + assert "Never rewrite from scratch" in result["updated_content"] From fb017c146ccffb3280d4fa33cc990cf784d094d9 Mon Sep 17 00:00:00 2001 From: Edgecaser Date: Sat, 21 Mar 2026 13:08:21 -0700 Subject: [PATCH 2/3] docs: add beginner getting-started guide Step-by-step installation and usage guide for users with minimal code experience. Covers Python installation, API key setup, memsync init, filling in the memory file, the daily refresh workflow, multi-machine setup, and a troubleshooting section for the most common error cases. Co-Authored-By: Claude Sonnet 4.6 --- docs/getting-started.md | 590 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 590 insertions(+) create mode 100644 docs/getting-started.md diff --git a/docs/getting-started.md b/docs/getting-started.md new file mode 100644 index 0000000..6ca850d --- /dev/null +++ b/docs/getting-started.md @@ -0,0 +1,590 @@ +# Getting started with memsync + +This guide walks you through installing and using memsync from scratch. It assumes you +have used a terminal before — you know how to open one and type commands — but no +Python experience or coding background is required. + +--- + +## What memsync does + +Claude Code has no memory between sessions. Every time you start a new session, Claude +starts fresh — it doesn't remember your name, your projects, your preferences, or what +you decided last time. + +The standard fix is a file at `~/.claude/CLAUDE.md`. Claude Code reads that file at the +start of every session, so anything you put there gets loaded automatically. But keeping +that file up to date by hand is tedious, it gets bloated, and if you work on more than +one computer the files drift apart. + +memsync solves this by: + +1. Storing one canonical memory file in your cloud sync folder (OneDrive, iCloud, Google Drive) +2. Keeping `~/.claude/CLAUDE.md` in sync with that file automatically +3. Using the Claude API to intelligently merge your session notes into the memory file + +After a good session you run one command and the memory updates itself. + +--- + +## Before you start: what you'll need + +- A computer running macOS, Windows, or Linux +- Python 3.11 or newer +- An Anthropic API key +- One of: OneDrive, iCloud Drive, or Google Drive (or any folder you can specify manually) + +The sections below walk you through each prerequisite. + +--- + +## Step 1 — Check if Python is installed + +Open a terminal: +- **Mac:** press `Cmd + Space`, type `Terminal`, press Enter +- **Windows:** press `Win + R`, type `cmd`, press Enter (or search for "PowerShell") +- **Linux:** you already know how to do this + +Type this and press Enter: + +``` +python --version +``` + +You should see something like `Python 3.12.1`. If you see `Python 3.11` or higher, you +are good. Skip to Step 2. + +If you see `Python 3.9` or lower, or `command not found`, you need to install or update +Python. + +### Installing Python + +Go to [python.org/downloads](https://www.python.org/downloads/) and download the +latest version (3.12 or 3.13). + +**Windows:** Run the installer. On the first screen, check **"Add Python to PATH"** +before clicking Install. This is easy to miss and important. + +**Mac:** The python.org installer works fine. Alternatively, if you use Homebrew: +`brew install python` + +**Linux:** `sudo apt install python3 python3-pip` (Debian/Ubuntu) or +`sudo dnf install python3` (Fedora). + +After installing, close and reopen your terminal, then run `python --version` again to +confirm. + +--- + +## Step 2 — Get an Anthropic API key + +memsync uses the Claude API to update your memory file. You need an API key to +authenticate with Anthropic's servers. + +1. Go to [console.anthropic.com](https://console.anthropic.com) and sign in or + create an account. +2. In the left sidebar, click **API Keys**. +3. Click **Create Key**, give it a name like "memsync", and copy the key. It starts + with `sk-ant-...`. + +Keep this key somewhere safe. You won't be able to see it again on the Anthropic website. + +### Setting the API key in your terminal + +memsync looks for the key in an environment variable called `ANTHROPIC_API_KEY`. +You need to set this so it's available every time you open a terminal. + +**Mac / Linux:** + +Open your shell config file in a text editor. If you use zsh (default on modern Macs): + +```bash +open -e ~/.zshrc +``` + +If you use bash: + +```bash +open -e ~/.bashrc +``` + +Add this line at the bottom, replacing `your-key-here` with your actual key: + +``` +export ANTHROPIC_API_KEY="sk-ant-your-key-here" +``` + +Save the file, then close and reopen your terminal. To verify it worked: + +```bash +echo $ANTHROPIC_API_KEY +``` + +You should see your key printed back. + +**Windows (PowerShell):** + +To set it permanently for your user account, run this in PowerShell (replacing the key): + +```powershell +[System.Environment]::SetEnvironmentVariable("ANTHROPIC_API_KEY", "sk-ant-your-key-here", "User") +``` + +Close and reopen PowerShell to pick up the change. To verify: + +```powershell +echo $env:ANTHROPIC_API_KEY +``` + +--- + +## Step 3 — Install memsync + +With Python installed and your API key set, installing memsync is one command: + +```bash +pip install memsync +``` + +This downloads memsync and its dependencies from the internet. + +> **If `pip` is not found on Mac/Linux**, try `pip3 install memsync` instead. +> +> **If you see a permissions error on Mac/Linux**, try: +> `pip install memsync --user` +> +> **If you see a permissions error on Windows**, right-click PowerShell and choose +> "Run as administrator", then try again. + +To confirm the installation worked: + +```bash +memsync --version +``` + +You should see something like `memsync 0.2.0`. + +--- + +## Step 4 — Set up your memory file + +Run the setup command: + +```bash +memsync init +``` + +memsync will look for your cloud sync folder automatically. What happens next depends +on what it finds: + +- **One provider detected:** It confirms the path and sets up immediately. +- **Multiple providers detected:** It lists them and asks you to choose one. +- **Nothing detected:** It asks you to specify a path manually (see below). + +### If auto-detection fails + +If memsync can't find your cloud folder, you can tell it where to look: + +```bash +memsync init --sync-root /path/to/your/cloud/folder +``` + +Some common paths: + +| Service | Mac | Windows | +|---|---|---| +| OneDrive | `~/OneDrive` | `C:\Users\YourName\OneDrive` | +| iCloud | `~/Library/Mobile Documents/com~apple~CloudDocs` | `C:\Users\YourName\iCloudDrive` | +| Google Drive | `~/Google Drive` | `G:\My Drive` | +| Dropbox | `~/Dropbox` | `C:\Users\YourName\Dropbox` | + +Example: + +```bash +memsync init --sync-root ~/OneDrive +``` + +### What init creates + +After a successful `init`, you'll see output like: + +``` +memsync initialized. + + Provider: OneDrive + Sync root: /Users/ian/OneDrive + Memory: /Users/ian/OneDrive/.claude-memory/GLOBAL_MEMORY.md + CLAUDE.md: /Users/ian/.claude/CLAUDE.md → (symlink) +``` + +Two important things were created: + +1. **`GLOBAL_MEMORY.md`** — your memory file, living in your cloud folder so it syncs + across all your machines. +2. **`~/.claude/CLAUDE.md`** — a link (or copy on Windows) that points to your memory + file. Claude Code reads this at the start of every session. + +--- + +## Step 5 — Fill in your memory file + +Your memory file starts empty with placeholder prompts. You need to fill it in. + +Find the file at the path shown by `memsync init` — it ends in +`/.claude-memory/GLOBAL_MEMORY.md`. Open it in any text editor: + +**Mac:** `open -a TextEdit ~/OneDrive/.claude-memory/GLOBAL_MEMORY.md` +**Windows:** Navigate to the file in File Explorer and open it in Notepad. + +You'll see this starter template: + +```markdown +# Global Memory + +> Loaded by Claude Code at session start on all machines and projects. +> Edit directly or run: memsync refresh --notes "..." + +## Identity & context +- (Fill this in — who you are, your roles, active projects) + +## Current priorities +- (What you're working on right now) + +## Standing preferences +- (How you like to work — communication style, output format, etc.) + +## Hard constraints +- (Rules that must never be lost or softened through compaction) +``` + +Replace the placeholders with real content. Here's an example of a filled-in file: + +```markdown +# Global Memory + +## Identity & context +- Jamie, product manager at a fintech startup +- Side project: building a personal budgeting app in Python +- Work machine: Windows laptop. Home machine: MacBook Pro. +- Comfortable reading code, less comfortable writing it from scratch + +## Current priorities +- Finish MVP of budgeting app by end of month +- Q2 roadmap presentation to leadership next Tuesday +- Onboarding new engineer starting Monday + +## Standing preferences +- Keep explanations concise — I can ask for more if needed +- When writing code, explain what each part does +- Prefer simple solutions over clever ones +- Always ask before deleting or overwriting anything + +## Hard constraints +- Never commit API keys or passwords to code +- Always confirm before making changes that can't be undone +``` + +A few tips: +- **Be specific.** "I'm a product manager" is less useful than "Jamie, PM at a fintech startup, working on the mobile app." +- **Keep it short.** This file has a soft limit of 400 lines. If it's getting long, you're putting too much in it. +- **Hard constraints are enforced.** Items in the `## Hard constraints` section are + never removed during automatic updates — memsync checks for this in code. + +For more guidance on what to include, see `docs/global-memory-guide.md`. + +--- + +## Step 6 — Verify everything is connected + +Run: + +```bash +memsync status +``` + +You should see something like: + +``` +Platform: macOS (Darwin) +Config: /Users/jamie/.config/memsync/config.toml ✓ +Provider: onedrive +Model: claude-sonnet-4-20250514 +Sync root: /Users/jamie/OneDrive ✓ +Memory: /Users/jamie/OneDrive/.claude-memory/GLOBAL_MEMORY.md ✓ +CLAUDE.md: /Users/jamie/.claude/CLAUDE.md → symlink ✓ +Backups: 0 file(s) +Session logs: 0 day(s) +``` + +Every line should show a `✓`. If anything shows `✗`, see the Troubleshooting section +at the end of this guide. + +You can also run the built-in health check: + +```bash +memsync doctor +``` + +This checks each component and tells you exactly what's wrong if something isn't set up. + +--- + +## Your daily workflow + +Once set up, using memsync takes about 30 seconds at the end of a session. + +### After a productive session + +At the end of a Claude Code session where something important happened — a decision was made, a problem was solved, a preference was discovered — run: + +```bash +memsync refresh --notes "What happened in this session" +``` + +Your notes can be as brief or as detailed as you want. Examples: + +```bash +memsync refresh --notes "Finished the auth module. Decided to use JWT tokens instead of sessions — simpler for our use case." + +memsync refresh --notes "Discovered that the CSV parser breaks on files with Windows line endings. Fixed it with universal newlines mode." + +memsync refresh --notes "Switched from Flask to FastAPI for the API. Flask felt too verbose." +``` + +The Claude API reads your notes and your current memory file, decides what to update, +and writes a new version of the file. The old version is backed up automatically. + +You'll see output like: + +``` +Refreshing global memory... done. + Backup: /Users/jamie/OneDrive/.claude-memory/backups/GLOBAL_MEMORY_20260321_143022.md + Memory: /Users/jamie/OneDrive/.claude-memory/GLOBAL_MEMORY.md + CLAUDE.md synced ✓ +``` + +### When nothing important changed + +If you had a routine session with no decisions or changes worth remembering, you don't +need to run refresh. It's a deliberate action for meaningful updates, not a mandatory +end-of-session ritual. + +### Preview before writing + +Not sure what the refresh will do? Use `--dry-run`: + +```bash +memsync refresh --notes "your notes" --dry-run +``` + +This shows you what the updated file would look like (as a diff) without writing +anything. Nothing is changed until you run the command without `--dry-run`. + +--- + +## Useful commands to know + +### See your current memory + +```bash +memsync show +``` + +Prints the full contents of your memory file to the terminal. + +### See what changed in the last refresh + +```bash +memsync diff +``` + +Shows a side-by-side comparison of the current memory file vs the most recent backup. +Lines starting with `+` were added; lines starting with `-` were removed. + +### Read notes from a file + +If you've been writing session notes in a text file as you go: + +```bash +memsync refresh --file my-notes.txt +``` + +### Pipe notes from another command + +```bash +echo "Switched to the new deploy pipeline, everything works" | memsync refresh +``` + +--- + +## Setting up on a second computer + +One of the main benefits of memsync is that your memory syncs across machines +through your cloud folder. + +On each new machine, you just need to: + +1. Install Python (Step 1) +2. Set your `ANTHROPIC_API_KEY` (Step 2) +3. Install memsync: `pip install memsync` +4. Run `memsync init` — it will find the same cloud folder, which already has + `GLOBAL_MEMORY.md` in it + +That's it. The memory file already exists; init just wires up the local link. + +--- + +## Troubleshooting + +### "memsync: command not found" + +Python installed the memsync command somewhere your terminal can't find it. + +**Mac/Linux fix:** +```bash +pip install memsync --user +``` +Then add the user bin directory to your PATH. On Mac, add this to `~/.zshrc`: +``` +export PATH="$HOME/.local/bin:$PATH" +``` + +**Windows fix:** Close and reopen PowerShell. If it still doesn't work, try running: +``` +python -m memsync --version +``` +If that works, Python's Scripts directory isn't in your PATH. Search online for +"add Python scripts to PATH Windows". + +--- + +### "Error: provider 'onedrive' could not find its sync folder" + +memsync can't find your cloud sync folder. Tell it where to look: + +```bash +memsync config set sync_root /full/path/to/your/cloud/folder +``` + +Then run `memsync status` to confirm it's found. + +--- + +### "ANTHROPIC_API_KEY" shows ✗ in memsync doctor + +The API key isn't set in your environment. Go back to Step 2 and follow the instructions +for your operating system. Remember to close and reopen the terminal after adding the key. + +To test it immediately without reopening: + +```bash +# Mac/Linux +export ANTHROPIC_API_KEY="sk-ant-your-key-here" +memsync doctor +``` + +--- + +### "Error: API request failed" + +Usually means the API key is wrong or has been revoked. Check it at +[console.anthropic.com](https://console.anthropic.com) under API Keys. + +If the key is correct, check your internet connection. + +--- + +### "CLAUDE.md: ✗ (not synced)" + +The link between your cloud memory file and Claude Code's config file is broken. +Re-run init to fix it: + +```bash +memsync init --force +``` + +--- + +### "Error: API response was truncated" + +Your memory file or session notes are very long, and the Claude API hit its response +limit before finishing. The file was NOT updated. + +Fix: edit your memory file (`memsync show`, then open the file in a text editor) and +remove anything that isn't pulling its weight. Aim for under 300 lines if you're +hitting this regularly. + +--- + +### On Windows: CLAUDE.md is a copy, not a link + +This is expected, not a bug. Windows requires administrator rights to create symlinks, +which memsync doesn't ask for. Instead it copies the file. The copy is updated every +time you run `memsync refresh`, so it stays current. + +--- + +### Something else went wrong + +Run the health check: + +```bash +memsync doctor +``` + +It checks every component and prints exactly what's wrong with a ✗. Fix the flagged +items and run it again. + +If you're still stuck, you can see more detail by running memsync with verbose Python +error output: + +```bash +python -m memsync status +``` + +--- + +## Keeping your memory file healthy + +A few habits that make the memory file more useful over time: + +**Update it after real changes, not routine sessions.** If you spent two hours debugging +but nothing changed about your goals or preferences, you don't need to refresh. + +**Keep the Hard constraints section intentional.** Only put things there that are +genuinely non-negotiable — rules you've been burned by before or preferences so +strong that "sometimes" isn't acceptable. This section is enforced in code; everything +in it persists forever. + +**Edit the file directly when needed.** The refresh command is for session notes, but +you can open the file in any text editor and change it by hand. If you do, sync the +copy afterward: + +```bash +memsync refresh --notes "Edited memory file directly — removed outdated project." +``` + +**Clean up old backups occasionally:** + +```bash +memsync prune --keep-days 30 +``` + +This removes backups older than 30 days. The default is already 30 days, so you can +also just run `memsync prune`. + +--- + +## What the memory file looks like from Claude's perspective + +Every time you open Claude Code in any project, it reads `~/.claude/CLAUDE.md` first. +Your memory file is loaded before any project-specific instructions. Claude sees +your identity, current priorities, and preferences before it reads a single line of +your project. + +This is why specific, personal content in the memory file works better than generic +descriptions. Claude doesn't need "I prefer clear code" — that's assumed. It does +benefit from "I'm building a budgeting app in Python, currently debugging the CSV +import, and I prefer not to have tests suggested unless I ask." + +The shorter and more specific the file, the more it helps. From 255264784f99156fd4c2cd2ac134094b9dafcf52 Mon Sep 17 00:00:00 2001 From: Edgecaser Date: Sat, 21 Mar 2026 13:39:50 -0700 Subject: [PATCH 3/3] memsync ready --- .claude/settings.local.json | 34 +++ .coverage | Bin 0 -> 53248 bytes memsync.egg-info/PKG-INFO | 213 ++++++++++++++++++ memsync.egg-info/SOURCES.txt | 26 +++ memsync.egg-info/dependency_links.txt | 1 + memsync.egg-info/entry_points.txt | 2 + memsync.egg-info/requires.txt | 8 + memsync.egg-info/top_level.txt | 1 + memsync/__pycache__/__init__.cpython-313.pyc | Bin 0 -> 191 bytes memsync/__pycache__/backups.cpython-313.pyc | Bin 0 -> 2346 bytes memsync/__pycache__/claude_md.cpython-313.pyc | Bin 0 -> 2361 bytes memsync/__pycache__/cli.cpython-313.pyc | Bin 0 -> 41785 bytes memsync/__pycache__/config.cpython-313.pyc | Bin 0 -> 10613 bytes memsync/__pycache__/sync.cpython-313.pyc | Bin 0 -> 6700 bytes .../__pycache__/__init__.cpython-313.pyc | Bin 0 -> 487 bytes .../__pycache__/capture.cpython-313.pyc | Bin 0 -> 4366 bytes .../daemon/__pycache__/digest.cpython-313.pyc | Bin 0 -> 3414 bytes .../daemon/__pycache__/notify.cpython-313.pyc | Bin 0 -> 3755 bytes .../__pycache__/scheduler.cpython-313.pyc | Bin 0 -> 9542 bytes .../__pycache__/service.cpython-313.pyc | Bin 0 -> 6175 bytes .../__pycache__/watchdog.cpython-313.pyc | Bin 0 -> 891 bytes .../daemon/__pycache__/web.cpython-313.pyc | Bin 0 -> 5105 bytes .../__pycache__/__init__.cpython-313.pyc | Bin 0 -> 3721 bytes .../__pycache__/custom.cpython-313.pyc | Bin 0 -> 1506 bytes .../__pycache__/gdrive.cpython-313.pyc | Bin 0 -> 3170 bytes .../__pycache__/icloud.cpython-313.pyc | Bin 0 -> 2249 bytes .../__pycache__/onedrive.cpython-313.pyc | Bin 0 -> 3135 bytes .../conftest.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 2373 bytes .../test_backups.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 18308 bytes ...est_claude_md.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 20191 bytes .../test_cli.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 88027 bytes .../test_config.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 20587 bytes ...aemon_capture.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 19541 bytes ...daemon_digest.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 10272 bytes ...daemon_notify.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 8402 bytes ...mon_scheduler.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 24584 bytes ...emon_watchdog.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 1603 bytes ...st_daemon_web.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 16023 bytes ...est_providers.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 27676 bytes .../test_sync.cpython-313-pytest-9.0.2.pyc | Bin 0 -> 28429 bytes 40 files changed, 285 insertions(+) create mode 100644 .claude/settings.local.json create mode 100644 .coverage create mode 100644 memsync.egg-info/PKG-INFO create mode 100644 memsync.egg-info/SOURCES.txt create mode 100644 memsync.egg-info/dependency_links.txt create mode 100644 memsync.egg-info/entry_points.txt create mode 100644 memsync.egg-info/requires.txt create mode 100644 memsync.egg-info/top_level.txt create mode 100644 memsync/__pycache__/__init__.cpython-313.pyc create mode 100644 memsync/__pycache__/backups.cpython-313.pyc create mode 100644 memsync/__pycache__/claude_md.cpython-313.pyc create mode 100644 memsync/__pycache__/cli.cpython-313.pyc create mode 100644 memsync/__pycache__/config.cpython-313.pyc create mode 100644 memsync/__pycache__/sync.cpython-313.pyc create mode 100644 memsync/daemon/__pycache__/__init__.cpython-313.pyc create mode 100644 memsync/daemon/__pycache__/capture.cpython-313.pyc create mode 100644 memsync/daemon/__pycache__/digest.cpython-313.pyc create mode 100644 memsync/daemon/__pycache__/notify.cpython-313.pyc create mode 100644 memsync/daemon/__pycache__/scheduler.cpython-313.pyc create mode 100644 memsync/daemon/__pycache__/service.cpython-313.pyc create mode 100644 memsync/daemon/__pycache__/watchdog.cpython-313.pyc create mode 100644 memsync/daemon/__pycache__/web.cpython-313.pyc create mode 100644 memsync/providers/__pycache__/__init__.cpython-313.pyc create mode 100644 memsync/providers/__pycache__/custom.cpython-313.pyc create mode 100644 memsync/providers/__pycache__/gdrive.cpython-313.pyc create mode 100644 memsync/providers/__pycache__/icloud.cpython-313.pyc create mode 100644 memsync/providers/__pycache__/onedrive.cpython-313.pyc create mode 100644 tests/__pycache__/conftest.cpython-313-pytest-9.0.2.pyc create mode 100644 tests/__pycache__/test_backups.cpython-313-pytest-9.0.2.pyc create mode 100644 tests/__pycache__/test_claude_md.cpython-313-pytest-9.0.2.pyc create mode 100644 tests/__pycache__/test_cli.cpython-313-pytest-9.0.2.pyc create mode 100644 tests/__pycache__/test_config.cpython-313-pytest-9.0.2.pyc create mode 100644 tests/__pycache__/test_daemon_capture.cpython-313-pytest-9.0.2.pyc create mode 100644 tests/__pycache__/test_daemon_digest.cpython-313-pytest-9.0.2.pyc create mode 100644 tests/__pycache__/test_daemon_notify.cpython-313-pytest-9.0.2.pyc create mode 100644 tests/__pycache__/test_daemon_scheduler.cpython-313-pytest-9.0.2.pyc create mode 100644 tests/__pycache__/test_daemon_watchdog.cpython-313-pytest-9.0.2.pyc create mode 100644 tests/__pycache__/test_daemon_web.cpython-313-pytest-9.0.2.pyc create mode 100644 tests/__pycache__/test_providers.cpython-313-pytest-9.0.2.pyc create mode 100644 tests/__pycache__/test_sync.cpython-313-pytest-9.0.2.pyc diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 0000000..3b809d7 --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,34 @@ +{ + "permissions": { + "allow": [ + "Bash(pip install:*)", + "Bash(python -c \":*)", + "Bash(python -m pytest tests/ -v)", + "Bash(python -m pytest tests/ --cov=memsync --cov-report=term-missing -q)", + "Bash(python -m pytest tests/ -q)", + "Bash(memsync doctor:*)", + "Bash(python -m pytest tests/test_cli.py -v)", + "Bash(python -m pytest tests/test_cli.py::TestCmdDoctor::test_all_checks_pass_returns_0 -v -s)", + "Bash(python -m pytest tests/test_cli.py -q)", + "Bash(python -m pytest -m smoke -v)", + "Bash(ruff check:*)", + "Bash(bandit -r memsync/ -c pyproject.toml)", + "Bash(echo \"=== SMOKE \\(25 tests, <2s\\) ===\")", + "Bash(python -m pytest -m smoke -q --no-cov)", + "Bash(bandit -r memsync/ -c pyproject.toml -q)", + "Bash(python -m pytest tests/ -q --no-cov)", + "Bash(python -c \"from memsync.daemon import DAEMON_VERSION; print\\(''daemon __init__ ok, version:'', DAEMON_VERSION\\)\")", + "Bash(python -m pytest tests/ -x -q)", + "Bash(python -c \"import apscheduler; import flask; print\\(''OK''\\)\")", + "Bash(python -m pytest tests/test_daemon_notify.py::TestNotifyEmail::test_uses_env_var_password_over_config -v)", + "Bash(python -m pytest tests/test_daemon_notify.py::TestNotifyEmail::test_uses_env_var_password_over_config -v -s)", + "Bash(python -m pytest tests/test_daemon_scheduler.py::TestJobNightlyRefresh::test_skips_when_no_session_log -v -s)", + "Bash(python -m pytest tests/ -q --cov-report=term-missing)", + "Bash(python -m pytest tests/test_cli.py::TestDaemonCLIGuard::test_schedule_shows_jobs -v -s)", + "Bash(python:*)", + "Bash(grep -v \"^$\")", + "Bash(memsync refresh:*)", + "Bash(memsync config:*)" + ] + } +} diff --git a/.coverage b/.coverage new file mode 100644 index 0000000000000000000000000000000000000000..fdd7c439462b9d22a6f5da2280ff998aa57ec30d GIT binary patch literal 53248 zcmeI4du$ZP9mjX~_8xn;w{v$6JsAhQG;)j^W3Wg?p;ZHkDdtgZ0!brkdhFfp*&FU| z&F-F!qw=`minfsoZPiF24^xRI?LVbyX(L61rtKe*`l2tjO;Jeu4?y{T&5K*$coKq#dfX=;7aQ1YoE)$Ej&at%{D*tyGcYHR4Z#g;B-wY*); zys255o|n~KYN}wV>04H?lR|ShfLC1`(ArKbwB0FLx>gmPO5t0SEM+eh&VX@w-Njld zNG}bAZJYCjToRB!UdRp8#(@&fg)M!ik~5XH^ipNtciY^eEi3h{X-*e8ZBeIGA+PN$ zsO84*lq)4W?P}!pPMKB$JfJP)i!X0t8SqMVm$@?ww@wMAVCi-|0ga#NhMhPb4fx}& zt?YrFwxduyiKJ$k_ax$wYYkkf37*mA~0%L)jEmO#+Xjd_59a8#;YAAzhSLv)X zEPT+81g_I9_~NTuXL6mg3naSBhrwdA&mV7TVV@LiH!8JODe%|0klT{C7)ISwIaM(W zhC7hF*C6SZsEm?sA1R%I>MZj6;;UL_s!l0iqPuhuDEfJSe9j!U==2CWF42#-rV=!z z(gkjq2;RT>!-_Q|-)uWg2E*IEzWB%I%w#wNKK*!VOr|Ln zlh>_WSTLpKxQSW7cARrUbp$GLmp$a_u(Lz= z?$=FIgK3pcTjmNkRc;?rOgajg?Q&Arv#OGJ<_0LS)`V+nrfl1A^Q`nhMZ+AU8iOP4 zwoUHjLWlV+(COU%1qZpDl24cBD>@B3sxbD`&PlK13?fiaZih~3LKnS3(-ke9r6ZQB z-!%+OsA^70JKdc&N|oa1kb0AxGaL0NmTGA^Ri;^NV{FG7RZKZ$sBp40m)eE_v7sBS za%6hpj5~%pwdrX0yqE9LCQfwIJzXUS=&f9_mG#H(yN@jvN{p-K^OQexyHaBZb$$LAOR$R z1dsp{Kmter2_OL^fCP}hokzgKa;%@O|2gtkhP(?q+>ihgKmter2_OL^fCP{L53;y0b#*W8`e+bp#gTIiIY+MCmK8*!kpL1v0!RP} zAOR$R1dsp{Kmter2_S(o0nsy$b-xDSf}R$k^eq7W{XZ7m&yXLHJZY2uB7H+z6`l+~ zA6_p`i7$)W#YpHUAtNLQPeCT!kN^@u0!RP}AOR$R1du@E1lGlPrr7+@ih)N=cuIIc zQ}P*g0G@K~F|-kNphr&?a%$c(2i9m-Zy`C5Q*-86K2;9Wikj2&1EXpZo*C|og4R6^ zYbjQ0D6J2IV1EPz+Zq+5PvfQwS=9i=PZLmVZB#L@TUus}%1N^!m(~VV(*n7TVUSzU zsC6l2*n)QrfL@;ndJQ*7BBA9qE0F-fo)8Fz>o$1m>iiWm+3G-QSr1b-78mmUmX48IWGApTqYe&a(uDj)$QfCP{L z5-b{45s}LJpYP?1P4!Dv*8iS0d@)+T zP~a#NG1rh5Y+ zNQ*_e!`y-(9Q8!FBioiQnZADM)CB+9+i$g8JA7pR)G^`R(`UXd?3M(`GM{imZP)vL zKXts71RyR;+}7~%R}Oyf$5)z&pC*JkX6tu$zcxC0KIZp9LPX?(tzI}-$z8oWefGIa zr+$6(^yIVC(|b=o8oz%2jp^y>wafPKUREf6-h1`R{v{0Ke|g!VO9#JFWWmrSr@l4O zCo#hr-{Z{12ev&w_0F05k4;{9^YY2&eV=_|@vrtwoP6h-`m_CK?`Qt`%L9{7@7a3t zfB*W$t%UCkU0DKe*(|{Un6gm%j5z%OI{*J$TQ??ZO8JrY0yNB{{S0VIF~kN^@u0!RP}AOR#$4*~jp0vn3iK`?3u zLc|UN#18zD9r(g_5EJbn8nS~(&<==T2U5Te!uEF*tmw02oYxM#ydCf!JMeHkd;wtp z{?A0}u^P)D0VIF~kN^@u0!RP}AOR$R1dsp{KmxZ50s8wtuK#bB)=3.11 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: anthropic>=0.40.0 +Provides-Extra: dev +Requires-Dist: pytest>=8.0; extra == "dev" +Requires-Dist: pytest-cov>=5.0; extra == "dev" +Requires-Dist: pytest-mock>=3.12; extra == "dev" +Requires-Dist: ruff>=0.4; extra == "dev" +Requires-Dist: bandit[toml]>=1.7; extra == "dev" +Dynamic: license-file + +# memsync + +Cross-platform global memory manager for Claude Code. + +Claude Code has no memory between sessions. memsync fixes that: it maintains one canonical `GLOBAL_MEMORY.md` in your cloud sync folder, linked to `~/.claude/CLAUDE.md` so Claude Code reads it at every session start. + +After a meaningful session, run `memsync refresh --notes "..."` and the Claude API merges your notes into the memory file automatically. + +--- + +## How it works + +``` +OneDrive/.claude-memory/ + GLOBAL_MEMORY.md ← source of truth, synced across all machines + backups/ ← automatic backups before every refresh + sessions/ ← raw session notes, append-only audit trail + +~/.claude/CLAUDE.md ← symlink → GLOBAL_MEMORY.md (Mac/Linux) + copy of GLOBAL_MEMORY.md (Windows) +``` + +Every Claude Code session starts by reading `~/.claude/CLAUDE.md`. memsync keeps it current. + +--- + +## Requirements + +- Python 3.11+ +- An Anthropic API key (`ANTHROPIC_API_KEY` env var) +- One of: OneDrive, iCloud Drive, Google Drive — or any folder you specify + +--- + +## Installation + +```bash +pip install memsync +``` + +--- + +## Quick start + +```bash +# 1. Initialize (auto-detects your cloud provider) +memsync init + +# 2. Edit your memory file — fill in who you are, active projects, preferences +# File is at: OneDrive/.claude-memory/GLOBAL_MEMORY.md + +# 3. After a Claude Code session, merge in your notes +memsync refresh --notes "Finished the auth module. Decided to use JWT tokens, not sessions." + +# 4. Check everything is wired up +memsync status +``` + +--- + +## Commands + +| Command | Description | +|---|---| +| `memsync init` | First-time setup: create directory structure, sync to CLAUDE.md | +| `memsync refresh --notes "..."` | Merge session notes into memory via Claude API | +| `memsync show` | Print current GLOBAL_MEMORY.md | +| `memsync diff` | Diff current memory vs last backup | +| `memsync status` | Show paths, provider, sync state | +| `memsync providers` | List all providers and detection status | +| `memsync config show` | Print current config | +| `memsync config set ` | Update a config value | +| `memsync prune` | Remove old backups | + +### `memsync refresh` options + +```bash +memsync refresh --notes "inline notes" +memsync refresh --file notes.txt +echo "notes" | memsync refresh +memsync refresh --notes "..." --dry-run # preview changes, no write +memsync refresh --notes "..." --model claude-opus-4-20250514 # one-off model override +``` + +### `memsync init` options + +```bash +memsync init # auto-detect provider +memsync init --provider icloud # use a specific provider +memsync init --sync-root /path/to/folder # use a custom path +memsync init --force # reinitialize even if already set up +``` + +### `memsync config set` keys + +```bash +memsync config set provider icloud +memsync config set model claude-opus-4-20250514 +memsync config set sync_root /path/to/custom/folder +memsync config set keep_days 60 +memsync config set max_memory_lines 300 +memsync config set claude_md_target ~/.claude/CLAUDE.md +``` + +--- + +## Cloud providers + +| Provider | macOS | Windows | Linux | +|---|---|---|---| +| OneDrive | ✓ | ✓ | ✓ (rclone) | +| iCloud Drive | ✓ | ✓ | ✗ | +| Google Drive | ✓ | ✓ | ✓ (rclone) | +| Custom path | ✓ | ✓ | ✓ | + +Detection is automatic. If multiple providers are found during `memsync init`, you'll be prompted to choose. + +**Windows note:** Symlinks require admin rights or Developer Mode on Windows. memsync copies `GLOBAL_MEMORY.md` to `~/.claude/CLAUDE.md` instead. The copy is refreshed on every `memsync refresh`. + +**iCloud note:** iCloud Drive doesn't sync dot-folders on Mac. memsync stores data in `claude-memory/` (no leading dot) when using the iCloud provider. + +--- + +## Configuration + +Config file location: +- macOS/Linux: `~/.config/memsync/config.toml` +- Windows: `%APPDATA%\memsync\config.toml` + +Config is machine-specific — two machines can use different providers pointing to the same cloud storage location. + +Example config: + +```toml +[core] +provider = "onedrive" +model = "claude-sonnet-4-20250514" +max_memory_lines = 400 + +[paths] +claude_md_target = "/Users/ian/.claude/CLAUDE.md" + +[backups] +keep_days = 30 +``` + +To update the model when Anthropic releases new ones: + +```bash +memsync config set model claude-sonnet-4-20250514 +``` + +--- + +## What belongs in GLOBAL_MEMORY.md + +The memory file is your **identity layer** — not a knowledge base, not project docs. + +Good things to include: +- Who you are, your roles, active projects +- Current priorities and focus +- Standing preferences (communication style, output format) +- Hard constraints (rules that must never be softened through compaction) + +See `docs/global-memory-guide.md` for a complete guide. + +--- + +## Known limitations + +- **Concurrent writes:** Running `memsync refresh` on two machines simultaneously will result in the last write winning. The losing write's backup is in `backups/`. Risk is low since refresh is a deliberate manual action. +- **Max memory size:** The memory file is kept under ~400 lines. Very dense files may hit the 4096 token response limit — reduce the file size if you see truncation errors. + +--- + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md). To add a new cloud provider, see [docs/adding-a-provider.md](docs/adding-a-provider.md). + +--- + +## License + +MIT diff --git a/memsync.egg-info/SOURCES.txt b/memsync.egg-info/SOURCES.txt new file mode 100644 index 0000000..9979cb0 --- /dev/null +++ b/memsync.egg-info/SOURCES.txt @@ -0,0 +1,26 @@ +LICENSE +README.md +pyproject.toml +memsync/__init__.py +memsync/backups.py +memsync/claude_md.py +memsync/cli.py +memsync/config.py +memsync/sync.py +memsync.egg-info/PKG-INFO +memsync.egg-info/SOURCES.txt +memsync.egg-info/dependency_links.txt +memsync.egg-info/entry_points.txt +memsync.egg-info/requires.txt +memsync.egg-info/top_level.txt +memsync/providers/__init__.py +memsync/providers/custom.py +memsync/providers/gdrive.py +memsync/providers/icloud.py +memsync/providers/onedrive.py +tests/test_backups.py +tests/test_claude_md.py +tests/test_cli.py +tests/test_config.py +tests/test_providers.py +tests/test_sync.py \ No newline at end of file diff --git a/memsync.egg-info/dependency_links.txt b/memsync.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/memsync.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/memsync.egg-info/entry_points.txt b/memsync.egg-info/entry_points.txt new file mode 100644 index 0000000..64b942e --- /dev/null +++ b/memsync.egg-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +memsync = memsync.cli:main diff --git a/memsync.egg-info/requires.txt b/memsync.egg-info/requires.txt new file mode 100644 index 0000000..d6ddded --- /dev/null +++ b/memsync.egg-info/requires.txt @@ -0,0 +1,8 @@ +anthropic>=0.40.0 + +[dev] +pytest>=8.0 +pytest-cov>=5.0 +pytest-mock>=3.12 +ruff>=0.4 +bandit[toml]>=1.7 diff --git a/memsync.egg-info/top_level.txt b/memsync.egg-info/top_level.txt new file mode 100644 index 0000000..0a34709 --- /dev/null +++ b/memsync.egg-info/top_level.txt @@ -0,0 +1 @@ +memsync diff --git a/memsync/__pycache__/__init__.cpython-313.pyc b/memsync/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..862e518c279243fa4462f012d21c40ebc33acaca GIT binary patch literal 191 zcmey&%ge<81QzG_Wr_jm#~=<2FhUuhIe?6*48aUV4C#!TOjWD~dPaH%ewvK8xZ~r? zQj3Z+^Yh~4S2BDCslVmoY!wq)3>1yYOw3D5jq%S*bt%d$OO0{KPcF?(%_}L6anCIA zC{2pVP0cN?%u9yT@$s2?nI-Y@dIgoYIBatBQ%ZAE?TXldhJsvD407lPW=2NF8+@XT J+(j%vE&$;5GrIr) literal 0 HcmV?d00001 diff --git a/memsync/__pycache__/backups.cpython-313.pyc b/memsync/__pycache__/backups.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a892224cc554ae84893b31918a648d5583e4eaac GIT binary patch literal 2346 zcma)7TW=Fb6rR2KwsstwU~-{_#b6RcV_I&3a1%)&G&d@5U4de1ZR|1GV((frYapQ# zR$rQ4q=r@~JX9)=;jM3dt1A9L;|7sv71~NvmHGxsDph&vnO)lOy`Gr!oyn_7KQ{O42eXv z=fz>ykej$c_bILp^pQwBiln%0&8zqokFEKXfa0|^={g^8`ipH9=PbXf>xQYCnxWIN zIB$7#s)aDeC$^w!XqHWt$qp0eKgMIY)QWARhx^VV|6R! zyj2wL80XCCe2P=}6d!~vj|km)OUUimnL9YCFm(CG10Sar&CnZTCKU{zL7dKQKXT9Z(? zeJ=2I;G5u5sIwaCoNrzXov6IK?2kN1%(u_`zdy3j)>HNOR0dW=>C=%0!HU7KN`Prv+WgfUmd< zrNsZjNfNNMRTM!H>)6pR!b5Yku9oj(3L1WcYP^ zN~}jNqfv-_R7J>-90cz{Ly0CshXCgeJ*3mP>UqJhxxQ_e;>O-PK|%yE6&W)L3$3=-s zV3w~Drsd9-O(UPTJhkvFe~k}}ahBVp8K98Go0KvTFfM=)RwT<+B4r(u{h%{M)9+va zG<#8S$5-jk(_eh}tu}wPx~r$!)Kj^*-0?yg_k4p zQY2Q5#Fir6)kyb3;_OfSVx)iO;!3!EDIEPJ9DR6v{@h~t)J)%UbNGup%b}f1p=dP} zeG=NY9BQ2l-U~js^E7nur3;02zVxBCGcUHIKucxppEVB(b*v)J7q$YS7XrN3DGU6E z^X+|Y{NpyMZ@=qtjDs-?RB1?qKtnJE1ZvoT2tcf@ve828u+q6rWa1U!ByTszP6raQ zS|~KILk+?}0j&=DO?O@+)J%|8*yMcx2%PRs=s7qpSR#WFX$J%ATYPe@;wK2=MfwfUj}q#!s8owHVo zw`7CJV}_1pEidc!i-XGeHe`|@EV;2bXQx6A!pEj`n+kAuHWDU>z|L}}Z^FRx?Re0; z7~EeO{Iy-4>6v}!v)%>qz{4C2cBXMR34$YG*PUHcTeIB6zL)F|aAC}GTx8HJLDNn0 zHfZY-tMxd$rmw}tSKOG99ERbw*=axXmOr-hjr%PrlgXFi4#b%ZVU;GV4LY>98Si1Y zVnLg5wr6CsS?3gkCHn~3i8#FUo-P`>asi(u$HBzlOJgtq132y(iatk8zoD*YsBO*d XxZVh>T$+6uKL5JE`^YH-4@Bm~8YN|i`v?H$M4*t_oR zZou>a>ZMXI?V(5Vtl;;=^*~bkow-YLo~-D)VJVho=;k#T^Vb!tRA_F-Fi3!Z5Fu_i z0JHbe3b*7>@2{2s(OI$_`~u?vj8^%UZ3&LmS%%S>=2SK~aRVXcC_<}J3$^5jq%~@^ z!{`QgrpXbbbsji%??>dC14D&W3|eb~F3m4qn4Zrq z%q}c0ea^6{aCku}q~|r=zCD2plqeR#3M{XbHGLz6uM}aV8~P{U#`js&$^a+>P_1Ed zTQe;aD;Bm&B;25L66ck|2G%TWS8-9JriBx!DkUussl2k0#HN95lTfTGmVz}C>tqu~ zWy7?9(Px^j8n?^|*yol~GjYMF*6?rlerVA$F(qHx8U-S1MbpzWu()hg!NG!|k`!Jp zk;WR$3{z61#4HgN8#-p2)bP#LS(>(+Cm9`;YDJ4tQ3?f8wUj)Z69QSKO2N_!Bo$sx z20XEPk!d%l7y-3y;buvS>O|mv)6zmE+?Aj&Gx@_my znqk{(3uVPtNv@)%sx^;i*+HY=lV!H~461u*=LixGx_rOG_q+U{!w)`O{&8lD^~T?d zk#FvPeYY+idvK*Lj=q$7zU%t7>-*K`Qfgc3*$E-(uqzHZ;*cxG9WlNw_q%e;kz;ik zKb&(%&N?G!>m%cJd3?L8cN?(MfFlh&mj+*py{>rN5s$lK)DfevTc!t-ZuFEBJ+*au zwjRCsT)MQw^FrER8FQrAgRfplV}HGrdv{SlNWX2d`B_Ts2r%)t*$W4Hdf-qdik?Ps z=6vw!6bJP_sb(?@Ld@;{107$gn}Pwuz0xU*Dqz+*kP%qfSvs-G3170jKB)f@8Cox%1OgWLMEoo{e6cqZl8Di9ljMgI`KbmyMrk%0r`dG$^WVWPCBb_** z@0ZAtoglQ0EPB#|r_<<3Iy`+Y_+*lUIvJwJfD>g2MA_%W@3Cjj0_gGA44wQphQ8%oTo2brM*;9JKT8Po literal 0 HcmV?d00001 diff --git a/memsync/__pycache__/cli.cpython-313.pyc b/memsync/__pycache__/cli.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9d5e0bb10abf2d1c267c7d186c8c7491483a5712 GIT binary patch literal 41785 zcmd7533Oc7c_vuq0$l$DN^bdq^RK(2C$cGxlF)1EoqLq+T!hnaJF zzW=_Z78Mt#C+GAdc9lEZ9r?RIobLImM)PyJQ7*mY(SG&| ztw!?=4X@#~Lz+RYpykM~8`23n_Ny24?AIU|*soDAvR{*6!mobFJZKRtavZB*#WTZ@ zZO|^**)!viW6&u$2VH_|&@H$JJ%WclHw}3QeS)9)%|n5~BB5xoSSV)qmZ6ftQlWIP zOeh;H7s}Z)>rlmDrBFFoB~%So3)Ohmz}w0-FB*gzzLB>xAICTG4(6+c&&hms@VS_; z9zHkoHNfX#zDD@G%-6&>^FHQl#xp%(n%;V&-e*ON6%PEp-~cbQfBX zuCa0HH{0DM-v}Li3tz@!b|Q^(=GzKi1@mo#uafz;!&k+8JK(EkzMb&ZFyAitIOf~U z2l-m&+rw|+>zHrv^Id#>_i2qLY|&_*-&?0qa_2PXbmw%z)?ZR-g4(n_9F5M-hv%nf zqp=a(8%~7h&(rgXiAxb7hQ|{VK||VfCOmmzVJ_{Q3C~Aj^Aqx|V`f^tiKUHm!a_7c ziNr2PCxg1QsdqMdcKTe}eJ(OTG0FTBbCkY5?F`S%Ow0+hm!_wX4T`ZaKRYoMnU74) zryUerd55t6nc48v#H=te9i5(^xEQ%OD_l-{XJ*e$#3C_PL{uRXOBV}~vqB_xUJgPz z&PSs2lX9o9_BX-BfY*L@hYj|SW{f?~bz0=xuv4Qc93rY6ovhZ3%byMlWyno~?v&vW z)rE9p?gH=SM^SA^JK3z!j9Zm3yaD+ZNYSO?jXU(?>|>^|V-6)o=9kxX>CnykkS=Nn z8OFSsH;ON$RZ7;N`88|tnAB=DG!Pmtmc5eMsrr1mj?zorwM6!Q@dTGiRo%I4H_k6&>a7|Z7edsAVj(O^AWCNawfbm z6=|1-5GTye&T~`K0>&Ih6nA!3K&a^$cQHJ9emWZIu!S(bIBDE=(2$;BF{aOQvB-QI zH;W{Nm#1S9E*!fMV;M+c7lgLSBd?RP4XCfmHyz&z@ zcys*A&B$|MW{Sh8=FUz>r?~0)7)SY|h%@+HI&5PLQLY)aU=Qi5Xy#COl!^;;lMAu= z*^3-~XWYxv^XI#59H#`8%Etm`>eERW0zxTQ*0i^6w)U7gJ$2q)aa}UgT}N;DvD5w^n@}>R2xZqm2~l?$L7P) z$w4yvs*ddRBHUx4%32 z-iz?-}v(@9#Hg907zgw%u`;iWRL1ck3-nE8ZE~9>jvE@oLfGpyr4A z#@-6e4}-SeQvDBiRP_4wANq~(e^}w}ZP$O;riGg`i&C$!S@f&$d_yy?&ak1(oJVsS zo}a_-m^x1)xO^!Kk}}s$VGxW{y6ob0nR#RkUwHn->)9M8yKc zwA1&@KGrZkpULx)$qV?1xX3G5u3{aWG$q8i78?0&+}0p{JuM@%(MTJGp|xr%66~-A zbtAu|NjSb={z~PPGjj&}L}wSGQ%Lml-{je8AvWJ36d?&pN{3&8oS=~nyR=CvkWfJ{ zD#@v$w>p8tPuiaK#l$EY#|icHf(G(S3z)ZZ2J#lX+Kd0Oqi_~A_bnQK^XjDyU&rE5 z%Ir&+%Tw;+l)X4&XnQ{!?_6Dw(Un~Dxquy%6sP>c~)QWcLplB%j z3`uDwDR1SXm525G|B7D8?s=s@pQNwDsAl2yO&twb*T&6Cro6gd5&43c1xADtGrWYD zSP7)7a9Cya8^ijZe<}4~Yxa|jTl8C2NwMm(70#ukJLXY*nP1+(Mz0a2^JShYz5`mN zE>mcE5V)qO9;-BsB6fxJO8MFPPLpTe71F68j7PPje^WRgC71B5hNyr{`UOj+rbsje z4cNRZyWJQqlj%Kc;rv0%*g(rXNlV$0=VR(wIFG0v^^c6xXT>hHl;)6GsXv=}GNpAq zS&7DLlr%$TrEPd4Be{2Abenk{O=hfz9^G1xubE+tQ}W9EM)jkAmo@I3N*OrKI874l zLMe>9_4Gly|B9We?fMTJv7wR(Y@VWjL$>g*@J_gbdl0c8Tc@ z=(@YX5P^azR2KH`utqbg<20j(Lgqqg@Wyt`fB)$H>db2YBBlIj*Y`D=(;>c?=-iNz zFQMO^di1Lw#2$$i-Lil~l@cr9(9#eMrxn*!P4n8`6zxc?)0FrHi^l z?y)`jDP%?h(&$p&p8l&b^mzWId=Utl`Rc2cGUhz3!Msy&-0x6l*qrd3H2W7!t2X4M zl_B#E^(aufs3+tZJCI-IO#ReW$GVbL{<@F{UsHYNYr-gY<-gC)J@Za!@|>D9WXhQd z$v0-6p3{L3D^|)0UJv60DPWXI7jII+Mh&!r)o70DM@}o8Szg>mj7L*eKIV|=Nmo(u ziI%9T$8^pp{bu;YU{k!62TQatr%;5kd0}A^++alJ_3%p+8aF?EG13w5lKBQMJR?NH zQ=)A0z%JFp&Hdpmf<$q2D@LL~FHgAsKPDiv0aIIpQ$u#Q9>G7_(wV?^LD zwP+i#K8!;X0=O9wi1&z(KAz<}DO~PW`8diOr6IPgosqeDJe<%(ss2;BK+#!la%Og6 zDw~Itc~aDjF_Xk7<|K$J#^xfE(`PSpVTqT#7>+Igw7HzkPd>aHR5XR5tQ}sMnV+7U ziAV)dn7o);hVBPCj*II$@Skf*YlA1_w%+rzvuN@0ZS8UW3*B7YJrKQwt{`H5c6u_> zkv2;ZR4i?gL7>?Cy0m#NED$G{wyG{aSJla6OTBQM^7`_xqyBH23fjAQvr_vtPJ24*?fEuQ4)3FJq4ZUdt9>xR? z39v3qJC&iwN+Z>Whz_Ba@~}u~MPw?iola}#(gxxV@wKu62TWt(49b^wo|~CH6P{6E zNHcH>VLl?HZ5To_#+f!rrHK((hW^dHBp%K(d2x#2nZh#&bR7RU7K*%0wfRvKp58TqHq5n@Ay&o2i>2%)11;CV|yLQ*oE3Y3|>DjPXrAlg6wbx!m8R^)43+BH>?-0za$oS{n*p}sZmqbiC*!S+%BtLE&q1! zy5(ELYlm*si!}!~$_^#V`VwV*8)ZkAO(}ob^@GInuyCs->w84kqIR88BuWus>Awcw4ZPmOw?X>kzg8{Mys{^sbW zp$Nf@{^bM7x&w*21Dl3}cMVq2R<~)WziqHCjjkL^R&GmFZWAlEi?$t`hMm&Wft7ut zt$EYXBHa$J91?A==BI%4q>e{6mEmy3s#JT$vF>y6UA8lV1Qase*5X!>Z&Q2=g(O za8{H094j^j8YBz)r_vWF!4rrB6sS*MNQA;0c~ckKW^_DDs89fM83U7)e<_5o(h5^p z(H@hEO5SGDQc^(~%ChQf&Pu6}kVdJMZj30vyh}+_Eg8s=th6mpn6_q&mh`+!Ni&S4 zdz=<>c8wAIn0M7_PJ0Taz+1tFX|tGU6`o`8LSM*0BROPj(1Z-UoiUrHkm-rWOm00L zPgGCmI4$4o3T2oMb5?t~csK9iy#y)ZP53UL8}JUA%{|af&tW$gPqH2wQQEys^k*N0-DZoS)pr`6amq4s6}l~o8PVp(} zA!Lv8>~sU)n4K5iWy;r-DPPvp=8!8F2FQBXBC)*%;{`KM&v9U%Rajz-c)h#}?Dz=A zw3YXb6Mf6BkX1?HQ9v3sRJb*#o&6Bt6qrXO@a{=^5-hqcYVEO}D~iazv)Zikx5)*} z`jJJ))~vQE4q2act;pmj<(mZwY>6Kjj(}4DPsB6_h(!mZ4*p}7%V!k1OVeQ{)$L_c zW3K1KKu1~=FOj)w;>08oEy;gm4qu=tL?AJY&;=*HSHT1%7E$5N+Tf8mIS8KSrsr5f zCU!CVU3%gX&$JjsZ!SnIWhlG&cpNP!db4U`CcuT1sp``)Qw^I`~VPMB5}N)G(oEd9nE?d|PQ7&v~?3KT{l=zyUL zc>>L}mGCsi%@ga7;hi>3$HMdTm(vD%BOIoPG_2BgR4xpzK8EizZ5ASc4kjZ4kwuYng=Y`q1>B+SFaCmA=8WC(fLjPc1NE>HnUj|EV!FZ9Lnx}-z6KL+V4PeCd zJRlZIo_ca-`b^rf5S>0d9hsV-2Wi8Lv(r(5v{;0Ga*mNRKn}6!!+pnw1)Mg;47@+Ewoml!`N8BLz4-gkFc=`{z*Cm=)F(XkYtM+DdeO7% zit(=7clGj>%Q?tHpyc|%wSns+*G4v=Nzr!~DiWT-+cN)e)Pw&AhhKEZ?zy_}%D$@y zt{h-6NAtHQzw_ezFW$0rsbB}ne%o7_^fuh`Hmside)o5~6W)VMCUBEw?Q5qtN_Qp< zMHx}If60C)Q1cLu7FiUKdwcH|wX9Wb6zy3YVJvC$8h^uZ%W&YH!D;l~_BO8>Z(Q2& z_Ai<4RTB8}#n-;birTqp*_yI@mxD=rWva9(S=yE;ZQCgANS1abO1p0E6id6r(qjN1 zN~^`1t%=fY$Cn=^ZGU;v-+apt?T(w5#b?hX{F6&ZQl;f@o8L08 z8h*#QH1IQbDOmgE=%%NUey2A*96HtGy{~7`Ma!lmh-SKDb}n52?~791(`lWjwRdH~ zzS#i5z765;$i+m6rHh-EN<>2Q`O8+S)~6FKhcze z(*3|~gO8Z@u0&nerlI?`p=dd>)}5%@wQ1Om=2~k0>Ikvs!z-;TRT)lwc%^-%MzjSt z4O92nu%Q~_C`#HJ6847AG!~;PW%s=~xl;Sa+3PdcX5NTy*c-rFr+js*yWiXY z&i>U7;6~ee8G3Xi2R*va&?AfU?{C`!l4=8KyLhQ*Jnd`YHC}XW`+@$CY`<^2W$C@` z_7G=o^gf7z#L@goljFEW^XHbpNUi?QD|__AjrzZ6(30C&f=7R`zhb0P|Cg0U_;Xlu znj7$CX+Qfzc(QdMGJ1845vmXA7$c+yBc$WCj1eN~IW4+*S3!o47C&}9sWwa&3vU>+ zE3q@b60@w3fgse)K}$S%f)ux)HpUjf&r?Ap37DUnU4SAB5gZTNuIWo_5 zU~+{NOza^%k9ZlPh)GL=CTUhD-Xm|h5N7ZUDl(aQmgZIAOXSeBn$Zc%f>LAWXI~bU z@U{Xnme>S5i<-NoZNL7+Ai5NQ)G~tagNAl{(pHF? zDfGxHUgpvAaaBzxWK;%k6s4IYelYWqSd@1d*j9XIv{>d{yp6YaX$z^u@($h!{u_eF ze^5NjxL?U?oaB1!8Y8MG?*e!J8c!T)VUEhZL(h;3gHo5MHDp!$ry{4mzCubI87@yr zfm$Q0mHgSygMm2DxIGh@e>nmHlQJd9`iBkmo>D&MO5Bx*k6J(4Z6u-(>TU@Ctls1L?U7 zm+4`g9D)agSK$P051ppcHmQuDDh1)I6oE=fy0ei>)3XaP;kR%bqt7uz(Pd3&RDR)q z!P_k~p&f_w|IdVW$6L8_IpJ+t9J~X9@YUB|O%-$R?R#e*ggg+txXUGhQPN$XaMv@j zQTy7|cP_ktVeJLcx9^YZf3)R;Ew>!Qp#Sa}HJ%2DXG$7Y2RDj45!qgxuvb$YU*o2u z2|@0-Dw3|ogsXAwWzp3ry7nYphZC;D9~}{0heg-Zi$__04GDL{+BwnPAi8%a-8~6+ z&qq5&caP{kv3TsR-TkIH>1nv-X-HW-DSySvP_nWuQQ0O|w68radOIQL`qXUjnm*GQ z9OlI%|IGo?jL4faSI=EJw>aVuSg zQ0vToiYOL#UdQWsLkHB=fsCv3krE$sYMp`|MGaX>4#tq-VMvfd4n&P1W1)pyd9Th4 zSaEn0RAkIKY7~}`5h#t7p-SqE!ey(C*nsU|7R|r%%0+F%F5ZstF6A!sn-`uKC3a;9 zBFwC2rVi{t3dF_bU208$!ezCQvv6outAgqLTCGJ$Tvoe`p2|^!EZ8#2>dPucb*AJYpArIXUz1%fh99evA7HIjSc9^^I?uyq1t@x|>YAJe zDqY%$c|4(7hb`RFO&32xQdDk%W&fE6WDnMU6qTn63u@lN)!;(LIeA~&b6p`C->Kj zZ<2D13NVr$Ofc^5WgF1s_|T<)K}t7Ud54s9h6!9%LvfNSl{-hJhvVezug6_1Uqy2y z7_Z9<*s>745S@KFsx}9pQeAV9u~H0K`|EF_>;+1jg}@PgfZ}JX3_)p#gq%!iNY-~D zKyHD?H+i_0$=SKf!3EIbbT2m#bpj!Gh1!Rqf^pz!w9hZ5)IV-v++9m7C;^6sEu=x^ z=4JF8d+;b|+{{gdFH<1QV47e6M2_5BP@B07J*$gpJ+yNqK980g2_>W{k->g!H-tbG zvB1=eAeqz%5TnV8MhtudZ=w;fIA{b5IbVa5HV_{g^k2Xa%@lC0`XRCOk+wk4{z zUF#F~ys$Vd1s+;Cu%=yWUjr5+p*NI5|J$Q)jfxd*qOE;ovfBX1b$5#%Dwk?~6 z*1Nu@yd5M2GD*9mKxC16MUvZ^;I@i2+t$4|`jh*QC-xst?&lNx`Q-jkVt?qnyTtJ? zh}*-W>&(ZN$$JK~^|&^7A62%F&d-`U-|StnzBwZ9>RamjS#|TZ&{}AzpOh>{SNm4? ztktgduk96mJ2xG>ay3@Uo0H`oiSmwQ`PM}F)@w!L?vceosYXy0LA=4W{i1E}rlISO z)1P!UB%BQoR~QkS_lT~&9|L=|y02DUsk&Nog|^Db+9a=vt{op+c4o9mnukT#$fjj9 zLs_TH-W*L7kN?f%tK}Pkwhd4Fl2J;^xbpJqdC^7a;T>Pqsx9H$!qCg4xh!EWOPVVZ z<_hfNTCpcfTN0%$YsDL-TQ~gMmMkp#yZY5*|H`@Y70}JD-J6y@cSvhx2h&Q?QWKND z*Y5R8-~Y;Yzas8FdQ%X6gGtBGJw3gAz=r3C2YQMIwrc)#YyDt}{?CfFaC6WMl^*^R zjLcsf%_ykraR#4$VJ(nRqAO|c$h$!QK3ets_dxf8`X15`>Z7JXL(~j;yy;PEZOB|l z%JYUUEvfQHEm_!tN(uTkr>rquCpMEpxgLaR)H<$W2^s25p;&2?4k_0c3KOz)gVtps zFcXKYCv?MGBsKgfB%&dkl1BIt;%BI4o3e6)4y37sUA&PfK;S7v)0=mwNC;|dLpgyB zL>*b7EuacjBtl6;qsgVzhq3b0R*iGIte94o1GeTUrC{$>EtS)Kz)*BKdIOl2v*J6l zYNBcjs3o`Ml-yaM2AeY4US{Y}MTmKHvNrJqR}8tssfo=s~Z zOM96b20Qsl)i|w<3Zn=QQ5(CTiV^RqE{Dt`t%+7z6Ip8t-$9TiS`vv}d<5p9;uxvA zruRDD=}6S=Up8EK{kH37Q}Xaw;_%qw03jR@)BF_#G~Tl7mDeiYsD3B7UY4lam+)>Wd=Vy9-sV!a7xmQYeS8MNitBbu*NxhZmi;#_C0hFaA9|7~>AVLR zq3E6l5&oM+LsfEH>`DF>qtw&9kCwc+)cSiMNzxzdQupRMR`qcocmtKXwF>gRcxZDA_Oq|UL&}IS z3!)j+M75+)Rd`A-1cVSwqCz1$O!>6Pegpyu8t~9W2RSReL|zLp!8tZp*iMCK79tGW4C;jwcqs)o%|ji1UWlRGH0;?} z7v87WHD>mabpjwikV-(Ha9kar?2Z8v_rRba<(Kx8~p3di44abXvGMOFDhktQQ-QarTm) zy}veN(?S6dFhxV2!uG=!0JoG-1b(T@nTeKV$<{;EFj@rb7kH9?@p`tq3nk(W2!(zJ zJf@C1EOdoqncdj&gLmq<$fV0#**k*OR7F7Sd<#p9Tvplydtv#{zj7DHbM~Mk-gO+v zFLngYVDy}$EhE{s9b$~N1tvZ%$a`5}FNJZr3$c!PQIDzyqwG6~b=w%LGjid1 zPUu8YRCX{rEA6J`AxFB-0COdxw5;g&Z`&%dvheMV`)oav^RSAb@|#dhVoRGz z;=V|fB5m4W81_?iOMF<4yp0~Dok~8EZo|LCvwX>$Q<}A9BQC-X1o_`+OcQL4nOoKC z4%GhhQCKm^XyZG7W-m>ZG_N^Vzakdxxzc~fU2^qHSH84r*>Ja{%C=oI-6?BbHqoS~ zTi$#9z_kObet^oWPp`g`Y~G(}-hYD^n|f|~Z|=U?BD#itY&rg^UsKd@zgT0mi4NIv z0)*|NZQG_{dqKEY{T6O9xL2Cdq9FQUuY|rMd$)Vs~Q@@ut$Qfwvdh29VanfhO`DGlGP6?X+7%Z z&4qIVscPAw2f$mn{fJd5P)5XZ?9iWnCA)3MiK1dx77&h|%xdfY9$MG_$d+_Gq!b{M zvudU8a?Z{@WanKBesn%04cd+SWTRIl<8#t~2t6~ex1 z&uF|r`@)DjlwmT(v|K*$f~ z6yuuM!DtC;;g1$)QqSz<52>o}s6iE9U1-;Sc9?87GOO&=liCIV(~QPbh2fy_AT;M7 z?ueRsTBQqrX=eI98T`DVP=YZXr->P&#Z+>zdR%baJ#Tw?G&PT#C^XD%!tyHMFKoRo-sZ@remr3Q9o5bQD>#+5~E=-Ku%E9^*Q3EX5 z7}n_+pzdw#DZ)#ZW=J&^G1$6061PjZGs98S-ky<@$Hqoa4D?PQ?TMkjXXC~e?9U7? zfD_jcgiS`A7(grOW}*hz|A&ge=2Zhl9oU4NZ2w& z$?y^e@mLo@8d` zKto9udZ>bDr(aQt6x?7R_UBlR9mH|~FwxXwNPGi~20k<_+%$~me6zkwnr|3E-eH1g z7=~t#{;W2r9eG~^34*R;Yg#*z*3DkXA->br{pTXs{5vNcOji;0GBr!azdH8M%%Y!T z2Rqc)|6L10hDP5nLCT1p*5B=a%d(=(w8(=+D1D4=)lHkibELQ| z!RGTwH*GjGJ3Es$N1~Ueh1sa^&nd$UfXOqSdOgPP@2X6|A+!%D=Z8t#e{!BH}>${^u&e8W!T`GPrH;G(2O)x?Fzw*9oT8N zdLzBfK<$#ktckGo*$Yg!OJLwS6-oF*mZhAa3FV_O-=sGmlJhUg`5_AFV4<@VN z#W=4tL*Av15`K)(KcYVV7N(*_&1W`^(RF7_+hRXyMAk`wy219^@NW(;A6RWoVryjG z&W*a=KhWP8+Sq$c^bKqp1}SLCvT!|qEq?vW*S;(^b#0V%Z}|2t4*iU4z4nZ_?Zo0p zN=iiZH?3jw?U7AGAJh>P6JvLZE0e`diQ=YYaeJb;ovBgWF0WcSo2>3gRCjEYcV2Pd zGgz(tj{wb6<@IYj#q!RjpsG2+M6idyX2(ZrE;hP$<{-O)INEbji;vhR+ZXYq5JmT}a<Jyf- zCF7E?y!&V6mDh?^Q7wtb*uC7cQnwOX-LW#X`qEnQngv>;KelYgrrv5&aWUGy3J}JL zu%fH(V@v&iY}@{^_rSmDhoq!$b^Gc|Kd8Ij_&yYaZ+S_3`Az@wsW&Q?onlqzy5alQ z?^ zy*Ias)qSsz(x%gX?f-kfU#syQjFI^LLrsr;@Q~(DJKaNdmOs;($URg!RHOe7)mpeY zTt9K%m@u`Uy$eq^*RRkNPi*N5vr3jCnf5Uj+GHz~nHb=4FjePYyn#1%sT&9ZHJgIw zczv!;s>BF)z^ow_UdgB@`0Gw!pwb(*wmx>Vi=>MNLal%&tNelR05dQF+ARw+WY413c2?^z zpw_B9C1cKQjF#=ZOKB6*pBhuy$xI;Sy}}esVIOZ+Uphi&)a7voSLI%LY+XutC-3Ug z!}#rIsEeD`r5*V_rVT>cU$?frhuUoPCUWtzT!z7*=*Oxu^;UeJtAFk!<$JR6_yjdj zwVpEl#rs()FQSxz|DjSA{iBw$n3b{|r7U5k1X26A`eM|=Xc(C#9;;PaI`gZvIhQ@s z%Gj(?&Ol#vX25h_!5A}L)Ci`mGTtQb01RVaTJ#*Yr^ypIZ2K(0jxH$*!O%;`L0YsV z)euI?zDKu=yk%6ZhxW>WYJ`r1Le?u**dIAr(5KM1aW{7mKo8?VxQ_u4{S!*A zh@1{`C<|3HSxG@5S`puc_$rp$%|U9w3dPEU;jdxnEo=p`2kaG<62a(4NK@*qa{2NL z&Rv8pDB3g$aUphLM9#swFCtPLCS;2@$Prb6V4)bozozixlsyABj1nZGHR6N0W@uOAf7U!cjOuOV--> z2|c|HCv8Jk(^JGB#3Tuzl?Zr}#Kf3*BxXS=&!IuEo$!A zTvuDJv@G_f9GB`B|yE4Ws_2JxJ>vm#Yd^LE!;UGJV+ z8{BBzvr(~EEboGx#JTVDyQ0jtr59mw=XT@vcPC&y=XWPowbxI*dFq{sC94?Nm9l#_ z>|{YF-_rT{Y+hpxoOL+Hx&2-1= zShjCiD{i~}%YCoEnshfN+>IOV<|RYQ?Z5i!l~)M}XTrp->w$Gs-ln^()NYIYLdcm2VIzaPKl7|f0Oz1WRizZXw<4@*(auUBWKoA7omneKoq z8v4zluQK(dBg_6}<1)Wev|<)*+@_(H$tH)inxfq?ng~88g51==MO2~)~?Jyo7!_i>0v7we3zoy;Q%6mm2a9m2{g?QzcJ3FMAqEqIJJEd10 zbJJ6k$XUIp7>wSE!t)IXJ!OG`^s9|(&*>qm1T7|AfU&S%V16_!PcmZ6$ISK2G%Q4% zm@g%B=P+&DFWWFbDPWQW03|m9S;xXYV%i(7wm=)#4;&-HwJ?ir5i&O(q$p@6N#;3n zlFfYmK{K)NF|hR1lGrnQ78@ikr0oM2=P)U-S%=LOur_?82e#8jCI&|MlRd|ePaGRS z8Xz-NYsG&}{-49ivr;S-b7GpMIB{+vEKCW%K(ODSubPl3>`gV8j76VV%0ICLF#I9Y zET-*yMfjFCU3K5~f2aKY@>`ajKXGHX0Ukey5uN^97Q5#4rk(oV)9%dWbf~deCmjcR zRfyA3i6qP@uuEM~Nl6J4{=a}wl4_AI`Z!zE;9z(YZ|*cg2C6G0S1kOLw{*Zl|F|mm zDWq7R*=WF9+2RdQ2dRPZ&A;$eCBriB+R-K;j!*KoW;`4Nc;cLW3aDv0n87U)J&dA^%_?YT-1H`&mKk5Z0J?jkV=Hz^yt) zVr+~A=$MkIjU%%!fGqQ`)9-X@(I!QFF$}Vh9b1Kd$_l7FLK@aFuw(gG#*Qr}l}t;N zJfzHf^W&4^7hn32(A@b}%3i2+sZJuXsQ$AEP$VD9wZJQ5R+f@7`v`yth zYI``Zz_>-*ROQGiaF|O-%UAO?AcDzGuF^eOQI$vNk3QuwLLOE8nO`NOGNPldtRCc; zjH@;~cPuV6?=(c+(5-Qg4P{bNeA)UM!+wnhOV4rT-S~*&+o{7cgf9heT9kj@OLD zb?q19WXGN$a$wHWI1+*_;A95^=|SO+&WeE4g;~C5`u@<)hEWI-&R|747dx5uvY-=W zBSBWQ7Iq@>%$~q_;D|&Ouz=8&YdJA+gnOV1cEqds`PsQVdn0oBaZ1JP>#4CCw4~rv5+1kPWb;pgcOFAFvN0}^VAdVm*RNTQU3pAp{ zo46Ne&ycVHiAV9e1ExoziO==U3p1^~WR{jH1qLE*csV>hANSbQhM9!`VX7k-WD77m zm;=29a~KO}!02F)WQ?uoWcV`zWj?aqk~SadJ2etIeq4Bys{9Q&Y0r_qlRdr1`i@MT z7#r>F~hre*vqQk5aE+{%YCJ$TAY`v{9gEs(s3xEDK68ZOk3#mBJ@)% zZD6`##@Gy$In&;u$mKJ$Ab|$J<_W^W9E_FnY;Ocf7NJXaegPlC%h8}Gi=p~ER{Gc^ ztfkJUO&4A!d!}jA1p6>xpA-u?4zQc3aDB+B1{wAb{GyYO?!=G91^glypQw~OW9pt(e;jN9jgs% z&h=MdH^Msr#J;Xg5}w?&C7&8g;ID)S>~8m8J@mCh8}`ao>xMm;Dym5qZA}zyUGGlr zIF-O((W#~WR8!mXp<9mnJ6k%I`XN9`IGXRc%V1sPmK!=g3HP?OCOAum+ZJ!qQkt-o zCN0$oOZDpf`jkk%ee6YL!czH7el_r3={u#r^TPTwq6JTwl@U(^Ok19Q=k)5}+8F@; zo0jbi;qTalRKo4~3}z~e{>HHPPE*U`QEYp7?MuJ;rKGtYe?PT!+=1~5M`glMxq5WH z<>t7Q1Y4(!(a>FzCq_4+@n)56UWT!3_y$ob#K zkOMi7Tj_ctD+It{vT)ER6jqQhV#o-BU&%^S=!g&^_F+Xp#jVxAs>%pEu*0i7&me|T z1C*Uqvb(T-0EJwT3H~~eQB*1i+Y4Nwq?!38n+BN!E;5Q{yra{=@LC;A!Pl@f++;PO z5TbYb@EG#RzZ69rkO=ClF_PxzUC71F2nn!V(ghM7N|?Fi_&Ec}6qQ8*X@VS)awd)? zlMBPFiRWflK~+yCUIL%2SBMm(-lW7BSEx`8BZ5%oe?T34tPcKc(hT_-l>A^jp@vZx zVU_k#7;||^l|)qXTCKceCfOtkh1usK`~oHcVz8zF(y0hF$fBPzG|mH`VgQ|J8;S`W z^D088E%V^pFF;EqZs^3jxURGRBq#tQ6V`*6u#o{aEU6HmXAw$_9*~M5{AVN+x15mZ z04~0jVJdPt6v77eLvL&=D*W{yUYM8eKvcRdfL4-SoLA5}BcQhsilY z&iBYs=?a0Zb9%ItwCnFjc$+p*oziB0;ONP|vEj67Aqopw7t%htoX2Npu~CuX6lwD) z&pbhM*4kN%6$={?fkHAf;#%CYd<058Mw5%Y=1EH)lj2Fl29~_pIU$G$@97Y<86H~k zU|*;rVXu&^46!LqfzFup?&k*YU9!4emvGmu9TnYmqI+l3-JNiEZ@Bj_9wYT`?`@ZN zx#jhXOZs~@yYYlJZx#H+QI&EOUmdwJvNE`4`ksEh_xCI}8pMi2H|Li|ZaI$s+)OZ%BU=^Svf3QR{SeQv5Gdn zeQX((XY~`iE2i*e%TG*1X5KRnm&;FmKzH_gTj)>7N+BF);XM zCuVbiMMD;LFc4U`{EIi^m=mj{I^U_s+)fs2^igBxV2#Xb0^1NO#F9y8AG}f1Xc^1L z5jBH}EGRj|@ha*@3y$n^Vr@{{45VRZCpixVQq1PA?3r-1BkR5U^Y61-T!kw#Wkjt! zv|^0fvr@^jOq2CA@DMisQLqepn=gXWWid+qFo=XCKY15Imu7<=S*0j@NGZ1FC4pPD z4WFbG+dm)w5v3@9NGUL?@~?uWsCY>H-TAR4xG1|V&Ka@3(zMR5alnw$#nxA>)EGgI zsEMg$HYh73Vl*h>)b+krdBS@$V}z~u>gqkF6kZ96U*1O`Al6^?7*I$PE110jY0_4B zVrBbdqBP2|v;C@uAk~PX<*+iwvTzixU zN2F#$gWnYWdz1cy3ID+j|5Hnrd%98_df_WtKL1AhQtw^tSozA= zzOsQWSpysH_D?*usj_-nl)=ib7Ogd}*RL0?xy8WVj{@S!)6l;wZ^1#J<-TPySXoiM zGWk0#t7qT4_|C;d`HpMmpL@zcA9x#5f#T~&uN__KUG=^>3K`?_QS37Gl+d{jrJd{g zwT1P=V)34jJ$vuclS4Q4Hx_Q<$mYv;pbm8~?r-Tqd4vV3o%e6Lv6b;C=s_EW3_Z2x*t zQ`Y+JqhiTU>}x|?RM)?8lw_(6?7W3?kRxb++SuOoF7`2szU@Eo|55qxm)~+6`3YI0 z!rKS20~ik)H|;?aoXxw^?$IpdTKF%>pkMf}a2NXEK-G{%&7{=A_$!60 zll4tVubnbvVP$jJ%CrqM+QZ9eu2hh2+eqBo_-uRQ#rDQ2uJKrR<8XH)zfg|+T1ed+ zvmDzBzz{e&r=X-(?xIO=;E04hW3FfSbYv8*BAD!L(qym4xRn4?2l)s+NIR+0WDJ>Y z|4zGl=5ZL)nT2_2`zqVidX|7o26Cr$(^EN9GDBZWkr>%Y2F9s}o3~6+_g*Gjx0M@a+hEQCH$ za929LwK7#wo-Aoel(Z}z!z8-r(9*G_qblL3605guI<|{PzCc^Ky_k)yF6=VgaeeQ# zy~|scW;ZR>NO;+`5_|j7TbEYOi>~HP3-~O0^vcSMqAR#*p`)1onfcQajVExg6#J`x zb>FY?RAbhnTr0kI46~NCQo^`C@Hu*ox(|(x-hlB#zZP!J3_}OQt z^SwXrtP?rSEZWTyKR71ur_Girkv0RikI)utY=jXm;iNaNB@E*N?d%(f(S{dnd?2KR z=y$fm23Ut%R)2teh{X{C^s)%fNW7aN1Kr$QL|{M&QkaTFVXcx`Q19kq3sbX-Vu)Zq!lx#S+(_S*Z-S}9-ICH zTO+(5Y}3LmJfPo2od1~zw7O83;~lb55ZW9jGZT4ZP}=*A1MMJ{%pH1OSWcBxjtUGu z;88Sygr}&B`^f1bho<2SwG8PZ6D)G{lm)7VFK;jevIc{iqks(ong$vUgNTto9QNYS zdKwO8o0bY012StKGaMKM^nh)9lKb1O^tZ9co!iqYada1%KWR@xC0$$SEZIW#0f=nk z4FqR|iIv8#=8&Gw5@oO~?cFutRo+zyDh5Ms*@*_B9J*;X?W zJjTtk2w3MoRO22_$o6EA;wp+<=jA0R#?-{jG+MpNE?`B z(;>&4V0ZcjVTLKx(IlfxGfeGL8ucXe7Dfd?s~3Y8wR84YS5gQrk_S*K1tB!rLmOR7>2 zc^`n%5L2ZpJUlaNGSp|Zs7K*)mhh#loDZ>1%a`%xI4{b{TB7ilcbp{!WsygPsKE(a zoOHt0LpEjC@^x}u^o2gPaS}$e>ydS-=NnjE$c%o04>luTdSoh%d=pEBtm_u)xn{ma zE~~L%E<|2DzN|rh3(JLMZiULKVz-YhYb)QzQlb4Hh05AKt}5p79lVC`$a2M#Zn=&4uw)tQN~B6vYX$-Qi1V@ zg5U98`8y`nyF%Y_J>SLRlO5ecrQqZSfdyOkl@(5HmdcuLwKXd7@g(ld~6*{1hEIW$nX{gc1>&LQAaOWbIK-V$T4y1r~nOR|2n8Ti) zlSFMj3ny-CZ%6GeO~brjyotYnZ3T=mh2CR?)u4^TaoL~-AtO-3*|W}Q$6@r3j>L|Q zXC@hDE>qdh;s`N11B(Ir@ur8KTbFYf3ssiX2xCyajK<3pL%r+~C$ZOnd!Sipl1`vQ zu(R>z6O=d92gA}%TA*3j#?Q!5S>uva0+1#;6DMi413Ze?K=ts_bmV2_STWUn1|7Xd z8K5BS9JhGKXf)Ce^MJHzBQnFm{GuSB6}gtF*+?wfjH74R!DRAQuplg~;Y5mfMeY{h zoJz8z6XJD8a704>Ar+T!dRpe7hq$G^U9tohZ)Tyx=TI*)=%t)mF)5pFWEE#;ltUqv zO!LMY7<~8`W7YyqvT@(%txCqx^|WWJos2k2Te%Jl;a;&i$w8e}u@fnq@PPdg3XFD< zxQTAblAQ!C27?9&qb>u*IK@5`IgB(k%bAg~m(Pc>>4n*)#D+52;$qsW48~Zzo&{6y z*>*d1_{wrKsD?5_78v56x1J(%x{M8$&87$(H9*lR8VS$>mQJUWyE5&P$_|}F_EpEr za{E+z%eHmn+_uPkyhz<0D81sO4QroJ7J*)7zMHo2(MXjtVw=t4))8bb1^rJHv~HNq zP>eI1g@L|VD4-~eJ!((du4$Ld^2kMFs-eJV^OT%v{78z;@ha&Z`-Esn$(qkfdN2z( zI~OnI;mRDLqz*0T8!vyjs1lH?jE17o=`a@MHVdxK1*+3#xhLY=RTS-sp|iAA1rXwO z^1yur+>zFUD~uO2#y7+C%6w(8ENy{KD)f3|5g^IJxdlSOm_bG%?FdgzP2eqiCJ;3N zJ0W;PI>!`2h^1|GFWsaa7y+;w${6XG@C<^pgY4L$PV68QW-MD$ANmfxy+O{8$w`rO zhn#Ynu1Rx;3H<~@mlVJy%k zB3)Qa4Qcb7WUCZ*PT-Nv-_d*N+aaGcfMIcjjasU6+NnI1l67QmVL^o=OPcXEd1O)) zm-1B7!lfjXw-{)!tdhB91@mSe$U`daQ6I@;Chb$7r~@I5!pI4uk7Kkqok0YjcO-1m zAt}vEJtr)om5+i`k6i;Iv#7c4EP{pYTh4}*vp5wP!U^y$(_%j$D5E`TC`lM#(|T>- zX6c`|epDqMdPby2r&HdNq_-*IZNeLa=xq|ceQal^t2F7XOE_^1oVfLQ(OD-tUx3|i zgH2Azy4EHhIKz^d%sh37tvq{nGV{zSwmr+9J*PhV=4tkz2^&bg6-iHH!qd1qE$;pe z(bFh;CYX9`(omi-l&^HHPKn!&i*z##rCIsu-qn-hRwRn2NK|>cZ|#fX;Ys#%D)Y2U zY#nFMo>!l}YiAGIkbQP7hgmI0vTB)C&g_6ES-?Ig z&ga;1JL8MTQU*uTP?j*1L4B8gx1c!Cv3AubT=lC*#qIr~t6p>+#Zf&5^j>MgP`Xm} zZGQdLkB+3OUw{O~SoSI8w-ees>g$TN=f$U<7b)xuYNT&Yvj;6m-CvdTH79({Fo)AU zA^Ms{-xuiALAP}3;JfYYw85^w}}2> z;9CYy(omIv>6VDN%E&(5t4u}6E=NADhF54PvkB4VG6m7wI zb6rDX(H61jIGGXhB@HzRLru!!TiGvm3{%^4p90%AA$g-;db!!SOQcY{bL#mttLIpu zdKPoZ;?_iQ>)J7K-!o!yt5|$`aWrK>a$Lf|Z8q%@$+P!PX_+s%Hi8j z81AmK?DgNkyl(g6NZEwFa>HJ|c;vRze|7w8x~EuFmj?3HJi=Qo@U z9~&CkFi%@1a7xK^bYcQhB`UpCNt`=OmapS>#^-$z$KypK*#U&#Mup<_^9vATj#HR6 z2sY+l;J(O7k0`wKNES52eG1jrF+w&S=IE4WH9wXdBh=7EC#V#l3#hv&8{tng{&A<_OXZ#mAw}EK31uF&g0n zg=OTYVyv%2HvKQ1@=TgJ0)0V@nG^m4`A9}C9D|c~DcirK5y%=;GRmjg(o;!5fxaJs zB<1WJo3ulTOWPj>Lb%x((gNwmNJjYxTY5OH%gEgKDM%q&_iyOMesY-hJ(Dy(p8jRu z0UN7 zA&>2+n$n+ZJb$C9`72HThNl0gnw@{8*$#g~)BjhRXA+uc?mMcrZTAmro!avIE$6ga z=lzrVGxRg4cfhS~gmXf>1MZBLgWIn)YXkQKZg?B|wA$+XCv;}`1G}}(dtcN-GY#v5 zw&+uv#_7cgfE$jjNyol~W8a43z@qiO$)YW~U%U_Dzo@N%yFUo$wB+@{yM<-xgs&y6 i)%xxq)7!L7_sVg6;8J`;U-lD&<)8E;eM32GkN*#0b4{%P literal 0 HcmV?d00001 diff --git a/memsync/__pycache__/config.cpython-313.pyc b/memsync/__pycache__/config.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a4e8f7a9741939a462c6941dcf6d66d174c39ce3 GIT binary patch literal 10613 zcmcIKTWlNIbwhIa5J`~~^`s<9Ba(WI9<=1gTC!}dWy$v1mgSAc$~r4$S(+S4jQP;c zP`0IH9U&iyoEEUuB#_h;unYvS0tBeh1gQH!>;44$IV_cmoow0#5)|D(Zk=M$?N86S zGeeWK*xeTBm3?^cIp^MU=FZGLuX$NrZKa@%{O8}shB_$fw>Z$6QR(PjejhqNqBx4v zU8An*1RaBY{WZOyC%*>4Kz?a~hF`-q<5iPjx@s28gpa;vxmqPuG1R=NiSkjLv6>YNozH3-(1lYx@Leu{zlnP9{_7a5|PsiqouKu8M}! z;Yd6zin4Jb#>b<;L%$tPFGW-_O32n=O81eVCP&guZE1=LN(#6RXH7m zvyP1F1t;NbAS0{bBAksXXAR+OA|sFBCY;S=)FRXp&Q>z=3U!3Djf~obdO~B!$SO3D zQHMHeBqN_XY9b>)83hCnko%$*710pS>VIz68B2$!uIiFVzDzji-Ln*m1(Q%Kz z9a@QL!amukr6}1DOQvO;N?A?`>CerBLGo9?E~Uh@?2Lq$aU%<}AL4AghQ*n3wKhtp z?(j*uHY&sx(jnNp$eqG86Fe1ib1?-J;03r3p{3M{Aj*b#YEiZ;4Dg1>7FI9Gj%aL= z7t`7zc(Ehoa))H6N-A8XhTs$7SUi+Y$u-(!f>;nz3E8DhN~D)TteF1f8XQWcXmUj* zjQG^ab;VPc;kdb%5~4q)rdh9S35Am31Rn~?)=)@!nxStGh2CEY$5o0m6k3Q0Vmcm6 z^2rpC&7n{<6$ym|8*U7ci_SH`+5sy#5!4`nJ*R{^1dRwh2$~VJBJd(;L%<;LA@C#U zM9_r*-&dgrK`(+n1pNTy`cMd7n{+G^3a8UT?Dk5U7eW32&|boSO1?00W_}iSK%9?- zlMDR(jU+!Q#P0I*lc~r`0^U_|{&Fn+*2?X9^?fY-sxR>Js$hYI2*9&J{4W6CqdujL zMoZpK)qB=2Z`{n%O?eaL?O4CD34JEl%1U(IW+Y3qfNt-Q=!P8w;(Y@W-MllAr3V1t z%B)XsPG;%$Tw}}nwawZr-I8l$lpa&ETvO}%)TTd6x8^)8>(@6=04LBpIwiVsr!Gqe za$VgL-L?lJx^qolh1ZJ`Iur>^uB8i9%a(hL$BDg`yqJJQo?EoUnX&S&4%Wl z`4%$`%ga@05<)^Ml@`d6<5HFmC8D8pSXkuKpWurkSKZM>3Gpci?QSevW+F2@I zKrd2@6sw>Ck>n)|Fqc*-ZwmU%i22zZAO6Vp!%tT zR&YHFdWLGJ82FoRKWAvC7Ids(ZE`9Vj;ekNg9e6)#>6}Q;0u7oOvaLnnm-|G4$MLf ze2-{~7n9v-hQA*Z(?MA;+@5AlvJ;-okaA=a%PyM;U6kq6G8k1e`a$v7ZJCaPP_Q&8 zFBFYM(kv|-B5~kDe?ZnP$Oa*NPsHPdfkqw$;VUqbZR!Jn3tR()coV=n^?MWLYR@{E z4Lawnee}(T-+c7J!w>$cw&4e-Hl{wRuGtLi&xr^ zNHTnL>2PB%ztw=1cyz@snXF;w6&6!Flrh9hOs+HFr7D{vSK9Ht6#Oht0cMKAyH zL5efanvawVr?z&DAsts$feV6-A}(tME-Pm%;<8uZvUAl%T#gD{4$fJ`<*L91AxaTf zO$DwR&RxV+TY;;Vt1IHFufPT2Oc7T@1+E6Jv52dw0#{SXPIxMCc}gTUSKw;qT8bpL zR^V#oyhU7X6}Z~C_98B(0vE$|6mj_~aQRBE(O-efU&0lrz!fOr>a4)k$#oU!)m?$B zyJW4N3S2$KTyN--7BmdA`Xk>Ymh08zR^aI4`oDlULdnA@{WA9Z!QIA6sRWxDmRcO(I!?O4S+#5B> z=)j&Iz$GQr2;jI20XmpP@dk^-J_Hza2%`wjAUKEMEP^)?j3JmnZ~*{oRyYY0p`S<6 zI0C{%E*C=;w1HqhiQ`6mBLN*7)+%tw`&9&AL+~zwIRxhc$aUq;A;dz0Y*ie;5SUQ` zCCC<%T1ZBC5#txMs!CIAE4qMkjpkex%Zy*0fzvL8LDJCQd4>f_d~$J4vh zQs3F<(Sz2Byb*A)KwHn__jcct`o~^WA9$zpX2e=3ruXrK-3L4pHDVo<$N$)|>)89?S@fW3H19;D z>lHF~&~!dugGe{!?Jm))7O{0ybKr5?Zri@&Iep+cm#;@;1J%;`IJg_!Z-4GPXg-&3 zL}U}y(f8Ex#33DXjDM7O?L)_C9<%X<;o zR&)gy4_Yqd+Y!qY>2>Fz^=iHYvA!bg?St0wydSXvs-1nD+)YXY=U?~^+Aiih5!+R? zxB3I`M7|rbJw;f`BDS{(OL+Sb+fVuWpSC?|lLjw+s6XhqmLCA@hUwGeR9$1P z-m`sc>(M4^^W~Bd;Evdosl1= zp40z2dgF(*RL^Ezy@#$-8P}-;*J)|?Yacgwx9@D-+3VXEvJJ;`wVqFFt##H73+%b8 zX~X%tqYoaN;=64?r=p`@?Ck*`wxdEH4jg(;KcWYBTzAw-2~K*IGP@XR=O<_2Xq>xf zWKC;l-w|SIUh~8u^``jY5lHx@(G6dQ3?C#FqmZD(051k;y{c|OWVgEt|IY*si# z*+QJ|Q0mT_aV5PlFbr{(g-=FOQCMc0H44{37ulq6LvVTrmXxjcPD4l|T zi8!kl-U40$eQXhTM0Ld>Obg%T3%)oGzKblU0jyKMbG9j-xW)d+_Rw}{@nkHXy}+|N z343qk0PTF}+@T*=?^gfV`K)`x`45&euiqy;zYQ3sz6*B@viPviM~A8oNOhlpodoI; zcTJ@lfl9-QWz9jI&~qjZyjY+Yg0WXj|I^)5Rz7Biz>u23L$$0GEGr!D@N}RBD{J5{ zA35`jrE1n0pF7YWJZD%56B1=jxxQoc07xf#qV+6mGiXQ4nx^|@UHCVkJvDR0Jd9~L z@W$uR@RA08%Moj7y3SzQudHcr0@2d}^U-vdHT4cUOXT3wQP#e+92dyJoMTy2<IvuN_0SnsH}Zy9Xuc>ST-4kM2BXe4IV`!Z+uIM4#!{T`_ekJf}E4D&sVS2 z3sg)-l{J;x2Grp)5)4&pJ5bR{ENd#20V+C3kU_;7R} zg_W)zF&%|`4jW`RsFWnaNEKs=Wk?V~g;Qc4ToO~s-jF<@xk z!cc8W#ABg*kZ*!qQ4|aoNL@3{VYKk|HM^E)WzI1^S?61;B1u&ot<{#NQvoaBfwg9y zBT>fJ*Isjy$;9eHo|O!%=G0&W#1&GjKv}cQDc3$b$%F4VrXSv49HDRhx}4aFs`qPgB; zd}*l*<+~KJQs^QJ70OjgjS!h?wVIP!h9rC%Qmb!QDriX|a~j*gjZB-I017Zt%%0`E-ClrAC7sYP?DGxtY;6h8zHX`O-~ zr@B7pXx=&b_}uQfon!kolD#kIXat|kx@+D1*4~h0@60(m_5z2#(;44ssbg5OkL2u* zoE@h5_D&x9hco`+L;rZjKc4kZNWRGzx(JZG=TxteqC)<5O;YMy$Ycz*oep?4_b9m;x7NUbL& z7lasHd+#51k7T+>q^>j1J0#cHoICInZolS`J(FS2WIIO>JI6DfPRXD!XJ}Tv5;vjp+V%b0?)gEhMA>F@jiW{ zgkuQM;Sh!pkQC1e>{ZO(U^J;@Y%t3rTmc~4AR{A+2|m4)ijt~@faw(>h5%np0bLDJ zohFx{WMEKJqAq$G;wFIaQIO=QZ(o0FBa)@tay5PH7dN2j+gDVa#Vm+#{mRBSvvdbU z&RDaJLKm|uO@M}~ZsVs|0Djh*X&GC;v@w;X$9_-ijn{RMDKXW&)SHd|mj>wNu?O!h z;)2E|6$icF|ACG+j(%o8ZDbT>QO^kcFEq>#!s(-Xxdt^5h^GsLqoT1_Wid{7<`zYT zA-6SGh|9ig1jgy-@Bk)l4i8<@j*5SZCfRvw@^Wb6 z#`L8tmqTyexPEcX98}1xMYd?tWs|rnrul@dPk{(Nc{e7ck^VF7Ui03;jC&hLHuJKxTln|7{b&7B{)>$ZEhdbfjH z!9CBpo)9eV=^FlHuahHN>je5o@gd>?S|`cWe0it`4YPWKzi_gkv!*Oc?ul;dBh wp5IXTZHnIZnZ8Ho&r<;49oO6PWcXDbp-ky@OrAoh(y;p<@6l%6St9hm0q*Dcu>b%7 literal 0 HcmV?d00001 diff --git a/memsync/__pycache__/sync.cpython-313.pyc b/memsync/__pycache__/sync.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab81109ba017b2b381179b78ebc535d014ede3ab GIT binary patch literal 6700 zcmb7JTWlLwdY&PNw;?4`mPEyl9UonMp)FC_*iLNa(kilSNw!3nV`gpGyTphbN)wYa z?3tk@D(i%cEwEDhprqSHR1_%O0&TtRL$^hN?o-hK0k-IqxC$cf)aj;8_F?;^)GikF zQ@=CAp>D<|I|2_6&*eY=`RD)sn;x~bi5#B4{P>^Mzlw9*=j@03c$^OZXc3(sauO%; zSGXyj@`Uw0S3J~%Z|@cFlt6{%n2-7}CS37P1!w?cJ{puFlK&9*ra(hdn-s8nqSP)0 z?Os@lN+G)!kz$f)_u8ZmDQx%J=RAo6zhZ18cq=Sxnr_Obs%yp!ehb%Ra~|LRFBT%4(5XD9Bt`LNDw8Htsnh@X6B5uf}vO@yc zcSTXk_Pi|-Q=OYP$;pbA2Onpjf1VVa9k(u!~#?yp? zOAx|gIM^W0MP;o@`X*$W2Ll@RFcks=(2hQ6*!F-+mc1ZdD^`hIE-M2yfGcz_ZI&*paq9~@vuV>OTBP7M>+4r*rohO&Crd}Vtnw=h> zPNi=qOEzyzk&|RbC(2^EC~Gz%m}Wq(id>p4D#;w6(0YRC;Wz(iif1ss)tX2*0p1!w zr;j_={Hb{*KEt;I=ozk^J0oa9#+w#)u66q9!2Yp}C(TaX^GM!Xwy|Nul8_N3U(z$o zOa2Rj=F9lfkv+@Vs(=q|x_$plx?}%Xi*x_2PIvZ3{9N;vn*WCUn|6)Nahw(aA6@R= z?QbbKGJ)A(#-9mff>J2y9pDmT<*%_q9FvPh`;|3q!05F}rqm?|iQDMQIm(}PfR_~0 zwl)~h{=5nuUQo?>a!08eBc$TI`z+MZwpk~kdx=3;kI2bcT`wk*;zb>*JCnLj7JwyS z61L2+tws(U0l2_qo()Y7r;_4Q&sZj%9-q11)F|R;)QGs$A&SztBu%Df7_o6_L{zyL z$5{e1O9_u9P`#*FfhIEv!Sa=0<%*VAk{7e4en-&^%WqU+UQ1T61ltFzWKbq_8b*`w zTG2fh0x`#4SRo)&%fX`L6JaX^!#hv)vYN9(qi&bBfrjPJL9VgYiM!$NJr|k=q$8GH$D~R#0xI zH02^y?<%)1>bXh@RE^u0Rdb>;dmH*-RJB~|oBc?Zt5%#U1)MeCR2WwqD8V_0*@6Fx zrp7&vaP2*tZRBTdWTUOGc4>WviWw#aG{1 zd1o_vtR6kqXzyArt`r-cJrARY8_)JWxKlqcvflCXR)~uo-{O2Re{BL=dX8?!kJsbJ z@Be;1p4f~h>+$4f{A@jbb|d~`E%n&X^$b6Fdn2Bzr9Nvvve|pC-g|DN_oaIKOSQ{5 zzT@C#Y@i++co3<_&ekR|a`<$k^H8Jvxi14k#J|M}KK~=!h0E=dkmMxi|PV>RC z`A#;evlSVBfV z{A7~YK?@>_CJ5kxHFB+L&a;Sw#S@_Hs4|g@x`qG(81KpgJ`@yop`{y0QJJRi8cz5v zF9HzDizvg2qPJ^L6r;Na1Qt73f#%H(KZNFhgdRng_PEU$I>rZExyJQV8h;0lCW zEoFV0Q|x1IbLSd%-k#@NQ^j1>li^$?ujZM40V?QmM{SjTnd83lJUCQWQQf_A@@wq- zt@+G@Z^C8Z0y9FIS-m|^OKNcHE+_Ua{tWl>rkPXN^JIK3cnLTC_YN!>{e*An55#dt zGs$cPUd?Pm%8n;loU{)}!wAA#2H%Yyx4mr<;V%o@HC-E0V9%>8I#d}bo0OR!24WcP zW}uf8FFiT=?f(AT#_5xLkNeJ!!xFcRQv=tZh~Msi>l=eF-otW0MKsDq)wJVh%R?17 zPua}PTjJ#9nN)gwY*ZSjNW0+S9PlUnw443(IXgsE%Qm^N!aE!pO|V5lm-T(9>c=!? zjf9W!H)v|ymdLertfp2{8<8Wm@kU3yHq|(Cw07n5SogB%{cNf>%?wgy@!FqJ?AJhMI;~#D`VlV&X z`bRJP^wkev{k7NgV*8gI8fN$7I5N_+F2vzZo;m!%()&w4_|EdgFQPrmi;dW`o3Xxn ztndE$Phx{#9%PFTJBw*A*xls{T3G$JuY*+&uB27Ee-%jls`Gpq|2_m`PsXQt36~0Ge3E|% zhil&Hyo-_9=P}Q0(m{*|5OzNWT{m8GsS@B1G!RmO#dR2&`y}hlYn~Ugm#{Wg>Dj$tSu?dT%v=x{dLM@`zz)JEM8oSi(-HOKI8(wD#-RPM^YB{clg|E?z%u`}!2NJ9xIa7c z+-ccKKo>(y@AVqSP^kFF6 zSxY&FfV~j+5zvA2;U8T^=c}By3qKhlI@~+${??Jxc?qTggSBSolgKxz7dvsXe=RHT-Oi6O0O z5%nRxVwNjrvrIHd6f>7}$nW*)jC8ufa%a25wLkD?vNZGpvU9`@`-AWVi>dv#)iJCP z(#?F936a$-%cxlzOs9#BX{(c!6|*|cqE=}(Q$0F?`Bi+3V`zZ!AQy~$|E=%7_5FL_ zz1N6#uD-qU_WgzBx7VY?>%rkI;Xp*#?=+dava!;I(@?yHuA_L%X&yKM&(qN0hHYt< zdF^>5uj@b&0{`~Ah1PE2MkgvtzB8U7I4aF|t5rK<&P@tP!3$mwr};Dfrv;W$C?jCE z$m&s&cxFe?D23VIwws#^_i}@rYXS0*KmqqY$`Sm{g;VXIEE}#941=Xh^2)RICh4A)~Vk9(ch)w#8IS%2M#y&7otZp<{XU zCPt1;j~tVhyr|$=yi8w*eCZgPrI0u@Gz56`6M6x^XA(Zjk|1PP`hp$c6&Nu~&{1Nv ze0HS=J_z2PLovfCG-rWuXOqw78&wr-fwd}?tsr>MGM%&dg29B{EKoSg*TH68D)!}R z25bHnAHzgb<9;dhHX>cj$p7KYW~9F!>Ayd<5gA7O7<=Y}$or8Wv~R{v)MF1?UP5p@vQ2}HJ_)gu;1-B~s~LPddu^9}lWp6%_c9w` zZfM7--{#NRb#2$YCCnhhPDCv+n=MpW1v;Ch03~M!4OW1?n4&uC5M%{StG($RlCZn@ zpN4%w$_kQp*~iJ2``OO{)-XF^zp)>2wgGV7YVZ{l8(_U&qgl*D_c`MgG+SPt=Rf88 zKIcN8a?gCu#eT{4ZUx&qB)-=6m<;g#$1{8zKm3^V@I8;uKF4=GzQ`Zsh5tPt`ZGGD!NzzBV0#zve%S?^ip8I6!aBRq2Y_}ozr zAkR^yX{k|;-)z_oS*R1?!-J?BQDMCh%5fa9rt9{E zHZ%xe$<3?<5s!cuo&tHBBkx?dEfs@JBf%C-0S_aojE0GlA($E05K(_lrk5e&W-cYcP;tS?=q#uV&810>Ma~9uw z1?}(7$M*=~f*V4bQIb}V^0|&xwmrKyq<8J{Rl*p5I~831*+v*vJNu7yRkeX%TCJM8 literal 0 HcmV?d00001 diff --git a/memsync/daemon/__pycache__/capture.cpython-313.pyc b/memsync/daemon/__pycache__/capture.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..13d1a39e9ebd4951ed0f9e36f4fae4344419974b GIT binary patch literal 4366 zcmbtX&2JmW6`$qql1qxz%BCVY{;0>a1c|jpyOsr8w5%kSW4W@e7+SGTYQ@Z&JCrse zciGvc9kDT@00knU2RliF=%}8mTv{J}>LvdG8MctHaDo_3Q1qtM3Ebw?H^VPU7INtb zntikL_RX93@q3R?Bask-@zQ_)YAyry54LCwUyTr+z6Hc5NJA89g66v-O!%k|Gg`bN zPKZ?Gv~)$9kg3dR`HFucKm!vBRVIQo$lv*|geKZ(8_*#wuphlG(XiI0DV&PT2IFD< z3>+AmdR3do#4-xDX}LISQ=GSNn>m6l+a)-o7u+HxiBRa3OokL(2VZ{;_)qZ7yQVvb zbv&WHHi>WB#sdCr%q8!*u_TTq5{cM7tXl@w3k8twV0W%2GUVFWwGDlt&%q9H9MiUN z&Yn%+i^MT!En;vT%l-<}b17 z;EZi$%~>$05eHcpn9cf``QphfwrgA(|I#WH-PpZQh@XJUZ=k;b!L79k7Ud*QWCWwv}7JMTeZjhRxiM z$vXwxA-DiefQw-W5JG`Do(TPmg=$>z+H?yTb*7L5OU2M#;+lB^v~*o}=i;I#U&`su zycc-eu`M&Z;05YR0gE4e0`|xVOj>=2@PvQi>7RkPg>Ff=TUp z`S+tHx0n)oP&`-~8KVSDz$3ruY##gdOlqYXHw#OToSESUOk^5|uqU%al-c1&AVvj% z0|o_XLa-1Ug)pCG&&HIBuoXL=W5=IS)uLu{gkhH zj(9WN*q>@|P_1XIX``trLquS2}Q5Ci*m=AevTfXpQj%%L3=6Hu$pP>t0^d`oV zGbsmfW5(32ESY)DA{VK7hs<2GGsQeZrI~Tlyt zJ9{dfJ?ovrYsz2L&(v!F@KSK2>u2j-iAq;uZK2vVywvu%W8bIYkHR0fuXprSI{MZ^ z)sDd>c|+a5^6QG)x0YHPe>k`{P*Km6l`~J_D71TA{|pM2y6A zN!$zQ3R8`18^YS;bumfu@fV-+=4K6<*U z4n2CIqK=l8QF;KrE6KdfZF3M)n*&c6^c(eogXC5cvlH1cAO%{W%%JI{(0~L@Jnid2 zEuh{Frea8wGEKtz-@xP&ZDuI4r+1k(;-0n>@lc5J4LC3cxR~GJb7zzYNlLya> zhf!9DM<+d*P-;^zT(nwAVSWf=r&&37pl`HXPhwqQsWWdvcW2GwKDO+`S(D@p^8xUd zLcB%c9VVnipa9(oK%l3PcTC4k+w%bKmy^BMlD(QI=q0)6W(UrKe1%vU+W?WJU}#`q zAawNTQCte+{(9O$Pvr?A5a_e#y&%JD$JO(NcpGJPyq1m%!$a1Icrt*xS@8U|!#z1a zZl{(^aNW9SsG(sy$F-Tj$^ow z7wgHb=!TJIEwk72Q<>f1ViKKaFhN>BQTN}!w|wt$bpOiu>aFsD)79wEL#G@~mcz+! z)UI{)@E7XgRkxxZUy{C3qYv(^PFK6*oW0z8u6$sm8a@9A_%D>h7ruu5OUv)Bt4Av8 zk*a!>e^HY>Q$BF28XbJ7m7{0N;j>Sov1+sjDuU8+UtLz$l|vQf(5mrpy3DBLae=xrqUp=sP?NQI8 zu1B=odEsy2v1cM`AN<~rI=WYkhc8yT&sEitvNG~40I&ZZIrXE%s`cmo7srOg&xb-6 z#Xu==0$?=f&TRuKhW#9<4g#^YN8Aoa&_jrmGyyyiqdXKoLmd2S!0WF`_F_NfnHtZl zAj4r@%pwBQuShG#gLkWHPg&{V@*wj3lot}pfGO^yFTsNajh%~2l(hq%l1^KCo}|;B z1g*78F=O=ea^Q(jxalywpy%1tiYNBvTM$k!zg$y!@AInCi@?JW-MInNkDy>k{e$xR z6N_Wdq=0<(*Jg?6h9yA^WHr-{QN zMzc~}E{$le&VG+TjjktttEYw*>kgp_d;!{@c?TCQ*UZ&^w$Nh1h5B6zoJ{b)OYqA? zngbUwM^M+34RX6UI|+Vbmh?ngwCEv5g&Rekv4Kxn<)&jW)yg-nmv7*2G+^`!>|SLY zCt+f)IJokwYN%&%Y(olsF#i7dMx<*a621TC@|)|CzDlHTqdmGAKyvusNRs{B$V?A@ z5bMo(81e9V?yVk literal 0 HcmV?d00001 diff --git a/memsync/daemon/__pycache__/digest.cpython-313.pyc b/memsync/daemon/__pycache__/digest.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3fc2bb87f11bb0916756c1988a5cb64f995d6057 GIT binary patch literal 3414 zcma)8O>7&-6`m!Rpr*N94ZEgyj6zHvQhFsEdQ?v_^ z^JeDFn>X+0d+$Xo7D4dUKKY%lgc16bO}x+7C4?6nK)jC>M3EvWz7=8FM}3&l{uTeS zNX6v<4e+_R5?l_^5T^qx5|w}_xDsBD&o^oB)RbpzJ1}S3Q#@A?-aJk*L!Akc; zm6BpM@;S3|MsQ`#G%bf~VP|}>GwtEYV7-FQ901Os@X~U{Ak4Jj3uar*tU23E zNr%a?33!$Xa<)Ms!?1SPRZR{F22Nbqen10zv;$ z)yNjW7X2>-lpi7C#Vo8sN4dXy6M?u?^pUp}aW=KL5m+!8ZV{mo0nBoqR% z7g7)hz072;WU0mTVi3-~6roIE7Sk%@QF+|MG`d9`wL+~sI(TuldJLGn%w_>Qkp7p( zd;Jiu2PQZ1WD!y#WLGzDf!kei(<;_7QTIUaw3m7ifqJP%0mqO#V=?gkN#%7J-6DNTYb9In!CO?dt-0-W@l*pMMw-s_Wejo zzW^0bp%>GPM$9ujUpHX&eYDOO*(K8|2?rH+uy5bEm) z6jfrmKEe#NehGoX|1YYTk$faKGLYd>xx*ec81q7oMd2%-5?>$pmXt(3tR%C(GlDWS zCz^6z&K>dg2BVTH51~^YFiV2VPyg?tpNhW4*|reaJ>~ z#Hms!beajkS8 z!0ZtEZ8jOcxO8bjxuz;Nm1_%E)T_C*D_5`4*V&fH#RZsEE14)|<%_aN<0`olGoq?( zS#GpvG?f|H6?E6n@fl`n1$rho3{%z6HsRS}%`jBn0Rd(kI+(d5Jb=O3JJ%i}G1ymM&m!{`Um-FI4t&fZ_@L=Uy2uQj8u?N03KyX#MUPiEWG z3(e_;r#pMo3wzVo_uja%hj0Ed`py?JijVv=j-rP;iJ|Aok)OyvmLFYiB{Pkszs5$| zG5m)ZZpDr@-tHt0JiPbdUOO?_OiXs-!w-!IMkjsnSz@9ycJ#5{OwH~M&FzO#^7VZb zNCq2=`yh0iZ+w8?-vB&3tvbO rze*xu;_vAA$LP?fDES#Wx*zlh76hnh!htW7DIxOJ$!`g#xx{}1uFX~S literal 0 HcmV?d00001 diff --git a/memsync/daemon/__pycache__/notify.cpython-313.pyc b/memsync/daemon/__pycache__/notify.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8de42127363f29a49f608b599aca619e0ad8bda2 GIT binary patch literal 3755 zcma)9U2Gf25#Hk+kH?evBTANKN0rWU<%*3&Rkr2ELR7$(96MG_xjfO4VH(7VJW>~v zcf#J$Hd(-+`cOx30ogzUt8ac((HG|_DNwXX+t&hOZI{AUDbgBw=vxXqNRhsDc8{cJ zM-IA#W@mTjW@dM1z8yUa1Y`v5mtXzPi1-nDN(arxyA1aX1M>k=5k@Mfdgi$E9_&#l z&(HCgXT5+0=!H4&c@c{WO7V$cQ_XHagjDY&=Z>+Y`c?5DdQZT9;CzgeRauo7*OBL= z9i>YkEXOU!$Qc>UF)UNjmTU)W8OG-@A?3g(Y)~ndY)63= zC4)74xy|vI{4aV7QI2%Lp;!((TRRS8afTgFCD<_8IGIc%7TDd1Kw->18-?rn01cod zO0b0&Ny78un2x47m77{bNQ3B5&=qe}Vv$RFCL1;sl=rG0Ce?}16<GSGN`JiFIE@xahLK@AN>}4iiHL4| zlg{>pARTEM!f97yIL#6wO(Ql7Q)@siH4rzjuM#4)&}U*NXMvNG&+>_pZL3@M{}y zZ7w_sAKmHNzY|s-?0*s--|mX+gnRD??*%t*ZoA4 zkpDx_3;f;*wgdqWz)L|F!j%CnfUhY)>d+))EzwGY<_to6CWO$0-f3>6A)4)_X$WE7 zN{u!hdjPoUO$tK@a)&ijTF5gaH;agw3jLW}5^58cmn?U42rZ4GBd}j}d#uRwvOALN<{S~&9JtVzN zYfRVSJtV~4Gf+MBxBErja|n&1)_ucSLS1JkP_9?K>^-MnsK00};?JD5f@>B$;--FDLS)AWjF+wh=fNmpZ*=}G6P ziO>sEjzv0b-AMz+IDivGpze`yyRv1=c3zxWv9LoV+tIM&6l~%J6f?{hMM{81q?3VP z!~3X&X>j8%N@&pHeoEM$q?W^c(asZrZo;P9qUc5&?xC`@?qcK!F!rB;RMAr*@~QCs z=OPM6)+=k3^;>JVwgx62g-+B%sgIBRe(X16^{G^CDpfuA54rbFtU9y3um64jYJ5A= zcYo~OSUoaPi%e8!-WONrwgI&Q_X73sSS>sTQ^M-o+k)@*{ObJO*~h|x&-~%_{w&ev&qI`v*W|k=?oZ#Fu6Iw= zx+fmK@mP8j=Ihddnsi{}@MCG`Gt1oir*ruv)dWqKFS1d%Eq^DtXtQcUboHzX0rbR;>ToyP<|oQxV{W zi%>IEe2wyw_!5#0iFA?hK%6%Et3mPq{InAVF z&P}J{k!8Zk6>rJOjlDsQ>1M{tLJ?5uK1IuLRy{*b(z2G87&86=?3x;JmH`{^1RV*> zR#7M3;0?C%!^~MxUt0!Jjjkez6(v-Q2R7hul_)li zC6Ag$D<>UK_22N2`!~fx6IF z6Z$sJZk&8DyV!o)N11T{g1Rz_@}gopER z;bsQrh=_HkgiRvEEmMz*?pe^4T^T$}hi8C9JBcry&RUstn#k#Nt^}Xz=``l4CLgWc z;JWYt9kq&ow^+l}0pbUAeuw?P!!Cf;R4$0o^ZOOTr=ereMx-~3R<=~oe}vzGIY@A1 zp9BI05y$=8Qu^LmcRmB30Saa z0ntRN8TF;1+RjKylSpq-Us`$k5I^dn$=Kth$s=J>2D~F>)Fyh!8y(4=)YFIl|Je(! zl%1@EvuDr#=Rg1X_nhy)96oPvcQbGe{_iiOpTEX1zrzRPvT7B}^It&a#|+O9hPUu- z3zqpd(#B!kx?m+%xZ4(N^LApVZT1DnypuR--Lb&VyNHX{oeS=H5Ao1CyWpK~C+!?_ z*&2P-7_NFBWO&zMi}p->dl?>n#y{TAyZe|cwksCm=L5V4>m&d*FRgV#t)14opw>Za z!L&8pS+KM*jC&aLNis+0xh4Uhvy10vXBOk-D* zL4v6XIx(jdvRQ$wyWKOH3{@FuCSM>jCkh}YmnK3&OcgRv1(9SfC1pfqUCE1CPRRpP z6e}x%^5irJgP_Ry6+w0rLBh5OcX0&;S3nU$a(o$>A|+}~5k>CQ3`oBezwqwGGc${e zJWg}yP{jQQTvTC;>Jel)2czJ}UV>-a1sDPDU9<2V(j-@qljcH!=D;jy`zr9SZS>5M zoP3c;>9k0I#xa+ZQ&PIvp|6>Uwq_Er?99srHP}^_=YNFOA2Tt_nEpzDU3f`f@eFfV zZ$0n21k~DPw8?ETCTcTUtM{m*S~qGY_ZfzXwMB9ChU*gD{n&WPYrKkf7`5tsklAHa zf0f3l89O(|ykSAS;H|uE!olsvq0{JBy?+C_g9i0qD0j%{U%l7KZRZ_dBX>`=r%^NI z9^X~peFiP>+(n};@Q`Yfc~0&s4`)#Sg?bz``d9CDdi3yKzI_5L zL#%C>33n7e0y_6>LCPdGG^Bf`Ktzg&wdgF#fC6$tP(8t2 zuG-O3D5M=5aI+I1wzk)Se6j1VDCToI=2LVJ+pAm}x`#UnA`r17D~F~*)u$^WF3Q4k zMog+sA(@PW`Bb~0h&QN_II*u6KX^O=*qRrXRlmVO(e1X{S*MveT0S*^-0IrZP~BtP z?A}$QD^n~V_OE%N_SHPZ4L!%H+SCqBBW7nhc%AN4pH5MUNZ1jbs5GE{aB)%0%`Wxg~%zZG1liZZ(6p;L0el>Cghd(#$t4r*i) zrp1N_T=>+se+ngd%jhXH^A_1cJzncV+q_k7qjl;n%T`*a-m+|?b?Pn4fR%fhn2lOo z)5-yF{W8#1c7Xk8Va29RpCSpi8F1GT?KS#V@1{55j5!S}kpym+O#2jtXrS@poyO=K zlV*W)e#**d(b>48xIG#!zMb!wutfJaG{d7YqU?!z{)|z54WqKWpU%r0 z^M0dwef>8I)O{oG?z58HWA;Jjy`RUJ!?0rUk~?A@d?zksV@ASR(0?0H{f6&CIjb52 zdBMwRyr6#&`%}HaYL8i=??=%{BV8@0nl*Q@nU3$S*Vxxkhw7;Zm`jHYo=!#ppN*?s z7j-eB_`am%^#Bw@NN!Bdkt{@dT4)K;7c?r|3dXK-IO1Ls zuR&ylK@o&uNF#6wB0w-oM_$Vnz|+bLS4A$FTa&4mwYHMWh+NfDxCw3p41$snc3~u= znNDm$N8P?IJ_$tX=lae&n2^m&d^LOH4+ww52Vx+;MPtvF|PI9?CDgeT22&@j! zxvzppwp3(CzB@jW9Un=mtR5&UMJohr#W$NcHRuT=^bw`Xt7tFeQ{%@#XjYUHxg@YJ zzSYE}HX0YRtNC>r>DTi4%HJ6N77sVEX(VYiIn|kfa5arikETyW^%Yi8#Yt0NqfMN3 z2T~{0r7I3L(SbHJ_S#YzQHldiGzD3Fe^pGtfSibFh43Dg#qrU>BnRM|t3KV0i0Bd$ z+|p{mXw}jV@obXd2ClXnD?uS|V!PL%$JIh`538&}0$vU@(iH$+p_m78R0rfjAZAvb zYXXtsJE=ZgUfRqxN;}P-z?#UD6m^Fk+O|;bNpZQ5R$b_b$MfR*dDRXy(yHoSBT`<} zn*65R@jSTNVqUdLaw?~~PQRZJS25;SU7}G9`v}?-st;}Y9aLhI2c09G6`4lD6F5Il zjkIKrw&+VM4wF;ubhn#S)JN?|6bOM$qEA$HK+33)$t&o4(~L^BAPBz%%{DEfP*Hvg zzQzsaOD|*J`^+}_#CGh7E%Lgy}}21>qxvTvy58$!;D zHy1a17k(4l_VUQvzenTpU~SVsvgtos_D`4m(_8+xH|)Rl1dCS>?a{h7d7 z8}sp@nNhfX(mM&|FN1GD`77JZ7~KAIJOt&hZT)9Qt-l_zKw0NSVhm-)4AFl8C>vg+ zjCm9*=3UW)1n&?mGI)P2GCqdwlCAKb1zrtTlXqy1f>m$446`WPVm8AQpx$9MM}(`k z;o@z^=-^Ma#jMdl0|7k7Od8pKsmpi|vbOMaTY0y}2ru<%9pIT_nA33acKGL*01fgM z*%`CH+{5i+-id=p#KzcGUap}bQ#ewylV`^vu9uzX1cvdTw^RLQt-N5@d)Ex_55F4tOU9HK(_gA`XyFI*Zewm7 z1O8H*=`VSTcinTj)pZUM-db{3OXajkW@9#73y&calx)syN8A?xEFsqcE)JRl$P8sy zl?XQnW&pF#*k4bCCn1lL$YTx$Tq`|&T<;`BG=FD_yaD{G<%=qMv`>;V@enk_h6~}8|a$sbVzk5D}}t2A;*EQ zW`IoFL*B-=lUU4PF^h!_3ycN{rVmv&7=B2}7eHsKJ$n_DqS{G119=yxq{K1Pu6l@= zf&BM1u$ej4p-oD)gUIWL$O1B2#Nr$jVUK1Z{~8<6;8*P+IS-GjL(@H`IN-P13qgk2yO5s37|1=XgHNkwmcjhSEKIxt5hubeF`cLn%{;JOcWt|Z*j~V_C^lBK9 zm!~4Ys6eO&Kx}Sy7*xNE4G!jjX>~ZzWK?Xepx=N@*&eeS7?T9~()O4wYMKILOn}lV zXab<~4hox)u84M5X^fh|yMeC-g3r~KW(43GFOwdyKr^a504U6|MPdb#5U!XLO7~lK z-ZNnZXf=-jTEe#1&>H}&y@nQ7eZ$4K*U|zAo5_3=gsWo>G<1eXzN6Y7K-g}Au&;R2 zJx9+00S>E)VK_Ad423Ar00Bg%#-JwRJ_BHi2YTSmN+~HpkIT4g5@y2d02?C_S! zISxc9#}rbY6vup0H7hf z0OJ4h_1xQ29-S_YPM3WrO1=}|-2<#vdX7I04c$Le9zI?gKE63LwZmAZ_gD5Gc`y#O zV@GzZZQV!y=wjPDUUV|;U0V1v`^Y+-LPee0mW2BAOMwx%@pJA;ZJ1tO#Eoa~)jwT%N8#m z8xA=08r{ER{g$O0kVd=j9i;cC_5)wqq5XwyIb=EU;)-R!GWFse%ZO$2%L^8dWr|Ak E|M*nbfdBvi literal 0 HcmV?d00001 diff --git a/memsync/daemon/__pycache__/service.cpython-313.pyc b/memsync/daemon/__pycache__/service.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2ac57a2f8fa5be56d500ff8e95389c34ea5bb837 GIT binary patch literal 6175 zcmb7I+fy6o72lOsw*?7g;TW(JZ*Y9k#7K4=-w5CmgJT&Ggq3o`9?ha%V2jvQzg-za zE>$vGaJVI}i0!&-wNuVMywH8l64+ zUCwto=XcKe_CYktGf@2h{k!_FIK%vaFK+SK4Ex|cU_N6+hA<*4`bODHKH?LQ_mBFC zpFTOl!IK*eTndsPv;{~=43e-I6GPq1Z#fbHE=)OIY!f4tYXOd@Tr0G;P%a8wE9GKh zyBMWh+nhfcujFAlytr(b%A#N>X!1XVLksZ^3oRo8@Boe1W!kDU6t_i~T)xQV^l_d)mO*MsCwWJ7=DWny%n08v6XW9{%a%Kf7 z<5E$8f>KISMJvwBLb;?GCQ5iOJ*LhOi7cnbXw%S~qM1gzs4u2mLC(tP5Rmq|s>%A2 zLB(mhDH!&86iJ2dxTlPNw2C)zlnqOO?|$PpyiuvAp$ z^^y#0!lQ?&E>2BMX3iIeCNqW6kvCJq(5wlK&a_8)+{|J7Y>Q?|P-HlFXCH?JQx_yz zrV3n8mSK<}X@Y)LAw-oGTRlVq^`ed?jmN>NHb?t6mM6ouK=d*_$}NKpI90HjtBO#T ziVM;lY^p3vredV{f1{ZvS*u0T;Pk028CiJeCSksnpQiI*A6NKc#V8WBjFvXA_3(MR z%>hj@mvpj_0!_>*CeKfcwvk@pbIWCAz)%;LKpxr5Qk5|G8_6B>_}m;HGjP5>F{^hmY`tiEU7G|PSI z^wQE&O2U3AfCzRq(vw6lE5uwL14#A4kd$o7Jus1tu`LUNT3IccgCMXU>}GIGno&yq zX=Jg=FbOPna69<=(@v8+5aygC(&7-AtKhL34H*(45|!4Fw?%i&q8eRnjPKk=wcFg( zpY|r9yO^qIL*|$+$&FQ*q@tv~ZH_8Rd8h>D{Bs?^#xz+X@k!X^VNX$M9-gd9Oa|7bfpH&msp01Z7>a9AQss0{}%}akTL^qK9x-dtuXj(6MXfe6|`4p#S60#db%CSe9@P7th1=ld?h7Km|Km9$yq@`R=g6R_UP!+Oz% z+vQhO^I~P@vNPCw+E#fP#$v!vm6xqHZ+UhdX@@z^!QaRLsWLxCcV91jT(}c`rg~v> z*MU2`4pv9M3q^12xfAN#40o>Xy%#>X5ox<|=1yd<`~G9>+50{=`YQXt?{DMpGyVww z0E96++vgvk<=5D!XNzD<+ii7=C1MZHT3u_#9@CzEK4yofrXb6Ku(mz(dCUSK?9YKe zg&+#vJVk=A34&9tJnvc}#8Epm{JbG%lK~ROkyO>Y3f>UUSeok`b0JX}k!0x@1Mw%2 zHlS9#X|$uu-B3RPf8!L8t-9Rsbl80_ysx1_d;|T}dM56~CTrnI0%sN=gM(NaWy~( zoq(p-+xokW?wUe`!?m`GZ;H8*@!>)~o5@*0^sJI;@k>NCm4c~U1Kz02l!;zc48!t+ zyS4&lNJvcB6dsio0R)(ZG6ZO=m9m&$WA*{&3VzeZ3BeFV$y>k}=zXfpW_w3%ci)}% zm#Ue~#Qycfp?c!bt(R(vL$$=Y>evPsUFSOMT<7Y(TcH|qXEwRmN7)auH%7nMUrQXn z-Lb|EI#p|llecqg+{@nUi?@f@xS>rhQfuA+MP&8u8vp!V?kK+V=kIdElo`0k4Q@vo zVuK>>0~=}ZYL{OQ;Ue1$dlY??`jKiZAZ-U6iX8j`6g#eH=80qnsZ?Ly_sdfAfKA%d zKaCEOmMPEmx-|^*28#&RxV(P~wewiXb7Dl~=_(=pu*(!G{ILT5phaw*Mu$a9-b*pk z;7`OTm5^{-#aQ0|7>*llZHRHP{e&k0f(Da}XRVNP8@0GGea@ix$t8VOs4NjQGyv%} zhzbRL!D?;3uRiMBo-?Ot)&_B&+HT0nAW0`IkaPjDBDTYa8<@o*y2;bn)B|KCh_eGq z6FcOaN#2GNH?J=$R$y@f@=_~0Hk8kvzc^f=7YFhjN;-(-5R$_{EI&juas)UCO1LEQ z0@g<8DlxaUc$=m8irTyjFtjZImal}2gY{VI zR#!dNSAAuJYx!vW{qfc3Zgy11>s$)J_<7`$$fqr@-{Al9+iPhno zFIFe&+_8;#ckSuZ_4w)PrOl3K*E^2XJC59%u5}!#bxc*qH@Mh3*H!1bR(o$9t|8ag zKIc}y{Q_XzQ-N2kqNu-3htL8 zlY1g$LgZl~mOxy460AXQuBo?&TQT6q5sk?+J4H@6#X&7%ksz6ao;x7U?FrG#?h15g z7;#ravB78E(83v}UZ%*nKrPm-56P;5v4}R2Juo2+QDh%*xkv48 z5F#`IlT*oH!!~V$4pM9xRJ?o*adZ=q79A6K2*2A55!`%%7YI6DD#P{9fp5AUD zp6~u17VzZh?GC;hHR#y@2y=?7K~LAmA=zzsSub;P`{N}Ky?x^fmSCOe#f~h7gV%Qx`^&iQSqfk$dA%UeZ_f2|&Wt$JxA z6kiW@)e#e^}%3Vwe8Ru$&pD+7_vRd{ozRsX&J2AYH^uz6RzGA8+E3h=L7U!+_ z1^74%H_i(v(Y9Kdj-MFo01QKF?xYinQyuHcWt|`zw@VeRx@|n z4plGyvn8<^+rQB|^}z4LE1xgI-w!ga9n}d64G57zqL>w;SzK1vlq6?|7^?6LPH+hb z#6rqrZL-}86$-LmEEFuhP?)X2V5LwXc+&Qr3C#uruQ^|WFT@o#+rt2Ww3kcFGxBzI zcR(I4Y#ovr8w9R((V S^!873>nnM15qR`w^Y<@7N(gxX literal 0 HcmV?d00001 diff --git a/memsync/daemon/__pycache__/web.cpython-313.pyc b/memsync/daemon/__pycache__/web.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ded386d49b39ec18385a1569223f1bfd7d2c7c23 GIT binary patch literal 5105 zcmbUlOKcm*b(UOyMTxSdD6-v|wn$mA5{aT{TOTW?Y{?Lmo zm$D?+fQ%d>BL(aL36dO&xV=80MJkNT2+>gVh3k(y+H1~}{)sZG|=Acwsp^~rs7 zAHY7kKNU#%ThQwwZAjIoYBPh7$|jp70+=nHBV%MvMQ zYQ*oqO0_vfCOVdIhH5u-LfbQvPGmf%kQ-4VD+X3ngV3CmC7i?X<+1(?m(y2v z87ERaG1DiLIVDTl`O9fgm0~DT zzFh}@6#EI13T3_l20)+E@VP{4eQ36;&fB?5P#yTDaiMiwu-UT?pn9=0E(ABZjwGg)K))$I0^;NDCuR4k4 z)fHb3CE%rv zIS?q67+~okACF3v1FH9kvWq~_MlEB3?RXGpA@^2?2oOZN@Z@&@yoO#A-$j$v{mDwn zK&Y#-rV7{4gfPyic8!#90#;I<33L{U3Z#1P>taC!T+c24Dax=Orm2*NkhQhwZ&H;#AgJ4OOnITvORc(Ih3st8nlTFt-|vo z!j7X^1~HFjxv|q=%}AV=c0Ccp(`32;HC5Z&P* z%q-kNehp#59ORlagsO%E7a6aF4B~#1bPQd=4`tLbGII04Ei%w_#b&)L};=a zqaL@caM~4?}Fqm)iKHZijPTfcWX?*eI^`}%w_l( zZ?vJU=lIiGunnc$kU}cS>Hnbiq`WqD%GcwDknxXx>na(!_U;FQ=_P6w$`CY4XtA#C zdV5=;y-mh#FUH!gm@bvf5o&rW^~ZE(^SbHcH8t!qJ(aX?)@-YBX6=qTMqA+$W^ILm zRi?rL(@o|T-Ox=Rc(V*0+q_|VWk_O%QXpn+j%o!4>4sD&LaPSdpXp`Ak#7S;a}1&^ z*P7mILsza|zHn`bdfBT8O^O_u_l2kKjqzw#InH z>+E;-ObITSRpBs;Uk2_Pb_7);y&0mjS{7eH!roEK$voEgg-#E`dGruWi^rqqD$d`>Ive?vUgDoP zkB8IZzAd~V)|SRjE&BO3Z&Jhlig&XfhceAlmb*-M5!!auF#R{E0$)5lkm`5^gNCpq zlXO8g>#JZIntTWh%FJ*2hUT-R$UdvWF51afYE*I~ZO61g=u_+%9xcqutgbCt;UY4T z&E~zLh3&iq*!o*AZ3~u-rbF-L-_GCfDL1t**K9gGzH{rr20KDF>YMHvA5MNSxzbZU zboR6QuJu6u-LX4k?~gn@u{>4|#5Mvg@1MQjwAR{PZteao(6iC-%%g*Ys|`bs0z=2A!FP@=9*#Z~R`iD_A06ylZAknfkoeE06Slt94}&~Ey>RG4FZyk-zyGlFQ&(O8 z0q3U;0)r2@U=BM>Hzh`is+84|u!FK1YC0j^fQFa~Om8~Pn%{KV(#;vRCc;;*0SHW6 zrW5KiJqr*$&!(#KehAt-Zz<2er8YVYNZs($C7Axbg#O|WzTx-_tUG`uwMX7YCO zQSeAv#Qzdqt`~*PI^_1f`SR_Tms4*{{nHV2`JXtEd;cbbHbZRhmD{f@Pp`U~zIOOs z=h)`cn+)1)MuFf`lBa_m?J#o$sKCOLp8&AE6`t7E0z)IVN0o*MtyWwhu8h4Jzwg1^ zD+{p7@5BiHGZ7a%Q{b%z?_(?fnLs}U)hL4dq3@a1aY;3ly!8hIEfozSM=pk)rpW%* zU`C;a>5@sNG(8H&VTv}Lw4_2)$Nz0HYk+CGq@+2z{lcig?%eUAr$9POe;8Ya^8k4o zsOVK#%(2}VOw4GF_g-4{9|Wt3o;QbY53koY+B>RTo8=sN%=3b2C6b`s2-_Kpr;}=XTum92wL!F(@vgH;tY(*Tk)v#zX_sr|0pgdk z^yQ7i$7~7yy`VpUeby4{FTe!xC|rR-U>Mdutxv-q_bAr^8W7!3Yy*bAd9Z>x>7Nr?7j`eeewb^h@s4pS%oVU>BL-O zHp!E-Dp%!uaxOKS=4na}ljFotj}RkON}f)}Qh3)S;Ta=c>NdJwf-NI&WR8#(m1{I z6cSN#`ohI1ZbC^N@*i!q*1xkOQ(^qG zE)Z0|txZ$0;n{SpxlM0){!Kw^{!JSAwCZ>j8q^rIT|cZ)5%}C%W|aGWKz%Ph2{~t8 zKvylh=6FokuGOsIB|)7)0E@dpSNyV0ud-F2Z-4+E+Cj(}b)9u4s8#mY8L;YFl!UWL z^{iDE%-)yC6Fg}n5{FGf4i2Gz&DI~(*0jXUB;rseJqHPaoBJKN9l@}T4 zz?eFo2rSoSF<$6-iFws`A<#PTo6?M$AG#GWjssB^Q4fd|3>=`xEg%u3CI~4AN$b+@rDV( zXwxaQVg;72U1C)$bOS``EHaxIdgmr|+qJ?98wX~{1g({Z0u1*kWC7>+2M9cmu?iqL z#`I;KIu~W))htLTZ{3kmNGpCY9%CyCx>nC06fJ=&Bse-c>TYWw&`wHY*m{>)V8)tA z^`fH(kbC0vtMH{jahqf(NBTw@&z{`sJ=w^djKS#!0({TdFoUL(S z96n%aYa45XgO>wL+q2Zp!R;ZIY8yNOgP zx0@vCp&g`l@<8n-)b<`wd%1L~_<5g_Iaojfsu6`p78~7^92!wE?vSjR_eztF&jD;Br7%v&XyghEl`-+ zT|x(MoCJOuJW!mIwktZzOs4e>p8zhr1tPXdK0Pw}lbPH0TeY9nw$ww9v-!>0r;`F5 z59%ws?*MU7^_L-eENJP5vWNygaqXM9*c^*G8Q?Q{oo*(er(8#$$;(JA5t%s2B_&Tz zsVyF+?8{stDD*gK$pNU;`nf9%dQ}^kxU_pVqvZ{Xv8dCq^|gm7pXVchKMpPf#3>E;i$V^*AJAg92Tcm z+vBJ^-ZD_Z9bMUb-`wsx`grK@gPFTC?>N8qe&ua{Mc*EpxZV9p{+Y(eiTA$#aP0l@ zKXrfjX5-M@cK)?S=Cx08*VitjJGjLIpjNH4-r;;-r&geJl#t1Ha*%33GRSt|^DGaj<|%fNO4?q*Nf z)5zf$?%mqXPQQKY&avN*JskV(iMOWj-rCAeH`Hl>cv@g-{Kb&2zMIz7f*Pf5`Ohf5 zT){~&N;`4{k1`M!a%UcrNQZC!Gw^f~jl0ZwI0Yt5CfQ};UHnh8_tXbmD1-;=Z($I- zP&LFM9hw8gJ&BT5+2$AmvbRKCI3l}FIc{huFRq;hq_8&1tc;OD9HRc_fTXu#R`9dP zhGEFh;WCAb9Pl6j;zkr@&c?m)9KQ^E7+m5CEIUa>QT|Gf|C2oT7jk$neM~v8+&Q&J w@VPUqkfOfX-N=q@lkvTDQt92xrIgXVzJ&6%JuRhNP(B|Z%G4urPNw;P0S>F5?*IS* literal 0 HcmV?d00001 diff --git a/memsync/providers/__pycache__/custom.cpython-313.pyc b/memsync/providers/__pycache__/custom.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f6fc496e33931b2a00c7b245fc3dfa5805c1712 GIT binary patch literal 1506 zcmZ`(&2Jk;6rZto*IV0(+d5H#tD;pDp={G87kaS}hc*I~M4{@c5{Fe+>-E^3cD-xf ztgX5&QcfU*P~Z>&C%ERH;9np;HChRTgb?a2l`D`q@!ooELPh8BKIVPQ@4b05Qhi!A<7RiO3!gdjgnOxk_wsp|A4KOU`8p~{j zIa-4`x~pF=PO@B|Fb1WrVV9nhX7jY-M^T*kiHswKs&>my?%9R3dc#-Z7Q{o@7Lb}C zx>6+qf;>mY^1OU}1Jy(3GT;t_g2SQc6hL!|pgS5EPT8qAMuXkg?sF(PRi{KUn9iJI zX3{z59F)7dJ)h3s9H=DjP0xL9V&X@imEZHju;m92tWFG;AB`+EiULas7|K9c2;UM9 z`=JbE5~6L;@5hjc_D1;*x}aO4tmK}s`dFFO^P>S~8(Bl?H?5xNWvPNV>c}p+W~n`h zG3O=)Y$ow$Cl0X*<fJ_4_N|U3u16f7)1o*0}bxaqXnO^iA{Y=5h7%Nv#eP z@&65Z3oThg&bABpXiZCV*e+kqp>tS93oIcy3%Dwk=MQ}u`mIo;mtKK=*4UuPIug9B zTK(Yh=a0Ym84dCAuno#SBwHHSMC(Zzo3?PEbXv0>%i7eGx zaU9we8kWfK2^^NP=k?qt;ZC?gTYXfAnPI}T9L$Rn+`rRsB|2!)^AvqEf#)X)$krec%JYuc1Ug+F&}+{K32Ft&r|gAR)Xh_qRTuWR zPU_bF?w=ah8!rt_U-+xab$b#{0eiMd*gFHP5wQb}Tw0>%70MQ9q+Q6L&BpAxfR6h> z-|Xac8jgPvrRF=6R=f*W(Lgt?Y$T^e&iT)5<#)FH2YdU}V1@F_!bNU><&GJ$aS=CO I&_AIYJ3Z2q$md9>UdXV{go!>~%W3 z4%k=jqCQkXeINlv0V?3fm?5Jy=djE0bCk0O!JI>uQ`>bM0Eb{1?Y$2WA9Vu)MTw6Y-^ zYFX3gV9k9f8`o1d;`u<o~dC^4;0?Yw>yom{-1B?+;ZW6dyE*FZ*BvzMplzv0wMEF9UZhq^r zIN~Ntjh8;duhV(UszYPKC$G?D>goMpv+&Rb_2K$zIVc^RtuXT&1LF zhMv8s8dH_|Y)L8URV}vz7duhavzBk8i&ets6+_7xB>ILxj9N4wsMUvIT1G7!>kKyo zp^b^PiF;?BdSd^6<_otG;|%{T5Nfdiztug!`Agp+bk`MQf6K(!*J4KfH)D1LkoEr( zF%iu&Nrb(?nqqJO0QSRV#Q`4zlz3EKl9yz)D9;xa5`U8udpY{(aVlsTHGP3QpWOcB z*S9t&pZZ3g3L|aUMz0x5Z-r^>G^BbRU84-Kc?i(UE-YMd)mOOZS8S)P@diOEIva9Je?lY8Iw41(g8SX&7K%6q1Zi}NNoVB1E zF35r7$D7*h8T)~>flnEG5-x24LAD%bw*Ca!qE%&%bg+xO7Wlm3Yn<%ay!nl(Ae;hF|$LA)x2YS+gHtfabm11t;WKF*k@I(as4sNB~0Hk!d)+y6v8Vvp)u>kp+a}gtC(<_4BIZt(^ zho_fPY-M0mv?UdnHNqAYBjv*MCBaE5&YPp2OM1t3CQpP?RWc1Bd{HULxhip44NC$c zgjpm)1^S<<&Y6?7yqcE{MW;r`cZS&>a1T`w*%Qp}$70lMvSA?WU%+#>jGpyIHez>U zjozUZ{<*t%^`nOS@XLYtqe~Aj{buHIsy^`E=URPWa^-T9LA zij3AHqkoNjXZ2h&6y8``TiObZ)I%eW4{wD|HA1I;f9|WPKTLf!{pIxYL~^tCyR-L& zW`E+*`w!mV>OWTRKh{hfeR!oAi$4@zc~SJl_9666bmQvXtM{&Lo~`$tSou-Y*LUaE z?OS&~yZu=+ba>U~=47<}w)M83N+{SRN{ zSulJ*+6X3BnPwokaeD1EZQRZNfz_#(p~yyUt+w&$+NYZze?HU*jjv3-^anSbcb)g# z_qDqJ*fT!}-VATL9v^?&ce>&K9*|dm*6=4NA#=~&cJqThtIk#r3Lf4HBIG_Rgqqk#O}x?Y4sP?Px38n2RQDeRL9grUtghGnN7@d~l^wK^)LQ}-Ez$22D8D~ zg$g(a#$x!9AbgZg37Cju5xgzJD`MWw-p-$Ibmy;!X&1uu>tPDpXA=?5SW!5O&jSnX w{Q4M7@RwtluhGy8H3vbnZxXPMyrV{HVW&BDejx?wu`AB$Fq#sB~S literal 0 HcmV?d00001 diff --git a/memsync/providers/__pycache__/icloud.cpython-313.pyc b/memsync/providers/__pycache__/icloud.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c2f65c2ba6bdd9fd8ce327f27d72e3b4ca4de63e GIT binary patch literal 2249 zcma)7&2JM&6o30+d)Ibi2iuDIicLr=OhR%f5p6_UQ9?_TkRorb62;PLV{gJDdpDh3 z2VzQyI8>k>2tiRep&ZMt{RjFlG-(c`Q;?8As<;_BQmdZ&W^9L$OPRx)H}huZ{pL6C zy{Wgg1p!<3w_mMqd;l-mpw;{hMX2XdSp^L=VGKqEBBYrvj)_F%y+kDJr7?NbM||8S zkNHOfBrvKFB@KBc3EiOi4uR&+2Zp4^sY8tph&n4~0IP z-qnmq!=D?jd6_u#R?#F>A?B>*dL}6}58<8I7zp(dR8}DmM9=^s3J6amvL+Fq7S!ZJ za7`wDEyMt!7Qi__*Ay+R1-Pba3JK1tnKs(~)%lWBDYiVnW@J&nRbZ3Vu*Hq(5n|1o z1)<@dOBp5JvjwFKa**9?Z!T?d&;!R{stF$;00!<&LkebL20oEeFe5w>CXfZ6K4aNM zs$RHJFy|OYPmm$kEX}y=@H8DiKb+6GCUNtYVb7TPuWgfK%a1sPO4+nMH~*#OU93#! z%VybKunW8WT$5Vd$?rBWw?O?x(=!VmO}xv&Kw~tg_QITUF_{+Z*!`RLZvJ>{ePknYVnaDm z$7y&6Crb#9^A0*&g(2XEPCg0;r1f4fT) zPlW)}ic6VETg-`Bb?@qi-tu#zX)+K^PV9j$3?TIEXv(_#!R9>E*QIr)8VR74Dy4&kk2S4=|> zgdmM~%Y-JVf5afyEt>|$t!ZMAg=+MYGi{a3^lr#kV+E(YXw1!(%tfBBXmM%aTg)Wq zx=WRub|E`)?$QMcG}K6Fo-aE~FQP-U)Ucry+uQ@^`_b*N0(n^O6z zQ#PsJyn(gJrJ{pgOncrUj!mUm)64is6qk`S6TFZp6--v!z)fnwu!~mF@JyG{mS&B| zGHJshjqeL#UI&tBaeqOv1kXASuBBE|TOHkZ{Vzft%U8BSM_(llu1&5?KAc+rcr($z zF?en>ac*N|ax*b`_lxaF;{L6Bx9;D*cYC|@=yF?4f@rFC1fua;C$z_x7q;5cuR0H| zRUcH>79T9GU;DXxt8?h?#aGezn!2Js3_U7uM*E&capih)J@`}qM&is?^rN~@>S|wB zYYN1VK8+pUj2(X(>)nd=ZXfRXJ>K&sgw|RVosdpMUD%s5Oy`-!0uw6$<_n9V9>|Cj z)wV*(s1(gpShO6n@C4^LkyK?ROec=xxom%vF&aUx^~PFmq+R>i+h-rZy)L7930_2! zk3QRsoZL`Oa*X68PP5AEC#F{+b~VKQqxARb3CA`O2HQ&IX~!vLWYUKN9teDzYnc#L zbe+AhuG65dmz`pz#PqPPe^)U|O-n?_2PLjovTW0K&@A(y2p26EBW-~n(5Q5*Uh$A1 z{mgYj*Io8o7Iee&h&5fo_|bKax`CgSL!z_<3pzKA;sK6#WDc0?@hlYNsnCdNJtY{^bP?4w$nrW$|*cM_B9%sU}CXm>IJP=L> zgvcR05An)8@yU|xO~9g;2y%b{LduV8f=!9CAHj&}Anm@Xs~MuLbTod=oJ2)KS4(V} ziY`NubClY>7kjjhYh{qwg0ql=?6doAR@d?&I0v(B&;Wi=@N5c_umJP$3qA?+++%JA zZTA)CHN8Zosnw#o%slfL{bH)+d6NlG(V5AKe9lyfnb#D3Ud_AM@)@I8SyFY&%>PQW zu2kmoOX`xjrWZToa@(P1-nHz)@){LNs-+e!8gm7?e!t*sL{O_ajNJz8dw40{jK*pc zn-dQ&?Szv5-HXIqz)A5>qOpA*$N!ljRT!te^*<-MgUdC(Qf?393w`}JM-4(!acQ0Aza-7Ufyau3r2>58CaOEa| z?wLg?PRq8RnLB4=R(f{k&^ga|JD7An$B(obN~rzD&j4nF?ui`V;q&IaS>~-{&Bb;^ z#tnd+*Yu=d2!a@fKOCRT%ii&}FLd9m{jD&?*{);iaeC+<+4lk>KSVDJIbRpZ{)-(e z#+c6r&}TwUII%Sev_?3zW&!XPMEe%LL2W+>o9@r~Poh^r+jIV*lS`MF<#K@ax|kDB zq~kuK?{WQ!XIS_iYwqp3QGniyp#~`@WqXgY;#3YYuh>z9?;$50`s_#Ox%}OFvWVe$ zlYK76Baw(-3t~NeoEtiC>InCxqo@~7?tdrpP8tg#c27S|ET+RV)c&m`tR{J#l3<$hz6#uCnpno_1>PM*rnymDiTf-PWOnw%K5v%u)O zH0mCpLPjBXG@bgdYjZ>)Ycw=jHYz3AGKjLEuJ^c%FWkcXVdzSkFn^@OR9r4A*1SQM zs9>&Ps$HTU!=!#yU(tx6Q+`3U2>ZgQpsr#nHmUcvv7{oiK`gaI{TgQHl18XkS8zFC zS_-kuJDPQy3it=(PC7tXAd?{`gkMnz>&lc{reXz?nSD$uDtbvPDHdkjNGnM5ccWNe%HKC2D4LX9QVvW<7c`7dp)OX$p^_s&-r`87r~zD>y6;q zC$ZP7{9b>&wz#=i(>L{PxzV4l@}Echs~7j;1MkZ3OVwAJv3PBDbG5eqaJ`vKy(c~m zLi`QxpbO%O+WO{t?XAtXwiiD*--wOhyYgjsZ%uk2Jq*6PSnnR$>+Y?tZO69*kJ3AR zV~y_dmXAN%Rh9Nd=smL=9jr$Occa6N=y3D7p?~xaJq_aSeujhY-u)M$tGBw^2qzB& zh!51(9;`jQyZuHzHhS+$GZMT1_B(IyMpE@isu@dGBgk{++@G$#clG`0N9p>&#SirQ zK<3_!W@KPDGFXoc?nZ{|k>M{-C%419squPhypekGU#DMkOk3Gp**Q1%&)Aq_oDUK& zqr61wJ9u|vBlXzGqr`4(tPvagOV7vgzs5gK{4H^KdOv{%9E72#@2D%DnVrNBT_}aH z`+E$mG!lqvRfxVGWMyLN&D4yct7#8;nO(kj&M?a902K=b7EFZ#4HODXMyXO}bf{4H zO+_iUcOnH03}RYkO;>fJP#`~N5<(@-#AmQ(pLgnweq48AtFM3|h=ppgpjZ~s<|>wI z;=z{~0|V%Up~xuKu^2UR(1{%lM$}2iViD>MnGuIpSlitgNtw9m7P$OO6*VlMJ%3n!+5# s4$C!;`xMSU0qNfm`4r+`!Pve4p1^_U64zUOvjsS`cwBhO7HlK`4^RMrUjP6A literal 0 HcmV?d00001 diff --git a/tests/__pycache__/conftest.cpython-313-pytest-9.0.2.pyc b/tests/__pycache__/conftest.cpython-313-pytest-9.0.2.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c470a6900a8379dc4d93629dca65b66524815f7d GIT binary patch literal 2373 zcma)7O>7%Q6rTODyd~DkJU8dvD&%d-LA+ z-tM=z%Ls<>_jmgDQG^~crZpk~!R^1!A#?{RNZ}UI0!KN_Xnv8Wg1?Fj5sSxYAxdN6 zjzlGYCr;zA3XAeWf+jG^wGE=2wA|Y=aJVYs7$0yWp-=^_r|T}CJUrK;l!y{lV&mXN zXrsTibGZJeji=~7!$y1=AKu%d6sZ@Dg$(V=JzXdMm`-wyPsx9<6DiJ1sD@$Ms;!%b zbp!b5jA<10H7`Ml=2H$Ok0Op;OkeV1lvGNpMpC>d5koTzy0PX(38f~rv@njoBal%f zAjIu|3&gvS!xhM3I)RYEXL*J9DIqH;0;858%TEqPR3hUXMxYaYmd@K?DHuvD%Y!9} zolxTbERPF@nB|s3Otm!GswA>#lvCPz(fd*h1z=HJL8JUIO0_#*fRY@916NGlu;ICw zz(Pr>ift3yzzkV8GwyJ6y zPQ_Y*B*rQ#1xGDUvWR7|)Ou0Pv23%v9Se_b$7iljXJ@Zu%7v8RNfl~t>R{W8v&-|; zG;OalFq|(J^0rFXh`rqpKCt~v0F4aD@&W0`L3=#B3=c;b20DP4;tnwYvE^@UC&DYh zr_whJh*B$>7Q;i$yF4?I%UXn5IbAi1BzMywvsB+Cxmi2_vZR(*3+j~D$8HZzPL@nfEm>2U*6uJEv{*m-tA=)@yRZJxIkFENg1|(| z8;~a|0_Vjn0)=AJRD?dy_(a(>Hb@l^psmwnAn|h)gfAMHvl#T=!!YPErXwGELNN%s z;Ilr435++}P*>00m)*{R+I%Bv<-fa3d#o z#bH+*zQ0mu>xEz2k2Jb^;Bcd7;NHC3lYWqJdtTX#i5=0}Rk%OWxho!X#bXU=>VBpPjnF~0bo|1>~T*+L8B3<(HB+ElXc)q|&+OFak zFc`MUEn80G6#x)+0DUiE%PiVkP)<X@TiO>8j8fEI2b=+ES|)4$2U( zlA;NRnCH+oVEYEygwnP)^a}QYDpS@aQyi7p!Uq77nLf!gF92m2cIYWKF}zXszwp}E z{1@cmZO{#Ta4~F!;7SDIJZnOf!TJn*7E2U>D~gT}x?*?jY9o1kH#z7g2kWQD+~gPl ztou}baMbM{tt~agu3fRu75kceG=8etfs#GVE)+SrhvI;gZa~W2cj}2Vb@7akl++PV z&gY8`>t6XhWtsM*RYxzut1YkXwMsRA!1d%SRlhP-3nI(@I2lU(2Yc}Y5&OvmG8qmt z^}I?w0XnJ|4S263%xl<%!s1+4EmoXf)UpevV^aoOpMn3A7c1(wn2SM3Szzc?jD50jdtq1Xb;aI$a}Vb0jGFpUkpGt+A&5-#~k9xz4rs?}0D1@Y%s6!rdT1U^*K=(s%%ilVD&wcnkN(&9+@QvOun zAi5AKgFK}#?dwMgwTImuab=*j%ePZWgeIRj5;qDZHk6Bx@in9DCu#{_iHoh9_Hg#K&@DlyIn^7jt^?i(siZ zvZwc^YHB%ss4$$@&zfOTs*&)S(Tgl!%w>wXOGPs{b{_R4{HA8tV1`6{%&5qb!F7Y~ zB_5BP$DclQI&I{baXO#XM{=iM)N|iv`3t$z-!2T5#!$6!`k8$3+0vQQXpu3Y51pot zH@LLA-$;&6$g*?#1-1rdqjweKD8$>!$04P8-Ry=(%T15IeS9Ghoe!)n2iDH6+x_F7 zSqgq@Vbvou-zl%!_4bK{K=XWHO*ydUc3|B?!>ZYaEwh0w_fd*6M8ZC-N2cCyhvq}& zw8GSs!h9(e@4s)O!U8EjWH1$g45flBd^VH_o6%R$@}v9(y0;b?hpg9@m#RUcPt^OH z@vh(vQA@ZwzGG0n9RoY;S5*(B1O4v2yZ}Whq}$}@Q5e~6X@`G=_eo9p2Ca`*QTjvH zCD+Nx`7LONHz~>>wTK`691!f7IJYV3K)2O4yQb{As$2k%+QXgeMQW2%hj;3rT}R!N zAv1Zph~1m1P%_A$Y;*-rHfra#&*I7I!NaN^ z!V{jx6UKX6d+5PbxbP3@hC|QdyK=szf2?}LH`L}*$Rug$P?wsD3=DSpx~*^S|NV1s zuzNQZwZ6W_R70vU-MClfPpoMLPpru<%RW0#Z2bILCDGLP#gVK5lTnQC+C4tel`x{O zTY>mtuG(j-5U^Bx6Gk$330B&OUf!LEUN%E4XOu>ZH~pr6d}1$&eI&Zf<|=ELnH+J| z)oT}wfK~AFtJkw*x#;DjQOvTUaWP*!hn7&g4|NqL<@`MMrd_Xune~mDc@5v+IU>5(X)I5>1RKiWw4p zWH$c#_(bMRDL-1w>xLN`EevHx4KoTYr#)F_Zpawt3Z>EE49ktP%-PYxne1qWZqbZn zjO<8`)MoP${+z&~jxknX=IYEyfsJL0ncSuE(X7t-l^Ni)04E9Pf8>rFZ<~rSx{URc&?si|PiuAaX9dsw_a$h;% zj`SMMs2M!Qb+Facm|eW1hFFe>VuqMlhMsw*m>&ZR$bx+F+urMNg5fI(7N|w{ZI4t$A&$`&Y)L3sT{V%~2k4 zp@Lv-zFXQ>D*W~7*Jrh!`Pi;p^%f)-#U;u3uT}p&BinD>jD< zuZs&6UWgNJKj4HH6?jMMk@valdT>^QKy&6b>vv1rMR(#T5T1h$jj(iOCq@^n#1$$? zrjdc$+Ag+;tr5HAY^@LS%ds_B7`2cCTL`6m2h{@eVsyHOG&4vO;c5Ol9p(1upKMm~ zj5_>XSfo14F6B=*z#;_(CS4XO=(4+>{LUU(NIvM);%(8r`Jj`FzLc5+cP4Xz4+dR# z;+@*zgZ0Xle6VhrlKZ7X54}%(whgnN80X0K+AedUPeOIu?=Y!)!k-{xq<$AHlvy`xw2DjJ`3p0|G|x&n-hK zEIlmd(v}{mJIqI;FtPS{^{~YeU~6sj*eQ)Sg{&ewK@~(b6ta}C_sItq+6ilnQn?Z0 zHYuSwHkaFSww>;!4Z;kJPjv3(avzsnlQEY)?!>Ke^?^iGSmcPTmOVy=>?e^R@hFMM zN$eq!B=H19qM3D3m_{fyCX&%d%?^<0g>c)rgTm%p=CiXZwx1oO)SMIuE85vz73+DR z8EoT5%^LSx&V&pYz|c8ZMawO)r`858nVeXXp*+|C&d0WuW7}r6Q#aN_%*M9eSYM8H zA~@fDtlWJJfsZcIh3;b?T`YH>;=xnp*r|D>;J*sx>XcCtnA1*?0kM*lofs~l4w>`D zdKp|?$ZhS^k59S_v13$7_c59Cql+>)+ig`TvxxwTT5OfV=o)U7ehTkEJ*zY*JqZ00 zH;~^tFpg1e?20R%UWgz@P9C0wLe5irAss$+7GtR?8wp{%KsoCnZpxR|V7vSSXI-`{ zFeoihzo(s8WV4_9A!jICi4q8!RBl7oP10sF*$hnlj zpl3S`dX6tK=n>8p96NHQQ&ZZh`Sjy zMspb0>_yv_4aW_haQqS#@iGaH$ppt7nBAj`9(D+7gAC!K@kN3It5F&n${7ayrM53L zb1sWWNzcDl%JCtFVRqHx>vD3Du`+%N;TmT&t-f}=($q2KyQ?)`jsAA@TIZFfyHdFj zTXpr~G_ZC1pLYDP1E-mea%}s&X8kJK_9@_+qd<5LIwo~xCq@@)6W>OOZSKT&42fqt zDWCsSNs--c>&J1%vIM zHv5AE>SxhluuUYVL__$K2fgz+)cbMBrToe)bT*g>nK62@&%>)ByvoKs;sjN0WoZ&l zE@Pnb)lI@6)yI#t-~P1q5Djt2y->KA4)OtvGy=naxeOEF=O!R9P;yLw3t%LaS{362 zfN{VoI4DhoH~7={M~58f)B{2Zn-2h6?-t2i1h#Nx`o10r9lwO$R;}I+e1+aFeyQG$ zJoMg{Ms`KLEpwr_BVT84ivdd?-XBJU${w(^lELbv@q;i+`(aK9?J-#fe_JPs{gZA_ z99(s57ZpH;!7@55MT%EQ(0ApsIGpj6Lj?W6-s;LR#%)0FarP~X-nWHiWCzvZH)$oGB&GdDZjwUR@-`}zLyERKpbHA2p zKsU+51}^Y(fV()-(s!WypHK=NR?bP9&S{lo`qmZ=WkwCMI3DvSt^@Q3tMh$5<2g1;VuS?e(e!9pngr+1^g`JDSvcpnbKJ^}jTa|qF#={71cC_- zp`+R9iC;@)A*YM+R}jFl#x>U_%8l_UbwP_<4Nr%!MDE5m%^V@0{LbrbT$E!w=QZnB z(RPx@-BBPsr|rD*hSZg-VnTgK+c|T@ox+YGt6dyQz7En{RWhQtwVi^35_j!Ogc;n( z3VW5rAPHKg!p@N>koXM}H8eLvIugV=4f=G5=7xjYK5O*{h0B~@{z?S5g+B$p*2Y>m zK~L11;cl+WaN{n}_>xYKfEU9pq%D}06UBhprvVampa?KaQ^o#)SE^uE!q76D8w9gX zE%Y(e8~{q`q_IEq-Bub7vrcZ$VgKUX7R;`sm7D6f(itzv))CCsX`Pdc!)(3Ol7qvo z{;=CGfq6ykm$}e>0J#O}aI3)C9=0G|TY+OmP6pV|T0r{vNf%GufyWP@%F5uJ_$j?e zV!oAA*+r<%`D;5R>Yyrw=fp)~dH&^ku_K;t6Yen?p(;4Y6E$UrA&dxw4f1h;@*6nH z+t@x<05`?@H!LZ$$f*lfs@bV4Db-^98-$CTYRyv@Zfk9xPLR8?%`>l7V!LOxZ+h3H2QOU;abPlS9oYYGY$0 z7E#TWKoQeXgh95&1{gPswDCnR=vd-$E~jVUIutWlhPkhnzf=Q_SDvF(*-eNC(8$d* zFIQ-OtK)i?5Ha{ZtzSj!;FD2~2AAfv4gwiVSFVZ)^&PEa=4E#ZJBF;XJd}OCOD2+m z+ge9`CRxJmun7`gOp?(B5}d7*T^0`0Cq9G2bhjv-E_*Y{$8lvTSE*Qn3mHy@n07V{ zUvPw;%1g9J``)uuE+9`H(qZ~$T{@`;T;py(t+74m(}M%Y$Y!|K4E_gbj5GeD7q8~P zc-zm3U|`^wiwHjak*p*eHYhJ^HG<4V6zgkPBOyuIidrReS;R@WQ)`LS^2frS#0jqJ zfY(aj!ick8+F9fKm{Rx~`yTkJ>*&|ujgU{nu4(1Z(DgQYdn4pbW!;o=MMphuII`DT zjd!S=?~@?X;P%GuXFa4oK!W=^xO0{;)3!}?uI9gEg$tOcDqyXqZI;?30NVCwQJU|d zNaMdDT*SAr_1b|-WBZhPw{`tY^hUPax_2tP5Nnx+pJJ2v-O@Hqy(VKKFju83SH(n% zJKCme&&XUz;fhH$ie6Z~=8t-Rw|7?Ca;^9G58-s>O^6TPoY%Ive`Q?90uD>(ip^0* zaiN0XA(=)5aCr%4tbcif0O##D27$~X-&o=pLwav(4zNQB?JI&fnd|?KLHLz50@@0U zfT!Vw0myR$aF4Hte+w@O)`ElxsyX{FmiKKV5Z8Kq+3n^SNsYcO2^sASqFU@L?OkH|^5~mL)=e0-N zzcMbNeoNH767!H(+YI7RjS4g@~q4Q zONaPsm5a93w>*ZV_Wi|6a_%tP2i6}y#u4#mk$ zuVu}F85jvCc5Zyy7^<$Qm7D6f(%DQQTSs#!bz0}-LaVg8sl5Ql^RCkl;6$u>4J{Q- zHKa9bUL!KN-tH<_ts88ea|Zm#fu`Mqu1$d#QrO9#4y=}&pY`Yp`xYyp2;=W@wBJFx^AM(gG^dLJZM8dg(LYJR_6GI-QDoVn#0jC4 z)ylJL=^rIH7?oCCOFyU{92b-igK^~ryiX0tcUI}~kEjW?kN!15-oO^dVpbo@F@PQ| z!15*cn(DYgS?!^6^Ve#4o9*A_|Kg!AlCk^?{C_7H$tT6u$fuIBfDsgH z(4)Kub1>W5`;+jE&ObObll_Y|^Q+ruSGWHp{7Zux^3IlR>g40Dy&rNjinABRURfbF zn3#U-hU6p`TRdompKEH35QhCBiDk~Ou|Gy)Z2Rn@JoG-gwu*4i1Ko$$a*A3z+k|uu zEk;N%W3lmL$<8w$UrASIP><08BIx4E9y4*a%H^TB8*9DR^FEAheC9PS%CY#oX8kH! zoXn`BKzL4zUooVvTon`QJ6il&k2{4OLsq*ulsp5HTvalnQ2%>t@85+ht?*;?UNP{B zRALmIi@`Nf^()hLsdL5Vv|eNrS1JfE$_b_Ln~U`w5zqA1kBaZ-E%*dmM#R{YGER+3<6v=@-iJigFzPYs9C$xAix-e&P$MTkrc7U(3f7`nOV literal 0 HcmV?d00001 diff --git a/tests/__pycache__/test_claude_md.cpython-313-pytest-9.0.2.pyc b/tests/__pycache__/test_claude_md.cpython-313-pytest-9.0.2.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b1fbe86d09e919c6c551a50fd405e23496f1deb3 GIT binary patch literal 20191 zcmeHP`)?aZe&6MDNQ$Cu#qZRiC0jNxks?{LWx1AYCw|4R6k5~K9d+1J;!0*Mk-S;T zw$wQ1^x8yjdPuH7?tH-kRSzg?r9ad_i$l=@*Zv8lDTT)33Iw|>&dkove&_Rjtd_&!pa6&azkZqic|;I?jT!Us=m~N8V@P}` zNP;Au7tV=PjAGh#-bLN)>^bLET~XnjkNS-(gc5coKm%}gpAVi3(a^aj+7uN=0z*5@ z4y?1}*&!S>vb1@d&?-pY9fIUL=+X;mgq8WBjPSAztSkU!L0-0T!W9dBhDwNIuCgk$#vp=qJE>2(lhr!<%|tdxTi%5=FMz=FYLn zCYvplF6&HyV{~>CbOf-$>>E^ zJ=hV=D{4MEmd)g$!V~Xwe!S!Gf~XNCGnP%IGZV9Zql4MNs~1khPo9D%V{Xl-x2Soi zuBFmc3*MmVydvk7n|ZiM@>4lEm&{+)eEO&~Up`4El)Q@b5RFE)v#%Z*iK_}#N7Bj6 zxH9s3Mmb5-*Oifz*|F&<9O}rKbpGu0=m<1`PhtdzsWK|-H!M%3I&(AHM!g3)nMuid zCv+2Zf{Rui269{YG`#t)uNdA{*xg+WcNcn37sID-pMK~GzI*ZQi)GK&qG#*8r-0|Z zP+0bCE_yZ>w)Fkmz5*uCJp}eZF}!>Jd@=ko@P#(c4Hi7xm!WN842gLdYeigchmsG4 z5rK-5KwXjuZ-HxvKs}Nhs25i<^+{gppYX-}TI4d^m;`tDMmm$q-cUbl0q!yTdI>-Q zA?fYs$0f>x`KY`m4f zZQ1et#^uyU(5hSVB#2pmh{*;sr=Km(--m+KPC-aCnKg!*&6IU+g)tm7?!@dVxut?@ zgH_v1*|}a9VN6yZ&b7T}A9ZW`%$kz7)7=5Hlkq3;p|Hco{hV25oncgdliNHiunGpm z+!dHr3!>k9)**4GmtY^G1^H*W!w^h%(AXLo!m}8BP zt+kx}MLcxmEO7DXaz;$OT(L?Wb}iVY{BhDNN@Vz2uj`<(Up_<6GO+Hl!fifb^ZjT@ zR&Xwm6#?5j>?|vSW*>EHhRm8$u+zt6MUxbgn&M4eBFlx>u!h5CTjtqX!x`Uzm?5y1 z|87Evg)eD7bw zs0~)_K%KbN%41YXLP8$P=4RwT?JBM}{7MF-3xNEk(3F||{abY`38Ux`+-P+h6mYe$;G?Z@qHRd_0Kq#xgygX1$<1_kJ&D+6wHv066(5K-YkpM$ z44bDK$xSBn<5@bzprPi|XH;uC4v%njIz5?BXH?CnKR_)ArR+&3skTj4uV$wwQ!-U@ zRGwf6QO+c%6fGdD$#DgXwdND-sRC9$MQN65Ei!hM%*)D6=r_Yqv=+1`S_@h?s%?~E z)l9PX$Ffrh9%0}fRhb-Ta7_zMWi!{5832uASJ`~96`+N1==yzk+cQ0SMjyaI`xPUj zUpZvI(r3@~>$9R)>+Y_|^{5C_0H)dw{iS(+wO2!X>2f8MTlGak^q~F&;GWyU!|mU? z(>&*UNLtEdXOZmu+1dH4#jX>JzO>uJZz0ZV7qx3nfTa^F1KD9=2>Rx9lml>?x4;Ld%{~OFJYMBS@Db?PXH&E0XpP z4)HQuZizALS4xf#OwpoR00)T360;&mK zq6pGghcpy1wvJ>f;SBvfxS;hr3t&Vruq~xi%2Y0kW{z4A6UBC4K*A8xUQF#nf(ujM zkMPORmymP-i8awqOoIvlIz5tpBt1xuAn8VO07)+rT>JD8l0G27jV}S)`sh(iAq3Dn z8^F{cl2?$tisU$wlSobhQBl{m?M+dAbK0tJPBnL{?Q3vac46DU1JcNr^eby?`kKmg zKgga8swru~&z>fwBBvx+OS8Uk_i9*Z1QcDaHixH$tkysh7+I~e6L(R%4v*niH z!hn9E#M-f?D_Vf^Pe{zv89~|7b;iBrNLw+|Rv>!|k+xD~FC>;Cd&{KaS0sB;pEAm9 zxh1#&`XgQ_u@-FUiWZ<8>gxCj!P%u4(TL2f2(qjNWqB%_nx0f3-6YH3n@&!meF1V2 zSssU%CJ(k5C6kq9dK#GNc_c$Xw9T@tqKj@!MjJI0mu{60bwp-mHkSWs&+-=ugZee*N+-h&TFc(PfY zVfHsQN|oPRf3gBw=!Rx4IGtT<-9PG2}txtdB^I~g~6VTEu-S&Ul=QZZ&6`Ql+ z;2*~W%trCCiHB=EFAm-v@VxlqL9kKyhtoD2#k1qPBsR@_eY~{X+F(A1HG5u2T@=VI z4|QvDkDnAUY!o3WD23u7!$#4x2K3luqDJ#vWupj!CU6H(WK$z365TGolSv8=la(zM zJ$4~kX(u+sWG#q9S!+642;0n_oU%c=#u1}sqPIC)=x%79qTwSZ&c3-;r-+)Jysmr| zkBF@SM{`TK+T3y&hg%<2YJRG)wwYBX7H$My9>bqFk`pGT13!RiY-u$O=&eg=9IyBlNh^Xmqs*3DBFLs+DPg*T z4ay&oR>X~O-Fd4(dgk6LMP7jO-wyuuAe`n0OKm-HF0}O&BRyqe{7O&~d4Xs3#1e4B z2#hL4D~A`}(l0nNEVKpF@eT|4^n#8x7X$@7XL#62$L8&f~jd!@7>6l526gJLG#|oxU7hnPv^w}yi zaf5OzMjDk=DuK>SfuyUsJD4nT9KU4@u5+VDp;h$*AP_)TS1vgigP)Fk0-;s=e&YnN z_xm3~m;3U{g0rD^2+hD%2k3E3lVjm?7@k*kNL~!z#Nf>YnjzRxWj=J!At7)TNVfD}w{4RpxS)n)2x-2H|wXm&V3a8L@kYpeqBSzL~=+u{PJR4`Gm zHdj3A*0c=0AtNl0Bv7$rhm9NG?y3Xsh0m=y$2!EzO~6c}-=pft_#|KUI`H}7If|Kp z$i7jrgJzk%>C>;9ruO+XfukfwMDl#uZ7M>ZwonR!yS*>PSg9K_eH$ql?x zbEnq`w2DX@Y=KsduZ5KYtuV;S4}a*A{AZ8`7>`qRtnvnKls2vV8|dr+x7M8Vta2`R z12@&3+5h!75O(mEb=bjQtA*O5$+2syJe`9GB?v8pV0N%Hf(3F^fv_J%#$Yfgt5vjK zgNIJ}j($1jqPPb@NIbS8XRpnMvXdz@WS-*gLva(B-O!n%EDj$7);mX&*Ju>^+?d*p zwG8cQ7_}=0%lis(aV&}oBkMuGGNbYb3v z9InJR*^mme+oPA6;y$HElZ9@i^H*hcdVD;6vr(JA7T|-|z2b;|V$CZ<8;ado9&m=O zjzgI}bnCEg`e#vXM*LP+Mf}zQb*1>NZ!uuh(6Z3_zT|pB^R@a5*vVG|P7g`*`{6rb zJ>F~ngaaulc!hbF|nTE$r?oHTTZBA4WF4KYM2u z80Ti+dlODSd=r|p{~(lXPUQu*+!FHW7fO(<=6gVPV&g2}?AZK7fgGE=H9t{|bVBNW z2P4JEu`)4!56H1Xq?2d$#1b$;-$s?9mBS0*PrwT$06Djd5~@NO-5-}v0+_x3Kk z1>dmv2$PRC3BIO=xY{hPwnn)^UW04d1xa1K!9hE_PyQUPRJu=Uzj|?YlMJEaRCSC5 zok6JyVkIwgs4$o<5t}2TNea;LL@*Z89TBgo_-&1n`V1f9n9J4+*BX8YrCe)hOBfhj z5Qf{#&tx(lNz7i3doKvq$1q@oh9_%%ZsQc>0r<}q_W}NM51+E(KTiTJ66_elQB)n4 z>6;;Sh)S~c;%LfE@D@yTYhu*ojtXuR=~YLoP|FRt0X|M!(el2s7lhvFI2S$xgwQ*`Vb&%Ij1*&s ztu3zknb%PFSV`x}cmUrjfVzU0PJ{c+l~y4PzJCRaF9fsi$4h`1UNVFC5p<+gmg|eD z)B~Mb@oNh@20_r#T^Y>Y_oUfot_HY5Rp5*T^agscl_N+jqi!!|@r}1d&IN4X!3vtg zcsV;pI7E*l+aDmg2}DJJW*SKhi|sB32FBrI21UMcRmm8i56V+%RRvtAy@&w7lpnfD zXvNC7JE;E(gon=kDzfeFmQrMYfpp(HrISc^nHawk=`KX}^Q@j&BHe#6!^?^lIlcH3 z(tUS}y@oBvYs^>FFmup2?-6>xcZxT|6N{vK730bgFtKnkxlUgnCb)6MGLK}uk~^!! zeIRUk1e_skxv+F{%Y(4>#Qh*_y~9^*!gg)3x+rX|UL0R5O90h~GZeNKx1$lJmKz0# z8QGwuD_fc^{xTaBYE*O>Fujx3$mI12n-E!3ze2BWR zCCrCN3}G8sL!2`=WtgkN@*(D~H|jgA6W`3f)T)$7c{;7g$??2G*@x?^eyKGA^2v%q zyG(z8`9DKaDZ=SHSat^qO6Y}}P-j-XL4myxvYv{!jvYrE7X8;D@)uOB?6|z2%;Ieb|a4Zh2tzH~ui6ueIx0=f>l=T)jH_4gz zhybjX8{`)n>en(#1lCqRj-#}sHXj|zdW*|(bQTwk3L+=geNHdk-8g z*R5&E`X(;x!%}nHlJz{&+twB;^i>A{>w2<2(vUe5R?sRANe5YPL!ZX~>ws|MaripZ zA>w!x8wWI_dnIQh)43FU&g)>cJl*Iv0M?LBZ0}_x;Ak^!gKOCf=#PL+#dYZ{+*iIq zC*i(w?LWkB{sTxO!kydP3OioB55_Jqx(!+a|Nk9$rywu{%C`FNLaHJVfp}&#WwFw= zzNtULMpH;yfHaKZ=?BQNqWIN69jxtN+tbJ>(co=h>6Jr8z6My@@Cr0Lc?IZuIKh93 ziyE(jJ_RqMO^OOIJE?u=mxS;vLafm(ZU%hUIBIW{NBdc6zZHZ(6@C!;wGjTT(DBG8xX8P^-rn`O%Pa1H8+UDZ jgsCq!>=fHS7l3?mbhp^{xd7ygv*K2<`>_CoP0N1+g9H+( literal 0 HcmV?d00001 diff --git a/tests/__pycache__/test_cli.cpython-313-pytest-9.0.2.pyc b/tests/__pycache__/test_cli.cpython-313-pytest-9.0.2.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f69e92b7508f34fc48fa2912d1cb407b3d12092b GIT binary patch literal 88027 zcmeIb3w&G0c_#`GuLF=E2tH)JEP;AM5~;T=$(AFDdP}BkL9m&~2`){76e1F!4?sQW zICZ!A*;L~+N>evX;x=d>{g6(~!^Jc9 zn_sotY%kd&wut?Z?Vw$^hbUihs6uzJU*|zrx*}vd=+-^#isO)W(5rimT0Y%}TFyh2 z2dng|gMQtAuv)J^7|;U;YxJ6fK|Oe|R9bf{*P;6k&emrm z@7Cu;JacXO+}XB8wum;@7V&PX;G@&$u`(aZDn;3RR#t^FzbLz(l~toGAj%f7vKo{H zMcKk9eMfijzOCueUDh+6XRT||y3TC9$*80+V%O?%t--veFJ@)4Pbt=&r*V8#KKkZM(bCs0U<6U|@eS$YgQ-`dZG)C5+#<1Ki?A`;< zKGfLM6zYxlrq3ojLZ{X?uMagfJ?vW?+SikMs=X)lNW3?tpY{0`FAg2(j3+aR%-PVg z5C)!!pUL=|LdW9iOsF4E#H#FX*E{KAI-|EIl9@Cr?Cd$+el{I?s=ec6e_!aScvnh~ zhfeE>Od{Fs>o8s&`|%*HwAt)8{{R_$`gZ)QV5vRgXm%&&Iu_x97OA6m7P)jZxPoqgBZENF5C` zWQ#hM*uw6Cg}dmrhT22DcrCq{v39*X9ZI(M#?yW69r5PYylrilXLUA$$3x4<9)GC%vRpC7!PN^S&wnQWP8%4+aop|69Y(4p*y=B zVP|f3jPBa}P*19(y(j%}bE|p#H&>$3MO*7dS5Aw?XjHLS&KHZ}&iy@^dyq;WD1uXVlnz;5*@MjOh!*U)sM%B#q@dT^(IYYZdc2;C!=`%=_eEI z$*%a5N0RZ~dg4_4$=#`r{$6}A=_mIkGW+|VdeZpDoT}R1 z4LMDd|GX`yRq2b6)0ZH*=*YSI`#Re*@tiA>!55qJVCB%bFvISXr)dguT4%hgy}u`u zW=(Px$#l+sGG{-PcB6+7JM_iK=3H#b^k(Gg4ru^l+jF+J+_u_TLzzpi%g$@Hvxbko z?Q+z(pWFYIW^?&pIP$AU&hH&_&b#HbyZmq4C~@0-bswMi+^%aivp-Y)sp{8iLg$== z_Ul^Ue8u^vU(@Ei?XU$K->CFIcYr;I`o%88tqWf)0IxIRKYJ+~>JY2N9j!Pj zzI(G%xjv54EEXI|PsE;jKB{foZ?mO7yTZ8I`DL5!xMo&8$`GD;#(#lSDx%(vSlewU zHdbtM^3sU&c$Hc2XtkM>zj7@6{w~7Lev*Mb3e1lx(Nh%Er*Fjo!c{sg0tTjXK0V&v zi3)LD*WZWIoNq_pSq4bk^;|Fp(BA{R-=}w^dO>Bxb5*z;BdPKtCuE zsv7HNXr5t)oHv$k?}}4#u6ieX4iGVWbRBd@T?}};w>=Y!pXuvqPcrf*?9VyV@t&?+ zB|tlf1fULJH+M&SU-~R=$BCPqmg(&S(UUom^Bec#we`*PT-Y)3 z^G55*W^1*zX+OH+u-!;Gh@Z>mrcWnIBxa0}Q7*|$ZkYv)HW#nA5k04Wh~zojwUwK) z+VVm7P}B9`oR^n=e);g`(csEK_qFQ!@#=-y>V>1#jpNlzv(-xn9oOc}9nt0v1UeFA@ z;5`m{lOM-rUuuZ#HOFiI8hGJUx9y_V`X;?Nod^P*W`ZxT2Pn6V5}K8;PXZIYg-Ul* zvWJqrl&bL%L+td4crpf3(+WbGC#=m$>(is%)RAC~&Rd$*W)HdxR_M&EMy%*b zmdqcsB_o$g1nf(sq6rL6#Idy^>5ycYWHiiA*g}vwBhJ*ZsEaM4uDnGgNU@_Dp1t6} zB63CDSVZo&J(fk}IWCsYQRSjAM<5oFyo-7{IL(jI92-OBvXz=76Rog_JjYAb77I0^ z8A#s~@kV@6kFkif<8|iaTlyk)so=n(7lfKQZetO}f%Gd|fwlV6nN%+j8`$A+MNTt0 z?3|ZMVtOi-d6OuE5-Y7zrw5<(PdV=(GJ`%{S1=lY}bSL4bQ&RcG;cXv^N{vcb69(V?kTPntne9qc5Oj zAtfP79;f7klyp(jNXa}(h|^=Uq_3qMB?K|k1caZr-KcQ6mffg!xaPg%L&9rQoeumL zhrvkMZ-!7b8QZ42yItT+{pK_?(aW2A(Mrv-{T>hiL$ZM6_gFSQ-+53)z`3Jp1Syd1 zJ(lgC8_O2)-(!tw#oPtfxSF%ZlOw8|n~e*}OARJgPNb3*0xNEg0Hx4lL*b{&z)NFnES%$7EJq zM%mB?ahAf_i#OfJiO6DyWd2g8qi&*+r66vfy%V2pDf${* z0Eyx%+D=)ha>sE&TOX}4Ze{r?5P5>kvD`(838iq)q;drwotxKMNh42e4XEM(13r~e zg4`-KN;#WibhGkWDMu$;F)T0n8Fg(TG>o{Wef)ynn9}$~D~#V2aW^}efY=-HM6{^a z;Aniu?=wFD%Ph_|%j7RO8lU-&&0m9~IRlQy+sdGC*gmk?!wKEI zyB5Av<8+02?{v8X|HUErsbwgdjGxL=gH;)?>QN|C<8Knfr)H|o6d;PIW)XOXS#aZ^7QvK=RxC_5xtKz-L%yG3ZD0nN~dS8$lZ6_w~E z>5Vv27ot8U==qijP?)r|1lWzZqm@7@p0;1NpcL)6KnzEffYBTaAu*;!lXn3SERaol@beq;V&mmM**Ba)HH!RIIEFD>~CEKv&m95!^-Gjb& z+&1s3*R|@wXUDYgkAibw-gIGIc3#tkQ=`Gncb_wP57>t-1z16f>7`hntdH`b`kmQE zIPTCk!pUqSTzR$;?r9h62x2r3hG_Vd*inX{%`Mr5BDDn$h2eJ+TVy^Sf=zwL@Pjbf24ZG=9z3G0s28EC67GcLI8kSSAI#NivjNX{8}KSS?nU&`bI! zNb&_b1Uc7d=QUq)j|LyO`;c?qh_+}boeeA+*Nnd_+M>aa@UtRVC@mHL_ajGdo@HXGn(+GkUs4D(Kmv@JE8q7`Pdy5wvMGrN*CEkt~?gGp-w zQ;^RDOcucWH5idU2Pmy+)jxu%)}N(>sg*y9+_7*4Cy41Ym>js?-f1y7+ZaxB@eVSE zcxSy?gp8E71hxLFG=Kskx`oPiQ!=UH zNY2e~60Vt?gw}se+~Y;cZKgyau(t|&Hdtjmo6@v`3kYgM&@-PQ0K?uo)|mob4J;6Y z>LCs*;_yWHU!>|u+#&r7BvzS^nzsJ2w(|8V682Oz4cZOb`ch|h-h;0|?!Oy2VrGm{ zm}yweC(oQaGosBIJoE7Z{Cr|yT$^M4%i_}0qQG)?mD=zNqsSJvLzz`Hq*l6rj9%Oz zC7-55B_BFV7btlg|Ao@M(zWo$T$gL{JJntnx2R!tI#5R(LP!)s(PTm*L+M^4UajFE z@#bkXREvoEAktyF1T4Vvjoa=-lK_Lb{-}qEc&28+TfvA_N81w?95(%i(!$Or59(^> zODIC#i&lWTr46M8p(+gGyv!g@O1>M+`&k$Z0zn1GB$DqJU?}}9LqC&mcqpGm;1by_ zXa#SS3=e&6UzhW28HF123p_FdhSKkzJ(Gsg(?8FD3a$ISdBz8XFMnp|x%%Eb(>k5; zc@Es0=ese_obqM#E}7pIHCWDRsbsv9+{sjegPc3j(Ua=$%(=UHop$sEixSej+@Ul9 ztq)Oaat5`S_x4u*5%eDJw$QG$VJ06DNS1lP=iY0LI_3G zGw3<}Gf+VwK4{PqeLle^;~+(T#LACbQx) z%7&nVG%9EF&P4I37MdYzl2L8JKe?VKw}2VJ$1qi@^^WXc$c>{b>S7ABsZmoEjGA(^ z#g){QQd%RO_3pslL{fKZN=;CzMd(}gh(s%*Zw1v_=*Ce#)hZ-K1@eGyKt-)}pk`-B z1^_Q)LJ6h)oW-Z36ujF{u&aPmKR{&%k-#IQD7<5!E=n~Syo*ro7!svh2Y0pNmT>Lt z0R))Ktt#ZjtcDc@mH!3Z;CUnx&S?$575{Yi(5W$PnaZ-_n!>2!ip2;5W(_lG5b-ii z%NHnVq=fX-oK;>-x!*Pu;y#5zga(6%dSMVjb@F&!2MTc5Z_=wPii&fKBZ*Y#k3XiVRJH;-PP7DfnUU`D^oI{ z6Ra}iUDUuy%^=EF!af7DWgK9fMJs?bx5e*FU0bs3rvA7eVMZ)W<WL34*p7cm}H@K*INX=7W zD`HpWK^rYk4WuEg4gN`ctcW-1eFBD)OsJM`&m&ft%H9gX9v`#kscbuES)o?&!O-@oCA9*NA^}^Edk3a?(h4L zD~yUTi_SMWR>Cp7ivR>~HEln!yo=zpzY9UinWHO9tt46jS_VtL3jyIu))XqZk~O6Q zs&?AvMBsuc&53A*IjN1*HM^KvtR_+)X^7SsUtMqtUtQ4r+RR_BBuo0X+mn98zWDFm zwnp1(n>of#AcwAskZmC#N65CXqSd+GvC!6K56^17?ds2THEltZ1udTJNOeNve+(#v zD3RNXO#3hjJYz&iGChwC`ULH&IDn3wYVYZf-}a?bz3~tO=TOTLX408juC(+5As*Vu zw0g6tgF|L=wH(EFh_S#*&#o{Bzn!{(LXWJojo=eo!f9p&4e^Mc=uRZtdoH@Tb@p%2 z@IHxzw0N5-*>XEz(GaeKxp&S9Ywxw72fgBU`bBC-RKwH}YPhz9e(zf}oJxbtBl51O zickL(t16>GY_cl-9JT%0*b!mDppdAVhPGinvSa*VEk77_s8xVsi=B$aj z?r-8A>0W$!z|Ydat}ZlQw=!F|^3uMMx|Jh!4-IN>%$hqsYfX06npd17v(}8v+CEr$ z-CsLW_rT?oqyD4U8$!R|aN)_z8?%l3UtKoZ@YvwqTMk=w#E#HIb79bdC=oLgH)8&H zU|BY>Y((3A$%AAhu+%Cu4LgYbTa7QH%_RbjwRq@|yqXZAN$fal-+dA4+@PgSPZBocXBhLuXQ%Hb759l@9$zPL@R|P&wY7q~J7LSr>5$I7J{0KfG zWQ6Q(J8MCdzT?7%MXjSVM?*3kX6!BRqP8><(upoz=^hdEeXbQbtl>1hlr z&%tUdcd*(Db9WWO-bIhWq;_TU!Z{ZkL`e+(AE@KaqIU&z%+w!x234#Wd$Zn8B`ge; zfvML{im5p_jc71@mLWnkLN}$;?cH%TH(jju@8H(yhYGQ^K+P_Onjapl{Luu|oNe6p z%HGk277JnyG!zN(G*ieLguu!#oc-)s{0yD_{4@0POg6A`Tr>WzXe-Iilg`FuR$N9I z*?Ah3v-0dJ^~W!aQlFxZ$h@Ylyj7Wpe7{h??_tMZJnY-=FM@pCSUY*rfnvy)Eb1$g z?&(CoK&2W`>77I$gw%kc(lZi(=bWdI6o!01slW}0J-!(vNbt@S!3bC&W(!NP#*5p==P{XPKT$y0aRN@|{ z`#eHPD<$N&sWTCugl7s+#-#LCK7un5a~Kn0NVyRfPe}1LGWI9|92J3yGlp=KnBF=y1kl z>$t!Vcvd1)R3=yf@|(h8S*3zbNCjP!NCgpBGmj*nC==A~SSARs6u<;uo=mWE+GQ?c zz2?CLDX&x$<)Rf}LS;#rktDCpNST#ns%;^!?=#<#r7yJ?V^Q)NGC>7F4w+!UkO|hj z3z?vo$pnLWGQsJWe+lW{l-`ACg?CYFnLqWLn^~D)-L!w7Lb^AlIT5WeCm{8OOt2v` z%R=^-|J4;E`-Mypt|toCE^;69<(^GxvKE@EIfoGTK3NCjhdN-^T2sQ9^dDbrt^nyR zoUD$_P1Z-NqxDRqV3lJh^V~oLt7P^;aipOYBRTdwYHy(`=1=JjEzG-*l0G=T$h@oh zG~!L=OWT#-W;c_!H$*~_#%O~fZ=W>4)`1|x{3qk5Lll%f)ZW<{?-WGf1$?I}BCrg0TmRS8_D?AJ zCMAy|$$1P51pVJp(Z8qUG9@3OguH05wLulx&zZi{`X8~vfz~3Dcr*Tq!gmHCQCMgc zxXwWMZjrw;KD;NM)L*B&{uT{P`70#(LU?q{gz)RawK;!e=Od3E z-V=Ld&m%{UzW?2jz52K5Zk?3;DkW@iIpo6km5|V1qw13q#Y{4eI#>jkPB` z4V_KyeJaX(3LX3_3@QBxxL2!0|9b7hkwu%cwVMY$7TWxBHxn4NzQ?pVbMq0B{1%er zw@68T%Pae`ftGR2_#35bkmSn}sarJ~SdHu@JCc#W>Pz-)VBNT8{EZrAqJU?v zQd3cu%{$}8uW9SfKV+?8ITJO8ebyT089K+P_!WMCk-uaY4Xl}4!91bK#rh**$M?g& zBU?Nj_`t-WA?F~L=_4o=LX*WwCyXW_tSUSVlL}X!$OKzL5PJkcN)e3(7$j_aVlk2^ zFq(M6t%@0}(p5w>x<%3x^{9*}pviEjh9>AS0Io|RZF2utFj|6)n5LjLLX47!>V;tE zsG8jot$^yS%%8fpV9PAetgdNg$+#bBAxj3LiXWD~6eMnOjYm?0JpAHhsJrza4a6Oo zC=I9SADCkVY&p@MIN9H{v1wD&`nBsfty{ZxovHtBKs7y130dzWxL6Y*SD;BQ=SIfj zYRdMwk-BS&Eas=33gYJWM~3sM&d?_5VRW4ml76cN-Pr4h%;?#6FHTBzHlP-2wVRSX zl}kPfMrtcVj%~X zG9<ApcQ-EeaWumm)Miu5N)#@@vkp8bo)bgVVBJC^wqa%-h@OssK zFS~~KjcjPm&OS2o*b`$_Z9?HZCp11goShvWpWU3D-F)Hc?Cf=e{%Zjsm_>*xFrw`o zS+*}%?m&G@^5a*8=1OOU^+?IgTnlx5`^E{NKrtVdBxWX80ex0uFPh@8IAu)E3e z-6r4e2R%lpXTLHa*$?QgFk#`pq}*RnlBMJ-C2vsjHB-KWid!;LV|H+){#9{Y`1Ue~G{vphJj^}E);vp{q0vh%*EhYtJ~ zhXKd!H&>&m(0~^vxdj{hpaf=cy4LLpYdIf%7>}|&!ruH#H1Dt?1xVRA^7wm{zGRb8 zYc?78W_x4-y%Zj6u$$Inc9YhpJ+Y2D3+n*PV?(J_OVr6Y->3;PI|-b-&Dtu&0lwD} zbpf0^+csLbUYCXUQF6=XsA*QSgikqn*N9q33V%VLi#x$+p3OllSP=J+~AEDH3ngGZq{Ds zw zS#rJJ$;x(#l})>O&BV&qTCqhWg(G77LSbvhfpwEmQ(}uiiq)Dnx0#w`73*)&$|c^r z+~upkO>1T1vRuNJB^xRNXs}K8Vq!S41T9d+R9YkP7hjhp8eE~9{W_9~3v+n+@SM@W zD!wv5dhVmN?@>KZ3N}CbsMyAcXRcxwB&NFbt#CXeo-XIW&Y+Af zygkccXH$LsX>hZ|%dXwDZX@Rz4gPi2q!9OPNs3ATU$|!pKA2gP=k)|T1){SUgw8om z#?O`up>rw=`5Nex7?%y<` zQJM{G8rO`!E7~TazGMONS5bx=8f95|Mi-3AqpT-UNo2;fO_R`SNhZcCpA##GR!eP= zD*^P{Zj;;T#Ajv+M6A|KeHHb|D79C)(Q zL4yS42L4TOl4=9#Jf4y?sd6MreoxMl$$6Qx-99(3HE}=%$07?~Y_=vIQ}BZ>#t*s- ze$YLIJz#|{-R##08n==0f;o35+YVg6g%QCDb@e3hs|$X)l30YAixZ1pwA4R=B+tSW z9x6;}l?R%f)P>F(v{0+vluRyT^Kcp44XVjFOve5(3EFR>%<9SUK$@f)1_x-hwc?LR zgry$uhDHw?lXmh*$X3XuszoxP^zR~>2(pG(49|tGfy1n`=U`vC?B#<@?ExPK`n#en z8$A2+L4G3At0)EhFzRIG8C`%qrE!60;HYlt0F^CgqADtJ=@H4KTCwfh{rD~1$LuDR z{elu3$WfhwJ1YfQe@9g)A&F6%XcolXcRE~4Z(tv@WjB0I*XDPs9Ioa3j#N=CuOj(q zx$-JVt>DLi6wbg%eaz*&lDV8$!R6e~T+YqSst;ht`Zs?{_qCGMB)F+0UC1!IFLWKG zr>JOlFaeddCa%*F6>P4t)jKxGjyO#{2R3(iNUMHmR4P<%s3w50Z1w)Q)T6ZRfpfGW zK(lQ1?t}-t3p=?xaxNG$cf@s?Kc;I{Pb85qg%fFP)ounIq$z2}rb3BK(Y@RMo^Hav z51o_%h2qO4R91s&qIx%pl)#4RRWB~3^su0fw1 zT5cBcbl7sdV90+%4M}(oarFI*E;-@SRJVUnyQbBAGH@<1UaRikL4!Prl+=PaF znxmHe(<81T@d5<4Rw>eex7{pWfG00rfOW?NCH69(P)JbaU6lArdj1P6SB~wAz(&As znnm_gf=U;=IAF6*?BXEf1&}y`9hSb-Dl=g@SAYj!h`J(G%+AZt?7S+Yl`2cb)Sv_i zsqdN3%)-{fKiZ;Xo1WPfp#aT0F+N$o6ZL$FR+!y@Wk%GsrMj_Hzrt!0yF76Dw0{Z~ zLc)TnVCG+p1XF*fvb#$LBDKkyXaI70xcb1;KL{(hAbpuB88+z0psD1z1ubWM4*0=J zsApk=8mP6TTrI8C^CIl1{)xU<;V@P5No&kEQnIG7pp48itTAgN_eEw$YYmFDZVKOn zu;nxFZfGdTTFgThWB6Z_41zwL(>mbkJ=;VZaonD3^|)p`=yG;E*4vq97D31!asc~H zCo(VDu%orTmEpUK72xIdj+~a#ZJd4>8QzZQDt5c)Y_D!)I0tUxohU5FHjV7fZrAKPd}PC2O-u-K>9j)iI=mD8Y09-wvYV1U zlYPw`!dy;T@-Q|#;cIo;vw z7!F7sV~g=?b@PT2*}CPN;Hf@WJ*LfmL#rFt=3%?Xk@;J)+m4KBkNzk)XK2Y6)_iu& zziD|z`;$k8){M;Eb$Rb-@X(9}a)E`|8^Jjv^H*nst4G%C%m#NdUDqd{J@+iOX&ij^ z!9oJiCh8E4T40qjWcs#a%=L zYX*%cp(sxP;QTUElp$>7o{C4^jLLzTKNL8%90Asqjv-?r8eS{tr#^~sZg6G8fH z!hl$bHhm6qR{BSukAhrFIiydl10(`48*NgSo=69I=pG9wSwuHlNI4>Jm^uBx@q*|v zoG760dZ+=Lvl&?gjc}3*Akc|NS|gUPb~M z0hZu!woJolZ!wKaMV*|d>XI;F#EODCG41|{CefErXVUnO#=(gXMr~xM3ZpRqIP*N( zw=$#m$!-4>L`L2l$gP|5ORK>6(x;K+lOHFub>Yd$kLy~^`TEZ+{M5of+;HK{*S4Qu zI1=3Y%H}a`*BkR-dcAXG-l}X3WiBLGlC6RMX6Ja|ec8bK=yKqFu#7?Gs>S$^+w8%< z@4P0=_bg{*?KcmG(_{w-YrhK#kwtHoe4gzfFNay-H5%3~b6C4Bd>CgJKAf#Vl$u@R zfgRbv4!RtG!OkvZt_F6EqulbB)ppUaj55pFRn{xNGD>}lEPAu#^Li8I6X^$%4l#+; zdhjG4s+>qc^j03LR2g)e5*y%kNT1j)qb??&QplSi<8ZRgixB1W!>x8KhZ z#=A#j`bKnHkN*-4ffet-ZAyd*sAgCpCxi^UL?LK zMHjKk;$4W{3(kq)i$wzYW$-ml6tSad>|WXi#?|(?lt0cu>|WtRo(Iq5*wi-Aq7~p- z>Davj`iZ?a5h0r3wIKoY9uJyxVVM;ryq25Q6~MDofM-4fo^8e}0~{hj-JA3z=JxZo zq@gCNFp*?rAtE4gxz7VsX&WWcST!Bk{4Nk z{MEd&tUSYop;-l$af&`BAyKAh88N2-DagOKjAmvT4K%DFJ6hVG57E-sKx%qo&6%?U zoGtq!S|?<)R&>V{og!WpUlC!sB7|g|+3gzl6H#r}pqE8Ocipk@0d?Tt`m_?-j+pG$S&r`hyeT`&cU#W zTDO$a*(8&rQpxOs0V)uiVnq^R$PSfo5UMpFQ>|6zA+{*jduO(t@Fvn!+OHK3(zf1*Ut7*Mm$*m)s zGpXL5f%}hlr1ba)eI}_1k;ihA*j($U?n5z z7eIqEvT(<%5a%Epv5@%>jXnI>n0C}svUTi1Vg-bzYIN?Z3)rf3<9h_5gtL#cSOA4nn=(R z&}^PLYnCURZGa!_v{15}lKqtIp+p)72)lq%lJ}gCKMd(75VW62 zQ9Pmk7~MNceLhBsFbgOqeP1bQ+fvh3%w(igyEPb6`Ynvc0(fBt(1Y;rRyq8&hWkfC zYqJe&2YuJH+Hq}mR+~Mh&AnEK;`Q0O^{g&86c{>kse6P>1|FVhJ5V6Jzw9LO{hrI% z2&?stSqsKzt;o(=acTL;tQ8}(wgK*w8h+D-RRyjP%(j{Q9^l3x@>bu53lP>zzX;2j zC>9P7R#+*7G#gR+p{j`#b!mvb#@36y>j9lAh$2RaNTt256idLV!D5@Mu1&D}UBl+Q zi0?296>#~OfE!6{w?%4NMuUIWgk{+51a&RYM@b*UMO+nlRzNeIwu4APQggM^t% zs#a3l#dtYBX*=gzm{OR^nBGO7aO;mM=vktkcj`lumL=}%wm0U5! z0Bw?yHW`<$L{D6z9BX?<+mk4fXws@V<5i(-RcN^WY8Cf#$T{gI(}5_rsS%}t9t{R7 zf1IYl-+n?U1&VGVQGp5GT9Q#hFv-=~$QT-Kh%K2?8`gQ2AaLX=lfwh+A3(QoJ7h}i zom8-ul0QR|=UPLXwvcAqM6L94b%GCtsFigpQELHK-A^sbMy)yt%{l)34mD(&ZKm-5 zk90~ypMXvcR%X6LrxPbv#5^ukQcU<<9;RE|#?9!%u$az$I9s>EiZkDsJ$KOm4%`Kn z3q<(x-r>C$4v%SDEzF9kr6{J?z8E)Ja2VZTY=ccDuQ{C8+61bROr4&4hRUYQF+(TVk%EMd+cT~7TEkS2?So#v! z+`Pu_EcibY!5GemQ1s~UM@?vHH2h3y-^HRbY5jxfD#yaPz{Q9@4<0wdRJ?598XIi$ zU~mZp+#~pa^cam{5fb&Mw9R2?5Aj=Uzfbd3T0SNsLlsQFSyS~Pff%%>o-ZT@JHL!i zgXW1HMFg@Hj1+XVosjG~{9(msq_(#($09yJc^4(YDQzI~K0w(@{TYZ)3PhH5QK)N6 z(#iY)ePod`y?YQuwAA;a9(wN7p!qCi&9iBt@)vTRE$>GCM1t7Qvl~$^zbU^FL6VwJ z!9YT(HzHaQ*()qmrhlGa1$pXkEyE9Dpr$nydaAwSWPe{e#Pq0AzQDJl*@1TjLkfB_ z4lj05?tPTBP_mnnNogFt2UR9eDIso7v4Bx2#y&BC5yn^mEBKZ%D-Ci{!j+fZqbIhROV`M*;L3%xx6#tX~OMHDXf&AN>1Ed za2CL=0dNNjM`;t-S|uM%dSN1RA?jmXyfl|5z%4+6ni~fXUkS;Dx9!&@K*8oQf)iIq z7BxpL0Jpr$+=(Lx4IqGO9+;4==1<;=cy`N^sA~%|HL-b27275Yh1cq2jVh*rU53)4T(e+$4}*DB4oIS41~6e+G|;o4jC!nIGXXd9?5sQnQ2MYGXML{dus}^oi7IGH&ncinVvQCz7!F=^=Y} zGX}gcxRywuRFfFtf~`n^3*=Ux%3#*-xNY85uWQwV&yHzfnD}^MpA*VzAtnV`oUK}X z;fax|#UoXlMz$QzRvo_9F#j!=&D;34&EfU^+-Iwu`yS$#g~TCmuyV)p-t%bx^42r^KG zS72%3V)((GhaWr^$fbmK=2!%NkarQ7#T+-|)YSMvwo*z@B~}46@|2W`O8lS+xCQNH zRN{wu=9nyfsUdT5{7_j0KgefMvNl4$X~hq+jr#c|eyE=IHzFW?9)2h_BGC$OMCv!I zNJ~7e^Gx`mMvWgPH7f%ONQDT(A_z~iBhI(KEJOi;LKHA_fS+&a40@kIeP+^$INQlg z0xsUU5w0!@19Hw}dvCm4rXL-VFN=Pdl1C_MrQ`@Dk5ck$lrTc` zMapfagxLukrJOVtAjq3fJ95&I@8_8Ai+3ct5*_B=GF?6G-MOU`;D@T(M+tiTE_yFb zA`T}5yDN6tX&8#^H!i7o4~~8Xc-+PS^@u-cB&9!IN?-*7-(>+ zkWOOd%i6_iE|9g8lbEk{Ae0wkB(`qr)~)N-Z&(*wzhTqXOVvdC&ftC%#AzeT4`k;ad=*hJA~XKw zGeuczxKgQhBh7=Jpn2(`Bt;1ssOqOEX{4(}+FJB%opgZ`(qN@SxJY_7rc86W!Z)ge zuH`qD`H;+pZtb0VT;PqVNe60+1Lt(uZ-$UgrdHG4-L9}Z7c}O+g&7k^!0{%2llsKc z6^Ag%5exgC6782#i5Avc<@Py6S8Uj)(;J;0EnALnt!^&n^+Kt}Ehc!_cIe;a_m!`O zD{Sy-`u=qc53AeI>R%6re!t;M_gz>v8eBKy>$*cgbc{iK_~)@4KICVVDt-EU;6QCB zOW$7qF+Z)?!lMlD4nL%i1>POfV?)xhCZ!ma(J0-Zk`)qev!;sLWmaWsMWUqSlo(9d zlvx#tzG$Srv)0jr#eeA~87aZ$!`{Q+gw!72b%HM^FDelOhrF=oYL!mFLVN({pB##dAg{ zJFN-Gk{QHlSisC6F3&wi=~yE6hAf0=hW?MkJ=6SI5f+7zsD6-=MYwAjx=`-Wuab>} zm!W>>4Ou|?E;^lxBGlxGo}`8>f=K~m#F!w~qkGFgngud)IiFuxk-h}Cvn@p%_OERk&7xD!mo>AhDRO!{-{&01!CZ9fy zXA&1^2v+ixFgDKZH+Qfh%%)JEcEoQM`&1UjZ*HsaK}~+NEg=$}A5n_N>}XX#AIW73 zpU;TAZ1OM07aT|UW!fEqb`X?Ct*sDJr2E=C;)$=~hMavU9)0n$1@|;+w9@~y^2|LsZ9BF=+hgfB+TC=_UEil^CEW!M88IAXG#7;+Uilk@^s|ukSCp0_B5PCu*Dh zWti!5Z1zq6Ua~s6Xaw$YF=PEStvAC+p9k$R1BOY!l_Bjm0WY>?sDxgosr$b{uQv(J z1O2|z@bBk;WB%pNv8{(Vgla%J+p(q$qQ>0RMqw5YbNJ?h&0S#=kKJ5%QvbpTUjM%^ z7H;|O=Ohpb-E}u5FVLkH%I&6Pax*X*_Qe_*Shg^TTQLzZCw#s`H^@*T&EH?d?b9@x zJYpymGRUFo{8`Q|oB)&aEBq_!?jO*dmFB6b9drrOhAi60Rv$vd(;o#F4m-b6{l)4F zr$@FtIvRRxB=Y{z;0I=mZ)W?3*Pnmv+?f$=>nGV}YFkGdx8ek8Hn4SEGybk*<#a5(o+yZw@H8s5l-l>wJ?srOlb9gbD^;*)kg1 zeHr0#9=R)sU^U+WcSKt~yq_i6!0K_$_`9O59wFwJPRF#>gb64+YAHs3ct0<;rmtzM zC&2{!@$E{sL+Tuh2X#|14|m|CL7L0{sD}}{Q*$0LHJ49FLKj*>DfLrI3QalhgZK*W z5(cnpQ`HQIkoy(YToRSV4hz|>mTC%8KA%+XdjXq>e@hv5PvStOu2)1S`nsHF%P7=O zB#Hm&k7FcQkSXD0(F(HFDzGHn*0)eY{O;%ZPr-^*w=$5M^K|N|zNb=W^fDB|58?Il z01%G2uIoE+da=qvkME-EpC>rmLb=kEIa)JYjVGzPN+iiTD<+aGjFV~TqxmM_#Ns59 z*zGdWDP#7TO00FcNl;etR^K7m{I?jZluTY1)cC+FTSr5Pu`n8J1qi;&BFEVj9I&M@ zIN(_x?+?3a(N1%DaRN~`)FR3dC(v>xicx72aC}b=gfD>||Lo;JRqsj;FGiAk4-7>k}#Szaj`ODRbI#EmyI5Z5f35n}i`C_+k6d z7ZLo38_ytlPD3t5p4STUT#P8UxHx?-D*dNBBG8|CW%p=k=Vdqp9=t0A`ttEWW0ry$ zEFV6~l5Al4xMuua(U!B|19Up3Ehhq z5S2~_Y>^Z-6TBCD*_UL*NB=pSLGH5_J@K0U-dK{xshFM-Q zaSc*TSrBfScz0a4#FX7y#>!MT9i{d93`U?L+Mao3zlCU^&;#9u>vE1wgHGv}t#Xih8Ix?CEVkV+I%LNhISIYXDQ9_FwXdsa#EAi=1P_+Hy#% z#As;3c|Fo?lpqnQ`L=O`@+kc zMneZ)1;KUnt`J;n#se!5Zec`QbD@PL*}$4{&G@^btr?+E40JlCts#PovZIz_UY@0uzpJ!h}OUr&2~ObZY$;iK}d*ep(@M zwe+s8Dd)vM?RKxiuzyPLRkXr;^+!xGfiFU00yD;=|9JUttBC(xvJ$2uAF;f#1%>O= zW`nJFF!j@Db-`QvSlMr_w(X-c`_@XnUxD5ub%wwt5UGzeL<5H5P0bW$Qfz9S&*0EZ z()QAQ{~6NmS*-&aVivQVrQdVWi`WQbqUhx(dDH^;h%BuSEg^9c_mlJhHdN$WC|Yf9 zNZ+LEl%y$Xr{uS3Kn1GP8q1D~1avJN;nQ?lrtqIXT?bl(Yb|O3W}e(i{V#& znSa-ES^L%_qoEo0YO`ALG?}uYrs1}1plMt){;p_EOe{pFV_FkYDU=&GqxF6C)VpgP2N^Uw2M(0 z>Pa$hE*dN{CuL1y85h~VXcAddiKhP_!G&k>07_ZY$V088p(7)YJuw<=n=zhba%s~& z6N(v=GX1icP}uL8P+$St?urRTuChOQGMPG^jGc_1Wj-6_z$sxy!EGgWV6-~(8T%aq zAH+mi=@o0VN-}D&+|eQ*UsGI0rEGA6ZdJ^b;n`QZM?-sGMSzJTci)s@{)o1C*pUq^ z9@mV&E8610XZhKf%!8+M4ZY~GnDMkQ`%I1kg?ib(1z?Y__VyTn4;Lg@#`;Q)Z^gv5&=c5N= zhxWXGpeh58mnOjHruL^gCVB`{pUgv;$Ykyz%sv0%DOkb%kJ$ppeTg4U(*k^s!Q}jH zvH7Cnp%2=7dSV?X;vFZ`*mWl6oJjLB?n9nf< zAed4F3B+n<#(amVq2%HprMp)%!dVQVXwW4-#|0?RaH`<}n~Tn^*!{Y!H}Q{{-t*1Fvhd&ObZ8Yr-|*w3!PPU*5?Fz@0QMgMh-VrE{Pyi(dnwU3UT_fjk2n};L_K~;_e~QpSz=gt zfF$T11&3MlDCsZ?4(wIuOnoNmWpd5@Sii#RR~2iM#muBwzwWlPQm&a7pHk=sn^PyF^7RfK$)$QniJ-M#PMn34{gv=1M~{T6LT(HrQe5OCS#O z8jDROqDSOQM*A)TZT-O56Y#Up)w9R{gtJPkXV!l1Fb(q-lS;GGrN& zM_^GIcHmWn{W*7MyoaE)gw4Bf;f_KAffY66LW?%AnhwInCf8fy8-}um(8vh zjcDQFj%*-2t{HzN%KbGNN_zLlCHDsE0zn=_`(yEen7 zeM-veKcIB_DJ#Z)PqIptl$8gR)u6Hc&D1cyi(!0s+Y55#S}?v^w;=HA9mL5kg}Vhc zRRrU!MKrJmd-IHSI^*+Pd2gOE%`-mFz(iFB z1*?A_xMiS{o9u;XFE!PUO>!sboc5@mI)&isI<|4nD)17cFC}u?kH;y*PzR{&Ad-o~ zg#zT%OEp=KQOYr6fe7V}At@qM=bM0}`ZIm~wdvCj4DE%4TIn?ym>GfiC~g;+Df)F(3ttP|l`v>tr6B)% z^tD}~~^=udsE4l!w2^Dx9gpWwfL+Cc06$+o(y%eE1~$AjNYCO zK@u8DrZOSg!v-I4ymNI3KiF};x3@jnnGUhge4+Nx3Ak1?tEK+8@i>C5*l3mavw&+b z1C`OZl5WXJkSspp*;-Ly18D+@e$*{RD^Z8ZgurOcM*qxnFcVr*d%~mK`;c5r*r6h3 zAcjD|Y>0b&ZD%A=f}Qpx>7W!|n4%{kJ(qVWK{Vu%8nTsAgpnuisXSh)wj`CR3^7FE zw$VcWVChRO)+k1l40YQYZL4kOQ*`1zcU6RJ3-P`}w$|@%cPzAZ*-21=j|=j3h>CkV zWBl`q;cL<}>W`jePO6VycXd0%uH2D{pD!o!yXi~jOhBw#N4&hQ{+^z*BCIg~prxdI zZ~MvkP6XOZJb>Zt3U@kpk<w=bnGkUi4c;ZVRzO-z#Ve8=DTMk?GA^RJQX&XE>6CPQ&Ty%%4{)4v}_xgoE z%WB7WR{L7Ec{(WKoq5nF;vfj0!%Pd{__zR$o{&ue9JQ!ZwGk|K5^+S_qkaj$b zY8HU$GOu9=DNm6dOC(Uy%z?^ZfJbwOLFPzHUd@nLAKy&7?D_ zzS!wR<^*WRSRcYt5D6`GgXvcZ?J6N?02%%WGS1{ok@-A)e32tBJr%}W?4 zD>1LR2k!8NJWSy0Gq|+{e61ny6(sN#qz(J($F+G`ZQjWI4VT(Rc088d^w^ko^z~rq zi?hEp_X2c!8)sgpr-WAj6EN#;Lo3Lkrz(~5L0a;S2?0dE3_pevq#R5;s>rmX?ei7_ z&^ztgB>}7QAeCK5&cyl!+ z$1nU&O4baDCk-|`+5WAyk=jUIwARq+)=fe3SZDTS{>JSQ8$4YC_)i`$HOV0GS-l|U zgL(L@J=33_GDc(mulbB9r{uvLpg&kq3v{MHVvm%8#K;`ri@C`Flz>5Z)mU zj+46&P;MI~^tldH66>q?CzGUIWvbKvMAw)&=r1YvJxcxx2_oS$WJdeaab&hj)NG;L zZb~NAlj#4M`l+`lOBiXc!fos>61PpNP~~nc->2L6Q-XzOOB2k<$9(*Z8-?Cy;J?DR zz};)hdt(O(%Qa&*ELd(`Sy*n-7q@(A+l5o3!Oic9k?5+gc)#c!(N=xY`=v_!3_d$t znGLKO*Nnd_yojHV(TTVu%E)ZgsGikUnP*p#_3{g&$X1Fv)(ltA0?mp;unHrXOo4n$ z%26hhm`on35E2x7>V-R!pO{J=tNg@jn)1scKk<}g5TyLXG?yt<>XgqQ2vnR8TKgbB zp)C!uP)IsbX9cLR^6TvHiBB1%7GPptenLAV$3@+=T; zgM@HzYqr{@Uu^r*2QJMT4Q>G_yNhaLCH$zuc*}<$RynM6&4+a~-XI;*ESc4KudtsM zWUyI6CTB$?vjU!|ozW&pX60SfVgaGf%_Hw*E487AXa(}FvSfHr30aCsW*f{WxAdiU zXP*SJOi40ZYk@t6q<_kwCsJ(b7#+sQN)2i-kW?{qj;m99?+mro&$Si2bh{mS5Dq=DW0awBbQW zZCz9bxyamkFwUDHxvhup@s5qs+czOfwOt6bY;t^OlW(`fqtlgz9@RGLXxS#5&y-=6e7p(%Ymqy|Y4bokVa z10<$9O1>VRwsTU}h)7F<994UWOdv7kUDV2dLH^~Dn6i}`kcw6yF}0G+tB@3nO}r}E zCSKClqkQJIsISKpo})s7Y|bJykVZCSo76a9%rFjs4H!NVVt=bGAoFzmsVM`?2a7E~ zbQVkX#gpmSQ#y#uxUP^TeiVZ!1ro`1fYmDii2>Ze?3wOU9;oxv9T})=!Sk0P9n96j z<1Y?dfevPd1G2dz4*NH0StWX!+x!kpI|7JqU7mkwTXk2XBOb@j_;OjS>DYqa3F zwj`@9xvDL@<*`*SyXs%cwf4h-mhF!3Z1?T1^mJeb#UVs-B>#BHCU*)kzxt#-YFEX+ zBfb8b3KOO!;qex69;!I#NIIA!vH~8Zc1InDZKZB&gP2InB8Ov=eYt9b&_rdtrVhA-0-bcbI)oY5bxUL14gD2h+E> z8C-Q6`WDX=^esYYV2+0CTl)4e%UGBkF*CfJleEwbM(RH$is}|VM%_udpHPk|xj#WU z7nS`PasUdXyVdc**f?3mD!J);nXo3Takz4Wx{&udN~h#3q5mc&6b~y+pRNfwILv5d zFatiofL5^+?P=50Dc%2+VKvA%$xx{gCTsOPL6YCY?Vht;58nUj1(2u(8u)fW!z(WA z9ckP!8rV4K{DHRSTA=QeXV0C5@P6pXNP*}c`&CF;KJpOC4`xIer1j<*T_~&!asSW} zYh}xsC`KhN#K-iALqHBPtC$MpSOa1FV|Mc!Nmf8~PgtYiNII~?O-t072QetxqNFS6 zMHL*Nc^nL4INLT_Ackw&X&$lm^H!W3o0{ekt+3);mM2ly7VEQQO$l4vVim)8BPv&M z`P5O7Rh)DnKHxE|WN&NOp*P^m&AFJxjr6*xTYiJZl)FwXC?VVH^m-IMZ+qQ&@P^Y1Gx1q0sdTNrG1rIF z+16zFjoD7u>UXLfE+g~|brKHee6d(pf2LoLV{{I5m(!l=PxNFUePDX$+?-h7S%S^Y zoRCbUlFfSjw4TVsb6!ydXJTH)a7E4!4miUa5fRCynr_|=$1Uu;qm`@^h|>wma5q81 z!aB*(bQ03*L}2QK$@Kt|T(v1j=tw2I65TqjY5fP3G*hyck_07ZDfw+meh*2`+1sw4 z#9Hc2os8>$N5z#ijbwVvHX!FgaxT*;o8SC7&uI@~dsyhz9@bZ2(oxHn9!K()!)~|# zz_#e8HrvN+S8WS_X!GB4R@p0V+05i^%Dpq!W?%G{(;IX?=f5$#+FpHYQH>oor$}xs z^V`F3+mPG}EwImh+lJ)UE_;9qk=$DHKKm~F+crwy+FfC*&|aALtMkU~wOM=Zt%`d4 z^0#bAZaQogb#Kw>t<9Zw`|`JKl-_#Wv6b$E9 WSVhH1ZgtvYcKhNRHcHvY_J05crv(}S literal 0 HcmV?d00001 diff --git a/tests/__pycache__/test_config.cpython-313-pytest-9.0.2.pyc b/tests/__pycache__/test_config.cpython-313-pytest-9.0.2.pyc new file mode 100644 index 0000000000000000000000000000000000000000..69764e51fde36fb177e5f53c417ee803967a9441 GIT binary patch literal 20587 zcmeHPdu$xXdEdR~-0^WDDJilTSrlbTJlUj3iPS54P^M%{ltimsT_c2QHYeVZI_qxt zJbRQx%8HdVXsOzO?6`vI)M$yee@JPHIz7*73H zzi(!DcJH|4iFF**t`^6SGqc}3_A%e%_s#6Xy1J+YhyTC-DbrUcNx#61eFS+ze)v;J z+>sPXkxxkjGL>5}?K|b8e)c~w5Hx%((m;rYtuurYIO9JR8Hm!TRaQf5pe%5zcA$>d z4b;>67U@FG(35s&&i|nnCvB%TNUf3*+$brbULP;(wNB9nb}kI(h<%PWva$%2MMYWD zm@i)Q0Bgl%vnHu&dLdcJ=$bJI`M}v^;ZodZhWd4FG&5$##?l3Kgr(IyoEouuWB+8H zkst1Z#2x8^L}f*yK1GK6;oB(DfZ_)lR02RlIBFVJLO_WEqofgq(CAns9yJ@!rHumb z@K|~@Ii4*T4{D&v5vv2%O&F+OlH`Z~0;PAPVR6pB0h*kUhpd~zYFI>r0qMIE{sSi@ zNk7(Zm8Ra4q~Urn z1iNIG3k^5e<8igsXqP$v;mSgGTlW7nydZ~U&38!7_m4?&GH4QAOQ&e&O8TDsiSN*% zd-7ygTYG+DSKNqRw-POfS}@fWH#+lFzmiF%Y4k?RAzs=WH%xitp5OEX2a;V#dOxX8 z8b+EHSk1k_1BH9zQHoR9Bz7}qv@btVfR|5AQnNuFNoKPuP3P%|p35g`+N_1s>c#O) zwvf>bGnCbV!!S3f#wC3`n_{f$SXRH7%&J;4mo_7+ksM893ugUM)<4Xv^zB$W7_}+QwH;Ov(3SYi>{8nk}FNv~}lfx4h^BlG&ctm2JY<~!SKb9kS6Qm(;$snZ9=UR?TZWzfX$5ZJJ zL)Wx)p`)i`SJ$rIuHNn*ib5B}O2-OfC8wv-*)o~JrX*bhRq0wJ>wwteBgok4m#};T zk|&UCL;~VoqF+X`2?;87x*3RjZqjW~U^bS`7uKTLR&81_HWa_cbs%MP>guhlMbcB; z*u!Zl*0V^g-z@2wx+-!!u>fT-t1OO^lfx4h^BikWFp0!G>7jAx|FL?8oAAY+R+-nq zG+{F+E>G@TX$B>)sh~-8I-zDWTH1Kz$+RX{QS(#It4-zes-khTtLhZ1sDTsTco~R$ zwk^iClww2h`;Dq3w!}bJ|NX(P=$J4Ev zef!1GVh@yrNGaB`NUYy1X_>k%ay+pBWiX#Cj*^qZ6BhFvwkCK)VxF|DFr9w7dgoF?=koMw zT}zp{mcBCA(*J+Zwe-83JZ}T?f8$yPglh@T9l$c@7&AJR&hqT9-~NL`WEF^FB!MRiZY0V#%w-Y#vW6j@f)B z6Kh$7Ev$uy(NtnbB5dP5`qU6$S(S(Wwu6%Cf0>AuBXiHosnccnmp3*?m;jBO-{HdM zKIZ~diLOckfMe+r&YsxfFgma7u%C#)7{On4)%FzaOKSs05Up0rdHPj3i9M>SoRLT7 zjM|(^O=p-PmNiPPQkST)@XOj&;Fq;_TlRkkztjSNV}{|60G9bZz%uJW;d~MT9Rhed zD4l~VSZ^GvP;)7@kfZ>xbKL{=R?z=p~kKX76STm`m#sPAS-e`vciWYl3OcAZ1 zhz=tH+k<2el6__%KhfRA=&s3_3#{#ihFNh>yq3eYYbx*8BP(7UgBf*(7O$f%I3C1X z=@uldNVXz*3P~FfH{9Cm&g^w(_H%I9!fU&Ej>EFuyUTLD2BM%AFw6gO2Eszeh2duJ^lYX?5C~Il|JQ4mvt^$nk8qLv2#xv zjMtb%W%$3Uno(5+kZwGihIFl}zBHc9Vq^evF;yMSP@{nGh^DJ5-HtC@2a;|idyyPO zG6=+6r>X|Vfg5fd)ANe0i9|nBE_790K*o!2N zWDAn59}$0WQ0BFFQfVy4%c#hJ!?_zWq*{m`;rxuJV zQk6Sk-vK4s8Q>901vGvlWa<0xFaq`LaJ7ZFeZ>LsN@2I!ZCd)?4?6x_!Z(B%f5o8@ zrB0ttM3@cxefv6DblJI1?{}&`5ruaucK(+0PJK?Ffn&!x*X;S`C4?QPtg#?~8l^#L zbXjMQ{9hE0hHVZ_Ip$MQG`ii zGko;y*<(k~9i80Ne_$sI+3o~UU)XsxpFaj|>>SdQxr{buhOI#DJ%lwh2xzf5c{kW& z8I9SEV2E+M4HA}(w%fANz@!3`%=8-t3Z@kZ>U#k+!n%>{rhPCTdK5@m&>Sp1`YaX( zI}3U)YlqP~SFkjV@zK%DwQ}em1)nQ~R&R#Es6mwmQTS}KfEE)xrF0>!U7_gc(G(IV zw#I^K6#+I8TZ5XcOUte^YjV1FIXwYaIC6=mp&_@$)-e25ye4ae_ ze(cG)Sj$x4er!XLw7t_m6Zo@}keEJMinT2g>o-T*rY3l9-bsqnQ1(v0$d$@+y!dCN zZ6@I6uyYI8l{g_1(Bb`9(`^RqwoQ-WfX3VdnuSpSq~wec($LgYL6pHL>>Qr3n4#V2 zF~K7eZ~<&amDwL?1AsLlKb!&^<9BC&fUY3WbH$ADnyQZ9fP-XphXX9~fQ`1kE%Cnkh0!zXtC{b+r^cozcdx zc_}6O2hf1I-%F~1hcSN5rH!to#+GWLE~sv6qA=G)vGd2CSNmTWeD37)>WLT59G`65 zc}dTuxjO3PivTB^EH$)~=b3Jp{Fd8)_rec6bQ9<#uC5?qsVf0TIA{be`brvVUN=y& zAh6Jx)>6jROyLq_Q5S)pLbw5?eLUpA4YUsmm{wz2jP~RCI-14O91;x)Y8;c;9hk&! z+qI;FsUnz0mFqg<5soF~@f7yXRmN6Koj`ID$pDZtG@#o+ktEOnjs$E#jgNuz%I9Wo zf5Zp86B;%C4M>H;_+f0G^a*`YY<{NL`RqLDcN9j`?Hjjl6v^SK8?U|s{}yB0VL=uo zitUGxLc0CXT1OnhssEz&T$F3P49S*#gkM9_E%hwLM3+v^J;~aOGAtN>q4GKc1s> z?zMXZ3tkf&9MA&jm$uP~(+R)V+HozStkLgTp_L!o*U8*c=Q_RLsfI)iyi<+mzvsMDpVMa`oK4pq_I|cb z(4fh3OKX+2%DP0Y^|b0%@wDpfci;YRJ*@~lt@l9ruYXkKA3Bz#;7}~D{SSF5hg@>R zrTbHQ!BW&qe9=AdqS+PzVS<)khoa{oZXTw|jDb#P01be$b$<8;GVhr{;;?*2IwwB` z8wm#4C}HD(e*{)YlR_@f24Ntw*K(7Cm1wch?*Q2ce+J@uzqu*>Ghf_9zX4gAL^6UT zg@kFKQ2-b9>R|$Fo{6Ij-4I`H>T+&mk_cN&u zZN%$c3jY@N%f|N-rcNNCNDL$eBwLWQBH4=MDI{$`44i;1E#D9}KuZg^0=~?wfMuSa z`SdCtHfkJW03H?G;0ph-Z=M`=+XL^Wiv(QBgAUuVnQcFKdVbHq+>TT8c;utn zP;eXPMn;?m;=pMKhy3svpw;1dzSV}3v2dK2P0pJ$q>pQA~p1*$oF6JEA4T=9S<%SqXK30i=ZY2Q)vZD4gvT zhiw)?ZL1wvOe@SRhuJYhv_K-@{fB8L7>JA=>GNa-E1BxWVa5a(h zzwc6ZQJ1>#RFU?`nOTGehtrFkP}PUMO+@QGe&PyN~qWhWrv>b6?j@+h&gNs`&3V>{_)d-^$efI)ABGsIU@Bb z=2qN?-jS6E)>B5!_0U#4ZW3l2lxCe)x+c6_tkHwp2eS z)|{_KsynqnsyDcvgtt_89Fs=Njf^Nw%GyN4lGU}Vkk!k@%RpA+N~ZkZfOE2L@RKJk zgwI0!I#=2OjEb9i18CDSZvgg2uyA<=<~|P-g$LJ+ z3(C!{q-B}uYYo8 zeQEvnUqym-(JA?ZS}78}9l8~Ijr>=`m)>grZu6U)rb72)>u$et3vkaJGwqC&VmlUz z^_wL-5V>>;AiV%(S43IK$>9l$c@A3>JR$+b*xk#wUM`XY#mxseEyWHj66-fh4otl) zay+pBWzeC;QF3y4!eX9d?FlB4fO6<@F}ArB+YG_CnO%%jMBs|C%_4%v6BucOF0HT} zhTyCmoDkd)gPYkUm_&kw{6v>McSm72;>^Cgqou8EU*gst_r65jUx>Z;xPQ3+5c;#q zaATsCgA+@+VJqUyJ~y|W1Ma({;)F>2%-*^v{1teqgy$25muuCY57gnu5CRPqhs$?C zFUZ5Y(cmKq2>&uYE#O}U)FHyc4zEZd_$~nk{s!IPFM!iRqd#JF(CNj~*KuIaGA-xS z^7eQ{ybrWo$XRdXz52<4q^oiYFGmSmzDpQaF1cXUb@p9zwS~gH;;06zf-Z~>((gj8 z5qC{|+AAo$HMn!*S_amfXa&A(;w)64e+p!Y&Kp=(?4cl6txfladi7FTgQbJ8q%fB; z5S3f11ryjlXkD%ai<`S=*LHIt0Sg^H=DDw>mSbk9N$5LBm@0fk6=FxhDs;wj>cWUX z2Lk%yuoqtjTjiQpun?-$M3lyO6UY*^(e0>>+o%5I)T?KH=U*pa&!`Mu6|(5t@TS41 zX5f1nrzA~w;VV8wSoZrgAK)`9pelg6$V1p##o^3nL{pR?Hlc)&g74t-@XG=Ebj>7z z2RO}wBnmSWg}6OER~;dd+;tLSCRgQj@QO&MkH_Sds8ZwdfYDB2hpR1bX;uZVP-@xz zsI%^e7pnCC=zf3hU#x-Zz%tgKffT#r>ez~j}#My3C zt`8Fn!r&7PE!fcyklEng`cmdbvUrA;&DfiMYfv z;j?z#TLW(lym@*ma=&52Tbth4G(9ob5NBrU>!Y{6IsIZO7B7wiu9ZDFyIIH`#^i+DQ#iAQh317bFB5Qg>Uc^=mvvK()47fsOVWiIGV275#o__i zNO4Y99FJh#)DKREa}zD-{+gjtJquq|ip$LJHRTKJ{{y%^k|{@-1i;NrxI-zU!4zL_ zPjOztyjYm#^d3~8-$!y32*gS7PSd{yVg`+THd9!B;##X|m>q!=m;N1+@~KLHh-C&4 zZb#U2l+RELFR)#0*gQisFh3{LDHWFs;rExWCTU7FAQE^fZJ5t4YkxT#Ypb+z5r;Ab zgzd4JK7Th-+SL8-iPENnrP@Pf6Z$`6O+|9@FOIx@hFSn_pQvEsJ!oNcD z_eg$>1kK0FP}OZLLxO=K1Fb;}RW-n-l5HW_Ua}?>?D=R-AP8TwYdFdbi6Q5 zVZ#f>scaJa>sf3?mHC9qzqbWzy5QF0SLZru`YO#7;M-ZE2*S_2jK3|oTFVUMs%6v% z=wyK;XJ73y>&u}k{<`5LGXuFKy$mm1PQRRH8)SIwja?Np$=Rs4u^}GhJ9ZA!e}yc* z^Ts(KANXZi{<+lpOG)~U^oOyZOB;S6)%{Z1^+8DTkw0$!R`a6VSdtq*_HB>{t~q@t(EvwqHp=KJILn2jmYWr0ib(4>PLWnE(I) literal 0 HcmV?d00001 diff --git a/tests/__pycache__/test_daemon_capture.cpython-313-pytest-9.0.2.pyc b/tests/__pycache__/test_daemon_capture.cpython-313-pytest-9.0.2.pyc new file mode 100644 index 0000000000000000000000000000000000000000..54983d3dd70373398ff290717b4e7fc80816ce8c GIT binary patch literal 19541 zcmeHPYiu0Xb)J3CK4-ZkMe1R3L{Su1mZ%4*2PMmvEb3v>W?YUomQA&r6?aIjwVYYs zSy~bk*UFQ8ZEFZ$|Wdbx5y?5rG^WAgK-J8*9h=J=X^Z%0L)-cR3Fk)O@E#SKOJ_KH8 z1coqzOK|tPdfdd#W7yN{Azpg-_4pM?>j@B6&k2$sS$UB(!yq1 zJxz;1TGUKiHS9@pW8Vm!6qTaF56Ofd5l57Z(qMZwBaX;Y`(UO}93^5Xlu|^6KbFrZ z=Qb<+nbBOnxGg8~MX1LQ=5wM{Z0El$lH5=Zie*NNXSec1d~Ou8pU>oT*-SAfOIt%3 zDa%7c3Pkw4Jj|abxnfQlZVz33I3xU^}6uhQwVTO@hDR^}>-c$xp-y(+R!e+pK=ZO%$QdQ2&@q7@(7_WkK{SEtJB@WB*T5G7iV!S);^fe zjAq4cFs(AVsIvOtm6Oo-Kp{h5;uO_CII0xok=*54hEbWMN9By+h*IWEUd*ZyBcULZ zq8c?~Mfsd4L3y@7PU~bL28v$=TbR)pV z{FjbhPcV^0DZIYqU4IiAX9kfNm%dnBH`hbT>&$6}xL{Gc1sCxMKEaKP43@Kp_=den zze?B02{Dr|p1p?4a!{{MTMGcCpV`Ym;WKlMLT+K>fcZ(K+`IL#p! zZbtH@eEsGpW|a_JW*dG1_>aIC>zHSr^7rGLt-7Ru;Fe!XvHMRjjQnPso|}D_VFp;E zXg}q6MohZ{Kk=l3oxrb7UFdZ0#t#85!8Z`mLa+To(H@IbxM z8CzS>5ajwT;Gk4kO01mn|7IN(#|-H=VT@NGvWD)8~-8 zgE{1kwv4+zhY^^=pA9n`n5~RCa#&qMZk}mkc=&hR)pz-zr->PICF6Z#f$e9tW&0jX z+qCPp`{2Q8*I37Y8k zdsGjUL$V9W?%T190_?F!>+gkPkhoWM%jZr`yHs~hfu|*~>77Z1&ndK2XHuDVW4WD3 zIxAY(qqVRn$r0o$Y8bg`T9dM>XILz%VOs~)(4oRbS|UT#8sM|}e42;_GANIrd{HBi zou&diCxNKR%TPj58`H{Jc{HD;MbpE1`AjCCmNFxv8cZvhArV_pV~6RmKv*9oL?&u& z8W6%rrkEBl6!IB~N@>;mtRhRQFE3}ZFh3yZGs!qeKQTW<89QmfA-K}22ShVSI*Jv@ zMmjF+nKpAf)vy)mv`0F$i1y$;_Q*bau^slvE_<=v_Q+lZ`49><`$cd$s;lTEYM%;B z3&0*pNz3Qd4JTpxHOAlWTx1heSHkcGUSj6znlA^({U65o$yGD4wsFr~H2zBZa{6j? z{bcK0?dmu7zqWrOH&fd--aYSOVoe`&wXbL55j+2HP_WnE^=tkgI)V?DxkeTHZ>&nVjKTCQp{REhTo>R8}M0u31+9 zyTa}&aa*+LRaDp^9P9M_v@>+v6PRZDroHrwP<_SG zLS9t;+8nFiK3Nh$z_Mxj0jASxD4iaWv!i(t!jW|PxzS7>KTU{n>GV*JD8+nE5+ymE zCVlwk>eFdl5V^s0rdTAoGo$dDAn!@YBgkLr`z6RxDM9J2;L7?oGw=5MI zir3oV85M|vg#-`kPUyoy)&TnOFM>Xn z!4X1$q7SwLeFO(g4AJi>r52zM>nU|r1B0j?M;KCsq7SQ#g&uj?9s^dJ5QRDfxu{)Qv5J-)VKsX^G)TTHcfy4(^K96zZ zsm5JLAW=Xd*MvIxqf`X3u569}`+&f#u{e*vo{oRjsSiKN_=Segd>xqR&UlL}S_ft= zSO*P4qdwO)!fIhnsz#sd&>hTm$e3f}uFZAf=Sw0zci$sA(p%ei+qWH*2cqgNiWiDw z^~hg&OyGxzJi_OmJPU&6b{ud>J~v3g%XY*r({6GI-XZLv$@K_pF(M0h04 zND#0P6dVNSh$NA00dkT&h{0`0kcX3qk^dK`*peRtPK!FlPfgK1b&&b(} z(1-0<2e4pA1r`L*Nj5`HGET7|>A+Inf~*wD!x;2sM@I?@LDon1AlZv#ACfL4`;p)$ zN*+RT5XmD*4gpCf$YBiQdZ0LsJc^+sNV<{GQ6I(7F(k*4oIvszl3^r0NP3Yxj^qg- zcEHuCAgi{6EiJ4;t_F&yGJ}I+p{S&(q9{$@;6|Hl)Iit5LO5Z{ODjWN2VOfc7jO8m ze)SvA{LwR$t4hg3CI0YC{iAaaY`-30>YA=I-ny6$$*xD3c-4nP`F*6wo&DT~SzP295^F9;m;R4QZhWFlwGw|Vpvi37)<0deb*M<&Za2*HGQiV^o-v%=!r_3xg|xptM8w6 zQC`zWeh1bFNdp=4fW#nQ!Dt4^wO4`TFD;lA1gGLip?HyGA(V8DZ8fzon%o7cs;PY- zpsRgA6obv9m=&vTVR@c6@?e|PP}L_SMnUJ|D~Dhbj;d_ zd>Upr9r-kTP{A#>o%?CLuZiv_47N6s14FYkOt~MZ4aIy`eQ?3PV)4Sb$5H+r$O^Q& zmHUhOsc?z!oT=Y^Z!p8FZ0md!_|pfVmc2>UvTfaMo*%b`y7yG_rvMC=>Ku>VRsMuJ zMZpW^awk2ashfhn=ojb_eT(Q3S)nWxN(s=F?dPJ+1 zqqfqreFE@im$j=|dc@`0wZL4^1al!FY||sw8@;zZ)mfMJ>3L2><2hq2?CW_{w~U1l zjcf-4Qcj|XZUsyy8L4JEcZ9pn!+00r7;7pxh9paA>|+{rGHUCh%%Y;)No@V^fvk*A zOq`#3=7;%L50q+!FUSUj{J^L41Kmv1o$&lP5$fhE`2l`ulpp*qAnJPyukh6W!R`Se z_=P~Pug53(d90fv99~Mstkw<=BkkbDUT3DhoDp-#t5Mf%}t0+vHcFKUw+f_ zdXV34(;Loy2U-N({eZdh`yDM1MvritO8q6wRoHKO2v~gu&Ai!5<4IIgYt?eL2AH*g z(?kZAD{bn}mn~^(Y{AyA`AdWc9x(cAtLlsRndw3ptDBpGAVYma$Nq zF_Rm2DhU$^z^WaihBttkt)vtBTX8qWM)%PQ*G=Zo=@EXn1jR7^7Bx|ZS zBghzRhLB4T1-}J3o4H87iCNSravVd{?C}zYOx+9_!RQGfN(~VEK81$0&58y+&;u5{ z6^e3NcMVO$ggQVE$8rPsh`-a+HS4#Q*|m#$odt`!PdEW;~SCJB7Yp4jc+c;H&2CT;vM6@x!U!UYs$3`zEl5B;2mXZq+EMo zoCQ_OD=%JtvBdJ@FV1j0+&|#>S+?S@44I&F(bH_P53rV&F~jnu1r@Kb{9IzqY@(%{ zXqg%=C0a^}J>!8nuC2s&OdT(C;N+tJU12*)T$>iXY73hMAaUxr87o)B%;XuiV~$%3 zo;VZdt2}YWzoxm-0Ddxpre6)DO`JDl;FDvEnaK#eR#z&DdbWAq-xlh5Xc6GmSz#>! zXK{Bg)5Az!fH+G!kvh7MdJMxVIGDl)j6xhAKpdYAar^+AaKTkw2&@FQ%gr+bK_koN z&KgJs`f*cPbs5kG+*uJiIlwW{m7C^qCujvC8@4Sr=M!){=<+NCe$eaMs)s|A_Sq!` z!A0TiRFK*M{?QP$+3$lLzyxIdCJ3-vfoF56Fm+Oxw%Gx!I?nLS0z@?fCZKS#O<17O zgnm}r&Tz?uWyUBd^s{O?_sFaTVys3;=x`w-)CzT}h;F2h4w!JuHY<)GDrEv9<6F{= z^nWFU@6R)SKX-ecmxK$hzL#{kP~V3-W`?|oXUB(8LOdk62vjD%is|C7H0k) z63gxq1xE={%8FU)Lt#OSv3PZTORN(r2rET8i1AC<@;H*qK-3TwW}5g#hT|yV{{Sq~5Ma&wtW z&a(R76*h^;0Ap9JpqUM6lbg+0xgusJL!tQ^L*zzax_`vConSrhv7r+U0qP;zM}7;E zlVOs=FXPKdos!{wfPvT*r!*Dd~8ZrfE?~MC~DV%da5BI$@R_-LZh$2Tj>avu)sy0kf zd%wz!N_c<}Jn_&=Pqc4po8Z+fZJr{}W055kzS}iUuDhI_&lTYq1i1X++DM;=hl2l^ z1jh;K2{<_Tm{ku}ter|{wB@T`g6=5)0c2&$;mQr#{t|y+rv9OO&x%CGU_{Oww{bDJ&83tpole1OBFi{tJXTJv=Oq`0>m4&jl{XMse)d%q`6}e zVun&#;6Ql`J3)G!EHZ#xfL^w?csgFc$d(#qY6#DXu<^olqWcp{-7hI6$doHQ=( z+|@DdB4`JI8}2%7co{5QJmn)6RKE5aV7sLm)^v>;M!zFCCk%(J6g2=Rukx8eQEPos zbc`?AhK{R%F$K$r8>qwYbkwHZn#9xwB#$DoF!4r=Qnl7G3>`;u0tvN)*j3*Hq_upa}Zo?vnuqw^)4b3GEjww$c60w1*tRT4#W$0XWZhvm|mZ{!S z-bWu$C62z6)HjLl&m#{WZSef0A#`-pZ#uJspB)gwcf{->pOD!_ zDYJ`JFgxxZWOm$LVRnYkdl)t3Ul6maU543>g*A6g8^HVlvNTYof)}TjI)yCj&yes) znvqac*McDg_u9F&6|y$NGm<_*gLaOr&7s`iz_UJ2*7iV&-#b&k4>rok+8!@8cHLvF zjr=vvKB@|Jbz{&`-8g}z{u0UCNd6XylaldIFaybp@Lxe)H!2yaY7w-Gw?M17dAHIPnc_do@e zHGV{b8rapqWqB2vY^8Z?t|J44g7y~@Mb4*cv<@oYYKhW17EYptj; z{ilQ4wT*-IV@2vJBd78wsi<#&8W_r5z)oss7!d$z8dy3*m8w~leMtLNfrrUIK{*h- zjDlL%>mHZOb;Ip*dBN`O+W0Gm`4;od@JCGJFPP}BnC%}iyRZ8hH~Z51udl!9j=7p{ ndKh>7I)>((1Fnu643PP{K9_6D4F>7_8TTWu&Hu{)q4WR$_8CJL literal 0 HcmV?d00001 diff --git a/tests/__pycache__/test_daemon_digest.cpython-313-pytest-9.0.2.pyc b/tests/__pycache__/test_daemon_digest.cpython-313-pytest-9.0.2.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cbe9016ac2a4c3ed621fba1cc3d2176bf649ef03 GIT binary patch literal 10272 zcmbtaTWlQHd7jzX*`1xeahKAH7hRSVWoc#ain2&klq^RSN!g}s+Tmy`I#e-P?GCB6 zc4wF~OG#uJQ~{EjLGxh5bwZ>vsHP!u~76%qp$F`58@(HFT= zD_Bqc|1(!oOUtbf$bbHG=0BG?m+!xvy&sQ@3?%N0f3E&#ieY|-87BoThP@wT8RlI^ zVhAI#Qs5jr5g>sq^4vL&1nHBX2+nmhGA2a_tKWNQmbZy`g7k zHIl6>b$wAS7b+#CuBnAebq-#M;>hbSXUnBpP0yByk}cJ)l@|4ET`ONvD%tq*1La6{XfF zfk^Q6QsXkbio9BytCnAdZcKi@)F@+FXjD^Yt8->-jE>OCL3wIUQ56DB$tATS>xx=2 zlWvjq8D&^?H{o937>2z+0L;5g5mrARWf+w$!j3?0$}eqKlQ=0j%x0M)+sEYi$w!Go znSO8{rcEP@rl2djjvsdZCToW0VXoAM4j;tVuT-i8MkzGv^D=zOWix73k%^`?bX=xv z)_i63#Koen5dC7cq|PcAUsIJaQoX8N9Mj4Rb!_Y6i`B+U3zsh1%XE>>Ev2%xVzTwA zh51D@ZLzYiBa(rR((te6fZSj{O?Nf-sxmYoo*f+`MAA#VB8SLZ;Y-7!*5M~fAa024=&u8ydBJwT~KY)c7qLN zP1v`A@g~gMZW6ccro#zgv&@V;T~esX&fpTz1QQmP1Sukk!$B!J%)!=-O`1VG#v}m} zbPmsxHek(Ac|mVz^`%5ZJFQ$V)#qzUp$z%9S)$c9m+1Aze8DG^3z`XgM=5IQQcZya z=H%<8YE5ovW@=LbwrRH6McABtS<@Ri&J^Z}cC}hjh#A3A(lcO&tPPyw2yRdkL4r`8 z3y^l?4*{_cinV5OyJc8b00eRv-tR@3*MPv*ZtGZi&S>ko@$x#KXr_7$zUPApgDnFW^;GA2x_cwpz23cVefKl#d;8b-9Qi!Lw~3#JBeBpuCc=lT^@Mia zWCJUE|9L2Sm$}Fg7Su^VVhIOp5ukt_1dgK|4_uIPA>eq*g{2S?=EAv%nMU}1(T1-v zinyN@pxLtB3GFltl%BYsQr$J3dWmh8#au5_ zSt*#JOh@3m3(dC zHr$e+=djb3^HgBrPqFjo0k4Eflex)03Y1P*~%HXvA!n z!33z68nSYIzE)DHj?M`HB}#3Ust&*p>#!Zefkm-rYxK|hGe@nA)z*MN^PG;`2VobJ zb&Q{)-$srZZI3EUGH=( zhc;5#X3w!ZsppqN>z&_x~;$4Lih4jH)k<-HU`SS)@=Mue-dTc^?SVa5C(hY!XI=up15Fy3L3E(0$VuEVn}@Th24@gM zX`0?41&dsQITjeSl}MI(HOnwj2Gk0wCYqeHtMXo%#7m)J_6S?#Yc5L6pi-tua!*45 z&eZfDO`($6lBQamqbbzNTT;zMmKoHZG=Z8!EliU$I_K-vuml#o#5j4duz6Lu5H1Q7 z>)vzlgkoKn)5_Fthx49*bwV)$SQnYT?!!892Gt2oQ(2qwJ-qdCvCiwI*C%MRe(Z3@ zMzPMT)%WGK0PDn=CrJwqE=f@v>qIFg#fzeibm8|zl) zm>%XC#;H^RT{H_8+dkM$S!VK=r?`F0ESpPAeg-d?&v4&>ISh9GqP~=z)ZoxkR|E3d z{8IY`MY&R2%<2pEdWkH8w{X3I()fJTUQ!#Ei8fy?7e*Y8pw~puE+dFS1nqY+gycAo z+)jepz~mbm+I&1IsdEa!<3nvaf(M1{K_Z}7%)%Y{YN@uMm_i-Q_Y!zKfOFu^G!%jW zY(eobawm}BauZbhqt{5EMIMQ z7K~(JO|buNW0{+?n7fkMHAtwND(+hg;iR z@BhE?dO9t!9_D&D?Vy>;d1X=nu^X_KtKlA|$%n+Q6$4lsoF16My|E=twXlC$VX_a) z(YB@uuy&`zJEL>3RplSU+JG9mz!yUlYp=OTfOaKreVV2^9L{?J*7C(LU@d41AJ&F_ zSkw}soe{ZM>-FNIw7-`k?_icQHj1@gt*j^QaDaGjEx_8a?@L-shnzkn!NywZh7|=H zYa=uJowj{#^*LqU6MBv~Z8=XHYp;GKtc^}C#m^(+9n>Kf1MpymZ9pZH&<%MV$(I9X zI|WXPS*@UwNZ7=ABt<0ucVrZ|AS1be?O*_5d;CKm!@)ksVBAB|ud0@75NguF?S#*k ztEzmhs#dgXX8(2wxLpmzUH#vHY(_yL)7+K2b!1gI^ZSk-8{KY2jGhzC?8&bMZX(mQ zzbW)CUtQ@mlD%t!{dXIS+?>VS-Bf0HIe4+sxkVVV(8j~)CTI%{kDcOvc}g6QZiY70 zejJ9se*QI}%?DjzmRe?~#QEUPH9(!?3vlO_(Lj#{cdmIm6Nhopgbvy;3r4JGY`Ufm z03bg-*b3ObjVZs$p=Tzuc(O9E zlVk$ABd-8iO1%MYu|j;XM=s(^guJCrJ2ZQ_RK2p0AIJ~ppY4Bk@MwSkQ5uldAlPid z;TzV^V;(GclO&ZF=1J?>e%?S1A zOJq(bo{jkgqtb%TGuR~Jrx~SfT)Bp=_&xD{f94oLr1QJugZ|9WHpZp}w_syJY*en} zw8%|*OOfibK2baG?S`T- zk)p*IcoMPcKleNdU!B$}$eoubO1c*L6D37wPM;|0DnKb-2-4Uys8T6ejN2YVV&;G| zlfJf|a>~3Xcnk@rSLbPY461O7Rb%iU0j2u4U+O)4VJX$^V>vIflbo%1+a|5cB%k)Z zpb@Ed^7CTqpnK`mQFz}Q8g#S1m{k`~&@TT7c;5^ZJM^kI_yn`wZ36^4kbhd0weZ#foNDhkK{W@ z2$CNn`4N&IBf$mPYP~LC8IltG>uDfpy(alF_CYVlAABfse6KYW6oQeo!-Lx0R_}Ne zasmZhs}wzM3;XfXz~ta&(l^e>>f<-%_L^Z?u4rY;WS?CCa~_Ozf}7J6oSRyDjbL`l z*4=Kw+T3sxTObPkDT*0!i_EZHMzw`0;18zomoC=0a2MfkjT)D0)k~HO5noWN4P58~ zss~F=;XyJ(R&Qp)(Zu$3E?o){C3ZS*HvIsM;GUu(KQlaAy^bGbc`Q^7($z#qfX@7u zB{7BPEvMxL@;-HBx4BCvJi|zd{ J1_)jE{{^qPA`}1s literal 0 HcmV?d00001 diff --git a/tests/__pycache__/test_daemon_notify.cpython-313-pytest-9.0.2.pyc b/tests/__pycache__/test_daemon_notify.cpython-313-pytest-9.0.2.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9123be8ea03967ba0567d3ca9b719af731d8e1d0 GIT binary patch literal 8402 zcmeG>TWlQ3aXtH--JRVfDN&+c)QHkaT3MoAq$E-zE!|1-iIk{2j`m`kwC9Gqv&*q| zXXu_;N~9e`1_GOg9Yh8!gs=|*e-h}BpUa=W0^1*kf#id0QAcLLz(yRz&>t0Ez~sJC z)$`bgmR5Xl{?a6Sx~r?JtE;N3tBae-WSl@T|N9@b^&}y`#g11(PJ`P#$`SH}NQ4rJ zlLDjMNPq@JYzIe!G{nB)k%$SfktpTaNN6-R5~p$24v!{A1S*UqY0{mQqA8da8BLG0 z(YBFx+Rot7(TF>RsqqsYBek#8F7bQG*Fya+i0;}JFV=v!dt1DC1724b z=^;{LFOh`)fU^kQ!(d5(rF__421^61&4=wP1~VO%Con61PccobS7ZO?kGhqcL(U%;TgrsO@A1$BX1dcK`YwAb;iEa1Kc+G0o5#3E|ip_ zjEvb4XJ?QziiF6f~#-Fr)$Q2+4%T>>%v;L(UfRR4Z7r zLlgQDM|?C2vpyu7IM=@Y*5G8;RH-?sDSAPjysN9%sWz)lUN>@;vZ`C=Yg}>YjmWVsJS$NOu6Z zVWR>7*hYJyvHeD+pcInAQbdYM{6JKSu}#Ov>@d^td|QDU<*VvLr914$Ua0nTgUg@L;#+wIb|RW|UZC= znK5V+ge|kPeiknIz@T~B28%N7P?Q;y5Z#H42o49XWJLh&hClNpR7|q#>3wT^#I^1- zYx_^G9XP#qKwLX;cGtS9Ea~#WrnEu!X z>1Q`Rf&>9|B&9G9pRVh1N8g=w@HfrFE-$(r4aFZzH|!_o0gtBpl5wM$6i z`dt$Y5Is;cG%vQ$gEd$-h}Tp$Fdp)G_Zubm@DWnO2T#8}zE5KUEsm9DGtLIbQG`up z99?@;@wkNuiv4)e2)cqpCS2TQF5UFPBx* zREnzI0k?!oEjb5AMa|2Go>ReikuxeHb{c~inO#G<65!e~A7G-eL@17bI>3&gd!@J@ z3aOfGcD;9E*5LT3?a)zlCfXw$xn#%V;gw|1lCj=(=*NkLTkHM9zr0oLe{bRLs&Kq2 z9A6R6t@jQ6vcKB*ZVhyHUFdjxWoggTn=8WkHKB7==&1@lt3rQO=wA^oZY4 z<7CgL>_mM3TFzZs6Uw$X>cO}+b*xH_8(c=ih6QXl|Y+};g-Mq8jmu>O1u z0!h%)ui-sQu@?H-~YqZ z=YoG1ABg7sV+=swX$-@$c|zde!?(Gxo7Z(=pbUS$6!a7YjwM)Al6O?~Pz{z1Ng;47 z!Es!%sg!Dt0?!;P5I$~?U&DeBb2Ks%)x#pmhI^ z2h{ABcERXaCT>Se)q?p=o{e6GVR{WJyR+p3j=qCqVYuxz-QNAGdy^k%@TQ8?`M3nU z{_d-D>|sT(Bl#OxAsrKSk3P=xa&Dku!aa+70~yI)pe>_p-0;-C(DIbPDg6EOa9`#e zRFBAS_#LbK-YUO$dEdE@Zm;myKBFW27h&L^h24w7lJ?QA72y*2!oR%xU2vV}tHSw@ zZvN!?{JF08UUXYY~^z%T*e5bU$TKd zi2WfT;%g_n=2`HuSv#rs0#f)CT+LF1)lsa$58xT#3I=gGXU9o!ag)2 z#f;f(bntCLj0G>`_yU)Ql+uJB$c*Dlps7HzC7$JBABis(`vU#0L?mIt_fO-f)TYuy zJYRYlM)C(6R>5ayQ|TcSEX{>rk7tidWpBgmT}@M=+iLb*P7= za|is?Nw6h%wOPPb^0n9zPXe#SjCz<-syB2Byp$f3(o$PCc7|iU_{1JhT6MHQp3Mbr zptwiN`>OCV9^&-K5I6IkY*=c?%WM2V^GvYV4r#{#rzfPK@lV+V>f0!I(#ii#gLGQk z*UoGN?5p#O|5&rHI&ND_RPeZzcKy+HobYISyHNf&I`(IQj(17Bdjm`ZNvTWPlTEt% zNc{o&NO@d%Uq>G^zmP)zFK4vE|7$rzre^P$9hx@EYQxQgfhGiE+$Il=VeSdJM~;vW zxiNOR!Sjr_$}c&ZSgpHc>i4*P; z)e$tViitZf=G~u*wV;rdJ)OoD53VmAT{$th!e3tJyA}^V zJ-NbPT zFZ11t@3QKh1y_H(i9|qXs1HV^o==oqkVqT{q zI>r|gi;mxe#vda(h8Ho$$3PPc%bSPC=p?jX?~LhJ0nK@9$k3|u!&HI3{q}2Ju=9Vd zrliNKzW6J+4)WtXQfisFAT<%n+ztft@Sn3*XWPx4z3=AEj=g%MTJll}LHNSpRv5kxw@A)Q zt>fjK*>6W>P-4!>tQRVffrrdGMWeEL<)>DhZ^=NjmBJS#ZuUaGR2*pB%iJLtS28EczkKvOFu#$d>J~y!JS1amTxI!q6l|b1acc zH8mP*;foI!BuaLZo$T&lya|vfK?02PxSPBVgS%W1=QhC}?gFqa*%W%c$soHxGRQx6 zKCb|Y)|HP2g$UkudAx7s;j%Ie&3_M>a|eF&%v?qgFnk1k8#|0F`^%C z7I3UJI5_TYPT~kBIV9(K$C#5iMGU*nyNH{f`7w_SX=7d@&@1lqzA-=X(=dNNFcu_1 z8upwIjn$Dlt#p`#p|tmWWUQXlk2R16nl7Ag9BU#?W6h*_tcA3UwUXAcHqs_?SHk0m z^~RZJsY&u}=U#UYXh~#y8`sH6{_UI;7;v&gvV*1tAuVL2?WAdSkQO%5UZ80aNUJx} z+G$z?q%|68yRxom^TMC{Co*zT7N-hCoXO0{H}a`iI+>X%=M zdf}=pCP_vtLV-+L%;m+LT$oN4bA`Nk^@f;APEY6ZS+RI6Gb5&RB$Fym-w@|>#cSeB zA$1)}Oy#CC@(n0F<4@+(dg{ofb20H^K_r>vv^a7}((fv2_s2R5sni^i@s?1#%$gv_ z{2$`0i8|CkGM_ILX-njb5O=4O#SES=C5zYK%+Dr^scTWE>N!=&Pvx>|aFjkGi$!@g ze0452olY1}tk%QRCaxw^*XL#vGdV&EL=9t7n&hU6320*Ky4r{d`CRrIJZFMrrbtG< z#tP17GS?w3oy)?jO2N=Gju3=g9OqbTq60DxL$EKvad}4^h9wO-<4(zip>bmXT;s`JYo2cNcBU!)G*}k=Aw-YFS2f7Lx+t;F_TYTozA59i#R|Nq);e| zvxQu~NC#X&EY8d(ppUP`R5xbWsEehhlXK}z9}MXNxuFVjMwVfm$`ddeyt6Qh^F>+p zvJRFvB4dBGMCN7-5@ap*IV>9W6&vuDtrwdnML|&jZ`2Ag7;<+>w%N1|<`$U{H z;+%|g11`LhBxx}P)fY(;8YM%9k?Hy54Vlgr{{}x-oa#Gv$Ds)43Y<59ON z%#y+zxpao8KI6@*5!SZEOgd3al5D1^dRR9=EBKk~Ft=6zJjoR^iDKqWxX9io!G0zJ z5Uf}45z+|>YJl|v?KstER4?OD6X8%_J$3v_9A2k( z_STiNx#BBxSFdOwbp=sKrZmACXU^W(>NF zrN-R~zq{-{gi+zgSL@pz@f?5nG3Vkvj|9BKF(uNqbX|!Yg4?)5Wp|U}ZYsOCEAH(} zM@o3^p~-EEyREcs|9kT#Hw0h$UZ8%pejBuFbzA4d@Q&4{w$+{D>aLDev3qrg_&DH+ z_#gWMLC+&DzYOV{HLa-sY}w!~u-YDLIHs3Q106G{Evgl8c6T2)Sv< z10kMrp0EB>HLES+qKtkDIG^{q@G&zaTYM}au z!_cUd)sl&G*|yFs1+DT*!pGh!=SOOlQ3_fmSo+8Q z8Ql6kBwx(k2fdZ&CmZ!Xu#|4n)68=x7w7fT`Z)uWChyL2QU67P!xb1u<4F(*vZOGV zH^=iCMD?rFAm9M1Zww@50I{ODyJz;sk*Mtdq81Q`Y0|-{+=D5HqVj&4a5ySQ{Wn#A zPR;@INaj-+z+qhgA}A@|ajUM`8wZgbLUI_%k!V2eNX{BLO;JsbAunKUq~!^6TXGbL z8hm;7Mgj#L;F_}Pfi@?nWz`RC zF60EFNP02ne%dCIDbA6+OeHb)P$tSJu7ZS>nM%%07uA_ z7(__Y(kf#t`GGKqAY|sS#EcxJ-AAoijOcw~u(au(_k_VMAn9+YraNh9vvG&cV)LNu z+UbtEXx7Mm&Luk!aI&+Jdj-UuLcg7EddYbKhJJt(fnDcalA{ohyHThsOz8qz8j#%- z-^LBW5He{87+aii&k+zZFHanC4!BverHrizX$lRJ+cFVt1vCAbsJMxOoFPt*+fQ6W zoY9{miGrM2%DzQLDS$?>6~vrW5b~m^8TTG^(3oHmi_H4$#UerPEB%aO(XeCob6MyW z5&rY8DK08pRKrz<5D<%;x%?f+LW3sopz#KS{ZUZ7(Q=YY%XggQIJ{~UY5F(nEV9!8 zWP-$jI{_-cr~)XV2`D5(rD-~=)%pY)oT#LdD$Jk?%I3H^`RTkjru-eBWy(`w#PK?r zz`I+7l7rUBVXK64#2Ps&BiK-B#gtgOG=YQKfv~ZOWICNmCnz%Ls`}B|xUf~ZF}R)l z6(F~`hbWvZHyvK|t#<4ycl0YA{VN><%MC~Gbqw4ddFRO2k0>1ji{4Vh(bce65_*@; zD&gL;p#AO%y``|oqAQlLkpU8y&KfbLB4#8*rXT#1i)$QQLj~>EnosR>SBN7x!(6D+ z9o1Dj?mSm(wu4Q`76sxCyV)**b!^C4tqPh7Ox_)Lj~f;yXoq&_^A9kEufUYJoqA}& z6i=R)_`-bL^CBQvVNsur6jPp}GD@Df0GQ&v{DuWn%rf@#-mA|vPrX*3#VlptBBK=S>sN{}=bi;{UqTvk>*n0s{0AE^kd|@d%#?Yxv;#lN0 zB$u&Vof-hka1!iKpuzwQfj%>xOl4TjRDSpZ=AA_HGLkDusOV4=Rst4SnTG7DD>ap6 zGR#k4X@nS(K=PAFl1Q!sk-^+*LWr*R^lSnVcq}WBq;oR0gsLw-3zF0*{3_o6FM-_R zzAbG3pl(~au0yHoSPgDlZEP<$?ok@|EZr>ceNox_;|b;} z5xC~=rBlmIJKkzZaiH)!`rM`nWym7-QU)NDpCZ(43qqM? z4E!MmlafP^YKTO$)LQW?UHDd6m**i`Xg z)nX2=!6al@Fc_|dPR~>vpIUwZsfZcLkcj~n)h_S0HIH^zicN+k z{70fPp?LGD%JjoinaCSB9j_sI9f=Jxkk8|V9|vNma2%{LAFG%?os*fF+2RfL)U(gZ znw4OK*M1Sm^Qj^*rTUh#O1Q5qXuo?xA5JNZt(ZaMGNdhKjhIpqGm;?_n^GhLEvnE# zezcPbTJz~-`eE810+#jet=a9?lWH%WRF=xBS)8uPAFnl+o?aFiz5M)BXlF^-y99zX zFdFT5PuRP7lf_odpm7<}mb^wxsfZcLkm-{-g+Ap+KV%bLsKRaOU&EQSNkrYB==`bT zCWaDQdlEghEP+WxZX($P{Q{)^+cQ7GLw#y~`eB=&CP1qbagd=_NB$F%Ep<6^9kb+t z*xPqH8GA8Dw~dW>peAS8s@Oc<^M3+)2Aj?(Rgiw{6RRK*oZNS~i+9{?g$`vo)x(?t zRQJU~K6A%oULjP?MN2gKG7{vh(K=N~P}hY-LiHySGX?Zyg>W#D_^G+%G&(gvESyM8 z<%nDa$JI=}kVueMu!0LnQb=ZzyouzCNNyqd86=B9)aFD2oHmNNR3cd{lHAof=v=7d z7cfH~l3pYSbb_8w@>w81%RO}}3m0F;ac zKqo8;OhO)ZmST6O5=B=E5YBc9~c{l7#m>+In6b2t$2NQ3M!7uEv@-s5o87Qpe|FA)UM-{@u@hRm>y_I4!f0wqMK_z#;h@^=yu9 zbZMT4G3q0&`(SF0jo@Zj1TV#lQ5V7ICTOu!>)DD{0=XY9R%Oy~`7~bMkC%h!77Ytn zbm2$F*8sK_=Ka!6VM4y1o0Zx65e!bP(Ftg@dc?L5*dle!QaEO19GADa?}ZvljmK6( zL+@XC7~HiKS_#HhgKg#DJ|(zsCAfc`=jx7^LqiWjLmRRHCBD4#kn0cpFL~`dcqhE> zx4}hk9CB74nt!u%QX7`w*X*v&!-2EA-nHX6*Q*ZbXt)C&;Ge8{N5@@toaFAOmR!5_ z9&`beyy)RPY1mpN!%heT&pjDpb)s{jUOx;x_jp)OH_v+OVBpa|%fREd79z?;5O?{=SipQFP#39k;)l^SPXp0 zu{H+ulZ)_@katTym>;kr4YP)~%$i^v{M&KXRPcd19q(znEgz`H`RX|Ep)lQDW5X>d z=W*mcm(#~z0i|u;5KF!d9z4hh=rL})mL)~vURaTcv8Kq#ItwC!1*jr_+pgvctj8f35MyW~jPY|SLsUez4&4@WhZE$~$r zhnd?tc&V#i-0%Y;t{Q*_>G&OWk|C@gmvmO`2g1toK zWvq^RU7WxWb>bYs&`Bhxkc3n#nU5!W&@t>s$^>s_ zxuTn!VO=Un323J>|Xal zlOEI`++fY3e5xDX*xg;hGmWmhy}>gM{_dav;lFbD&qREG6`Q65JvhXXxlx^6``}|$ZdQYb9=3HaeQvpJ{~GVA^MiL) zNTe$Hx(}l39FTuTq6uqaaAd%BcffzrRXNyE&!dAq27yX@3v-&Ru(cQ}HQGm_?m6VR zXc>Xsvm)JIYikYB`W~f&iva!|aHaqcVU!hFH)%EwtPysw(*2i}){V_7sJ^Ke_FBU} zUDzJ`tSV@A!Mku%8TDwean!diwY@i?)E{5;JQQ}^K678_VkR>*&|M^70t!P4n@RB9 zS&WAP@)b;Y3&}4d`6`lMMe>_SejmwyMe=7z{sPI@kW`ue-o`W}XdIJ8Ak6gF_ax}$ z`<^rez^1o7%E{oux4HOk)<7(hf`8-Cj3mz4T)&KWYcW0{CZ=~hT}U@w{q#FkiO(obRUIH$S_M_7MGUpi zZ3x>>&1smmo|@BSDPC=lq<^wiZ>XiTz0y#_aX0T7cI3UVDd6W}x1_+_c1aOxC38r! zNbnICE-*!%y66t-yCWf>?T!Sef3{%~0+2*^N}?2cz}PAoqdK_kzu5!3CSmRx!Y6g_ z#T^5K;Quy_H=IF&LIascg0i0KVa1-%FQXRuPs?g4F&0DH=# zR*CbNjD{Rj*wm!W9n9KLn9TvZqF={I6bt(ICJxxlSk2C0Tj2k|9klK3=+yghxXk&s z-|kuw+CSLVTM`CJ+j^A<2Fl@nCESlG;r`|D00i!b2g;Cc`67T}#%9fc_i z5*ez0uXXJf%(8$=u1Oq}^ssaAHES_Y$-pjqXI>LuDp1KR)u7+cJXfKT%i0=*yZEDIGs z1#-Y9QZ3CAyN^ZLg=Gl7WaMFi6`}K?u#GaF)o}AyZvOnulF+_*^GiPsr(Xnz(RS-k zflF@~36|J>EW<7=L$JCWq%qE-N!IYLI1bT+YET3M24|_UN%iC1wgy@lEaI^_wNQT` z4=xV4+yG0;2e_-rv?)n})KewjAQ!1njp=oiQYuwrs;d|)$5uUnNfle@zTd;jP}-^O zTe2E?p&aQ}BHg8-SHT+9wD%F`Y^r<2b`9}#*H9gY@&t5@RSWwisFkT=g00f^*LeLN z5~^z1X<2`XSCODPBjd^{YFU2%=#w@-ziS-{yCdeYSE~?Rs?;jMP^*AV(C^ytz%cYL z3C8KnK_?Rj;fr5lls=RvwtL?2rtF z;iEfBfFlK3!Kx0}C(Rd5NgmXDB`?xK{HelBG44fC#NvboOV3XeVkCj?f%IKIYp!Ql z${0d~*+8pE^3hWnJMU<~oIB6o{3XYs01^d=Rp%QdKWU_}Z`lm75ACqX zp+okXAKT|CXYy%kr_4>|U=*G;xK&^)r*a=%T z(BbFc`*X{o({u%3V5R=(`@Ih%==&3itwwf~BQYfsTZtT8ZI71Q`<3?ow*srJ-KEI6 z_eSp2y>~zUaK#KRV}TKtOv7g_ zVY4txWE9r`2GRb1#!(8L{@ePa_v??+^?);u`=Qetau+<-mx7~vUGJQVjkfULXa4#!5~R)zjA-sn6KV+a8y)1V0A#|Uv-$ZJ z0Nt%EBcpMRbvn;Jx9FM6gGcN2>JeaBKQX?@h`YlG9S&}&1uax;9c7aTqP7UC8|V+# z7z`Hg!^#qF;1=>FxW}d?&(f*5(h`RyOQEqWY*&QsE5c6d#&T#SbojmN4}&{yUswt5 zTMf3BgHa_IT?zK04@-YJbofE&@CF6g|GaYqRt6jWBONt59yQd;DQ`NSn*N2jbDM&I z(Ye-6{HUFTQx?VJ~RIZCLqs(ub>z+Q$}%7>jNgRAFlDDw-DEHwNu^;73q`i zPp7|ibos5 zk)RwbqnR1yat}Y|coK2*2cHBz{QmWblka8HIp$2kzj0`ivtz9XlB%S0FzQp8+>M4b zO*(fIPnM5*jirv)=vR10sndL}8QzJ#EdDpNcB?F|`+cC)4RSem-a)^MYm5n_h2S{~ z6Wcc3PU>c#u%yOAx8=(KhLlg;o^eD}9j0MV{Am8Co{JlMtHe6?CD>Rtlg|*aI3(bM z%-~_1PgAST1i=L>YPknbI?6|?luq=SQd^lgrujBg$GZf6@7d~&s`{wU9j)3_O!1W= z)@Wd}vj~e*v>G|FsYR*|HP-&r4B^yIZ&QgD9RE{r3vK*=W+gQI=i9y=Z2zF)g>u7g zrC~Q{tE;V@<<@;l>%Mn~%e^lty)Tt6O_W;qm0IJVv^MU6PhB+DEuO-~*!J70m0;&; zu(=%UQ-Xc(LhSH*Jy&<492$NQ8fMF~Umo#|w7c%M`$zVF0uks3@G=(s@=S47%-}~G zHvSOJ~*KF!D?^-*SN1Nw;eXtxZ7{fpsRV+8aF|(uwszc z7K8o;bJawib-aoMXFYXsMC9@D{p&3}-^btvFK=G{MG-M7s@?>+5Tw}Fk*PWO1|@73 zAs|3-@U2hn1De!asBTNJuV=>CQ~(J+Ml#g@NA(#=_@Nn^M%55iKp$A@$&D-*KSy1> zHl4dl7opYATpsb*IPS{5}WdQ5)wJey;sLwy!z2IXr7F z&KY@xq4k#?ryP!bk2$346V72r_hb0hSoiuX&MwEU#~hIL3CA`^?_&-KE9(X^4{9=uKt@j@$FLJl={XmMzZ1S(_`daz(vuV&>jP)h#+m(Wv=&8>exDg6@~h$$T$D3nriD^6YjjLudYZSomay}^(s3lpI#uE38oOKT z)I~k#E8Uq+Lo{;U={7sFV%CF2xC}vc2?F;z-m90G8(Mzt!OCqaQ|ZNAcw;)2dn{@X zLOSNGy&I;{&VUb}Yir+7Ga+L<4by}(*~9P3L>nOEY;qPSL*(W}#>hYhS5%>Wmv5hj*0+TYT%>?Y}sZz)^SQH(d|byDt_ zNB0PxCp9FV=c(T2TK^6@%YG8FnD#$m6w$s<`!{)%jVViI|Dz=RINRfai$|xT)15Uyk_T zToDl_l48}!IFT}8Lvqp+YVB26o$s69dJ(UC2{O0!&^d6Ir>KF)(DO~@2%2VsDZ9rR z#j$eqQR0jxTJ~3K)5B*6D>7#U&7y}U3vGYVOG4tP}}(Ps!GU!M@2YshM{6OLN~B- zx3grFCQPcv`;$F#@^y(!_B6)rVleRCY74q6l9a02iAYsTSsk78l7vDvGnS;eub!2B z$kc2(nZ+386;`cM_|l`9k7SYF*_4&LUVwQ+yp6%@he%#Q@~34O#xHR78C?DY;0ri_ U<=whAcDyiA|c@Noe!UD zAPwgtBtpym=cDIhB$nhZhsF&6w}EuHA&=bAvv8d$l499D9>O%~OycisUsf8;D$SH;>zc=F@Hy4Pr&34zC2w0qAIOEEA zCxR^R6&u8e7#;G6u_3SGyCn9z+qqQZnC``STZp8m3fcLb+yxV-coFb2s{7drXQFUs57N7FvGx_2( z^OKj&H|jFJFB+wiF=h$wK0S8>Mhs8IxB__;uFweo+7E%;L@jhViM%TUj%HDmY{ijOLUo2tz z5_UCMjPLOy$3&t?qBJD=f!tlE=MOm z3Hl=bo2NexbK!U?)LQbk-h%;hQ%J1C6f~}TeNgfScbOwD5QJ{gMLh6sxKTiX2n>iG zn({%)OH+Q)M*=hc6t9PInO&48&nh|j`rSj&&6L?Gy#~G%^hG`mrSKxV=HWcoHo;!n zjQg0GR`8{(@se5+(#WHxah@#v@kBJ|E<)dw3iQ zbNef<60`ZN)Il!ex9-RqTXs)HtGLB$pPj9q1AXMUliJ(?>l$@Ad5cr@r#)TpJQV>& zXGZ98(OhT(Urc&9`hpQxA27y{)t7aaVaW}3F%81`0GEWn zvHuwM9N?y1soc0Cf^nJ_qnR|HnmTTeYq0YG*eJM?N*-RvdlbGsBX=x4g(Yyw4 zYfctkNe&wo$5NW^C9*bm%cFaM4@n=AW1mH`8VHFZZGRltpzwGqOmHRZA(RM`p+R*I z3bT--dtG>H?gr(^61`EH$`%R|k>|*iI*U?G4@0#?l`$w#-Ct0FL(^L%?TR{I$Wc~l zhKh7a$}O26I|LUk(0K;9X$5fY|sJPNWgmn~+6SEyJp1Ys|VyQ8EB zIwrkH5Ez)!j~vB0It{&3@joSuvQ(`%a`OP7!<35Xf*(s)yjlu%o4IYuLIE-=(1aE1JM{T8?X7Y zZUNDDD|YOw>_+U^t(xq6u~M5LT0L$iHYe(F))!C@S)*|D)nb*|=XwxL466S;zR%{j zQtJ0iW%40|ZwRODao3(JA=3bGI&|q>8%{gg)P9Cx3p#!dWnz!#%bQhuuEJZu=`TK4 zVSv*c+rjBGuCaeK0d>O|RVM@|j--%0h6KejK^bUk0u5h&MSe*p#<8O+j9uT~(`#N3e^hkT7*Yp2jSy2{MX=sRuHSSrLea zFT4|`CX)tG?R2OLfZo?MiQbWP?TV~O=tq!d^O}a9l4DybjBT5thSUy&a0Ru|+F9oJ zFZwt5#5%vP%3E;mU0Qmeqz-JBN^X>SVLC?Po3eN-rx45xeUWyiUUvYj8{5pQa zme9Rpsv?8aftRR(BuTm;NwaEh9_CEa!;gobXOKjc1j`+SE#jre?B_agz{ zA%hzkr{JF*;OD@@bq`l}m1~7(t`(lSR(R%G;YA;HvIImw;b#J=pdK>Up_qM1zKd9@ zLcyc*1ZC~I_j9jP!PoY&h%WG=)bq3WGya->7I3*{d_Ca(7y_GOlcyz6)6)WXqb_BF z)YI};6{-QpO^+=+E&hxEsv&UcRa-TP`~&y2FcoW!ZAYFK)(Wf#-eLXJRw_*3*X0Z{ z)fO~WLr4_FP$p!0TEe^Vv@kVk^=W!qcu)wq7dS48no~B zNQNLBC1^mWTnm@bS{|@WWJs7M@)jON*t3Go4A`v@Ug@-URfe&P{*;!oeT;)CjUaT< zLoi<4EkGkQ8-fXlb{tUu$BW~?HODQRl5xzfBo^LYb*J8_LsU&egE-N;`Eo~K%{VM-+HtiP*#aQ zwK4;w6m4IbDMtq&`OahI=&5zy{Hf<=3Cml0eb8hVYySkhVN!I0LE4wZyLy>hk8EO%O=^)kmVTw1y zrq*xRh1O@d#+nIJ>-%7L&u{GTDS$ndARvIBOzgE*mBYOl$wbCsuB#7+doh}cj-yVk zIv|<`LM*^CiDEEEwPo^uBwb=(OP8u?0PJtg6*_iuJCT8##E8U>Cf<0YOj%5ePLJz#{k~x_`#-e=nr$kb+KH2>{*LQbVY2R>F=O ztpw1)1|e79wrQF)*!88VFn<--g1w z0Is=IlmH@u-f+0&Wk`|*B=!#A_I_R55i)$j-+@#rR?`moZDblnq9G{)aatIrQDFuy z^cp6Hnd`5}9j!1w1}bucwj)_XP*!c0Y_dv>2t`dY9=yJVRZ_|I40Nm&U_CiJz|G0^ z=ELRY!z-zB^U+0NqvhaR(Kn+deqhB9WGTAhUv24z)Svd3TDn&gJ&^pKR!a1&MtdQ7 z`vpqCHdyD)-zq;)iuSUskyyi4tgKv-Gm78GOE@_kImWRhI615wbiNcdD&XQPewKt0 z+v!A)owmjY?6+SqimNEpd{jHcBOHJ2{Hy1e0?Ri_eJ4vDql@Ru;Zro$<6}R!x9#hv z2T$@2KVVyBXnDQcnRtHKQyLat2v?eA_y$t5Y!r+Rdl$I2len&fc*{UM5nRNkgLvnm zc)rgWisz@Hc!7#gya#Vsus{(8Om<>euvR-VEIx;t2EcBG;-Q21frsM37C;RLaV>a` z;M)qgRKndHYz;)fbsRh?_Z{5d^T-B@W{|(WLF<{kR?R6^VT(UKi$cbyBLQj*L*R0v2hpAE^3*4U(IJ%Z|*NLhjl&9 zOe4F~J!vnUkA(P;*pf+@26Iy;P;*9_taeGKTPRsQbGUty35a{G_o;`Y*Ju{^iTg9n z<~8>0!YJQkUDrCBMtQ_E$Q$5{MtKze6Bkz9p4+uj4O z^A{ih7C#lyIo*d_n>pQwj^G@PEdUT6M^p@9YK|Z_oyVwIY?N}7=kXdnKo$bX35XUN zowLBX)vA{{_AzAvbq2?KZ9I-BKV49-$pjEN;&3m;(Bu`k1jWkzlnyNL^csNeX?~F0 z#4$L%_kCapU>%$rZSOrn@Vh(bec%f0HNYneM%bN?4o>$&NUyBq^oSJ%JgKS$buRT# zHLQLTulE#^`ksCA8uE!q&{Io(7s($0sesnU$nUWt)4hMZ0$*3S{`-2Ib$xvr#+mH3gR&T{nNIzO@u z8wn#nZv9a!oPN~0+B*VgDC-?5cZ@)_@p~T|8Ob|#l2t?5QYXumD{`zD62M2hHp*zu zS#5CKZeeB6la*V8WUEoJ%8$VC7*pK@^m0Gwj2&4{T!?vo9uqDc*aGuj08^|U=6z6s z5jEV8?plEn>a}qm)?wA5F4`!_5tjXTfSi1y-w_#yV880HTOx9zpP66?6g_H(THCHy z3nNfmVz;X8+-9Hb050)6!ntd6+t{{VzqW(B9w&2fivd#$3l5!Vw+5tMeSm&tPVXw6 zh@YUmx}El8pP=w#3$eDO2p_rG`_i^Hq<)iy8OLreQ$e-{VhYICKg``!;cYU%4zxd@ z6JANn*TEU28Cr^lRD-HY!<|<7fhq#Gsq(=GqWg{Jj)3g}YxEoW^!JB)%c#AGW1DekiC(MHN1(!EYepF7?jZVtq5`Ww@>epcdUMo8l9YZgY^0yGY7j7-4IfuB#p z2mD(|vPdS8Ttl*mReHst?j(@r*;CtG2 zKOXS)7>$r0R?~iL=0)-?APWf^h&KVALHLaKND@Em{Z^`34@go@oigI^rsv^%ABemo z;AY@--uljot}=bEWPBNwHeL$AZ)qaa4Szkziu8b4M*WKT?Q8K$AwNln0Uw6aPpkBZ z`Eh0%U;&K9{!mnpTQ&BF&=h;=f$98p9Gme4LOb3ReJYhtO6Q+2LJ7Y?K-_ZZ_S7x_ z`Pk!fxju3GT;5-YIak}SIPUkkHLm5CT*I%p?hm;Xc%$6>cUym_^`1NCI&ja!xnmz= Z>VC%_*P%~1Aou$YxqAMa141A1{{l#fA8r5u literal 0 HcmV?d00001 diff --git a/tests/__pycache__/test_providers.cpython-313-pytest-9.0.2.pyc b/tests/__pycache__/test_providers.cpython-313-pytest-9.0.2.pyc new file mode 100644 index 0000000000000000000000000000000000000000..600c317d152845d7bab865e3076e0e620923aad7 GIT binary patch literal 27676 zcmeHQYj7LKeZRx=2tGgxdQhSs5KTQGQIKSdvgla0Y)Q64Ikbl`617xLgRKlTOn%AJR@I(`1qlojyW{l&D_pX_{mvbw6|*DIPce z(CP2Ld$)H79tl#W>v~2vz#qGNyL)?k_kaI)akIWYqTu+-Z@-`a&00nI1!b%wC=%+; zdsIbvK}jho^|;cdGBr+V-*F%F^WQ*MQ1``^t`G~GXEdhajQ@B|SA<2(vRYP)vcU1W zu6kDA)xa9MqAbeK1&=p&HL)h14jo_76=N};4j*6HwTi9cY3=yxt~G28(lzT8)_k(I z=T2*w_HWNVJ8h+!l~yGcS*N6GJA9(7!#u@W__;cqtGCXv^}MVBWl>qSftNL+tVxz_ zgN$`=cIH}ZkUvZcX<->AG%-Sf4JzOjf4CRin{MnqNxZ(YW zhl->9j*`Yl3+`Hdc&_sBBZaSevl)pOlwNr*rFNLtlt! zlck)FLW1&E_2kHyQJZ1eyq?R@71;`O9>rI$2e_!b5{W(k&~&6_D$sHh1xgo5nb z-rS0k7nG9sXUKY^V#)_q-M3fKeKF-&gQ66hddzpu8W;WMeIIC4lv*XO6nq7BFR<#2 zTD&vuzvmcQ9NBJG?f;gd^aiYUJs~S)|Kh#&r33ral>fvX`+OZ{-Bh4gv+DG4+_rM! zw^gx5`#u!(x45oS=dMe+RweDTp1(D=>}ppl+-c?EZ1t#$G1+(GUE6P6BNa^gcHwRd z!Bi*}P6zj>JQwP1vaaFii(ZL}16K}NeObRbJp6;|$l(DznK-@$^|X@Ey6>Yw8KFLo zP)2B=pD@S><@*S446Rtm@sdgPE#JX|3H8E?E!#)N_9XQ9!Gm#1-j&deU?Dr4(1`H8U_&Je3{F@a}3ddiHdVdN3Lu;MWFD9%Zb^ zj9BJ$kqu`{ncVr2p=^PZo~m)J>x2q%aWHx? zMyt3V+{x+UD6_E`qa}30==zP_PV`DvZ?Fm->z@DsvTsg)bJKL=-j_E`Jn-V`ude=+ zP2<6-#=S=KgHzgrQ_T;SqYut%=I4s`;P|M_iNsY`S=r9v1e#aWMi!m?_J&_+x>Kj; z_Jvi4TLRv+;ALRAlNQ+2vY_^YT8Ja9+HbAJ8;Y$KK0U1V?^J-zDSxlT^B#%CRv(0m zF5KM&ER-1Db9Lq_9papPQdO==!wWDjYwVxEh=RTeL1!Z{jgeq zM1aNdLj-OwfjbH81F0kG!01)w)z~&l(UWCc2qXyXAb^)%VeJ6uiAWOf+DU@$a#EcR zou-tCjvzT}IR$ZwN10P&f+B96OKH67LEu&NZvcS6)HP46o33jcS6_*)o!C6HVb^pt zIi)49MU%6d>r>W}&t8ya?wlyTrX?pfJ8L*{vIef;tRk~lwdB;CYhKZkY>qfWNXW&J zi%5V30gA&*=xB8a6Dc+2>-6~`fb`On;)hMPxVQ}g-y&0SWkA`iv?~^B_CxqN4K}(7 z?djY>|0dIt17$L#wj1U*(bVECy8{@1=yd+$nLx+m_ zOxxx@Q_+z4SZ1_vrcgXrF!ok?ymOmuz$NrH06gjX*mEEL{SUu%;Bx15eFr`FwKH{h zyy;gO_RrRLT&wSROV?2GYjwN*e-Swto>aOMnxSR*iP(M;xap6R8bd?8mTW-^R46!ty>hXIULnG7(fl<&)AOC^>+HHv#ht)oY?l%D1M0Kj!hp#+n9&xBQ4**sddLjCTQz1Yt6$Y$H zX)H2OlZY5AX?iW8-O?XkYelDhc;DnfvxK~Q^UuI!KBB&$Jg%nHT_6kvf7;(;4o22S z!eU-=h?G6fp3F)%i%j|KH#4o0h`UaCLbHfkhuL=CerRO;AhxhhvFrB|+2V&amO>yM z=pluo;*jTqCsJH6KP}(?pMo_>hSWoJtKxuQ9!iHW>hOt_LonBRCDHC#SH8w7o+hZe5uHtaE0z=YLHOoFK$%K|PD%lF7s!2smB_N!tP1U8t zrV^m_VphcAn49%hnf;5qwBHuP>o!?&Gg@ zn=+|(b3`RUzYoH=RvI43j6i$WNx+I<6dXyrcPKl2sy}<5@qiZ!36WRm^?HUboUwF_ zn+A9_~Y_JmheRIliC_sFP69Lr1dgT)rS^ zO9g@XAnLIX0T5fhNv$p_-_rz1p{bmltLQ)o00GbGqeHMqSVPYA=h>u>#nFV_4dBvy zAW<-)LX%Ojf-|UTg!LSVUx_igqEXv71VLmtR~jrr(}HSJA~l1H3r0xHkPsGV8XN+I zm+Vj@WQR2Gceq$MlN-a6=^JDpr4nt#w3M(6b+wzlW#6L>11rG5r z*MU5jOe;*q(-NAD^$uASa)HNt2VLXG0C#u9E zS*@k4wY+rX($;V9oY~(sz2o?d_K016?9yO)&!HLZfnUb%n2v23*RD6MeR0#5HcdCR zj)$&CSCyk%W;OG3McXodR^~+Fs;jK*%844+v@H|sWh>NhP~fDf==%YKT@Fz#sJQBON;@8uL@%brh^MIhR}(A&KS;FWUhZP zj3JT^gE7>e=yx!Nx@9N9(gI?QZBdLtwgL&ROVuy?r~9l#Pe0GYxK=Xea|BS8+g^(+EM4(w7 zzzFi&U}ve|2?CNdvon*P9Cy)VkI67PZfOolrPXHvh(0$qPjr?W+b%s> zZtNHjUXL|TwIs{2BnYn7_*}y?4HKpDhO*XXQ|4Qz*w7Nq&lPPmQEa;a>8q}?vMVR5 zUeh*DG|N_~;mFD2X>IcY6ua4^*bvD*=(24AN~yg;Ob|b~NDedlJe1d~A89cn`;m+C z66@uM6uC)#kb=5()>a=1fs|5wJRN*QDcVB$w;(P{3s{d7f$i21%7a`e51lybpuOQ< zDZuwg@!Z+U8rAzy$gSn}RXm0Gw4_%q+H1G+7Ug9tP`R+ArxcvmEpxvhRInW?td#)Cb%_R+qcq7V>@}L{RF`8X z9daxulMd3Dq%KHwWqSw^f%g(w1$kUxJ{Ka73p68AuD=98YN@+N=&N}E5yyC^f=>X_c+M)s6o2OZIz5R0V<^{>rabll?J_Q{3>q*tD zQAzq_Uqw$=>VClzgLY+N?9TX|B!v}^_XZX*bzTcMvliT>C{0$i4;K+!sZq8uk_#7jd=O~@}Kx>UhQ znnkWcN-eU9ODu)l*&GwTY~A3-X-lAVE04~4htQY)8Gwt*&n%pqS>Jwn;JY86(GI`j zr3Ahkxb+_#x9n&E>wV-{B5?<|VIH>EhMNf`##!fKJ`AK(;_nq^x4#94mr{hccMKRr zW;ef!!v`Eb7s-X~t9h8+?5lWUoQ(@E46<8Q!8?vY)oM!&a@@0bzqTw8O!~RU2v%Kq z&TXR^N$mV^^yMwPcpA+Ps)c|l4`|&N8j>*GjP~btA$Ft4#@KrF3})F*UL)9vghL4l z+o6;s`*`G&me-tI;sI7HmL~lyj>g=0#5t+Uf^@g=oZp>5+z@IRQF7pjYi6i5YooS{ z#?tw4)v9H#VUymu9c(_v7xrnDecttq)W zO#Q;oUBgZNHKv3RS%O6;Vo3(puaFQ5K~Sy`DA&{6=+7R-oh_F-b9{a?Ltf8h&t~&O z*;DWlme{DN(+Hz>&O@c)<61^@&5O9t$LLO!(UiTm!nnk({z6RK zO_w8z?hGl|@62?tFVkrra3%Z!d-v$XZfwJoX?YTkY3B$%o~_Y&%df>FxTrn@Kq}VT z^$@riM_W1ChCqg?XxnsjD-xF}2jB=YKzPAabgK+U5D5e#jGvcfh;y)VJVK#OoSAmC zMEbH>ESn*LXb535Jc_&P20MQUzl_)%J4WFB1cnHlC-7+k;{?7!;1Yo@0@Pe|)I=$o zFb+r&*W&>2ll!#b)*CBpf~_}Jhk_e#GzI}0{J~vsL_)#cqA|7W!&e@rwMxC&j-thS z&6ze3Mp(cKdfx6h+ts?#2u&OnsfLyhvoUf`i zZT1yN2D#8((`$>__MLi**yXZsh*QKNgqOD-K%>dXGMPDH^0|Oy#IH6-m{mYfUkuZV6AV&Jr)IUx*pz zQycl+fncI;^bn>m&mBkcL4@+_acBd(P8WwLsz3hoi(ickxj#<&OMDn%$8lIX;|K~u zwD7rnX)wMmxy?|M7oISK^yn;c>;slwg@mbBdBpC`!^DO5mE>2^=_=sl)aia6*~gy6 zm=i&VCPrMG0{Qk6IM98eey+a9K94%=3k1GMfOvkQj?oLmo+r=-U_|UoIz0GXV@pt_ z9Rzj~XeY20AW`q&=1)*54Nq7;xoY-xWStEBZfB;G{V~=45&_cTICJN#1uWH#{|fhh z3pdO-6|uoA*`P^-Gm>-Zy72CM*Z&|crmq7dxv2ayws|^s2Yf}9I$Lbbi=AKUgtj&t z+g6TkL*lZZbhp^H%l_#Ysc|z=f>#}yoKV4_tv$a_YGaNZbhqa_#R<~gV%uc;vR@{r zVzy=|JHd6N61j8Y(J2j}9NjsqnV&1#PSOhP0;I2^3@W5qR~P~IKPAbvec*4 zIJsHS5g<@2hQdSUDq;i{(hE698Kj;$+aMO4$sEl7DJ{LsPPRl%_aZ`wXklpZk$hqF{Je2Q z#{KglkjwR4EMyQtKYD&H*&;%~*MJ~kH7vCGMY;V3&`t=a)Wc#)3459@M~}>s9-VQa zo#fJzJD>(nUxv@AT@O3w7cJ zkMkWbmP?^}a30KYuj&}e4P^VqxEeE8KH>h0WmKzvh`AlZcV4xMt);VE^EyST0|1i? zxY=r`(jb8?1a3v0;#XXXI@MfqE~9bGpRs4DGd_wz>oh5(RW+@h=q@+yz|t43;kmkJ z>Yi;_k_NSPI=02zgvUpnCw17G6 zhdy^eh1U|ARDdt0p(QFNx4^8(BZaLqI@LuirU_RprqSjN>GsRcWy01^2;*Ja9C=Xr zVd+f;tcP51F-`cyDEDaCAr?P%nPxn-EZFfW#G>xRsKcYN{L5CPuV+aVQMM9hsAXSF zBaLrM8kKB?Q8lEZrrubSYD_hymo4<7pusq{LKw%`2wsE>>l`Af%^JGCY>y+r7-C>z~CfET_5bAJ88O-}BOSL$d}`LEPDo&qOw;d=VN0}$P=SY5Mf z-PDH8@~Y19$o1HUOObMH$J<(GGY?s}m(|=}XCs&4^Y~jys_}TtLlW)V>ue;se9x@2 zS(xm(tX9>Q+HlwFGK#ZhykFdBY!dpl?g*(h-Y7Tijt$jLWXak!5-9dWi+a z0H<;9%NV!0f<+?8%=+EmS@U;0XS9dj1w>fTy7yy@mi;qvS-rsRQ!?wvnh`vdA4%A!1^6Rsbn&OpO7 zHsQWaHwL=bz%{#z5Q!n zRl3#etJK-`tQ1~*xjULm>9Jc$GnLhb8>eI|C_PKqiPCY;-u>DRIH$92dG6GEN4#ox zDqG=BVFWNE>nq)YP1fuiIz%-$|kb-0GG3N*{|5KD+aPF~*pg49Jg_9Ac;b& zFB75RW~`aWbb5FOw~%HlcZ}iU6-`YBJKtoog?{Y8iJdTYA#u2I*==N;ujSV;ApK2f$myS6bI&!l3nzdlaX`7P7>3>k_Bu-M@P!ToYMV?x1LQ(KqTKkhlkSCRU z{e3R4|I%Y$ZS?adPZIr>M4n_TAg$kllHNo|60@o`Wi&z6nl>Q|NRk2a;fN%7`y|7M z*k&RdkWgqd$|vy}W7*)_Ar}=(Q5f+?di+3z-*gM;F(>2*6RR+I( zTmf9uZPe069mLA8_YpG)iwK$7&k1lu^FqWq3vg{d>xGS^w^(5}<_fF@(Q!DhV}sg( zC8ML+j+m{7}vIH=xg-O9<9%sK?Cgq})b}R1@$yT71c8I{TulgN1pQ`8c z3^8+w^W4DQiP00c=Na#)IG}UOB~%rrrt|XO6oU9Sg~sk>U0;3FO-Y1)WQkP8!Vr9Q zoW$h%?Y|nB>$u|qTgzy{{x3|*pX2Lr+_fN!{jPoa0+e6=2NBphJ)SSf9iui<1s~f_ z%{CKgC2$9UwEzkAtq?8q;_X+H@=WbTiPgtK5%MAGsExoD0z9N+2c@GY1JNIC}iRs22FE^Y2C(%s*C$ZytbnTSZI*~3%TW2-% zb46<%$A0gWo3WGfG|DE@GFNuxWbw4tiUna)2&f;$YHS%wPf_|1+^Jy3PRi3Lo4_Ag znDr?f-_FTmRQm0WpwhHcr&r+zn^!%&Ibg~PAx~N1H}nj>B!vH@69kBD>tTS;DR1~9 zL9yQ}Ka zT6oDB37B4|#X1!p4{NNO+c_j(9wZ>PcOyn;1ctNhj1km_i)V6T_x49nEP_D4Pbn^W z@yT_IWQKOH*h&6A=Es|jkFb`%?pIayRi*V;it<_IGtpO-=3glFzfvA5D-XS@-1Vw* zfWy0Qgj99?>zd-zo^JV6%dEPhtgd*|SF5(Yt^nNhE56w4lznsk9(DJx6@WKStLxQ` hzg7U=yswo~0B?4wwQAzmXp?wzeXY9X22$dQ|1V`IvV8ym literal 0 HcmV?d00001 diff --git a/tests/__pycache__/test_sync.cpython-313-pytest-9.0.2.pyc b/tests/__pycache__/test_sync.cpython-313-pytest-9.0.2.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae06e39393310840b1751c8df34bfc0fc7f3eadf GIT binary patch literal 28429 zcmeHw3v3)odS3Uu*c=WyA}LW^J-9=PD`_b4A&R6#Nh_^Z&((vvW$*0rOq~a_ccSf=S z5-1Wj3kyb&@2~Ez?xA}4(5-xzSgp}NRn>o0ch_`%_5FW6_^`G%DB-y9cYl%k?=6z_ zLyB03-w4PLr({WbUs5Giz9?OgnH-_8=c0#s`M2+aU-Lwy3jtQe&v-8?7phtHg&+%> znl-Ejn!byn3$?74r};0|T?n)Ag-vYJg?d(hp@B7AXk?8z7hs#O)(m%8H(`Gd@3+HN zs99>0)T(Wgs`Pt|w0`px+q%J0s!>Ytu~M2hSV|2_2|ZTIbNoJQajwof$F}jbFw!=O zwC$6gX#Gd@1W{S9Nn|qFTq2jsX0)NGN3Xt|m`sgb&W>Hv{WFQ&*c5aEr?Q!e)MV7J zZ;mJ5%CW>)EDvi*@6mK~oR%SF=3#CVeN zET?7}LnfR`WmzhhN@~b*7PqQRS=Z!x=kZunsP(|m8cJSIvItAwK)z&Tf@P;8+8Cqy z!pW2A8;P4*axHZzvel9B9e zZe}(Y95Y`s{)NUthy0M<&-bOP5|dSlc~lwA!?R6dKGh59SACEH)el*Pmt2{wiYj^& zRl^zMhr>0j$M|R$iW)NuroyEv{P)sD9^gHHUm6wXM$PwLlKRcGjI7GpwwQbrS+WQB zQf?gM-;ktHpAnX%;eZvgztNIBF>jx&dPiFOJpHDwBadS1TA?;6=CQ7;`bIgH(7EA| zrD1=!NmkpWI~(6#c0YBto)tpbvQ@VA6p5y?T<&)ya$S*CpX%@ObmD0n^PCB!1u*<(XZo@~obzrk-{^PfF41p}8Hawe35y?n!0t z$a4)Z?47xJAgV=DnFs|BMm4?0fe?2*x)%y04nC?&Xh5JGSL}x(68q;`R)>i@K}L;H zuT@W8{>-H_@yll}Um5 zXvW{1O{D=knjT1Jp$w#sYg5_T^f=EIpX4AK&m^XkdfiEWlYrzijAfbL7zb)iCvpJ% znRFtsa zuU^R{PqWnZY-^MG$?2|M+Z#tdK z#nY(_`ji9isMl3F%L}+W?fVe#NcWmLzV%|UX>h(~Ioz37j@{i}2p=mc=4VkkmJfFt z(IrP%C?N6fb`dL-#YA#ZIkp_`&MU|79xa5AmlX4}s2tCSyN&3QBPV`#%7_Kda}b+>aXgzI_!^7?mFdz()bWTmcy-irEdY4*jG}_&!W=Vj>wzj2NyvExpQVsVZk=o0{76E#v8yvb{0yk@JA8(-xUK zCy&>H&ItlLEa)jNV;-E8`Zy`wWs_1+O+jFXT}faW)qK9}DuSe1*N|XDj*5sy@~pc| zY%9y{N^Bp`ZBU{O-6auXyDnVs(ymc)cSMNoT&{O#=fYk2*VGn-eoalURaa`D%V*GP zwMtcDev@PeM#EOMI?CE)rP<#g*#WC8>)RmN84rl|DFYzdcDY%VuQ|!Z zKp9y)Y>>sn(Q2NL;kT364ia#pNDMEW#3Ceil4vE-1`(}d?Gzp$u^Yn4&ib8^GIn;z zN!125qMrs;ix|LVa;@o^+|BrnsU(^17H+MEPAl{Ld!-wX5_rkkwpDu=@ zd8K<{n2Q3DAoEjHy7S?vh#G+plBat^oNaP#owUNm2UJFD*!RCSY+HNEM zQGry$uC2Qav&zxHyZ( z3>_WLBmm^_Y#P3+OwtPGZe)XVRs`vcFqx6}!;M4iBCm4*=oPJ_8R<@1;4$}4i9Rz{h8Gv0= z$3kDRsfVn~?Rll+))xxlj*?=27L|_qpEqJlc2Jx~+O02$SfMN?l8Z{m(<&O*=dgH3 z8j4oyN}T%-;&D9~k59wJnoc4dipPI$HjyUJ0%GBKd?LlP9NFdQG~$dHE9)Wg5{Xj~ z`j&WHBR}+5Jdw+>)SI*Tb)b5N6o?V>PKbfZNnCJlP2J|NNe_Hp|A_}dufP96(Btp< zSn>Jy8Cghs4FAOeBa06CVK3x*y{Pb|Dryz0o>cI>+xV+s_`At3>}UcX9YLY)#I=QY z7B*?M0o@K+{`rg?lkHSaX4@?29sh42Vxrrq;9r$v9&{icXDN19qIx&nLD<)G?;wOH z)Vj+R{?j$Qt^h7CsJ22s3?1v}wuox4aaVQ?T2xQm9Z_vNx9i*dwIRw%j-1> zsBZ&aUY~V0);AH*j0fPV3^9TUSf1Ru2nuQO#$M3C07ge<(>aEFm!cka7+=H}d*__a z#Xf^C_C6|j(s+eRcDVKqI9cIAXQW^ALzh!TgCIW7@e~a_J7W>V+!Vp+ zILpq=B**n5mD`Ttm8+ch7%rv#A_QD>_0ey2{@KoA{ki$Ta(K({?<|COmlX4}sO+A< z{`)&c+z5O?sUodV7BiARRCYUZLBkOf8H>v9RS4mC04^9a!95zsu-6LN-zwUr-Ofj; z0{LO*f*EBJ#EeJvR-iV5a=DwOs*h7he;I|e^SIJN2ZaobigrHi+BzPikapSbW|ts? zq79NkY3FjkBasVj52GrALMm#t!}z2A-l#*nJaW&hI4T=|Gku)eJleUM=Rc@RkG54F z^m9-fYCtReLw9O;Q`D>AnNKIil1A$U_cpD$RmsgA!ZD8lEpT>?t(t$?rY?gSKMB<@qY!PQSCYSbvh* z>At+uzi_<}?k_3kXHn_ThxZxLB}Z5&AaMb$)>JQ)#Y8fcR%w$Fw83X>62H)j1Fb={ zuAj~}sRo~6AN^?m0N7VE#71x>_F0HWJ64;#usY1mJx5^TI^BX%2gg1K@wgcl;V6Zf z5Eu-+4$jY#)0kF5ya6T|%>vPnt_4!7WIjvR{Lc^|4r^f)2Ko-&Z7PHhl@#-{s2t)0 zeH2}CgoOeU?>328p)4kni^`$pa0hwJCm!>d!{#H8IQN*FVW%^EVM4^nV{XSp^6DP* z`E>xG)txs=uge;I=$fZdIuExbP4U14573%%P?F0v5>4H(OiqvTfPhlBEX1?BOnzHBV!I|_~Lgau68Yes~YAMWOX>W>9r^aMs; zvrQd`Ng)W0RSJ3I?I{;()u$y@sw28HKB>gMANT1Tg6LaM!Hpt2m-`)wTxj<2 zahsHDBt$rN63;0YDW%&@-ANhPnVHP7mqs_SXj^CVhR1phPg zgcJ=8&DEB1$hq1wwAJMx`x)HS>M+aRgi17IP|Bb-lTLBRil+NMpG{>p#4X`xYe!+a zC!Nv##;geiv%^wktySik)yQIw3`ESafp(7JmM4s+r8wPbmi=eA(Oi8ehW)oj0he%L z?K$8P_-4z#Qp-T0Wnii0@Iy(izbr4d9KPjQ-n?s}x3IbMp;v0yyxe>82MvYZi=R-W z&~kX*f1hjqP2<+D4Zb&cD^_gWHLn;nwYgaT^3$ZLMWvrlBzY?R3p$~{dr(7_poLLI zUkjc+;roUU;!gq)?jETxb8D4`FH}#)V|%7X8Uz@7VSk>Ec{v!zEErqY5QZ=22N?TD zPC3BXv}!BO3XBQpQv(iIbF~Z~17o{vcUTp}Bi2)JFt&3E%MrC_9so~~tBivMRI)z< zCKy+3VDPcZ0%7ah1miP?YrUh)weB+K^kOnO6M;uX3(oB<)3v6)!q-kSs)BRXLUna0 zLHnm3$>Aef)f9~?23&Zv>2ckg!Q|J9wkQ>AVT9owYAJ&Vd+ZQXgX1VlA6$#%S>W(k z))=m({Tu|m>vhNHy~}k6=Dldv3%iQ-2dPnS%PZ}-K)u>aiuqYo+UMazr`VDm6sM7P z3slQgE|kSYGL#H2^BG)v2tGHWn(Q2j^CVs&LHEYuB*sXjNzkpb3nVC~Q3g#7VT5m7 zkRw^!4nfXskAKetxU>fzgnaODhpPOCKW^~(Uoi4f7AjdBXx4Pd4|hYZ?cruslYwZJ zzL_fj66R`OVXt7OwlS^xqpK)lOtcb2h`fPN$`5}TsqcgSsd6W1Va5~l44W5*hB#Ds zuvFh5I>1rnG7cvh#1QGJn2&q71-_WmHGGzS1s}JcBTQhV&qf%KIC|^&;Z73EaN{TA zLk6ka&|TssaQLd-(aqhGw`)*PTdQ_B`4zc%m?1#|4P zG^WR$?2O(7Mm$EBXV@W%P?KZS)kTAPkk9Im(<}|H_rmCNt}St9tX*04wCS>!CPlN1jf$NNxjppg&M5MMg3=~_ zp!6?=+e9Lqa?Yrc{-M%;`+!r$iXpQo5v4Ee5K1B-+IJ1&YRors*6Qb8r5l z!8z{6T~jPESW{EXlfuMTW6qV6=-5x~YAeSoHCrE=?dxhFJ8zQ8-?3_Cg@x~GQDa#} zHAtm0@+&svulXFp{{89vZPjYi!VhwY44R~}#z89As_PnyOj_S2sr(X$$47M;OuQg8 zgdYvy_-Kpkf~vVojwK41re@4%L%^KRh7m|32?2YGTNlRbSx&o=%1s5|=JVY!0Ezi- zoE1iWe2|iT1}{E)mBb*#Tq9O$t)#XNj#@si7MzM>bkM$#W9A$2BNic1j>3F~9PHzE zI`#7;I0|#BI*h1ubn~Er%N&y3hE%8>Su~cu4JzAaqI7PSWi)gX(-_ssC58sf zi0}vD!IDz;DJX+9^l7F!Vju9F#+f27%ML$Q%91j;9B$7mT?>IixT~a?pGBoBPpq1v zOOCKmK;l9`#0q6Gkqjkc2*-)qf@=N-H&kt^>5lop_9rt{y_`t;p3qeN^k>@%?6>a$H?3YMT2VhL;;_Yii`^EZPI?r3`4hpTuyjqHffA?inchF(BfF3$(NX7brh{9(g!Lx$<^|ua4jtdQL#nt&MQ3&y9?o-l45=q zm7YA!N}%YHBPa z5cK)GKMr~Q2aM~G4grujOrvgt)?}km(UYpN#&?L8_1j3IMhbr2Z)mLIXf?iO5*jx=v(U*NK=I4f~7=i`w|h}xEIjq2WX5>9xs^|X~1X|3*`zBZm>j2=<y%oCg|Q~_*@}jYrT;KE5r~gNjvLnLK9?46 z7^jab&!AlGdl10W#_hL0UufJruPiH@zEbz4x+SIgp7LCN+rEX7C1v2=rp@n;eC>1Z zeeU)^aZ5*W)4qBCy>P?3{l7k!S6Xhp%Egy{v81#ZpQ6$-KPO^F-~*-QH#Ct}D2o}% zA1W>HVv31b1~jY~vRcKV)0I^gJ2fi>YfQ@tC4JJB?(W?m8Q{7(#b z0Y2;xifRNAB3&>6%X- zwBC2M;Zy&OtF5v(^c#y@=+YY0M$_o8Q8%kwVl}4G9~xc%?-8=@!umFi{;y-i?pZzK zEqmJaj4r-uh*2jx7d%T#8zQui&0LL|#JKl3kFejwS@snY2T5?3Gw-i?w_EOXW@P%Z z8zkN$Am4W}srjAC1<8j3;jD`zvw=7j3ExPJ7{<(IL`8(lZ#!`JFm{%?*U?wpX7H57Il>^)@ zMNwd-2nz)yE&vrw^+H)pBtvO!x77Po|G!J(4@g`fafZZx5>XOgwgkC`EVq=$?|=S5 zUA2Gj$4!2Juc1wfWo{|cB9b4T#hDe89Pr}OhG?JneO`9o6wO%92U)EukU_PY)l3GX zwR$uCV#CHpbjA#bQ^rsG5x)UphT)n%{6}1Vc$(jU?US}zEle%bcSf^hRe4bEmSf(u zNE_LerZ=4*F>gE8bPYvizG6S>WmGzU-}M?Ths9Lrr?qSbBP#85Vt~XSLg=;A*rpF> zwHa*cmeeR4y`aX+*%+TrTuWj}rbET(bKM@X%SHRwQJzM zYkxj6Fkic*>|Ac|UQ$|lpR&V%fAN6GMlLvBN&6wO3PYR|mE>6G=r(bEF zoCCkMGtYMQc`)!h(&siG#La={w%#dCJqSPSb$%F5w0}OiAJ0ce#*h6c@z0t;7m?oZ ztA5I!31HasdaUY_l%2QWc^t3(sDS=qS5U{Gr@{w4Cmr^WoyUFBuNb^_lRPb(PyBnE+g3UsOZcq+umeEpR(mfYy z|9Cx2O0Cj^t=?ozjN|X^NZf+r3pE@X^3^BisznRxvvmr_zfj*SQ}Q} zeAI~5=~Gx;nHuXlX@%w*PL7Ym-H1tbLJ?*Jrq4|#$6w=H*lF0i#NNVA<9n81?Dmew z*o}mSYs)qwp`Abv-;gWB33q8-*xMS<%MscvN&9qZt*(l2jUoVBa7?Z9@;S zBn^mSwS9UvsVmbMHAqaN2Mrj$LxveLH$WxhwLUS-}B`#Kd}!{Ki% zox=E!W3u9}P;TD6zeXV%{?+Izgr4SMfT%UW#u%_Q>oA^Uv*u{8)s>H@Ca~|Grk|_S zg>0}OG7+^fUJqLCy7jBg^8v@=l_U9MKU-{wql<6eQEKifH22)yT5KMiul-SNL%wnE zQf+i$bounFrBHhz)Lsg87DAn+P+uX`w-h>gZ_B>?{+GVjnSb?-;+B#5v!D2-x{mvb zw5jRc6dzRHd;4@=AranNQq0eyvKNEQMs&#$E|Q|q5&=`iTEZe?D;5=l#Afc($?1eQxvrLB+ZE zH|Rr5tVRWk!=CnIVHajZivc86tZzqO;vL!Lw&r%d{Y#qqXnxpJJXDW;eXRbq`V!w` zeF?UB<$Vb@dCewc0rVv|1d>o+vW7u{p{XjqMCNrzUt;HR?SJegGLF*SHASx@R_ZEz zrnF-ACDji9mVH(C;T7R8vwjc5zm@T$FS))Nh7NtzM9|<6TR=VQem?lf?jU@YeG`&> zi^QLh_@^WY*6!3A;KU(`dN+c+3|*tj5ojZj<=Z=p%_r82EDPtp z{kgw9TikNd1y`cC&*T*nw2hPbSyZANS17vV2p36FXo&z+KB)!}d|&~=9}k@B@_x4~ zcVoO4$7J6TyJ6l%Em|IA$N(Fu=DlFQ=5=9|wSjMbpF2Z60p^hYYq5 zFsZ8_8$APV_-J@J;0PcXcBPkUfCEm|h!b$^Jnl_j3|ow@rD+3>wN4I$I#?%X0}lBB z_~9svrlG7jKwh65aDHF{PR-EVb7ScQ_H^u=N~EsMcJ_7lclP%5_8;i!IeW^R+ zoW*=LfKQ>e+lRh&`8xwkp_eVzv$d_%)KzHeDm5J{G#x569WOK;FEyPnG@UOsT`4qO zq5YY*-8xhV@5(DL=l8s9$U^w#l45=qkxqNiQFO@>E^>XLCIZmmWJX}vHwR9)dB4{d zJl#_P!~Xt>$qYw3THIw*nV$%TVLr`Dv)IO#z!OoKDvm5l8L|jub2p`*h{{;=a}{nD z+sI=3L+z`&QPGh9z4uq&5vLFq58-h|gKfjE`+r&e<&!^Y8+e{d<^ z+>>wq?2pGct`)*<*i-HHxFO0nR?E}k1=?o~yO@bEHcPubF1Ah+0jLPR@sQZxHwR92 zdcWHlJazP0TSawXTw*Rij_tfRE0l-{d?4q-A>tmK(w+AvPKIvR!?H-qoZ-6 zeOlKtrklGuc6rmTn2~Uya(leM+v6LbMtj_MJDFEV(3U^uXHnV5+aN`k9N{7<3M~#11*BVpY3sMd$4XVfUD#25O20qEB>Am+B^S4OXG3; z`JqfaHDR)B+F$|w_Ed6O|IF$=^u|m4KAuneE`)Fn{Ag!KX=iU?XK((XnvcCv+&Pk0 zwux>$f8bQH`Sgzi8Q8e~{KED8z_~*Ac?9#_=L_NUCB^(KB84!7qDzi&k!uPy5rB?B z4by#iTK0ZV4xSEI@exTHc>7QEg9iA}n(oC87zW4skbYY1yvWhqM=BxLDfrxP^c|dM ze?j8klK3kUc@j$`E|54w!XE#;O;HkG#(!-O#IH&J+vD@^d=Luyk3MKt{2dQASNl)N z51JJJz6YBX|Nf6R2mPEt%jqykV6+K{-rPrB%lcmcr?YM2Khpa^U z>nz5O6W{{|g<;P*(r| literal 0 HcmV?d00001