From be8dd6ee7e0cf17ba4f91dad73b633b72df2748e Mon Sep 17 00:00:00 2001 From: Eli Fine Date: Tue, 31 Mar 2026 00:29:08 +0000 Subject: [PATCH 1/4] copier --- .claude/.beads/.gitignore | 60 +++++ .claude/.beads/config.yaml | 44 ++++ .claude/.beads/metadata.json | 8 + .claude/commands/add-command.md | 167 ++++++++++++ .claude/commands/commit.md | 57 +++++ .claude/commands/create-adr.md | 240 ++++++++++++++++++ .claude/commands/create-issues.md | 195 ++++++++++++++ .claude/commands/gap.md | 45 ++++ .claude/commands/green.md | 110 ++++++++ .claude/commands/issue.md | 162 ++++++++++++ .claude/commands/polish.md | 185 ++++++++++++++ .claude/commands/red.md | 111 ++++++++ .claude/commands/refactor.md | 127 +++++++++ .claude/commands/research.md | 96 +++++++ .claude/commands/simplify.md | 84 ++++++ .claude/commands/spike.md | 103 ++++++++ .claude/commands/summarize.md | 62 +++++ .claude/commands/tdd-review.md | 110 ++++++++ .claude/commands/tdd.md | 102 ++++++++ .claude/helpers/merge-claude-settings.sh | 98 +++++++ .claude/package-lock.json | 24 ++ .claude/package.json | 6 + .claude/settings/basics.jsonc | 5 + .claude/settings/permissions/bash.jsonc | 139 ++++++++++ .claude/settings/permissions/read.jsonc | 9 + .claude/settings/permissions/write.jsonc | 8 + .coderabbit.yaml | 4 +- .copier-answers.yml | 2 +- .devcontainer/Dockerfile | 5 +- .devcontainer/devcontainer.json | 31 ++- .devcontainer/docker-compose.yml | 16 ++ .devcontainer/install-ci-tooling.py | 6 +- .devcontainer/manual-setup-deps.py | 17 ++ .devcontainer/on-create-command.sh | 12 +- .devcontainer/post-start-command.sh | 12 +- .github/actions/ecr-auth/action.yml | 23 ++ .github/actions/install_deps/action.yml | 11 +- .../update-devcontainer-hash/action.yml | 2 +- .github/pull_request_template.md | 12 +- .github/workflows/ci.yaml | 89 +++++-- .../confirm-on-tagged-copier-template.yaml | 34 +++ .github/workflows/get-values.yaml | 2 +- .github/workflows/pre-commit.yaml | 7 +- .gitignore | 12 +- .pre-commit-config.yaml | 21 +- AGENTS.md | 149 +++++++++++ CLAUDE.md | 1 + pyproject.toml | 6 +- ruff.toml | 2 +- uv.lock | 153 ++++++----- 50 files changed, 2861 insertions(+), 125 deletions(-) create mode 100644 .claude/.beads/.gitignore create mode 100644 .claude/.beads/config.yaml create mode 100644 .claude/.beads/metadata.json create mode 100644 .claude/commands/add-command.md create mode 100644 .claude/commands/commit.md create mode 100644 .claude/commands/create-adr.md create mode 100644 .claude/commands/create-issues.md create mode 100644 .claude/commands/gap.md create mode 100644 .claude/commands/green.md create mode 100644 .claude/commands/issue.md create mode 100644 .claude/commands/polish.md create mode 100644 .claude/commands/red.md create mode 100644 .claude/commands/refactor.md create mode 100644 .claude/commands/research.md create mode 100644 .claude/commands/simplify.md create mode 100644 .claude/commands/spike.md create mode 100644 .claude/commands/summarize.md create mode 100644 .claude/commands/tdd-review.md create mode 100644 .claude/commands/tdd.md create mode 100644 .claude/helpers/merge-claude-settings.sh create mode 100644 .claude/package-lock.json create mode 100644 .claude/package.json create mode 100644 .claude/settings/basics.jsonc create mode 100644 .claude/settings/permissions/bash.jsonc create mode 100644 .claude/settings/permissions/read.jsonc create mode 100644 .claude/settings/permissions/write.jsonc create mode 100644 .github/actions/ecr-auth/action.yml create mode 100644 .github/workflows/confirm-on-tagged-copier-template.yaml create mode 100644 AGENTS.md create mode 100644 CLAUDE.md diff --git a/.claude/.beads/.gitignore b/.claude/.beads/.gitignore new file mode 100644 index 0000000..bb8b4ce --- /dev/null +++ b/.claude/.beads/.gitignore @@ -0,0 +1,60 @@ +# Dolt database (managed by Dolt, not git) +dolt/ +dolt-access.lock + +# Runtime files +bd.sock +bd.sock.startlock +sync-state.json +last-touched + +# Local version tracking (prevents upgrade notification spam after git ops) +.local_version + +# Worktree redirect file (contains relative path to main repo's .beads/) +# Must not be committed as paths would be wrong in other clones +redirect + +# Sync state (local-only, per-machine) +# These files are machine-specific and should not be shared across clones +.sync.lock +.jsonl.lock +sync_base.jsonl +export-state/ + +# Ephemeral store (SQLite - wisps/molecules, intentionally not versioned) +ephemeral.sqlite3 +ephemeral.sqlite3-journal +ephemeral.sqlite3-wal +ephemeral.sqlite3-shm + +# Legacy files (from pre-Dolt versions) +*.db +*.db?* +*.db-journal +*.db-wal +*.db-shm +db.sqlite +bd.db +daemon.lock +daemon.log +daemon-*.log.gz +daemon.pid +beads.base.jsonl +beads.base.meta.json +beads.left.jsonl +beads.left.meta.json +beads.right.jsonl +beads.right.meta.json + +# NOTE: Do NOT add negation patterns (e.g., !issues.jsonl) here. +# They would override fork protection in .git/info/exclude, allowing +# contributors to accidentally commit upstream issue databases. +# The JSONL files (issues.jsonl, interactions.jsonl) and config files +# are tracked by git by default since no pattern above ignores them. + +# at the moment, we're just using beads for local development, so don't commit any jsonl +interactions.jsonl +issues.jsonl +backup/ +issues-dump.jsonl diff --git a/.claude/.beads/config.yaml b/.claude/.beads/config.yaml new file mode 100644 index 0000000..25090ef --- /dev/null +++ b/.claude/.beads/config.yaml @@ -0,0 +1,44 @@ +# Beads Configuration File +# This file configures default behavior for all bd commands in this repository +# All settings can also be set via environment variables (BD_* prefix) +# or overridden with command-line flags + +# Issue prefix for this repository (used by bd init) +# If not set, bd init will auto-detect from directory name +# Example: issue-prefix: "myproject" creates issues like "myproject-1", "myproject-2", etc. +# issue-prefix: "" + +# Use no-db mode: load from JSONL, write back after each command +# When true, bd will use .beads/issues.jsonl as the source of truth +# instead of the Dolt database +# no-db: false + +# Enable JSON output by default +# json: false + +# Default actor for audit trails (overridden by BD_ACTOR or --actor) +# actor: "" + +# Export events (audit trail) to .beads/events.jsonl on each flush/sync +# When enabled, new events are appended incrementally using a high-water mark. +# Use 'bd export --events' to trigger manually regardless of this setting. +# events-export: false + +# Multi-repo configuration (experimental - bd-307) +# Allows hydrating from multiple repositories and routing writes to the correct JSONL +# repos: +# primary: "." # Primary repo (where this database lives) +# additional: # Additional repos to hydrate from (read-only) +# - ~/beads-planning # Personal planning repo +# - ~/work-planning # Work planning repo + +# Integration settings (access with 'bd config get/set') +# These are stored in the database, not in this file: +# - jira.url +# - jira.project +# - linear.url +# - linear.api-key +# - github.org +# - github.repo +no-git-ops: true +backup.enabled: false # in v0.57 the backup seems to automatically add stage the files into git, bypassing the .gitignore I tried to set up, so disabling for now (also we don't really need backup anyway if we're not pushing into git, but this would be nicer long term than having to run the export command manually all the time) diff --git a/.claude/.beads/metadata.json b/.claude/.beads/metadata.json new file mode 100644 index 0000000..a4be941 --- /dev/null +++ b/.claude/.beads/metadata.json @@ -0,0 +1,8 @@ +{ + "database": "dolt", + "jsonl_export": "issues.jsonl", + "backend": "dolt", + "dolt_mode": "server", + "dolt_server_host": "beads-dolt", + "dolt_database": "beads_work" +} diff --git a/.claude/commands/add-command.md b/.claude/commands/add-command.md new file mode 100644 index 0000000..a19105a --- /dev/null +++ b/.claude/commands/add-command.md @@ -0,0 +1,167 @@ +--- +description: Guide for creating new slash commands +argument-hint: +--- + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +# Slash Command Creator Guide + +## How This Command Works + +The `/add-command` command shows this guide for creating new slash commands. It includes: + +- Command structure and syntax +- Common patterns and examples +- Security restrictions and limitations +- Frontmatter options + +**Note for AI**: When creating commands, you CAN use bash tools like `Bash(mkdir:*)`, `Bash(ls:*)`, `Bash(git status:*)` in the `allowed-tools` frontmatter of NEW commands - but ONLY for operations within the current project directory. This command itself doesn't need bash tools since it's just documentation. + +## Command Locations + +- **Personal**: `~/.claude/commands/` (available across all projects) +- **Project**: `.claude/commands/` (shared with team, shows "(project)") + +## Basic Structure + +```markdown +--- +allowed-tools: Read, Glob, Grep, Bash(git status:*), Task +description: Brief description of what this command does +argument-hint: [required-arg] [optional-arg] +--- + +# Command Title + +Your command instructions here. + +**User arguments:** + +Add-command: $ARGUMENTS + +**End of user arguments** + +File reference: @path/to/file.js + +Bash command output: (exclamation)git status(backticks) +``` + +## ⚠️ Security Restrictions + +**Bash Commands (exclamation prefix)**: Limited to current working directory only. + +- ✅ Works: `! + backtick + git status + backtick` (in project dir) +- ❌ Blocked: `! + backtick + ls /outside/project + backtick` (outside project) +- ❌ Blocked: `! + backtick + pwd + backtick` (if referencing dirs outside project) + +**File References (`@` prefix)**: No directory restrictions. + +- ✅ Works: `@/path/to/system/file.md` +- ✅ Works: `@../other-project/file.js` + +## Common Patterns + +### Simple Command + +```bash +echo "Review this code for bugs and suggest fixes" > ~/.claude/commands/review.md +``` + +### Command with Arguments + +**Note for AI**: The example below uses a fullwidth dollar sign ($, U+FF04) to prevent interpolation in this documentation. When creating actual commands, use the regular `$` character. + +```markdown +Fix issue $ARGUMENTS following our coding standards +``` + +### Command with File References + +```markdown +Compare @src/old.js with @src/new.js and explain differences +``` + +### Command with Bash Output (Project Directory Only) + +```markdown +--- +allowed-tools: Bash(git status:*), Bash(git branch:*), Bash(git log:*) +--- +Current status: (!)git status(`) +Current branch: (!)git branch --show-current(`) +Recent commits: (!)git log --oneline -5(`) + +Create commit for these changes. +``` + +**Note**: Only works with commands in the current project directory. + +### Namespaced Command + +**Note for AI**: The example below uses a fullwidth dollar sign ($, U+FF04) to prevent interpolation in this documentation. When creating actual commands, use the regular `$` character. + +```bash +mkdir -p ~/.claude/commands/ai +echo "Ask GPT-5 about: $ARGUMENTS" > ~/.claude/commands/ai/gpt5.md +# Creates: /ai:gpt5 +``` + +## Frontmatter Options + +- `allowed-tools`: Tools this command can use + - **Important**: Intrusive tools like `Write`, `Edit`, `NotebookEdit` should NEVER be allowed in commands unless the user explicitly requests them. These tools modify files and should only be used when the command's purpose is to make changes. + - ✅ Safe for most commands: `Read`, `Glob`, `Grep`, `Bash(git status:*)`, `Task`, `AskUserQuestion` +- `description`: Brief description (shows in /help) +- `argument-hint`: Help text for arguments +- `model`: Specific model to use + +## Best Practices + +### Safe Commands (No Security Issues) + +```markdown +# System prompt editor (file reference only) +(@)path/to/system/prompt.md + +Edit your system prompt above. +``` + +### Project-Specific Commands (Bash OK) + +```markdown +--- +allowed-tools: Bash(git status:*), Bash(npm list:*) +--- +Current git status: (!)git status(`) +Package info: (!)npm list --depth=0(`) + +Review project state and suggest next steps. +``` + +### Cross-Directory File Access (Use @ not !) + +```markdown +# Compare config files +Compare (@)path/to/system.md with (@)project/config.md + +Show differences and suggest improvements. +``` + +## Usage + +After creating: `/ [arguments]` + +Example: `/review` or `/ai:gpt5 "explain this code"` diff --git a/.claude/commands/commit.md b/.claude/commands/commit.md new file mode 100644 index 0000000..b11c370 --- /dev/null +++ b/.claude/commands/commit.md @@ -0,0 +1,57 @@ +--- +description: Create a git commit following project standards +argument-hint: [optional-commit-description] +--- + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +Create a git commit following project standards + +**User arguments:** + +Commit: $ARGUMENTS + +**End of user arguments** + +## Commit Message Rules + +Follows [Conventional Commits](https://www.conventionalcommits.org/) standard. + +1. **Format**: `type(#issue): description` + - Use `#123` for local repo issues + - Use `owner/repo#123` for cross-repo issues + - Common types: `feat`, `fix`, `docs`, `refactor`, `test`, `chore` + +2. **AI Credits**: **NEVER include AI credits in commit messages** + - No "Generated with Claude Code" + - No "Co-Authored-By: Claude" or "Co-Authored-By: Happy" + - Focus on the actual changes made, not conversation history + +3. **Content**: Write clear, concise commit messages describing what changed and why + +## Process + +1. Run `git status` and `git diff` to review changes +2. Run `git log --oneline -5` to see recent commit style +3. Stage relevant files with `git add` +4. Create commit with descriptive message +5. Verify with `git status` + +## Example + +```bash +git add +git commit -m "feat(#123): add validation to user input form" +``` diff --git a/.claude/commands/create-adr.md b/.claude/commands/create-adr.md new file mode 100644 index 0000000..307ed7e --- /dev/null +++ b/.claude/commands/create-adr.md @@ -0,0 +1,240 @@ +--- +description: Create a new Architecture Decision Record (ADR) +argument-hint: +--- + +# Create ADR: Architecture Decision Record Creator + +Create a new ADR to document an architectural decision. ADRs capture the "why" behind technical choices, helping future developers understand constraints and tradeoffs. + +> ADRs were introduced by Michael Nygard in 2011. The core structure (Context, Decision, Consequences) remains the standard. + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +**User arguments:** + +Create-adr: $ARGUMENTS + +**End of user arguments** + +(If no input provided, ask user for the architectural decision topic) + +## Process + +### Step 1: Detect Existing ADR Setup + +Check for existing ADR directory and structure: + +```bash +# Check common ADR directories (in order of preference) +for dir in doc/adr docs/adr decisions doc/architecture/decisions; do + if [ -d "$dir" ]; then + echo "Found: $dir" + ls "$dir"/*.md 2>/dev/null + break + fi +done +``` + +**If ADRs exist:** Read the first few ADRs (especially 0001 if present) to understand: + +- The template/structure this project uses +- Any project-specific sections or frontmatter +- Naming conventions and style + +Adapt the new ADR to match the existing pattern. + +**If no ADR directory exists:** Run the initialization flow (Step 1b). + +### Step 1b: Initialize ADR Practice (First-Time Setup) + +When no existing ADRs are found, help the user set up their ADR practice. + +Ask the user (use AskUserQuestion): + +**Directory location:** + +- doc/adr (Recommended - conventional location) +- docs/adr +- decisions +- doc/architecture/decisions + +**Template style:** + +- Minimal (Nygard's original: Context, Decision, Consequences) +- Standard (Minimal + Status, Date, References) +- With scope (Standard + applies_to patterns, code examples) + +**Create a foundational ADR-0001?** + +- Yes - document "We will use ADRs to record architectural decisions" +- No - proceed directly to creating the requested ADR + +If creating ADR-0001, generate it with: + +- Context: Why the team needs to document decisions +- Decision: Adopt ADRs following [chosen template style] +- Consequences: Better knowledge transfer, slight overhead per decision + +### Step 2: Determine ADR Number + +Calculate next number from existing ADRs: + +1. Extract highest existing number +2. Increment by 1 +3. Format as 4-digit zero-padded (e.g., `0001`, `0012`) + +### Step 3: Discovery Questions + +Gather context through conversation (use AskUserQuestion for structured choices): + +**Context & Problem** + +- What forces are at play? (technological, social, project constraints) +- What problem, pattern, or situation prompted this decision? +- What triggered the need to decide now? (bug, confusion, inconsistency, new requirement) +- Are there related PRs, issues, or prior discussions to reference? + +**The Decision** + +- What are we deciding to do (or not do)? +- What alternatives were considered? +- Why was this approach chosen over alternatives? + +**Consequences** + +- What becomes easier or more consistent with this decision? +- What becomes harder, more constrained, or riskier? +- What tradeoffs are we explicitly accepting? + +**Scope** + +- Which parts of the codebase does this apply to? +- Are there exceptions or areas where this doesn't apply? + +### Step 4: Generate ADR File + +Create `{adr_directory}/NNNN-title-slug.md`: + +- Convert title to kebab-case slug (lowercase, hyphens, no special chars) +- Use today's date for the `date` field +- Default status to `accepted` (most ADRs are written after the decision is made) + +**ADR Template:** + +```markdown +--- +status: accepted +date: YYYY-MM-DD +applies_to: + - "**/*.ts" + - "**/*.tsx" +--- + +# N. Title + +## Context + +[Forces at play - technological, social, project constraints. +What problem prompted this? Value-neutral description of the situation.] + +## Decision + +We will [decision statement in active voice]. + +[If the decision involves code patterns, include concrete examples:] + +**Forbidden pattern:** +\`\`\`typescript +// ❌ BAD - [explanation] +[example of what NOT to do] +\`\`\` + +**Required pattern:** +\`\`\`typescript +// ✅ GOOD - [explanation] +[example of what TO do] +\`\`\` + +## Consequences + +**Positive:** +- [What becomes easier] +- [What becomes more consistent] + +**Negative:** +- [What becomes harder] +- [What constraints we accept] + +**Neutral:** +- [Other impacts worth noting] + +## References + +- [Related PRs, issues, or documentation] +``` + +### Step 5: Refine applies_to Scope + +Help user define which files this decision applies to using glob patterns: + +- All TypeScript files: **/*.ts +- All React component files: **/*.tsx +- Only files in components directory: src/components/** +- Exclude test files (prefix with !): !**/*.test.ts +- Exclude type definition files: !**/*.d.ts +- Specific package only: packages/api/** + +If the decision applies broadly, use **/* (all files). + +**Note**: `applies_to` is recommended for the "With scope" template. Linters and AI assistants use these patterns to determine which files to check against this ADR. + +### Step 6: Confirm and Write + +Show the complete ADR content and ask user to confirm before writing. + +After creation, suggest: + +- Review the ADR for completeness +- Commit with `/commit` + +## Tips for Good ADRs + +1. **Focus on the "why"** - The decision itself may be obvious; the reasoning often isn't +2. **Keep it concise** - 1-2 pages maximum; should be readable in 5 minutes +3. **Use active voice** - "We will use X" not "X will be used" +4. **Include concrete examples** - Code examples make abstract decisions tangible +5. **Document tradeoffs honestly** - Every decision has costs; be explicit about them +6. **Link to context** - Reference PRs, issues, or discussions where the decision was made +7. **Be specific about scope** - Use `applies_to` patterns to clarify affected code + +## Status Values + +| Status | When to Use | +|--------|-------------| +| `proposed` | Under discussion, not yet agreed | +| `accepted` | Agreed upon and should be followed | +| `deprecated` | No longer relevant (context changed) | +| `superseded` | Replaced by another ADR (link to it) | + +To supersede an existing ADR: + +1. Create new ADR with the updated decision +2. Update old ADR's status to `superseded by ADR-NNNN` + +## Integration with Other Commands + +- After creating: Commit with `/commit` +- If decision needs discussion: Create issue with `/create-issues` diff --git a/.claude/commands/create-issues.md b/.claude/commands/create-issues.md new file mode 100644 index 0000000..76d7cd2 --- /dev/null +++ b/.claude/commands/create-issues.md @@ -0,0 +1,195 @@ +--- +description: Create implementation plan from feature/requirement with PRD-style discovery and TDD acceptance criteria +argument-hint: <feature/requirement description or GitHub issue URL/number> +--- + +# Create Issues: PRD-Informed Task Planning for TDD + +Create structured implementation plan that bridges product thinking (PRD) with test-driven development. + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +**User arguments:** + +Create-issues: $ARGUMENTS + +**End of user arguments** + +(If no input provided, check conversation context or run `bd ready` to see existing work) + +## Input Processing + +The input can be one of: + +1. **GitHub Issue URL** (e.g., `https://github.com/owner/repo/issues/123`) +2. **GitHub Issue Number** (e.g., `#123` or `123`) +3. **Feature Description** (e.g., "Add user authentication") +4. **Empty** - use conversation context + +### GitHub Issue Integration + +If input looks like a GitHub issue: + +**Step 1: Extract Issue Number** + +- From URL: extract owner/repo/number +- From number: try to infer repo from git remote +- From branch name: check patterns like `issue-123`, `123-feature`, `feature/123` + +**Step 2: Fetch Issue** + +Use the GitHub CLI to fetch issue details: + +```bash +gh issue view [ISSUE_NUMBER] --json title,body,labels,comments,state +``` + +If the `gh` CLI is not installed or authenticated, show: + +``` +GitHub CLI not available or not authenticated! +Run: gh auth login +``` + +**Step 3: Use Issue as Discovery Input** + +- Title → Feature name +- Description → Problem statement and context +- Labels → Type/priority hints +- Comments → Additional requirements and discussion +- Linked issues → Dependencies + +Extract from GitHub issue: + +- Problem statement and context +- Acceptance criteria (if present) +- Technical notes (if present) +- Related issues/dependencies + +## Process + +## Discovery Phase + +Understand the requirement by asking (use AskUserQuestion if needed): + +**Problem Statement** + +- What problem does this solve? +- Who experiences this problem? +- What's the current pain point? + +**Desired Outcome** + +- What should happen after this is built? +- How will users interact with it? +- What does success look like? + +**Scope & Constraints** + +- What's in scope vs. out of scope? +- Any technical constraints? +- Dependencies on other systems/features? + +**Context Check** + +- Search codebase for related features/modules +- Check for existing test files that might be relevant + +### Create Beads Issues + +For each task, create a bd issue with: + +```bash +bd create "Task title" \ + --type [feature|bug|task|chore] \ + --priority [1-3] \ + --description "Context and what needs to be built" \ + --design "Technical approach, architecture notes" \ + --acceptance "Given-When-Then acceptance criteria" +``` + +**Issue Structure Best Practices:** + +**Title**: Action-oriented, specific + +- ✅ "Add JWT token validation middleware" +- ❌ "Authentication stuff" + +**Description**: Provide context + +- Why this task exists +- How it fits into the larger feature +- Links to related issues/docs + +**Design**: Technical approach + +- Key interfaces/types needed +- Algorithm or approach +- Libraries or patterns to use +- Known gotchas or considerations + +**Acceptance Criteria**: Test-ready scenarios + +- Given-When-Then format +- Concrete, verifiable conditions +- Cover main case + edge cases +- Map 1:1 to future tests + +**Dependencies**: Link related issues + +```bash +bd dep add ISSUE-123 ISSUE-456 --type blocks +``` + +### Validation + +After creating issues, verify: + +- ✅ Each issue has clear acceptance criteria +- ✅ Dependencies are mapped (use `bd dep add`) +- ✅ Issues are ordered by implementation sequence +- ✅ First few issues are ready to start (`bd ready` shows them) +- ✅ Each issue is small enough for TDD (if too big, break down more) + +## Key Principles + +**From PRD World:** + +- Start with user problems, not solutions +- Define success criteria upfront +- Understand constraints and scope + +**From TDD World:** + +- Make acceptance criteria test-ready +- Break work into small, testable pieces +- Each task should map to test(s) + +### Beads Integration + +Use Beads MCP to: + +- Track work with `bd ready` to find next task +- Create issues with `bd create "description"` +- Track dependencies with `bd dep add` + +See <https://github.com/steveyegge/beads> for more information. + +## Integration with Other Commands + +- **Before /create-issues**: Use `/spike` if you need technical exploration first +- **After /create-issues**: Use `/red` to start TDD on first task +- **During work**: Use `bd update` to add notes/findings back to issues +- **When stuck**: Check `bd show ISSUE-ID` to review acceptance criteria diff --git a/.claude/commands/gap.md b/.claude/commands/gap.md new file mode 100644 index 0000000..e8abaee --- /dev/null +++ b/.claude/commands/gap.md @@ -0,0 +1,45 @@ +--- +description: Analyze conversation context for unaddressed items and gaps +argument-hint: [optional additional info] +--- + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +Analyze the current conversation context and identify things that have not yet been addressed. Look for: + +1. **Incomplete implementations** - Code that was started but not finished +2. **Unused variables/results** - Values that were captured but never used +3. **Missing tests** - Functionality without test coverage +4. **Open issues** - Beads issues that are still open or in progress + +5. **User requests** - Things the user asked for that weren't fully completed +6. **TODO comments** - Any TODOs mentioned in conversation +7. **Error handling gaps** - Missing error cases or edge cases +8. **Documentation gaps** - Undocumented APIs or features +9. **Consistency check** - Look for inconsistent patterns, naming conventions, or structure across the codebase + +Present findings as a prioritized list with: + +- What the gap is +- Why it matters +- Suggested next action + +If there are no gaps, confirm that everything discussed has been addressed. + +**User arguments:** + +Gap: $ARGUMENTS + +**End of user arguments** diff --git a/.claude/commands/green.md b/.claude/commands/green.md new file mode 100644 index 0000000..bc1b159 --- /dev/null +++ b/.claude/commands/green.md @@ -0,0 +1,110 @@ +--- +description: Execute TDD Green Phase - write minimal implementation to pass the failing test +argument-hint: <implementation description> +--- + +**User arguments:** + +Green: $ARGUMENTS + +**End of user arguments** + +GREEN PHASE! Apply the below to the user input above. + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +(If there was no info above, fallback to: + +1. Context of the conversation, if there's an immediate thing +2. `bd ready` to see what to work on next and start from there) + +## TDD Fundamentals + +### The TDD Cycle + +The foundation of TDD is the Red-Green-Refactor cycle: + +1. **Red Phase**: Write ONE failing test that describes desired behavior + + - The test must fail for the RIGHT reason (not syntax/import errors) + - Only one test at a time - this is critical for TDD discipline + - Exception: For browser-level tests or expensive setup (e.g., Storybook `*.stories.tsx`), group multiple assertions within a single test block to avoid redundant setup - but only when adding assertions to an existing interaction flow. If new user interactions are required, still create a new test. Split files by category if they exceed ~1000 lines. + - **Adding a single test to a test file is ALWAYS allowed** - no prior test output needed + - Starting TDD for a new feature is always valid, even if test output shows unrelated work + - For DOM-based tests, use `data-testid` attributes to select elements rather than CSS classes, tag names, or text content + - Avoid hard-coded timeouts both in form of sleep() or timeout: 5000 etc; use proper async patterns (`waitFor`, `findBy*`, event-based sync) instead and rely on global test configs for timeout settings + +2. **Green Phase**: Write MINIMAL code to make the test pass + + - Implement only what's needed for the current failing test + - No anticipatory coding or extra features + - Address the specific failure message + +### Post-Green Verification + +Once the test passes, run the coverage tool scoped to **only the files you edited** and check for uncovered lines: + +- Any uncovered lines in files you edited are over-implementation — **delete them** +- Do not scope to the full test suite; focus only on what changed + +3. **Refactor Phase**: Improve code structure while keeping tests green + - Only allowed when relevant tests are passing + - Requires proof that tests have been run and are green + - Applies to BOTH implementation and test code + - No refactoring with failing tests - fix them first + +### Core Violations + +1. **Multiple Test Addition** + + - Adding more than one new test at once + - Exception: Initial test file setup or extracting shared test utilities + +2. **Over-Implementation** + + - Code that exceeds what's needed to pass the current failing test + - Adding untested features, methods, or error handling + - Implementing multiple methods when test only requires one + +3. **Premature Implementation** + - Adding implementation before a test exists and fails properly + - Adding implementation without running the test first + - Refactoring when tests haven't been run or are failing + +### Critical Principle: Incremental Development + +Each step in TDD should address ONE specific issue: + +- Test fails "not defined" → Create empty stub/class only +- Test fails "not a function" → Add method stub only +- Test fails with assertion → Implement minimal logic only + +### Optional Pre-Phase: Spike Phase + +In rare cases where the problem space, interface, or expected behavior is unclear, a **Spike Phase** may be used **before the Red Phase**. +This phase is **not part of the regular TDD workflow** and must only be applied under exceptional circumstances. + +- The goal of a Spike is **exploration and learning**, not implementation. +- The code written during a Spike is **disposable** and **must not** be merged or reused directly. +- Once sufficient understanding is achieved, all spike code is discarded, and normal TDD resumes starting from the **Red Phase**. +- A Spike is justified only when it is impossible to define a meaningful failing test due to technical uncertainty or unknown system behavior. + +### General Information + +- Sometimes the test output shows as no tests have been run when a new test is failing due to a missing import or constructor. In such cases, allow the agent to create simple stubs. Ask them if they forgot to create a stub if they are stuck. +- It is never allowed to introduce new logic without evidence of relevant failing tests. However, stubs and simple implementation to make imports and test infrastructure work is fine. +- In the refactor phase, it is perfectly fine to refactor both test and implementation code. That said, completely new functionality is not allowed. Types, clean up, abstractions, and helpers are allowed as long as they do not introduce new behavior. +- Adding types, interfaces, or a constant in order to replace magic values is perfectly fine during refactoring. +- Provide the agent with helpful directions so that they do not get stuck when blocking them. diff --git a/.claude/commands/issue.md b/.claude/commands/issue.md new file mode 100644 index 0000000..fc39502 --- /dev/null +++ b/.claude/commands/issue.md @@ -0,0 +1,162 @@ +--- +description: Analyze GitHub issue and create TDD implementation plan +argument-hint: [optional-issue-number] +--- + +Analyze GitHub issue and create TDD implementation plan. + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +Process: + +1. Get Issue Number + +**User arguments:** + +Issue: $ARGUMENTS + +**End of user arguments** + +- Check if argument is an issue number +- Otherwise try branch name patterns: issue-123, 123-feature, feature/123, fix/123 +- If not found: ask user + +1. Fetch Issue + +Use the GitHub CLI to fetch issue details: + +```bash +gh issue view [ISSUE_NUMBER] --json title,body,labels,comments,state +``` + +If the `gh` CLI is not installed or authenticated, show: + +``` +GitHub CLI not available or not authenticated! +Run: gh auth login +``` + +1. Analyze and Plan + +Summarize the issue and requirements, then: + +## Discovery Phase + +Understand the requirement by asking (use AskUserQuestion if needed): + +**Problem Statement** + +- What problem does this solve? +- Who experiences this problem? +- What's the current pain point? + +**Desired Outcome** + +- What should happen after this is built? +- How will users interact with it? +- What does success look like? + +**Scope & Constraints** + +- What's in scope vs. out of scope? +- Any technical constraints? +- Dependencies on other systems/features? + +**Context Check** + +- Search codebase for related features/modules +- Check for existing test files that might be relevant + +### Beads Integration + +Use Beads MCP to: + +- Track work with `bd ready` to find next task +- Create issues with `bd create "description"` +- Track dependencies with `bd dep add` + +See <https://github.com/steveyegge/beads> for more information. + +## TDD Fundamentals + +### The TDD Cycle + +The foundation of TDD is the Red-Green-Refactor cycle: + +1. **Red Phase**: Write ONE failing test that describes desired behavior + + - The test must fail for the RIGHT reason (not syntax/import errors) + - Only one test at a time - this is critical for TDD discipline + - Exception: For browser-level tests or expensive setup (e.g., Storybook `*.stories.tsx`), group multiple assertions within a single test block to avoid redundant setup - but only when adding assertions to an existing interaction flow. If new user interactions are required, still create a new test. Split files by category if they exceed ~1000 lines. + - **Adding a single test to a test file is ALWAYS allowed** - no prior test output needed + - Starting TDD for a new feature is always valid, even if test output shows unrelated work + - For DOM-based tests, use `data-testid` attributes to select elements rather than CSS classes, tag names, or text content + - Avoid hard-coded timeouts both in form of sleep() or timeout: 5000 etc; use proper async patterns (`waitFor`, `findBy*`, event-based sync) instead and rely on global test configs for timeout settings + +2. **Green Phase**: Write MINIMAL code to make the test pass + + - Implement only what's needed for the current failing test + - No anticipatory coding or extra features + - Address the specific failure message + +3. **Refactor Phase**: Improve code structure while keeping tests green + - Only allowed when relevant tests are passing + - Requires proof that tests have been run and are green + - Applies to BOTH implementation and test code + - No refactoring with failing tests - fix them first + +### Core Violations + +1. **Multiple Test Addition** + + - Adding more than one new test at once + - Exception: Initial test file setup or extracting shared test utilities + +2. **Over-Implementation** + + - Code that exceeds what's needed to pass the current failing test + - Adding untested features, methods, or error handling + - Implementing multiple methods when test only requires one + +3. **Premature Implementation** + - Adding implementation before a test exists and fails properly + - Adding implementation without running the test first + - Refactoring when tests haven't been run or are failing + +### Critical Principle: Incremental Development + +Each step in TDD should address ONE specific issue: + +- Test fails "not defined" → Create empty stub/class only +- Test fails "not a function" → Add method stub only +- Test fails with assertion → Implement minimal logic only + +### Optional Pre-Phase: Spike Phase + +In rare cases where the problem space, interface, or expected behavior is unclear, a **Spike Phase** may be used **before the Red Phase**. +This phase is **not part of the regular TDD workflow** and must only be applied under exceptional circumstances. + +- The goal of a Spike is **exploration and learning**, not implementation. +- The code written during a Spike is **disposable** and **must not** be merged or reused directly. +- Once sufficient understanding is achieved, all spike code is discarded, and normal TDD resumes starting from the **Red Phase**. +- A Spike is justified only when it is impossible to define a meaningful failing test due to technical uncertainty or unknown system behavior. + +### General Information + +- Sometimes the test output shows as no tests have been run when a new test is failing due to a missing import or constructor. In such cases, allow the agent to create simple stubs. Ask them if they forgot to create a stub if they are stuck. +- It is never allowed to introduce new logic without evidence of relevant failing tests. However, stubs and simple implementation to make imports and test infrastructure work is fine. +- In the refactor phase, it is perfectly fine to refactor both test and implementation code. That said, completely new functionality is not allowed. Types, clean up, abstractions, and helpers are allowed as long as they do not introduce new behavior. +- Adding types, interfaces, or a constant in order to replace magic values is perfectly fine during refactoring. +- Provide the agent with helpful directions so that they do not get stuck when blocking them. diff --git a/.claude/commands/polish.md b/.claude/commands/polish.md new file mode 100644 index 0000000..b42ef85 --- /dev/null +++ b/.claude/commands/polish.md @@ -0,0 +1,185 @@ +--- +description: Review and address issues in existing code - fix problems or justify skipping +argument-hint: [branch, PR#, file, or area to polish] +--- + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +# Polish + +Take another pass at existing work to address issues. Unlike `/code-review` which only identifies problems, `/polish` resolves each finding by either: + +1. **Fixing** - Implement the improvement +2. **Skipping with justification** - Document why the issue can be deferred or ignored + +## Phase 0: Determine Scope + +Parse the argument to determine what to polish: + +| Input | Action | +|-------|--------| +| No argument | Detect divergence point, review uncommitted + committed changes | +| Branch name | Changes from that branch to HEAD | +| PR number (e.g., `123`) | Fetch PR diff from GitHub | +| PR URL (e.g., `github.com/.../pull/123`) | Extract PR number and fetch diff | +| File/path | Focus on specific file(s) | + +Use the GitHub CLI to fetch PR details: + +```bash +gh pr view [PR_NUMBER] --json title,body,state,mergeable,headRefName,baseRefName +gh pr diff [PR_NUMBER] +``` + +If the `gh` CLI is not installed or authenticated, show: + +``` +GitHub CLI not available or not authenticated! +Run: gh auth login +``` + +**For local branches:** + +1. Get current branch: `git rev-parse --abbrev-ref HEAD` +2. Detect divergence point (same logic as `/code-review`) +3. Collect changed files from diff and uncommitted changes + +## Phase 1: Identify Issues + +Categorize files based on these patterns: + +| Category | File Patterns | +|----------|---------------| +| Frontend/UI | `*.tsx`, `*.jsx`, `components/`, `pages/`, `views/`, `*.vue` | +| Frontend/Styling | `*.css`, `*.scss`, `*.less`, `styles/`, `*.tailwind*`, `*.styled.*` | +| Backend/API | `routes/`, `api/`, `controllers/`, `services/`, `*.controller.*`, `*.service.*`, `*.resolver.*` | +| Backend/Data | `migrations/`, `models/`, `prisma/`, `schema.*`, `*.model.*`, `*.entity.*` | +| Tooling/Config | `scripts/`, `*.config.*`, `package.json`, `tsconfig.*`, `vite.*`, `webpack.*`, `eslint.*` | +| CI/CD | `.github/`, `.gitlab-ci.*`, `Dockerfile`, `docker-compose.*`, `*.yml` in CI paths | +| Tests | `*.test.*`, `*.spec.*`, `__tests__/`, `__mocks__/`, `*.stories.*` | +| Docs | `*.md`, `docs/`, `README*`, `CHANGELOG*` | + +For each category, identify issues at these severity levels: + +- **blocker** - Must fix before merge +- **risky** - Should fix or have strong justification +- **nit** - Nice to have, easily skippable + +## Phase 2: Address Each Issue + +For each identified issue, present it and then take action: + +### Format + +``` +### [file:line] [severity] Title + +**Issue:** Description of the problem + +**Action taken:** +- [ ] Fixed: [what was done] +- [ ] Skipped: [justification] +``` + +### Decision Guidelines + +**Fix when:** + +- Security vulnerability +- Correctness bug +- Missing error handling that could crash +- Breaking API changes without migration +- Tests that don't actually test anything + +**Skip with justification when:** + +- Stylistic preference with no functional impact +- Optimization for unlikely hot paths +- Refactoring that would expand scope significantly +- Issue exists in code outside the change scope +- Technical debt documented for future sprint + +### Fixing Issues + +When fixing: + +1. Make the minimal change to address the issue +2. Ensure tests still pass (run them if needed) +3. Don't expand scope beyond the identified issue + +### Watch for Brittle Tests + +When refactoring implementation, watch for **Peeping Tom** tests that: + +- Test private methods or internal state directly +- Assert on implementation details rather than behavior +- Break on any refactoring even when behavior is preserved + +If tests fail after a pure refactoring (no behavior change), consider whether the tests are testing implementation rather than behavior. + +### Skipping Issues + +Valid skip justifications: + +- "Out of scope - exists in unchanged code" +- "Performance optimization unnecessary - called N times per request" +- "Tracked for future work - see issue #X" +- "Intentional design decision - [reason]" +- "Would require significant refactoring - defer to dedicated PR" + +Invalid skip justifications: + +- "Too hard to fix" +- "It works fine" +- "No time" + +## Phase 3: Cross-Cutting Check + +After addressing individual issues: + +1. **Consistency check** - Look for inconsistent patterns, naming conventions, or structure across the codebase + +Additional cross-cutting checks: + +- Did fixes introduce new inconsistencies? +- Are skip justifications consistent with each other? +- Any patterns in what was skipped that suggest a bigger issue? + +## Phase 4: Summary + +``` +## Polish Summary + +### Fixed +- [list of fixes applied] + +### Skipped (with justification) +- [issue]: [justification] + +### Tests +- [ ] All tests passing +- [ ] No new warnings introduced + +### Remaining Work +- [any follow-up items identified] +``` + +--- + +**User arguments:** + +Polish: $ARGUMENTS + +**End of user arguments** diff --git a/.claude/commands/red.md b/.claude/commands/red.md new file mode 100644 index 0000000..e612087 --- /dev/null +++ b/.claude/commands/red.md @@ -0,0 +1,111 @@ +--- +description: Execute TDD Red Phase - write ONE failing test +argument-hint: [optional additional info] +--- + +**User arguments:** + +Red: $ARGUMENTS + +**End of user arguments** + +RED PHASE! Apply the below to the user input above. + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +(If there was no info above, fallback to: + +1. Context of the conversation, if there's an immediate thing +2. `bd ready` to see what to work on next and start from there) + +## TDD Fundamentals + +### The TDD Cycle + +The foundation of TDD is the Red-Green-Refactor cycle: + +1. **Red Phase**: Write ONE failing test that describes desired behavior + + - The test must fail for the RIGHT reason (not syntax/import errors) + - Only one test at a time - this is critical for TDD discipline + - Exception: For browser-level tests or expensive setup (e.g., Storybook `*.stories.tsx`), group multiple assertions within a single test block to avoid redundant setup - but only when adding assertions to an existing interaction flow. If new user interactions are required, still create a new test. Split files by category if they exceed ~1000 lines. + - **Adding a single test to a test file is ALWAYS allowed** - no prior test output needed + - Starting TDD for a new feature is always valid, even if test output shows unrelated work + - For DOM-based tests, use `data-testid` attributes to select elements rather than CSS classes, tag names, or text content + - Avoid hard-coded timeouts both in form of sleep() or timeout: 5000 etc; use proper async patterns (`waitFor`, `findBy*`, event-based sync) instead and rely on global test configs for timeout settings + +2. **Green Phase**: Write MINIMAL code to make the test pass + + - Implement only what's needed for the current failing test + - No anticipatory coding or extra features + - Address the specific failure message + +3. **Refactor Phase**: Improve code structure while keeping tests green + - Only allowed when relevant tests are passing + - Requires proof that tests have been run and are green + - Applies to BOTH implementation and test code + - No refactoring with failing tests - fix them first + +### Core Violations + +1. **Multiple Test Addition** + + - Adding more than one new test at once + - Exception: Initial test file setup or extracting shared test utilities + +2. **Over-Implementation** + + - Code that exceeds what's needed to pass the current failing test + - Adding untested features, methods, or error handling + - Implementing multiple methods when test only requires one + +3. **Premature Implementation** + - Adding implementation before a test exists and fails properly + - Adding implementation without running the test first + - Refactoring when tests haven't been run or are failing + +### Critical Principle: Incremental Development + +Each step in TDD should address ONE specific issue: + +- Test fails "not defined" → Create empty stub/class only +- Test fails "not a function" → Add method stub only +- Test fails with assertion → Implement minimal logic only + +### Optional Pre-Phase: Spike Phase + +In rare cases where the problem space, interface, or expected behavior is unclear, a **Spike Phase** may be used **before the Red Phase**. +This phase is **not part of the regular TDD workflow** and must only be applied under exceptional circumstances. + +- The goal of a Spike is **exploration and learning**, not implementation. +- The code written during a Spike is **disposable** and **must not** be merged or reused directly. +- Once sufficient understanding is achieved, all spike code is discarded, and normal TDD resumes starting from the **Red Phase**. +- A Spike is justified only when it is impossible to define a meaningful failing test due to technical uncertainty or unknown system behavior. + +### General Information + +- Sometimes the test output shows as no tests have been run when a new test is failing due to a missing import or constructor. In such cases, allow the agent to create simple stubs. Ask them if they forgot to create a stub if they are stuck. +- It is never allowed to introduce new logic without evidence of relevant failing tests. However, stubs and simple implementation to make imports and test infrastructure work is fine. +- In the refactor phase, it is perfectly fine to refactor both test and implementation code. That said, completely new functionality is not allowed. Types, clean up, abstractions, and helpers are allowed as long as they do not introduce new behavior. +- Adding types, interfaces, or a constant in order to replace magic values is perfectly fine during refactoring. +- Provide the agent with helpful directions so that they do not get stuck when blocking them. + +### Test Structure (AAA Pattern) + +Structure each test with clear phases: + +- **Arrange**: Set up test data and preconditions (keep minimal) +- **Act**: Execute the single action being tested +- **Assert**: Verify the expected outcome with specific assertions diff --git a/.claude/commands/refactor.md b/.claude/commands/refactor.md new file mode 100644 index 0000000..15119e1 --- /dev/null +++ b/.claude/commands/refactor.md @@ -0,0 +1,127 @@ +--- +description: Execute TDD Refactor Phase - improve code structure while keeping tests green +argument-hint: <refactoring description> +--- + +**User arguments:** + +Refactor: $ARGUMENTS + +**End of user arguments** + +Apply this document (specifically the Refactor phase) to the user input above. + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +(If there was no info above, fallback to: + +1. Context of the conversation, if there's an immediate thing +2. `bd ready` to see what to work on next and start from there) + +## TDD Fundamentals + +### The TDD Cycle + +The foundation of TDD is the Red-Green-Refactor cycle: + +1. **Red Phase**: Write ONE failing test that describes desired behavior + + - The test must fail for the RIGHT reason (not syntax/import errors) + - Only one test at a time - this is critical for TDD discipline + - Exception: For browser-level tests or expensive setup (e.g., Storybook `*.stories.tsx`), group multiple assertions within a single test block to avoid redundant setup - but only when adding assertions to an existing interaction flow. If new user interactions are required, still create a new test. Split files by category if they exceed ~1000 lines. + - **Adding a single test to a test file is ALWAYS allowed** - no prior test output needed + - Starting TDD for a new feature is always valid, even if test output shows unrelated work + - For DOM-based tests, use `data-testid` attributes to select elements rather than CSS classes, tag names, or text content + - Avoid hard-coded timeouts both in form of sleep() or timeout: 5000 etc; use proper async patterns (`waitFor`, `findBy*`, event-based sync) instead and rely on global test configs for timeout settings + +2. **Green Phase**: Write MINIMAL code to make the test pass + + - Implement only what's needed for the current failing test + - No anticipatory coding or extra features + - Address the specific failure message + +3. **Refactor Phase**: Improve code structure while keeping tests green + - Only allowed when relevant tests are passing + - Requires proof that tests have been run and are green + - Applies to BOTH implementation and test code + - No refactoring with failing tests - fix them first + +### Core Violations + +1. **Multiple Test Addition** + + - Adding more than one new test at once + - Exception: Initial test file setup or extracting shared test utilities + +2. **Over-Implementation** + + - Code that exceeds what's needed to pass the current failing test + - Adding untested features, methods, or error handling + - Implementing multiple methods when test only requires one + +3. **Premature Implementation** + - Adding implementation before a test exists and fails properly + - Adding implementation without running the test first + - Refactoring when tests haven't been run or are failing + +### Critical Principle: Incremental Development + +Each step in TDD should address ONE specific issue: + +- Test fails "not defined" → Create empty stub/class only +- Test fails "not a function" → Add method stub only +- Test fails with assertion → Implement minimal logic only + +### Optional Pre-Phase: Spike Phase + +In rare cases where the problem space, interface, or expected behavior is unclear, a **Spike Phase** may be used **before the Red Phase**. +This phase is **not part of the regular TDD workflow** and must only be applied under exceptional circumstances. + +- The goal of a Spike is **exploration and learning**, not implementation. +- The code written during a Spike is **disposable** and **must not** be merged or reused directly. +- Once sufficient understanding is achieved, all spike code is discarded, and normal TDD resumes starting from the **Red Phase**. +- A Spike is justified only when it is impossible to define a meaningful failing test due to technical uncertainty or unknown system behavior. + +### General Information + +- Sometimes the test output shows as no tests have been run when a new test is failing due to a missing import or constructor. In such cases, allow the agent to create simple stubs. Ask them if they forgot to create a stub if they are stuck. +- It is never allowed to introduce new logic without evidence of relevant failing tests. However, stubs and simple implementation to make imports and test infrastructure work is fine. +- In the refactor phase, it is perfectly fine to refactor both test and implementation code. That said, completely new functionality is not allowed. Types, clean up, abstractions, and helpers are allowed as long as they do not introduce new behavior. +- Adding types, interfaces, or a constant in order to replace magic values is perfectly fine during refactoring. +- Provide the agent with helpful directions so that they do not get stuck when blocking them. + +## Code Complexity Signals + +Look for these refactoring opportunities: + +- [ ] Nesting > 3 levels deep +- [ ] Functions > 20 lines +- [ ] Duplicate code blocks +- [ ] Abstractions with single implementation +- [ ] "Just in case" parameters or config +- [ ] Magic values without names +- [ ] Dead/unused code + +### Watch for Brittle Tests + +When refactoring implementation, watch for **Peeping Tom** tests that: + +- Test private methods or internal state directly +- Assert on implementation details rather than behavior +- Break on any refactoring even when behavior is preserved + +If tests fail after a pure refactoring (no behavior change), consider whether the tests are testing implementation rather than behavior. + +1. **Consistency check** - Look for inconsistent patterns, naming conventions, or structure across the codebase diff --git a/.claude/commands/research.md b/.claude/commands/research.md new file mode 100644 index 0000000..0a41c22 --- /dev/null +++ b/.claude/commands/research.md @@ -0,0 +1,96 @@ +--- +description: Research a problem in parallel via web docs, web search, codebase exploration, and deep ultrathink +argument-hint: <research topic or question> +--- + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +**User arguments:** + +Research: $ARGUMENTS + +**End of user arguments** + +Research the following problem or question thoroughly, like a senior developer would. + +## Step 1: Launch Parallel Research Agents + +Use the Task tool to spawn these subagents **in parallel** (all in a single message): + +1. **Web Documentation Agent** (subagent_type: general-purpose) + - Search official documentation for the topic + - Find best practices and recommended patterns + - Locate relevant GitHub issues or discussions + +2. **Web Search Agent** (subagent_type: general-purpose) + - Perform broad web searches for solutions and discussions + - Find Stack Overflow answers, blog posts, and tutorials + - Note common pitfalls and gotchas + +3. **Codebase Explorer Agent** (subagent_type: Explore) + - Search the codebase for related patterns + - Find existing solutions to similar problems + - Identify relevant files, functions, or components + +## Step 2: Library Documentation (Optional) + +If the research involves specific frameworks or libraries: + +- Use Context7 MCP tools (mcp__context7__resolve-library-id, then get-library-docs) +- Get up-to-date API references and code examples +- If Context7 is unavailable, note this in findings so user knows library docs were harder to obtain + +## Step 3: Deep Analysis + +With all gathered context, perform extended reasoning (ultrathink) to: + +- Analyze the problem from first principles +- Consider edge cases and trade-offs +- Synthesize insights across all sources +- Identify conflicts between sources + +## Step 4: Present Findings + +Present a structured summary to the user: + +### Problem Statement + +Describe the problem and why it matters. + +### Key Findings + +Summarize the most relevant solutions and approaches. + +### Codebase Patterns + +Document how the current codebase handles similar cases. + +### Recommended Approach + +Provide your recommendation based on all research. + +### Conflicts + +Highlight where sources disagree and provide assessment of which is more reliable. + +### Sources + +List all source links with brief descriptions. This section is required. + +## Research Guidelines + +- Prioritize official documentation over blog posts +- Prefer solutions that match existing codebase patterns +- Note major.minor versions for libraries/frameworks (patch versions only if critical) +- Flag conflicting information across sources +- Write concise, actionable content +- Use active voice throughout +- **Do not create output files** - present findings directly in conversation unless user explicitly requests a file diff --git a/.claude/commands/simplify.md b/.claude/commands/simplify.md new file mode 100644 index 0000000..55242a9 --- /dev/null +++ b/.claude/commands/simplify.md @@ -0,0 +1,84 @@ +--- +description: Reduce code complexity while keeping tests green +argument-hint: [file, function, or area to simplify] +--- + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +**User arguments:** + +Simplify: $ARGUMENTS + +**End of user arguments** + +(If there was no info above, fallback to the context of the conversation) + +Reduce complexity while keeping tests green. + +## Core Principles + +**YAGNI** - Don't build until actually needed. Delete "just in case" code. + +**KISS** - Simplest solution that works. Clever is the enemy of clear. + +**Rule of Three** - Don't abstract until 3rd occurrence. "Prefer duplication over wrong abstraction" (Sandi Metz). + +## When NOT to Simplify + +- Essential domain complexity (regulations, business rules) +- Performance-critical optimized code +- Concurrency/thread-safety requirements +- Security-sensitive explicit checks + +## Prerequisites + +Tests must be green. If failing, use `/green` first. + +## Code Complexity Signals + +Look for these refactoring opportunities: + +- [ ] Nesting > 3 levels deep +- [ ] Functions > 20 lines +- [ ] Duplicate code blocks +- [ ] Abstractions with single implementation +- [ ] "Just in case" parameters or config +- [ ] Magic values without names +- [ ] Dead/unused code + +## Techniques + +| Pattern | Before | After | +|---------|--------|-------| +| Guard clause | Nested `if/else` | Early `return` | +| Named condition | Complex boolean | `const isValid = ...` | +| Extract constant | `if (x > 3)` | `if (x > MAX_RETRIES)` | +| Flatten callback | `.then().then()` | `async/await` | + +**Also apply:** Consolidate duplicates, inline unnecessary abstractions, delete dead code. + +## Validate + +1. Tests still green +2. Code reads more clearly +3. No behavioral changes + +**Simplify** removes complexity locally. **Refactor** improves architecture broadly. Use `/refactor` if changes require structural reorganization. + +### Watch for Brittle Tests + +When refactoring implementation, watch for **Peeping Tom** tests that: + +- Test private methods or internal state directly +- Assert on implementation details rather than behavior +- Break on any refactoring even when behavior is preserved + +If tests fail after a pure refactoring (no behavior change), consider whether the tests are testing implementation rather than behavior. diff --git a/.claude/commands/spike.md b/.claude/commands/spike.md new file mode 100644 index 0000000..72a8fdb --- /dev/null +++ b/.claude/commands/spike.md @@ -0,0 +1,103 @@ +--- +description: Execute TDD Spike Phase - exploratory coding to understand problem space before TDD +argument-hint: <exploration description> +--- + +**User arguments:** + +Spike: $ARGUMENTS + +**End of user arguments** + +SPIKE PHASE! Apply the below to the user input above. + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +(If there was no info above, fallback to: + +1. Context of the conversation, if there's an immediate thing +2. `bd ready` to see what to work on next and start from there) + +## TDD Fundamentals + +### The TDD Cycle + +The foundation of TDD is the Red-Green-Refactor cycle: + +1. **Red Phase**: Write ONE failing test that describes desired behavior + + - The test must fail for the RIGHT reason (not syntax/import errors) + - Only one test at a time - this is critical for TDD discipline + - Exception: For browser-level tests or expensive setup (e.g., Storybook `*.stories.tsx`), group multiple assertions within a single test block to avoid redundant setup - but only when adding assertions to an existing interaction flow. If new user interactions are required, still create a new test. Split files by category if they exceed ~1000 lines. + - **Adding a single test to a test file is ALWAYS allowed** - no prior test output needed + - Starting TDD for a new feature is always valid, even if test output shows unrelated work + - For DOM-based tests, use `data-testid` attributes to select elements rather than CSS classes, tag names, or text content + - Avoid hard-coded timeouts both in form of sleep() or timeout: 5000 etc; use proper async patterns (`waitFor`, `findBy*`, event-based sync) instead and rely on global test configs for timeout settings + +2. **Green Phase**: Write MINIMAL code to make the test pass + + - Implement only what's needed for the current failing test + - No anticipatory coding or extra features + - Address the specific failure message + +3. **Refactor Phase**: Improve code structure while keeping tests green + - Only allowed when relevant tests are passing + - Requires proof that tests have been run and are green + - Applies to BOTH implementation and test code + - No refactoring with failing tests - fix them first + +### Core Violations + +1. **Multiple Test Addition** + + - Adding more than one new test at once + - Exception: Initial test file setup or extracting shared test utilities + +2. **Over-Implementation** + + - Code that exceeds what's needed to pass the current failing test + - Adding untested features, methods, or error handling + - Implementing multiple methods when test only requires one + +3. **Premature Implementation** + - Adding implementation before a test exists and fails properly + - Adding implementation without running the test first + - Refactoring when tests haven't been run or are failing + +### Critical Principle: Incremental Development + +Each step in TDD should address ONE specific issue: + +- Test fails "not defined" → Create empty stub/class only +- Test fails "not a function" → Add method stub only +- Test fails with assertion → Implement minimal logic only + +### Optional Pre-Phase: Spike Phase + +In rare cases where the problem space, interface, or expected behavior is unclear, a **Spike Phase** may be used **before the Red Phase**. +This phase is **not part of the regular TDD workflow** and must only be applied under exceptional circumstances. + +- The goal of a Spike is **exploration and learning**, not implementation. +- The code written during a Spike is **disposable** and **must not** be merged or reused directly. +- Once sufficient understanding is achieved, all spike code is discarded, and normal TDD resumes starting from the **Red Phase**. +- A Spike is justified only when it is impossible to define a meaningful failing test due to technical uncertainty or unknown system behavior. + +### General Information + +- Sometimes the test output shows as no tests have been run when a new test is failing due to a missing import or constructor. In such cases, allow the agent to create simple stubs. Ask them if they forgot to create a stub if they are stuck. +- It is never allowed to introduce new logic without evidence of relevant failing tests. However, stubs and simple implementation to make imports and test infrastructure work is fine. +- In the refactor phase, it is perfectly fine to refactor both test and implementation code. That said, completely new functionality is not allowed. Types, clean up, abstractions, and helpers are allowed as long as they do not introduce new behavior. +- Adding types, interfaces, or a constant in order to replace magic values is perfectly fine during refactoring. +- Provide the agent with helpful directions so that they do not get stuck when blocking them. diff --git a/.claude/commands/summarize.md b/.claude/commands/summarize.md new file mode 100644 index 0000000..d568f34 --- /dev/null +++ b/.claude/commands/summarize.md @@ -0,0 +1,62 @@ +--- +description: Summarize conversation progress and next steps +argument-hint: [optional additional info] +--- + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +Create a concise summary of the current conversation suitable for transferring context to a new conversation. + +**User arguments:** + +Summarize: $ARGUMENTS + +**End of user arguments** + +## Summary Structure + +Provide a summary with these sections: + +### What We Did + +- Key accomplishments and changes made +- Important decisions or discoveries +- Files created, modified, or analyzed + +### What We're Doing Next + +- Immediate next steps +- Pending tasks or work in progress +- Goals or objectives to continue + +### Blockers & User Input Needed + +- Any issues requiring user intervention +- Decisions that need to be made +- Missing information or clarifications needed + +## Output Format + +Keep the summary concise and actionable - suitable for pasting into a new conversation to quickly restore context without needing the full conversation history. + +## Beads Integration + +If Beads MCP is available, check for task tracking status and ask if the user wants to: + +1. Review current task status +2. Update task states based on conversation progress +3. Include Beads context in the summary + +Use AskUserQuestion to confirm Beads integration preferences. diff --git a/.claude/commands/tdd-review.md b/.claude/commands/tdd-review.md new file mode 100644 index 0000000..b3612fb --- /dev/null +++ b/.claude/commands/tdd-review.md @@ -0,0 +1,110 @@ +--- +description: Review test suite quality against FIRST principles and TDD anti-patterns +argument-hint: [optional test file or directory path] +--- + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +(If there was no info above, fallback to: + +1. Context of the conversation, if there's an immediate thing +2. `bd ready` to see what to work on next and start from there) + +# Test Quality Review + +Analyze test files against FIRST principles and TDD best practices. + +## Phase 1: Scope + +| Input | Action | +|-------|--------| +| No argument | Find all test files in project | +| File path | Analyze specific test file | +| Directory | Analyze tests in directory | + +Detect test files using common patterns: `*.test.*`, `*.spec.*`, `*.stories.*`, `__tests__/**` + +Also check for framework-specific patterns based on the project's languages and tools (e.g., `*_test.go`, `*_test.py`, `Test*.java`, `*.feature` for BDD). + +## Phase 2: Analysis + +For each test file, check against these criteria: + +### Quality Criteria + +#### FIRST Principles + +| Principle | What to Check | +|-----------|---------------| +| **Fast** | Tests complete quickly, no I/O, no network calls, no sleep()/setTimeout delays | +| **Independent** | No shared mutable state, no execution order dependencies between tests | +| **Repeatable** | No Date.now(), no Math.random() without seeding, no external service dependencies | +| **Self-validating** | Meaningful assertions that verify behavior, no manual verification needed | + +#### TDD Anti-patterns + +| Anti-pattern | Detection Signals | +|--------------|-------------------| +| **The Liar** | `expect(true).toBe(true)`, empty test bodies, tests with no assertions | +| **Excessive Setup** | >20 lines of arrange code, >5 mocks, deep nested object construction | +| **The One** | >5 assertions testing unrelated behaviors in a single test | +| **The Peeping Tom** | Testing private methods, asserting on internal state, tests that break on any refactor | +| **The Slow Poke** | Real database/network calls, file I/O, hard-coded timeouts | + +#### Test Structure (AAA Pattern) + +- **Arrange**: Clear setup with minimal fixtures +- **Act**: Single action being tested +- **Assert**: Specific, behavior-focused assertions + +## Phase 3: Report + +Output a structured report: + +``` +## Test Quality Report + +### Summary +- Files analyzed: N +- Tests found: N +- Issues found: N (X blockers, Y warnings) + +### By File + +#### path/to/file.test.ts + +| Line | Issue | Severity | Description | +|------|-------|----------|-------------| +| 15 | The Liar | blocker | Test has no assertions | +| 42 | Slow Poke | warning | Uses setTimeout(500) | + +### Recommendations +- [ ] Fix blockers before merge +- [ ] Consider refactoring tests with excessive setup +``` + +### Severity Levels + +- **blocker**: Must fix - test provides false confidence (The Liar, no assertions) +- **warning**: Should fix - test quality issue (Slow Poke, Excessive Setup) +- **info**: Consider - style or structure suggestion (AAA pattern) + +--- + +**User arguments:** + +TDD-review: $ARGUMENTS + +**End of user arguments** diff --git a/.claude/commands/tdd.md b/.claude/commands/tdd.md new file mode 100644 index 0000000..7cb2a55 --- /dev/null +++ b/.claude/commands/tdd.md @@ -0,0 +1,102 @@ +--- +description: Remind agent about TDD approach and continue conversation +argument-hint: [optional-response-to-last-message] +--- + +# TDD Reminder + +## General Guidelines + +### Output Style + +- **Never explicitly mention TDD** in code, comments, commits, PRs, or issues +- Write natural, descriptive code without meta-commentary about the development process +- The code should speak for itself - TDD is the process, not the product + +Beads is available for task tracking. Use `mcp__beads__*` tools to manage issues (the user interacts via `bd` commands). + +## Plan File Restriction + +**NEVER create, read, or update plan.md files.** Claude Code's internal planning files are disabled for this project. Use other methods to track implementation progress (e.g., comments, todo lists, or external tools). + +## TDD Fundamentals + +### The TDD Cycle + +The foundation of TDD is the Red-Green-Refactor cycle: + +1. **Red Phase**: Write ONE failing test that describes desired behavior + + - The test must fail for the RIGHT reason (not syntax/import errors) + - Only one test at a time - this is critical for TDD discipline + - Exception: For browser-level tests or expensive setup (e.g., Storybook `*.stories.tsx`), group multiple assertions within a single test block to avoid redundant setup - but only when adding assertions to an existing interaction flow. If new user interactions are required, still create a new test. Split files by category if they exceed ~1000 lines. + - **Adding a single test to a test file is ALWAYS allowed** - no prior test output needed + - Starting TDD for a new feature is always valid, even if test output shows unrelated work + - For DOM-based tests, use `data-testid` attributes to select elements rather than CSS classes, tag names, or text content + - Avoid hard-coded timeouts both in form of sleep() or timeout: 5000 etc; use proper async patterns (`waitFor`, `findBy*`, event-based sync) instead and rely on global test configs for timeout settings + +2. **Green Phase**: Write MINIMAL code to make the test pass + + - Implement only what's needed for the current failing test + - No anticipatory coding or extra features + - Address the specific failure message + +3. **Refactor Phase**: Improve code structure while keeping tests green + - Only allowed when relevant tests are passing + - Requires proof that tests have been run and are green + - Applies to BOTH implementation and test code + - No refactoring with failing tests - fix them first + +### Core Violations + +1. **Multiple Test Addition** + + - Adding more than one new test at once + - Exception: Initial test file setup or extracting shared test utilities + +2. **Over-Implementation** + + - Code that exceeds what's needed to pass the current failing test + - Adding untested features, methods, or error handling + - Implementing multiple methods when test only requires one + +3. **Premature Implementation** + - Adding implementation before a test exists and fails properly + - Adding implementation without running the test first + - Refactoring when tests haven't been run or are failing + +### Critical Principle: Incremental Development + +Each step in TDD should address ONE specific issue: + +- Test fails "not defined" → Create empty stub/class only +- Test fails "not a function" → Add method stub only +- Test fails with assertion → Implement minimal logic only + +### Optional Pre-Phase: Spike Phase + +In rare cases where the problem space, interface, or expected behavior is unclear, a **Spike Phase** may be used **before the Red Phase**. +This phase is **not part of the regular TDD workflow** and must only be applied under exceptional circumstances. + +- The goal of a Spike is **exploration and learning**, not implementation. +- The code written during a Spike is **disposable** and **must not** be merged or reused directly. +- Once sufficient understanding is achieved, all spike code is discarded, and normal TDD resumes starting from the **Red Phase**. +- A Spike is justified only when it is impossible to define a meaningful failing test due to technical uncertainty or unknown system behavior. + +### General Information + +- Sometimes the test output shows as no tests have been run when a new test is failing due to a missing import or constructor. In such cases, allow the agent to create simple stubs. Ask them if they forgot to create a stub if they are stuck. +- It is never allowed to introduce new logic without evidence of relevant failing tests. However, stubs and simple implementation to make imports and test infrastructure work is fine. +- In the refactor phase, it is perfectly fine to refactor both test and implementation code. That said, completely new functionality is not allowed. Types, clean up, abstractions, and helpers are allowed as long as they do not introduce new behavior. +- Adding types, interfaces, or a constant in order to replace magic values is perfectly fine during refactoring. +- Provide the agent with helpful directions so that they do not get stuck when blocking them. + +## Continue Conversation + +**User arguments:** + +TDD: $ARGUMENTS + +**End of user arguments** + +Please continue with the user input above, applying TDD approach. diff --git a/.claude/helpers/merge-claude-settings.sh b/.claude/helpers/merge-claude-settings.sh new file mode 100644 index 0000000..8343e15 --- /dev/null +++ b/.claude/helpers/merge-claude-settings.sh @@ -0,0 +1,98 @@ +#!/usr/bin/env bash +# +# Based on https://github.com/PaulRBerg/dot-claude/blob/main/helpers/merge_settings.sh +# +# Merges multiple JSONC settings files from the settings/ directory into a single +# settings.json file. This script handles: +# - Parsing JSONC files (JSON with comments) using json5 +# - Collecting and deduplicating permission arrays across all files +# - Merging non-permission top-level keys from all files +# +# Usage: merge_settings.sh + +set -euo pipefail + +# ---------------------------------------------------------------------------- # +# CONFIGURATION # +# ---------------------------------------------------------------------------- # + +# Navigate to the .claude directory relative to the folder this script is in +script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)" +cd "$script_dir/../" + +# ---------------------------------------------------------------------------- # +# 1. DISCOVER FILES # +# ---------------------------------------------------------------------------- # + +# Find all .json and .jsonc files in settings/ directory (excluding settings.json) +# Files are sorted alphabetically to ensure consistent merge order +settings_files=$(find settings/ -type f \( -name '*.json' -o -name '*.jsonc' \) ! -name 'settings.json' | sort) + +if [ -z "$settings_files" ]; then + echo "No settings files found in settings/ directory" + exit 0 +fi + +# ---------------------------------------------------------------------------- # +# 2. PARSE JSONC FILES # +# ---------------------------------------------------------------------------- # + +# Parse all JSONC files to valid JSON using json5 in a single Node.js process +# The json5 tool allows comments and trailing commas in JSON files +# Using a single Node.js process is much faster than calling npx per file +# If a file fails to parse, fall back to empty object +parsed_json=$(npx -y -p json5 node -e " +const fs = require('fs'); +const JSON5 = require('json5'); +process.argv.slice(1).forEach(file => { + try { + console.log(JSON.stringify(JSON5.parse(fs.readFileSync(file, 'utf8')))); + } catch { + console.log('{}'); + } +}); +" $settings_files) + +# ---------------------------------------------------------------------------- # +# 3. MERGE WITH JQ # +# ---------------------------------------------------------------------------- # + +# Merge all parsed JSON files using jq +# The merge strategy is: +# 1. Collect all permission arrays from all files and deduplicate +# 2. Merge all other top-level keys (later values override earlier ones) +# 3. Exclude the $schema field from the final output +merged_json=$(echo "$parsed_json" | jq -s ' + # First, build the permissions object by collecting arrays from all files + { + permissions: { + # Collect additionalDirectories from all files, flatten, and deduplicate + additionalDirectories: ([.[].permissions.additionalDirectories // [] | .[] ] | unique), + + # Collect allow patterns from all files, flatten, and deduplicate + allow: ([.[].permissions.allow // [] | .[] ] | unique), + + # Collect ask patterns from all files, flatten, and deduplicate + ask: ([.[].permissions.ask // [] | .[] ] | unique), + + # Collect deny patterns from all files, flatten, and deduplicate + deny: ([.[].permissions.deny // [] | .[] ] | unique) + } + } * + # Then merge all non-permissions top-level keys from all files + # Later files override earlier files for conflicting keys + (reduce .[] as $item ({}; . * ($item | del(.permissions)))) + # Remove the $schema field from the final output + | del(."$schema") + # Sort all object keys alphabetically + | walk(if type == "object" then to_entries | sort_by(.key) | from_entries else . end) +') + +# ---------------------------------------------------------------------------- # +# 4. WRITE OUTPUT # +# ---------------------------------------------------------------------------- # + +# Write the merged JSON to settings.json +echo "$merged_json" > settings.json + +echo "✓ Merged settings.json from JSONC files" diff --git a/.claude/package-lock.json b/.claude/package-lock.json new file mode 100644 index 0000000..860fabf --- /dev/null +++ b/.claude/package-lock.json @@ -0,0 +1,24 @@ +{ + "name": ".claude", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "json5": "2.2.3" + } + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + } + } +} diff --git a/.claude/package.json b/.claude/package.json new file mode 100644 index 0000000..f79a49b --- /dev/null +++ b/.claude/package.json @@ -0,0 +1,6 @@ +{ + "private": true, + "dependencies": { + "json5": "2.2.3" + } +} diff --git a/.claude/settings/basics.jsonc b/.claude/settings/basics.jsonc new file mode 100644 index 0000000..a7b12fa --- /dev/null +++ b/.claude/settings/basics.jsonc @@ -0,0 +1,5 @@ +{ + // Basic configuration for Claude Code + // General settings, environment variables, and UI customization + "$schema": "https://json.schemastore.org/claude-code-settings.json" +} diff --git a/.claude/settings/permissions/bash.jsonc b/.claude/settings/permissions/bash.jsonc new file mode 100644 index 0000000..bdf2771 --- /dev/null +++ b/.claude/settings/permissions/bash.jsonc @@ -0,0 +1,139 @@ +{ + // Bash command permissions - allowed and denied shell operations + // This should only ever be running in a devcontainer, so pretty lenient permissions are allowed + "$schema": "https://json.schemastore.org/claude-code-settings.json", + "permissions": { + "allow": [ + // AI interactions and tooling + "Bash(bd *)", + // Cloud Infrastructure + "Bash(aws sts get*)", + // Code Quality & Formatting + "Bash(pre-commit run *)", + // Core Shell Commands + "Bash(COMMAND *)", // used in slash commands + "Bash(kill *)", + "Bash(ps *)", + "Bash(pwd *)", + "Bash(time *)", + "Bash(timeout *)", + "Bash(which *)", + "Bash(xargs *)", + // Custom Scripts + "Bash(~/.agents/helpers/*)", + "Bash(~/.claude/helpers/*)", + "Bash(~/.claude/hooks/*)", + "Bash(~/.agents/skills/*)", + "Bash(~/.claude/skills/*)", + // File System + "Bash(bat *)", + "Bash(chmod *)", + "Bash(cp *)", + "Bash(eza *)", + "Bash(fd *)", + "Bash(find *)", + "Bash(fzf *)", + "Bash(ls *)", + "Bash(mkdir *)", + "Bash(stat *)", + "Bash(tar *)", + "Bash(touch *)", + "Bash(tree *)", + // Git & Version Control + "Bash(git diff *)", + "Bash(git status *)", + "Bash(git log *)", + "Bash(git rev-parse *)", + "Bash(git branch *)", + // Misc + "Bash(amp *)", + "Bash(atuin *)", + "Bash(bc *)", + "Bash(chezmoi *)", + "Bash(diff *)", + "Bash(jq *)", + "Bash(just *)", + "Bash(lsof *)", + "Bash(test *)", + "Bash(zk *)", + // Node.js + "Bash(pnpm test-unit *)", + "Bash(pnpm test-e2e *)", + // Python + "Bash(uv run pytest *)", + // Text Processing + "Bash(awk *)", + "Bash(cat *)", + "Bash(cut *)", + "Bash(echo *)", + "Bash(grep *)", + "Bash(head *)", + "Bash(printf *)", + "Bash(sed *)", + "Bash(sort *)", + "Bash(tail *)", + // Search + "Bash(rg *)", + // Research + "Bash(gh issue list *)", + "Bash(gh pr view *)", + "Bash(gh pr diff *)" + ], + "ask": [ + // let's hold off before we let it use the github CLI in any free running allow mode...I don't want it somehow approving PRs with the user's credentials + "Bash(gh repo *)", + "Bash(gh release *)", + "Bash(gh secret *)", + "Bash(gh ruleset *)", + "Bash(aws *)", // let's hold off before we let it use AWS CLI in any free running allow mode. We need to be very sure we don't have any access to staging or production credentials in our dev environment (...which we shouldn't...but we need to double check that or consider any other safeguards first) + "Bash(curl *)", + "Bash(ln *)", + "WebFetch", + ], + "deny": [ + // Exceptions to generally allowed AI tooling + "Bash(bd init*)", // we need to control the init process, don't let AI do that in the background + // Github + // Claude should not ever interfere with the PR process, that is how we gate AI's work + "Bash(gh pr create *)", + "Bash(gh pr edit *)", + "Bash(gh pr ready *)", + "Bash(gh pr review *)", + "Bash(gh pr merge *)", + "Bash(gh pr close *)", + "Bash(gh pr comment *)", + "Bash(gh pr update-branch *)", + + // Destructive File Operations + "Bash(chmod -R *)", + "Bash(chown -R *)", + "Bash(rm -rf / *)", + "Bash(rm -rf ~ *)", + // Dangerous Disk Operations + "Bash(> /dev/sda*)", + "Bash(> /etc/*)", + "Bash(dd *)", + "Bash(mkfs *)", + // Process Management + "Bash(kill -9 *)", + "Bash(killall *)", + // Git & Version Control + "Bash(git reset --hard *)", + "Bash(git push -f *)", + "Bash(git push --force*)", + // Node.js + "Bash(npm publish *)", + // System Administration + "Bash(doas *)", + "Bash(passwd *)", + "Bash(su *)", + "Bash(sudo *)", + "Bash(systemctl *)", + "Bash(userdel *)", + "Bash(usermod *)", + // System Control + "Bash(reboot *)", + "Bash(shutdown *)", + ], + }, +} diff --git a/.claude/settings/permissions/read.jsonc b/.claude/settings/permissions/read.jsonc new file mode 100644 index 0000000..7916e1e --- /dev/null +++ b/.claude/settings/permissions/read.jsonc @@ -0,0 +1,9 @@ +{ + // Read permissions for specific files + // NOTE: to add reading directories, use the additional-dirs.jsonc file + "$schema": "https://json.schemastore.org/claude-code-settings.json", + "permissions": { + "allow": ["Read(~/.zshrc)"], + "deny": ["Read(~/Drive/Secrets/**)", "Read(~/.ssh/**)", "Read(**/.env*)", "Read(~/.aws/**)"], + }, +} diff --git a/.claude/settings/permissions/write.jsonc b/.claude/settings/permissions/write.jsonc new file mode 100644 index 0000000..f7a1144 --- /dev/null +++ b/.claude/settings/permissions/write.jsonc @@ -0,0 +1,8 @@ +{ + // Write permissions for specific files + // This should only ever be running in a devcontainer, so pretty lenient permissions are allowed + "$schema": "https://json.schemastore.org/claude-code-settings.json", + "permissions": { + "allow": ["Write(/tmp/**)"] + } +} diff --git a/.coderabbit.yaml b/.coderabbit.yaml index 9904260..372316b 100644 --- a/.coderabbit.yaml +++ b/.coderabbit.yaml @@ -6,7 +6,9 @@ reviews: - path: "**/vendor_files/**" instructions: "These files came from a vendor and we're not allowed to change them. Refer to it if you need to understand how the main code interacts with it, but do not make comments about it." - path: "**/*.py" - instructions: "Do not express concerns about assert statements being removed by using the -O python flag; we never use that flag. Do not express concerns about ruff rules; a pre-commit hook already runs a ruff check. Do not warn about unnecessary super().init() calls; pyright prefers those to be present." + instructions: "Check the `ruff.toml` and `ruff-test.toml` for linting rules we've explicitly disabled and don't suggest changes to please conventions we've disabled. Do not express concerns about ruff rules; a pre-commit hook already runs a ruff check. Do not warn about unnecessary super().__init__() calls; pyright prefers those to be present. Do not warn about missing type hints; a pre-commit hook already checks for that." + - path: "**/.copier-answers.yml" + instructions: "Do not comment about the `_commit` value needing to be a clean release tag. A CI job will fail if that is not the case." tools: eslint: # when the code contains typescript, eslint will be run by pre-commit, and coderabbit often generates false positives enabled: false diff --git a/.copier-answers.yml b/.copier-answers.yml index 16a8e0c..6af7586 100644 --- a/.copier-answers.yml +++ b/.copier-answers.yml @@ -1,5 +1,5 @@ # Changes here will be overwritten by Copier -_commit: v0.0.59 +_commit: v0.0.62-2-g93c5fcb _src_path: gh:LabAutomationAndScreening/copier-python-package-template configure_python_asyncio: false configure_vcrpy: false diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 70842f4..b36cd64 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,12 +1,13 @@ # base image tags available at https://mcr.microsoft.com/v2/devcontainers/universal/tags/list # added the platform flag to override any local settings since this image is only compatible with linux/amd64. since this image is only x64 compatible, suppressing the hadolint rule # hadolint ignore=DL3029 -FROM --platform=linux/amd64 mcr.microsoft.com/devcontainers/universal:5.1.1-noble +FROM --platform=linux/amd64 mcr.microsoft.com/devcontainers/universal:5.1.5-noble SHELL ["/bin/bash", "-o", "pipefail", "-c"] # temporary hack until yarn updates its GPG key -RUN rm /etc/apt/sources.list.d/yarn.list || true +RUN rm /etc/apt/sources.list.d/yarn.list || true && \ + curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | gpg --dearmor | tee /etc/apt/keyrings/yarn-archive-keyring.gpg > /dev/null RUN apt-get update -y && apt-get install -y \ "bash-completion=$(apt-cache madison bash-completion | awk '{print $3}' | grep '^1:2.11' | head -n 1)" --no-install-recommends \ diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index fede563..e4bbd51 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,20 +1,31 @@ { + "hostRequirements": { + "cpus": 2, + "memory": "4gb" + }, "dockerComposeFile": "docker-compose.yml", "service": "devcontainer", "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}", "features": { + "ghcr.io/devcontainers/features/git-lfs:1.2.5": { + // we are not using git-lfs right now, but its a transitive dep elsewhere. We want to configure auto pull to off since the feature will by default also try to install the git hook + // and it breaks creation of the devcontainer since pre-commit hooks are already in place. Probably a better way to fix this but until we use git-lfs we can leave it like this. + // https://github.com/devcontainers/features/blob/main/src/git-lfs/README.md + "autoPull": false + }, "ghcr.io/devcontainers/features/aws-cli:1.1.2": { // https://github.com/devcontainers/features/blob/main/src/aws-cli/devcontainer-feature.json // view latest version https://raw.githubusercontent.com/aws/aws-cli/v2/CHANGELOG.rst - "version": "2.32.6", + "version": "2.32.6" }, - "ghcr.io/devcontainers/features/python:1.7.1": { + "ghcr.io/devcontainers/features/python:1.8.0": { // https://github.com/devcontainers/features/blob/main/src/python/devcontainer-feature.json "version": "3.12.7", "enableShared": true, "installTools": false, "optimize": true }, + // https://github.com/anthropics/devcontainer-features/blob/main/src/claude-code/devcontainer-feature.json "ghcr.io/anthropics/devcontainer-features/claude-code:1.0.5": {} }, "customizations": { @@ -24,25 +35,25 @@ "-AmazonWebServices.aws-toolkit-vscode", // the AWS CLI feature installs this automatically, but it's causing problems in VS Code // basic tooling // "eamodio.gitlens@15.5.1", - "coderabbit.coderabbit-vscode@0.16.6", + "coderabbit.coderabbit-vscode@0.18.3", "ms-vscode.live-server@0.5.2025051301", "MS-vsliveshare.vsliveshare@1.0.5905", "github.copilot@1.388.0", - "github.copilot-chat@0.36.2026010502", - "anthropic.claude-code@2.0.75", + "github.copilot-chat@0.42.2026032602", + "anthropic.claude-code@2.1.84", // Python - "ms-python.python@2025.21.2026010501", - "ms-python.vscode-pylance@2025.10.100", + "ms-python.python@2026.5.2026032701", + "ms-python.vscode-pylance@2026.1.102", "ms-vscode-remote.remote-containers@0.414.0", - "charliermarsh.ruff@2025.32.0", + "charliermarsh.ruff@2026.38.0", // Misc file formats "bierner.markdown-mermaid@1.29.0", "samuelcolvin.jinjahtml@0.20.0", "tamasfe.even-better-toml@0.19.2", "emilast.LogFileHighlighter@3.3.3", - "esbenp.prettier-vscode@11.0.2" + "esbenp.prettier-vscode@12.4.0" ], "settings": { "editor.accessibilitySupport": "off", // turn off sounds @@ -67,5 +78,5 @@ "initializeCommand": "sh .devcontainer/initialize-command.sh", "onCreateCommand": "sh .devcontainer/on-create-command.sh", "postStartCommand": "sh .devcontainer/post-start-command.sh" - // Devcontainer context hash (do not manually edit this, it's managed by a pre-commit hook): 2824eafe # spellchecker:disable-line + // Devcontainer context hash (do not manually edit this, it's managed by a pre-commit hook): bb54c7d2 # spellchecker:disable-line } diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 882f04b..40e8a2a 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -18,6 +18,21 @@ services: - AWS_PROFILE=localstack - AWS_DEFAULT_REGION=us-east-1 - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + - BEADS_DIR=/workspaces/cloud-courier/.claude/.beads + - BEADS_DOLT_SERVER_HOST=beads-dolt # this is on the docker compose network. We don't want to publish the server port because then the local machine would have a bunch of conflicts with multiple devcontainers running + - BEADS_DOLT_SERVER_PORT=3306 + - BEADS_DOLT_SERVER_DATABASE=beads_work + - BEADS_DOLT_SERVER=beads-dolt # for some weird reason, unless we specify both of these envvars it doesn't seem to reliably work + - BEADS_DOLT_PORT=3306 + + + beads-dolt: + image: dolthub/dolt-sql-server:1.83.0 # no explicit reason for this version, just pinning for best practice + volumes: + - beads_dolt_data:/var/lib/dolt + environment: + - DOLT_ROOT_HOST=% + - DOLT_DATABASE=beads_work localstack: image: localstack/localstack:4.1.1 # no known reason for not using newer versions, just pinning for best practice @@ -30,4 +45,5 @@ services: - SERVICES=s3,sts,cloudwatch,ssm,events,iam volumes: + beads_dolt_data: {} python_venv: {} diff --git a/.devcontainer/install-ci-tooling.py b/.devcontainer/install-ci-tooling.py index 5d6a223..4f78f6a 100644 --- a/.devcontainer/install-ci-tooling.py +++ b/.devcontainer/install-ci-tooling.py @@ -7,9 +7,9 @@ import tempfile from pathlib import Path -UV_VERSION = "0.9.26" -PNPM_VERSION = "10.28.1" -COPIER_VERSION = "==9.11.2" +UV_VERSION = "0.10.12" +PNPM_VERSION = "10.33.0" +COPIER_VERSION = "==9.14.0" COPIER_TEMPLATE_EXTENSIONS_VERSION = "==0.3.3" PRE_COMMIT_VERSION = "4.5.1" GITHUB_WINDOWS_RUNNER_BIN_PATH = r"C:\Users\runneradmin\.local\bin" diff --git a/.devcontainer/manual-setup-deps.py b/.devcontainer/manual-setup-deps.py index 6f6fe0d..53e59e1 100644 --- a/.devcontainer/manual-setup-deps.py +++ b/.devcontainer/manual-setup-deps.py @@ -44,6 +44,12 @@ default=False, help="Allow uv to install new versions of Python on the fly. This is typically only needed when instantiating the copier template.", ) +_ = parser.add_argument( + "--skip-installing-pulumi-cli", + action="store_true", + default=False, + help="Do not install the Pulumi CLI even if the lock file references it", +) class PackageManager(str, enum.Enum): @@ -127,6 +133,17 @@ def main(): check=True, env=uv_env, ) + if ( + not generate_lock_file_only + and not args.skip_installing_pulumi_cli + and platform.system() == "Linux" + and env.lock_file.exists() + and '"pulumi"' in env.lock_file.read_text() + ): + _ = subprocess.run( + ["sh", str(REPO_ROOT_DIR / ".devcontainer" / "install-pulumi-cli.sh"), str(env.lock_file)], + check=True, + ) elif env.package_manager == PackageManager.PNPM: pnpm_command = ["pnpm", "install", "--dir", str(env.path)] if env_check_lock: diff --git a/.devcontainer/on-create-command.sh b/.devcontainer/on-create-command.sh index 2a387de..6417509 100644 --- a/.devcontainer/on-create-command.sh +++ b/.devcontainer/on-create-command.sh @@ -3,9 +3,19 @@ set -ex # For some reason the directory is not setup correctly and causes build of devcontainer to fail since # it doesn't have access to the workspace directory. This can normally be done in post-start-command -git config --global --add safe.directory /workspaces/cloud-courier +script_dir="$(CDPATH= cd -- "$(dirname -- "$0")" && pwd)" +repo_root="$(CDPATH= cd -- "$script_dir/.." && pwd)" +git config --global --add safe.directory "$repo_root" sh .devcontainer/on-create-command-boilerplate.sh +# install json5 for merging claude settings. TODO: consider if we can install json5 globally...or somehow eliminate this dependency +mkdir -p "$repo_root/.claude" +chmod -R ug+rwX "$repo_root/.claude" +chgrp -R 0 "$repo_root/.claude" || true +npm --prefix "$repo_root/.claude" ci + +# Install beads for use in Claude planning +npm install -g @beads/bd@0.57.0 # no specific reason for this version, just pinning for best practice pre-commit install --install-hooks diff --git a/.devcontainer/post-start-command.sh b/.devcontainer/post-start-command.sh index 2fea9b2..b415152 100644 --- a/.devcontainer/post-start-command.sh +++ b/.devcontainer/post-start-command.sh @@ -3,4 +3,14 @@ set -ex # For some reason the directory is not setup correctly and causes build of devcontainer to fail since # it doesn't have access to the workspace directory. This can normally be done in post-start-command -git config --global --add safe.directory /workspaces/cloud-courier +script_dir="$(CDPATH= cd -- "$(dirname -- "$0")" && pwd)" +repo_root="$(CDPATH= cd -- "$script_dir/.." && pwd)" +git config --global --add safe.directory "$repo_root" +pre-commit run merge-claude-settings -a +if ! bd ready; then + echo "It's likely the Dolt server has not yet been initialized to support beads, running that now" # TODO: figure out a better way to match this specific scenario than just a non-zero exit code...but beads still seems like in high flux right now so not sure what to tie it to + # the 'stealth' flag is just the only way I could figure out how to stop it from modifying AGENTS.md...if there's another way to avoid that, then fine. Even without the stealth flag though, files inside the .claude/beads directory get modified, so restoring them at the end to what was set in git...these shouldn't really need to change regularly + # trying to set 'prefix' to nothing doesn't seem to work (it just acts like the prefix flag wasn't there), so just setting to 'work' as an arbitrary name + # for some reason, the envvar for the server host isn't being picked up normally, so just passing it explicitly here + rm -rf .claude/.beads && bd init --server-host="$BEADS_DOLT_SERVER_HOST" --database="$BEADS_DOLT_SERVER_DATABASE" --skip-hooks --stealth --prefix=work </dev/null && git -c core.hooksPath=/dev/null restore --source=HEAD --staged --worktree .claude/.beads +fi diff --git a/.github/actions/ecr-auth/action.yml b/.github/actions/ecr-auth/action.yml new file mode 100644 index 0000000..8524f88 --- /dev/null +++ b/.github/actions/ecr-auth/action.yml @@ -0,0 +1,23 @@ +name: AWS ECR Auth +description: 'Use OIDC to Authenticate and login to an AWS ECR.' + +inputs: + aws-region: + description: AWS region to use for ECR authentication + required: false + default: 'us-east-1' + role-arn: + description: AWS IAM Role ARN to assume for ECR authentication + required: false + default: 'arn:aws:iam::000000000000:role/CoreInfraBaseAccess' + +runs: + using: composite + steps: + - name: OIDC Auth for ECR + uses: aws-actions/configure-aws-credentials@v6.0.0 + with: + role-to-assume: ${{ inputs.role-arn }} + aws-region: ${{ inputs.aws-region }} + - name: Login to Amazon ECR + uses: aws-actions/amazon-ecr-login@v2.0.2 diff --git a/.github/actions/install_deps/action.yml b/.github/actions/install_deps/action.yml index b212d09..7e22d84 100644 --- a/.github/actions/install_deps/action.yml +++ b/.github/actions/install_deps/action.yml @@ -44,6 +44,11 @@ inputs: description: Whether to skip updating the hash when running manual-setup-deps.py default: true required: false + skip-installing-pulumi-cli: + type: boolean + description: Whether to skip installing the Pulumi CLI even if the lock file references it + default: false + required: false runs: @@ -64,7 +69,7 @@ runs: - name: Setup node if: ${{ inputs.node-version != 'notUsing' }} - uses: actions/setup-node@v6.2.0 + uses: actions/setup-node@v6.3.0 with: node-version: ${{ inputs.node-version }} @@ -75,7 +80,7 @@ runs: - name: OIDC Auth for CodeArtifact if: ${{ inputs.code-artifact-auth-role-name != 'no-code-artifact' }} - uses: aws-actions/configure-aws-credentials@v5.1.1 + uses: aws-actions/configure-aws-credentials@v6.0.0 with: role-to-assume: arn:aws:iam::${{ inputs.code-artifact-auth-role-account-id }}:role/${{ inputs.code-artifact-auth-role-name }} aws-region: ${{ inputs.code-artifact-auth-region }} @@ -83,5 +88,5 @@ runs: - name: Install dependencies # the funky syntax is github action ternary if: ${{ inputs.install-deps }} - run: python .devcontainer/manual-setup-deps.py ${{ inputs.python-version == 'notUsing' && '--no-python' || '' }} ${{ inputs.node-version == 'notUsing' && '--no-node' || '' }} ${{ inputs.skip-updating-devcontainer-hash && '--skip-updating-devcontainer-hash' || '' }} + run: python .devcontainer/manual-setup-deps.py ${{ inputs.python-version == 'notUsing' && '--no-python' || '' }} ${{ inputs.node-version == 'notUsing' && '--no-node' || '' }} ${{ inputs.skip-updating-devcontainer-hash && '--skip-updating-devcontainer-hash' || '' }} ${{ inputs.skip-installing-pulumi-cli && '--skip-installing-pulumi-cli' || '' }} shell: pwsh diff --git a/.github/actions/update-devcontainer-hash/action.yml b/.github/actions/update-devcontainer-hash/action.yml index 148062b..e4cee89 100644 --- a/.github/actions/update-devcontainer-hash/action.yml +++ b/.github/actions/update-devcontainer-hash/action.yml @@ -24,7 +24,7 @@ runs: shell: bash - name: Checkout code - uses: actions/checkout@v6.0.1 + uses: actions/checkout@v6.0.2 with: persist-credentials: true fetch-depth: 1 diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 0d27ace..2081e6a 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,21 +1,21 @@ - ## Link to Issue or Message thread +## Link to Issue or Message thread - ## Why is this change necessary? +## Why is this change necessary? - ## How does this change address the issue? +## How does this change address the issue? - ## What side effects does this change have? +## What side effects does this change have? - ## How is this change tested? +## How is this change tested? - ## Other +## Other diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4a3d599..1f02a37 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -5,6 +5,7 @@ on: branches-ignore: - 'gh-readonly-queue/**' # don't run (again) when on these special branches created during merge groups; the `on: merge_group` already triggers it. merge_group: + pull_request: env: PYTHONUNBUFFERED: True @@ -18,11 +19,32 @@ jobs: get-values: uses: ./.github/workflows/get-values.yaml permissions: - contents: write # needed updating dependabot branches + contents: write # needed for updating dependabot branches + + check-skip-duplicate: + runs-on: ubuntu-24.04 + permissions: + contents: read + pull-requests: read # needed to check if PR exists for current branch + outputs: + should-run: ${{ steps.check.outputs.should-run }} + steps: + - name: Checkout code + uses: actions/checkout@v6.0.2 + with: + persist-credentials: false + - id: check + uses: ./.github/actions/check-skip-duplicates + + confirm-on-tagged-copier-template: + if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }} + uses: ./.github/workflows/confirm-on-tagged-copier-template.yaml lint: needs: - get-values + - check-skip-duplicate + if: needs.check-skip-duplicate.outputs.should-run == 'true' name: Pre-commit uses: ./.github/workflows/pre-commit.yaml permissions: @@ -34,6 +56,8 @@ jobs: test: needs: - lint + - check-skip-duplicate + if: needs.check-skip-duplicate.outputs.should-run == 'true' strategy: matrix: os: @@ -63,17 +87,20 @@ jobs: SERVICES: s3,sts,cloudwatch,ssm,events,iam steps: - name: Checkout code - uses: actions/checkout@v6.0.1 + uses: actions/checkout@v6.0.2 - name: Install python tooling uses: ./.github/actions/install_deps with: + skip-installing-ssm-plugin-manager: true + skip-installing-pulumi-cli: true python-version: ${{ matrix.python-version }} - name: Create AWS profile run: | sh .devcontainer/create-aws-profile.sh cat ~/.aws/config + - name: Unit test run: uv run pytest tests/unit --cov-report=xml --durations=5 @@ -93,6 +120,8 @@ jobs: executable: needs: - test + - check-skip-duplicate + if: needs.check-skip-duplicate.outputs.should-run == 'true' strategy: matrix: os: @@ -109,16 +138,18 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v6.0.1 + uses: actions/checkout@v6.0.2 - name: Install python tooling uses: ./.github/actions/install_deps with: + skip-installing-ssm-plugin-manager: true + skip-installing-pulumi-cli: true python-version: ${{ matrix.python-version }} - name: Build executable run: uv run pyinstaller pyinstaller.spec --log-level=DEBUG - name: Upload executable artifact - uses: actions/upload-artifact@v6.0.0 + uses: actions/upload-artifact@v7.0.0 with: name: exe-${{ matrix.os }}-${{ matrix.python-version }} path: dist/ @@ -129,6 +160,8 @@ jobs: build-docs: needs: - lint + - check-skip-duplicate + if: needs.check-skip-duplicate.outputs.should-run == 'true' strategy: matrix: python-version: @@ -143,11 +176,13 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v6.0.1 + uses: actions/checkout@v6.0.2 - name: Install python tooling uses: ./.github/actions/install_deps with: + skip-installing-ssm-plugin-manager: true + skip-installing-pulumi-cli: true python-version: ${{ matrix.python-version }} - name: Build docs @@ -155,33 +190,49 @@ jobs: run: uv run make html SPHINXOPTS="-W" - required-check: + workflow-summary: runs-on: ubuntu-24.04 - permissions: - statuses: write # needed for updating status on Dependabot PRs + timeout-minutes: 2 needs: - get-values + - check-skip-duplicate - lint - test - build-docs - executable + - confirm-on-tagged-copier-template + permissions: + statuses: write # needed for updating status on Dependabot PRs if: always() - timeout-minutes: 2 steps: - name: fail if prior job failure run: | - failure_pattern="^(failure|cancelled)$" - - if [[ "${{ needs.get-values.result }}" =~ $failure_pattern ]] || - [[ "${{ needs.lint.result }}" =~ $failure_pattern ]] || - [[ "${{ needs.build-docs.result }}" =~ $failure_pattern ]] || - [[ "${{ needs.executable.result }}" =~ $failure_pattern ]] || - [[ "${{ needs.test.result }}" =~ $failure_pattern ]]; then - echo "❌ One or more jobs failed or were cancelled" + success_pattern="^(skipped|success)$" # these are the possibilities: https://docs.github.com/en/actions/reference/workflows-and-actions/contexts#needs-context + + if [[ ! "${{ needs.get-values.result }}" =~ $success_pattern ]] || + [[ ! "${{ needs.check-skip-duplicate.result }}" =~ $success_pattern ]] || + [[ ! "${{ needs.lint.result }}" =~ $success_pattern ]] || + [[ ! "${{ needs.build-docs.result }}" =~ $success_pattern ]] || + [[ ! "${{ needs.executable.result }}" =~ $success_pattern ]] || + [[ ! "${{ needs.test.result }}" =~ $success_pattern ]] || + [[ ! "${{ needs.confirm-on-tagged-copier-template.result }}" =~ $success_pattern ]]; then + echo "❌ One or more jobs did not finish with skipped or success" exit 1 fi - echo "✅ All jobs completed successfully or were skipped" - - name: Mark updated Dependabot commit of devcontainer hash as succeeded + echo "✅ All jobs finished with skipped or success" + + - name: Mark the required-check as succeeded so the PR can be merged + if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }} + env: + GH_TOKEN: ${{ github.token }} + run: | + gh api \ + -X POST -H "Accept: application/vnd.github.v3+json" \ + "${{ github.event.pull_request.statuses_url }}" \ + -f state=success -f context="required-check" -f description="✅ All required checks passed in the job triggered by pull_request" \ + -f target_url="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" + + - name: Mark updated dependabot hash commit as succeeded if: needs.get-values.outputs.dependabot-commit-created == 'true' env: GH_TOKEN: ${{ github.token }} diff --git a/.github/workflows/confirm-on-tagged-copier-template.yaml b/.github/workflows/confirm-on-tagged-copier-template.yaml new file mode 100644 index 0000000..f042bae --- /dev/null +++ b/.github/workflows/confirm-on-tagged-copier-template.yaml @@ -0,0 +1,34 @@ +name: Confirm using tagged copier template version + +on: + workflow_call: + inputs: + answers_file: + description: 'Path to the copier answers file' + type: string + default: '.copier-answers.yml' + +jobs: + confirm-on-tagged-copier-template: + runs-on: ubuntu-24.04 + timeout-minutes: 2 + name: Fail if template under development + steps: + - name: Checkout code + uses: actions/checkout@v6.0.2 + with: + persist-credentials: false + + - name: Check _commit is a clean release tag + run: | + ANSWERS_FILE="${{ inputs.answers_file }}" + if [ ! -f "$ANSWERS_FILE" ]; then + echo "Error: $ANSWERS_FILE not found" + exit 1 + fi + COMMIT_LINE=$(grep "^_commit:" "$ANSWERS_FILE") + if echo "$COMMIT_LINE" | grep -q "-"; then + echo "Error: $COMMIT_LINE" + echo "_commit must be a clean release tag (e.g. v0.0.111), not a dev commit (e.g. v0.0.106-14-g7847d7b)" + exit 1 + fi diff --git a/.github/workflows/get-values.yaml b/.github/workflows/get-values.yaml index 0be45c4..4d6086b 100644 --- a/.github/workflows/get-values.yaml +++ b/.github/workflows/get-values.yaml @@ -35,7 +35,7 @@ jobs: JSON - name: Checkout code - uses: actions/checkout@v6.0.1 + uses: actions/checkout@v6.0.2 with: persist-credentials: false diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index 5029575..fbdeb86 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -33,14 +33,14 @@ jobs: steps: - name: Checkout code during push if: ${{ github.event_name == 'push' }} - uses: actions/checkout@v6.0.1 + uses: actions/checkout@v6.0.2 with: ref: ${{ github.ref_name }} # explicitly get the head of the branch, which will include any new commits pushed if this is a dependabot branch persist-credentials: false - name: Checkout code not during push if: ${{ github.event_name != 'push' }} - uses: actions/checkout@v6.0.1 + uses: actions/checkout@v6.0.2 with: persist-credentials: false @@ -50,6 +50,7 @@ jobs: python-version: ${{ inputs.python-version }} node-version: ${{ inputs.node-version }} skip-installing-ssm-plugin-manager: true + skip-installing-pulumi-cli: true - name: Set up mutex # Github concurrency management is horrible, things get arbitrarily cancelled if queued up. So using mutex until github fixes itself. When multiple jobs are modifying cache at once, weird things can happen. possible issue is https://github.com/actions/toolkit/issues/658 if: ${{ runner.os != 'Windows' }} # we're just gonna have to YOLO on Windows, because this action doesn't support it yet https://github.com/ben-z/gh-action-mutex/issues/14 @@ -59,7 +60,7 @@ jobs: timeout-minutes: 8 # this is the amount of time this action will wait to attempt to acquire the mutex lock before failing, e.g. if other jobs are queued up in front of it - name: Cache Pre-commit hooks - uses: actions/cache@v5.0.2 + uses: actions/cache@v5.0.3 env: cache-name: cache-pre-commit-hooks with: diff --git a/.gitignore b/.gitignore index 7200438..cab6c19 100644 --- a/.gitignore +++ b/.gitignore @@ -14,6 +14,16 @@ .ruff_cache/ .pipx_cache/ + +# Claude +.claude/settings.local.json +## we manage the settings JSON by merging the JSONC files in the settings/ directory +.claude/settings.json +# Dolt database files (used by beads for Claude) +.dolt/ +*.db + + # Vendor build/make dkms.conf @@ -73,8 +83,8 @@ dist # Logs *.log +*.log.* **/logs/log*.txt -**/logs/*.log.* # macOS dev cleanliness **/.DS_Store diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e0dbbb7..6ea6a13 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -minimum_pre_commit_version: 4.2.0 +minimum_pre_commit_version: 4.3.0 # run `pre-commit autoupdate --freeze` to update all hooks default_install_hook_types: [pre-commit, post-checkout] repos: @@ -42,7 +42,7 @@ repos: # Reformatting (should generally come before any file format or other checks, because reformatting can change things) - repo: https://github.com/crate-ci/typos - rev: b31d3aa6e8e43e6a9cf7a1d137baf189dec0922b # frozen: v1 + rev: e1f6f6eaedd8587fa3c76ec20e7cbaa8f7132b2d # frozen: v1 hooks: - id: typos exclude: | @@ -108,7 +108,7 @@ repos: )$ - repo: https://github.com/rbubley/mirrors-prettier - rev: 14abee445aea04b39069c19b4bd54efff6775819 # frozen: v3.7.4 + rev: c2bc67fe8f8f549cc489e00ba8b45aa18ee713b1 # frozen: v3.8.1 hooks: - id: prettier # TODO: get template YAML and MD files more in line with prettier expectations so we can start using prettier on those too @@ -195,7 +195,7 @@ repos: - id: check-case-conflict - repo: https://github.com/python-jsonschema/check-jsonschema - rev: b035497fb64e3f9faa91e833331688cc185891e6 # frozen: 0.36.0 + rev: ec368acd16deee9c560c105ab6d27db4ee19a5ec # frozen: 0.36.2 hooks: - id: check-github-workflows @@ -249,7 +249,7 @@ repos: description: Runs hadolint to lint Dockerfiles - repo: https://github.com/astral-sh/ruff-pre-commit - rev: 5ba58aca0bd5bc7c0e1c0fc45af2e88d6a2bde83 # frozen: v0.14.10 + rev: 0839f92796ae388643a08a21640a029b322be5c2 # frozen: v0.15.2 hooks: - id: ruff name: ruff-src @@ -314,10 +314,19 @@ repos: # print the number of files as a sanity-check verbose: true - # Devcontainer context --- this makes Github's "prebuild codespaces" feature work more intelligently for the "Configuration Change" trigger + # Updating repo config/tooling files - repo: local hooks: + - id: merge-claude-settings + # Keep Claude's settings.json synced with the JSON5 files. It's only for local development, so don't run it in CI + name: merge Claude settings + entry: bash -c '[[ "${GITHUB_ACTIONS:-}" == "true" || "${CI:-}" == "true" ]] && exit 0; bash .claude/helpers/merge-claude-settings.sh' + files: ^\.claude/settings/.*\.(json|jsonc)$ + pass_filenames: false + language: system + - id: compute-devcontainer-context-hash + # Devcontainer context --- this makes Github's "prebuild codespaces" feature work more intelligently for the "Configuration Change" trigger name: compute devcontainer context hash entry: bash -c "python3 .github/workflows/hash_git_files.py . --for-devcontainer-config-update" files: (.*.lock)|(.*pnpm-lock.yaml)|(.*hash_git_files.py)|(.devcontainer/.*)|(\.pre-commit-config.yaml) diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 0000000..d6ed3c3 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,149 @@ +# Project Structure + +This project is a Python library. + +# Code Guidelines + +## Code Style + +- Comments should be used very rarely. Code should generally express its intent. +- Never write a one-line docstring — either the name is sufficient or the behavior warrants a full explanation. +- Don't sort or remove imports manually — pre-commit handles it. +- Always include type hints for pyright in Python +- Respect the pyright rule reportUnusedCallResult; assign unneeded return values to `_` +- Prefer keyword-only parameters (unless a very clear single-argument function): use `*` in Python signatures and destructured options objects in TypeScript. +- When disabling a linting rule with an inline directive, provide a comment at the end of the line (or on the line above for tools that don't allow extra text after an inline directive) describing the reasoning for disabling the rule. +- Avoid telling the type checker what a type is rather than letting it prove it. This includes type assertions (`as SomeType` in TypeScript, `cast()` in Python) and variable annotations that override inference. Prefer approaches that let the type checker verify the type itself: `isinstance`/`instanceof` narrowing, restructuring code so the correct type flows naturally, or using discriminated unions. When there is genuinely no alternative, add a comment explaining why the workaround is necessary and why it is safe. + +## Testing + +- Always run tests with an explicit path (e.g. uv run pytest tests/unit) — test runners discover all types (unit, integration, E2E...) by default. +- When iterating on a single test, run that test in isolation first and confirm it is in the expected state (red or green) before widening to the full suite. Use the most targeted invocation available: a specific test function for Python (e.g. `uv run pytest path/to/test.py::test_name --no-cov`) or a file path and name filter for TypeScript (e.g. `pnpm test-unit -- path/to/test.spec.ts -t "test name" --no-coverage`). Only run the full suite once the target test behaves as expected. +- Test coverage requirements are usually at 100%, so when running a subset of tests, always disable test coverage to avoid the test run failing for insufficient coverage. +- Avoid magic values in comparisons in tests in all languages (like ruff rule PLR2004 specifies) +- Prefer using random values in tests rather than arbitrary ones (e.g. the faker library, uuids, random.randint) when possible. For enums, pick randomly rather than hardcoding one value. +- Avoid loops in tests — assert each item explicitly so failures pinpoint the exact element. When verifying a condition across all items in a collection, collect the violations into a list and assert it's empty (e.g., assert [x for x in items if bad_condition(x)] == []). +- When asserting a mock or spy was called with specific arguments, always constrain as tightly as possible. In order of preference: (1) assert called exactly once with those args (`assert_called_once_with` in Python, `toHaveBeenCalledExactlyOnceWith` in Vitest/Jest); (2) if multiple calls are expected, assert the total call count and use a positional or last-call assertion (`nthCalledWith`, `lastCalledWith` / `assert_has_calls` with `call_args_list[n]`); (3) plain "called with at any point" (`toHaveBeenCalledWith`, `assert_called_with`) is a last resort only when neither the call count nor the call order can reasonably be constrained. + +### Python Testing + +- When using `mocker.spy` on a class-level method (including inherited ones), the spy records the unbound call, so assertions need `ANY` as the first argument to match self: `spy.assert_called_once_with(ANY, expected_arg)` +- Before writing new mock/spy helpers, check the `tests/unit/` folder for pre-built helpers in files like `fixtures.py` or `*mocks.py` +- When a test needs a fixture only for its side effects (not its return value), use `@pytest.mark.usefixtures(fixture_name.__name__)` instead of adding an unused parameter with a noqa comment +- Use `__name__` instead of string literals when referencing functions/methods (e.g., `mocker.patch.object(MyClass, MyClass.method.__name__)`, `pytest.mark.usefixtures(my_fixture.__name__)`). This enables IDE refactoring tools to catch renames. +- When using the faker library, prefer the pytest fixture (provided by the faker library) over instantiating instances of Faker. +- **Choosing between cassettes and mocks:** At the layer that directly wraps an external API or service, strongly prefer VCR cassette-recorded interactions (via pytest-recording/vcrpy) — they capture real HTTP traffic and verify the wire format, catching integration issues that mocks would miss. At layers above that (e.g. business logic, route handlers), mock the wrapper layer instead (e.g. `mocker.patch.object(ThresholdsRepository, ...)`) — there is no value in re-testing the HTTP interaction from higher up. +- **Never hand-write VCR cassette YAML files.** Cassettes must be recorded from real HTTP interactions by running the test once with `--record-mode=once` against a live external service: `uv run pytest --record-mode=once <test path> --no-cov`. The default mode is `none` — a missing cassette will cause an error, which is expected until recorded. +- **Never hand-edit syrupy snapshot files.** Snapshots are auto-generated — to create or update them, run `uv run pytest --snapshot-update <test path> --no-cov`. A missing snapshot causes the test to fail, which is expected until you run with `--snapshot-update`. When a snapshot mismatch occurs, fix the code if the change was unintentional; run `--snapshot-update` if it was intentional. +- **Never hand-write or hand-edit pytest-reserial `.jsonl` recording files.** Recordings must be captured from real serial port traffic by running the test with `--record` while the device is connected: `uv run pytest --record <test path> --no-cov`. The default mode replays recordings — a missing recording causes an error, which is expected until recorded against a live device. + +### Frontend Testing + +- Key `data-testid` selectors off unique IDs (e.g. UUIDs), not human-readable names which may collide or change. +- In DOM-based tests, scope queries to the tightest relevant container. Only query `document` or `document.body` directly to find the top-level portal/popup element (e.g. a Reka UI dialog via `[role="dialog"][data-state="open"]`); all further queries should run on that element, not on `document.body` again. + +# Agent Implementations & Configurations + +## Memory and Rules + +- Before saving any memory or adding any rule, explicitly ask the user whether the concept should be: (1) added to AGENTS.md as a general rule applicable across all projects, (2) added to AGENTS.md as a rule specific to this project, or (3) stored as a temporary local memory only relevant to the current active work. The devcontainer environment is ephemeral, so local memory files are rarely the right choice. + +## Tooling + +- ❌ Never use `python3` or `python` directly. ✅ Always use `uv run python` for Python commands. +- ❌ Never use `python3`/`python` for one-off data tasks. ✅ Use `jq` for JSON parsing, standard shell builtins for string manipulation. Only reach for `uv run python` when no dedicated tool covers the need. +- Check .devcontainer/devcontainer.json for tooling versions (Python, Node, etc.) when reasoning about version-specific stdlib or tooling behavior. +- For frontend tests, run commands via `pnpm` scripts from `frontend/package.json` — never invoke tools directly (not pnpm exec <tool>, npx <tool>, etc.). ✅ pnpm test-unit ❌ pnpm vitest ... or npx vitest ... +- For linting and type-checking, prefer `pre-commit run <hook-id>` over invoking tools directly — this matches the permission allow-list and mirrors what CI runs. Key hook IDs: `typescript-check`, `eslint`, `pyright`, `ruff`, `ruff-format`. +- Never rely on IDE diagnostics for ruff warnings — the IDE may not respect the project's ruff.toml config. Run `pre-commit run ruff -a` to get accurate results. +- When running terminal commands, execute exactly one command per tool call. Do not chain commands with &&, ||, ;, or & — this prohibition has no exceptions, even for `cd && ...` patterns. Use `cd` to change to the directory you want before running the command, avoiding the need to chain. Pipes (|) are allowed for output transformation (e.g., head, tail, grep). If two sequential commands are needed, run them in separate tool calls. Chained commands break the permission allow-list matcher and cause unnecessary permission prompts +- Never use `pnpm --prefix <path>` or `uv --directory <path>` to target a different directory — these flags break the permission allow-list matcher the same way chained `cd &&` commands do. Instead, rely on the working directory already being correct (the cwd persists between Bash tool calls), or issue a plain `cd <path>` as a separate prior tool call to reposition before running the command. +- Never use backslash line continuations in shell commands — always write the full command on a single line. Backslashes break the permission allow-list matcher. +- **Never manually edit files in any `generated/` folder.** These files are produced by codegen tooling (typically Kiota) and any manual changes will be overwritten. If a generated file needs to change, update the source (e.g. the OpenAPI schema) and re-run the generator. + +<!-- BEGIN BEADS INTEGRATION --> +## Issue Tracking with bd (beads) + +**IMPORTANT**: This project uses **bd (beads)** for ALL issue tracking. Do NOT use markdown TODOs, task lists, or other tracking methods. + +### Why bd? + +- Dependency-aware: Track blockers and relationships between issues +- Git-friendly: Auto-syncs to JSONL for version control +- Agent-optimized: JSON output, ready work detection, discovered-from links +- Prevents duplicate tracking systems and confusion + +### Quick Start + +**Check for ready work:** + +```bash +bd ready --json +``` + +**Create new issues:** + +```bash +bd create "Issue title" --description="Detailed context" -t bug|feature|task -p 0-4 --json +bd create "Issue title" --description="What this issue is about" -p 1 --deps discovered-from:bd-123 --json +``` + +**Claim and update:** + +```bash +bd update bd-42 --status in_progress --json +bd update bd-42 --priority 1 --json +``` + +**Complete work:** + +```bash +bd close bd-42 --reason "Completed" --json +``` + +**Creating human readable file:** +After every CRUD command on an issue, export it: + +```bash +bd export -o .claude/.beads/issues-dump.jsonl +``` + +### Issue Types + +- `bug` - Something broken +- `feature` - New functionality +- `task` - Work item (tests, docs, refactoring) +- `epic` - Large feature with subtasks +- `chore` - Maintenance (dependencies, tooling) + +### Priorities + +- `0` - Critical (security, data loss, broken builds) +- `1` - High (major features, important bugs) +- `2` - Medium (default, nice-to-have) +- `3` - Low (polish, optimization) +- `4` - Backlog (future ideas) + +### Workflow for AI Agents + +1. **Check ready work**: `bd ready` shows unblocked issues +2. **Claim your task**: `bd update <id> --status in_progress` +3. **Work on it**: Implement, test, document +4. **Discover new work?** Create linked issue: + - `bd create "Found bug" --description="Details about what was found" -p 1 --deps discovered-from:<parent-id>` +5. **Complete**: `bd close <id> --reason "Done"` + + +### Important Rules + +- ✅ Use bd for ALL task tracking +- ✅ Always use `--json` flag for programmatic use +- ✅ Link discovered work with `discovered-from` dependencies +- ✅ Check `bd ready` before asking "what should I work on?" +- ❌ Do NOT create markdown TODO lists +- ❌ Do NOT use external issue trackers +- ❌ Do NOT duplicate tracking systems + +For more details, see README.md and docs/QUICKSTART.md. + +<!-- END BEADS INTEGRATION --> diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..43c994c --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1 @@ +@AGENTS.md diff --git a/pyproject.toml b/pyproject.toml index 39332c0..bdac315 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ dependencies = [ "boto3>=1.40.6", "structlog>=25.4.0", "watchdog>=6.0.0", - "pydantic>=2.11.2", + "pydantic>=2.12.5", "boto3-stubs[ssm]" ] @@ -36,10 +36,10 @@ dev = [ # Managed by upstream template "pyright>=1.1.408", "pytest>=9.0.2", - "pytest-cov>=7.0.0", + "pytest-cov>=7.1.0", "pytest-randomly>=4.0.1", "sphinx==9.0.4", - "pyinstaller>=6.17.0", + "pyinstaller>=6.19.0", ] [tool.setuptools] diff --git a/ruff.toml b/ruff.toml index e7a44f4..7a0478b 100644 --- a/ruff.toml +++ b/ruff.toml @@ -32,7 +32,7 @@ exclude = [ line-length = 120 indent-width = 4 -target-version = "py312" # TODO: dynamically update this with a copier Task +target-version = "py312" [lint] select = ["ALL"] diff --git a/uv.lock b/uv.lock index 059aedd..8e653ac 100644 --- a/uv.lock +++ b/uv.lock @@ -586,7 +586,7 @@ dev = [ requires-dist = [ { name = "boto3", specifier = ">=1.40.6" }, { name = "boto3-stubs", extras = ["ssm"] }, - { name = "pydantic", specifier = ">=2.11.2" }, + { name = "pydantic", specifier = ">=2.12.5" }, { name = "structlog", specifier = ">=25.4.0" }, { name = "watchdog", specifier = ">=6.0.0" }, ] @@ -594,10 +594,10 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ { name = "boto3-stubs", extras = ["all"], specifier = ">=1.40.30" }, - { name = "pyinstaller", specifier = ">=6.17.0" }, + { name = "pyinstaller", specifier = ">=6.19.0" }, { name = "pyright", specifier = ">=1.1.408" }, { name = "pytest", specifier = ">=9.0.2" }, - { name = "pytest-cov", specifier = ">=7.0.0" }, + { name = "pytest-cov", specifier = ">=7.1.0" }, { name = "pytest-mock", specifier = ">=3.15.0" }, { name = "pytest-randomly", specifier = ">=4.0.1" }, { name = "pytest-timeout", specifier = ">=2.4.0" }, @@ -4541,7 +4541,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.4" +version = "2.12.5" source = { registry = "https://pypi.org/simple/" } dependencies = [ { name = "annotated-types" }, @@ -4549,51 +4549,80 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/77/ab/5250d56ad03884ab5efd07f734203943c8a8ab40d551e208af81d0257bf2/pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d", size = 786540, upload-time = "2025-04-29T20:38:55.02Z" } +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/12/46b65f3534d099349e38ef6ec98b1a5a81f42536d17e0ba382c28c67ba67/pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb", size = 443900, upload-time = "2025-04-29T20:38:52.724Z" }, + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, ] [[package]] name = "pydantic-core" -version = "2.33.2" +version = "2.41.5" source = { registry = "https://pypi.org/simple/" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, ] [[package]] @@ -4607,7 +4636,7 @@ wheels = [ [[package]] name = "pyinstaller" -version = "6.18.0" +version = "6.19.0" source = { registry = "https://pypi.org/simple/" } dependencies = [ { name = "altgraph" }, @@ -4618,19 +4647,19 @@ dependencies = [ { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, { name = "setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/b8/0fe3359920b0a4e7008e0e93ff383003763e3eee3eb31a07c52868722960/pyinstaller-6.18.0.tar.gz", hash = "sha256:cdc507542783511cad4856fce582fdc37e9f29665ca596889c663c83ec8c6ec9", size = 4034976, upload-time = "2026-01-13T03:13:23.886Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/63/fd62472b6371d89dc138d40c36d87a50dc2de18a035803bbdc376b4ffac4/pyinstaller-6.19.0.tar.gz", hash = "sha256:ec73aeb8bd9b7f2f1240d328a4542e90b3c6e6fbc106014778431c616592a865", size = 4036072, upload-time = "2026-02-14T18:06:28.718Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/e6/51b0146a1a3eec619e58f5d69fb4e3d0f65a31cbddbeef557c9bb83eeed9/pyinstaller-6.18.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:cb7aa5a71bfa7c0af17a4a4e21855663c89e4bd7c40f1d337c8370636d8847c3", size = 1040056, upload-time = "2026-01-13T03:12:15.397Z" }, - { url = "https://files.pythonhosted.org/packages/4c/9c/a3634c0ec8e1ed31b373b548848b5c0b39b56edc191cf737e697d484ec23/pyinstaller-6.18.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:07785459b3bf8a48889eac0b4d0667ade84aef8930ce030bc7cbb32f41283b33", size = 734971, upload-time = "2026-01-13T03:12:20.912Z" }, - { url = "https://files.pythonhosted.org/packages/2c/04/6756442078ccfcd552ccce636be1574035e62f827ffa1f5d8a0382682546/pyinstaller-6.18.0-py3-none-manylinux2014_i686.whl", hash = "sha256:f998675b7ccb2dabbb1dc2d6f18af61d55428ad6d38e6c4d700417411b697d37", size = 746637, upload-time = "2026-01-13T03:12:29.302Z" }, - { url = "https://files.pythonhosted.org/packages/54/39/fbc56519000cdbf450f472692a7b9b55d42077ce8529f1be631db7b75a36/pyinstaller-6.18.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:779817a0cf69604cddcdb5be1fd4959dc2ce048d6355c73e5da97884df2f3387", size = 744343, upload-time = "2026-01-13T03:12:33.369Z" }, - { url = "https://files.pythonhosted.org/packages/36/f2/50887badf282fee776e83d1e4feab74c026f50a1ea16e109ed939e32aa28/pyinstaller-6.18.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:31b5d109f8405be0b7cddcede43e7b074792bc9a5bbd54ec000a3e779183c2af", size = 741084, upload-time = "2026-01-13T03:12:37.528Z" }, - { url = "https://files.pythonhosted.org/packages/1c/08/3a1419183e4713ef77d912ecbdd6ef858689ed9deb34d547133f724ca745/pyinstaller-6.18.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:4328c9837f1aef4fe1a127d4ff1b09a12ce53c827ce87c94117628b0e1fd098b", size = 740943, upload-time = "2026-01-13T03:12:41.589Z" }, - { url = "https://files.pythonhosted.org/packages/c2/47/309305e36d116f1434b42d91c420ff951fa79b2c398bbd59930c830450be/pyinstaller-6.18.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:3638fc81eb948e5e5eab1d4ad8f216e3fec6d4a350648304f0adb227b746ee5e", size = 740107, upload-time = "2026-01-13T03:12:45.694Z" }, - { url = "https://files.pythonhosted.org/packages/83/0f/a59a95cd1df59ddbc9e74d5a663387551333bcf19a5dd3086f5c81a2e83c/pyinstaller-6.18.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe59da34269e637f97fd3c43024f764586fc319141d245ff1a2e9af1036aa3", size = 739843, upload-time = "2026-01-13T03:12:49.728Z" }, - { url = "https://files.pythonhosted.org/packages/9a/09/e7a870e7205cdbd2f8785010a5d3fe48a9df2591156ee34a8b29b774fa14/pyinstaller-6.18.0-py3-none-win32.whl", hash = "sha256:496205e4fa92ec944f9696eb597962a83aef4d4c3479abfab83d730e1edf016b", size = 1323811, upload-time = "2026-01-13T03:12:55.717Z" }, - { url = "https://files.pythonhosted.org/packages/fb/d5/48eef2002b6d3937ceac2717fe17e9ca3a43a4c9826bafee367dfc75ba85/pyinstaller-6.18.0-py3-none-win_amd64.whl", hash = "sha256:976fabd90ecfbda47571c87055ad73413ec615ff7dea35e12a4304174de78de9", size = 1384389, upload-time = "2026-01-13T03:13:01.993Z" }, - { url = "https://files.pythonhosted.org/packages/1b/8d/1a88e6e94107de3ea1c842fd59c3aa132d344ad8e52ea458ffa9a748726e/pyinstaller-6.18.0-py3-none-win_arm64.whl", hash = "sha256:dba4b70e3c9ba09aab51152c72a08e58a751851548f77ad35944d32a300c8381", size = 1324869, upload-time = "2026-01-13T03:13:08.192Z" }, + { url = "https://files.pythonhosted.org/packages/e3/eb/23374721fecfa72677e79800921cb6aceefa6ba48574dc404f3f6c6c3be7/pyinstaller-6.19.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:4190e76b74f0c4b5c5f11ac360928cd2e36ec8e3194d437bf6b8648c7bc0c134", size = 1040563, upload-time = "2026-02-14T18:05:22.436Z" }, + { url = "https://files.pythonhosted.org/packages/cd/7e/dfd724b0b533f5aaec0ee5df406fe2319987ed6964480a706f85478b12ea/pyinstaller-6.19.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8bd68abd812d8a6ba33b9f1810e91fee0f325969733721b78151f0065319ca11", size = 735477, upload-time = "2026-02-14T18:05:27.143Z" }, + { url = "https://files.pythonhosted.org/packages/88/c9/ee3a4101c31f26344e66896c73c1fd6ed8282bf871473365b7f8674af406/pyinstaller-6.19.0-py3-none-manylinux2014_i686.whl", hash = "sha256:1ec54ef967996ca61dacba676227e2b23219878ccce5ee9d6f3aada7b8ed8abf", size = 747143, upload-time = "2026-02-14T18:05:31.488Z" }, + { url = "https://files.pythonhosted.org/packages/da/0a/fc77e9f861be8cf300ac37155f59cc92aff99b29f2ddd78546f563a5b5a6/pyinstaller-6.19.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:4ab2bb52e58448e14ddf9450601bdedd66800465043501c1d8f1cab87b60b122", size = 744849, upload-time = "2026-02-14T18:05:35.492Z" }, + { url = "https://files.pythonhosted.org/packages/6d/e3/6872e020ee758afe0b821663858492c10745608b07150e5e2c824a5b3e1c/pyinstaller-6.19.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:da6d5c6391ccefe73554b9fa29b86001c8e378e0f20c2a4004f836ba537eff63", size = 741590, upload-time = "2026-02-14T18:05:39.59Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/b8db5f1a4b0fb228175f2ea0aa33f949adcc097fbe981cc524f9faf85777/pyinstaller-6.19.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a0fc5f6b3c55aa54353f0c74ffa59b1115433c1850c6f655d62b461a2ed6cbbe", size = 741448, upload-time = "2026-02-14T18:05:45.636Z" }, + { url = "https://files.pythonhosted.org/packages/6f/4d/63b0600f2694e9141b83129fbc1c488ec84d5a0770b1448ec154dcd0fee9/pyinstaller-6.19.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:e649ba6bd1b0b89b210ad92adb5fbdc8a42dd2c5ca4f72ef3a0bfec83a424b83", size = 740613, upload-time = "2026-02-14T18:05:49.726Z" }, + { url = "https://files.pythonhosted.org/packages/01/d4/e812ad36178093a0e9fd4b8127577748dd85b0cb71de912229dca21fd741/pyinstaller-6.19.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:481a909c8e60c8692fc60fcb1344d984b44b943f8bc9682f2fcdae305ad297e6", size = 740350, upload-time = "2026-02-14T18:05:54.093Z" }, + { url = "https://files.pythonhosted.org/packages/52/03/b2c2ee41fb8e10fd2a45d21f5ec2ef25852cfb978dbf762972eed59e3d63/pyinstaller-6.19.0-py3-none-win32.whl", hash = "sha256:3c5c251054fe4cfaa04c34a363dcfbf811545438cb7198304cd444756bc2edd2", size = 1324317, upload-time = "2026-02-14T18:06:00.085Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d3/6d5e62b8270e2b53a6065e281b3a7785079b00e9019c8019952828dd1669/pyinstaller-6.19.0-py3-none-win_amd64.whl", hash = "sha256:b5bb6536c6560330d364d91522250f254b107cf69129d9cbcd0e6727c570be33", size = 1384894, upload-time = "2026-02-14T18:06:06.425Z" }, + { url = "https://files.pythonhosted.org/packages/81/65/458cd523308a101a22fd2742893405030cc24994cc74b1b767cecf137160/pyinstaller-6.19.0-py3-none-win_arm64.whl", hash = "sha256:c2d5a539b0bfe6159d5522c8c70e1c0e487f22c2badae0f97d45246223b798ea", size = 1325374, upload-time = "2026-02-14T18:06:12.804Z" }, ] [[package]] @@ -4677,16 +4706,16 @@ wheels = [ [[package]] name = "pytest-cov" -version = "7.0.0" +version = "7.1.0" source = { registry = "https://pypi.org/simple/" } dependencies = [ { name = "coverage" }, { name = "pluggy" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/51/a849f96e117386044471c8ec2bd6cfebacda285da9525c9106aeb28da671/pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2", size = 55592, upload-time = "2026-03-21T20:11:16.284Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7a/d968e294073affff457b041c2be9868a40c1c71f4a35fcc1e45e5493067b/pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678", size = 22876, upload-time = "2026-03-21T20:11:14.438Z" }, ] [[package]] @@ -4998,23 +5027,23 @@ wheels = [ [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.15.0" source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321, upload-time = "2024-06-07T18:52:15.995Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438, upload-time = "2024-06-07T18:52:13.582Z" }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] [[package]] name = "typing-inspection" -version = "0.4.0" +version = "0.4.2" source = { registry = "https://pypi.org/simple/" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222, upload-time = "2025-02-25T17:27:59.638Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125, upload-time = "2025-02-25T17:27:57.754Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] From 1c75ac5c0edc63abe486b4b0ab2af9d8ade0762d Mon Sep 17 00:00:00 2001 From: Eli Fine <eli88fine@gmail.com> Date: Tue, 31 Mar 2026 00:30:16 +0000 Subject: [PATCH 2/4] localstack --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1f02a37..8f02edd 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,7 +78,7 @@ jobs: UV_PYTHON: ${{ matrix.python-version }} services: localstack: - image: localstack/localstack:4.1.1 + image: localstack/localstack:4.14.0 ports: - 4566:4566 env: From 69a10c53a87d10db10bdce0322921a3058f68b22 Mon Sep 17 00:00:00 2001 From: Eli Fine <eli88fine@gmail.com> Date: Tue, 31 Mar 2026 00:34:33 +0000 Subject: [PATCH 3/4] py313 --- .copier-answers.yml | 4 +- .devcontainer/devcontainer.json | 4 +- .github/workflows/ci.yaml | 10 ++--- .python-version | 2 +- .readthedocs.yaml | 2 +- pyproject.toml | 4 +- ruff.toml | 2 +- src/cloud_courier/load_config.py | 2 +- tests/conftest.py | 2 +- tests/unit/test_main.py | 2 +- uv.lock | 68 +------------------------------- 11 files changed, 18 insertions(+), 84 deletions(-) diff --git a/.copier-answers.yml b/.copier-answers.yml index 6af7586..3e122ab 100644 --- a/.copier-answers.yml +++ b/.copier-answers.yml @@ -14,9 +14,9 @@ package_name: cloud-courier primary_author: Eli Fine pull_from_ecr: false python_ci_versions: -- 3.12.7 +- 3.13.9 python_package_registry: PyPI -python_version: 3.12.7 +python_version: 3.13.9 repo_name: cloud-courier repo_org_name: LabAutomationAndScreening ssh_port_number: 61335 diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index e4bbd51..363e39c 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -20,7 +20,7 @@ }, "ghcr.io/devcontainers/features/python:1.8.0": { // https://github.com/devcontainers/features/blob/main/src/python/devcontainer-feature.json - "version": "3.12.7", + "version": "3.13.9", "enableShared": true, "installTools": false, "optimize": true @@ -78,5 +78,5 @@ "initializeCommand": "sh .devcontainer/initialize-command.sh", "onCreateCommand": "sh .devcontainer/on-create-command.sh", "postStartCommand": "sh .devcontainer/post-start-command.sh" - // Devcontainer context hash (do not manually edit this, it's managed by a pre-commit hook): bb54c7d2 # spellchecker:disable-line + // Devcontainer context hash (do not manually edit this, it's managed by a pre-commit hook): 48063975 # spellchecker:disable-line } diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8f02edd..08d3800 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -51,7 +51,7 @@ jobs: contents: write # needed for mutex id-token: write # needed to assume OIDC roles (e.g. for downloading from CodeArtifact) with: - python-version: 3.12.7 + python-version: 3.13.9 test: needs: @@ -65,11 +65,11 @@ jobs: python-version: - - 3.12.7 + - 3.13.9 include: - os: "ubuntu-24.04" - python-version: "3.12.7" + python-version: "3.13.9" JOB_MATCHING_DEV_ENV: true runs-on: ${{ matrix.os }} @@ -129,7 +129,7 @@ jobs: - windows-2025 python-version: - - 3.12.7 + - 3.13.9 runs-on: ${{ matrix.os }} timeout-minutes: 8 @@ -166,7 +166,7 @@ jobs: matrix: python-version: - - 3.12.7 + - 3.13.9 runs-on: ubuntu-24.04 diff --git a/.python-version b/.python-version index 56bb660..655ff07 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.12.7 +3.13.9 diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 3299cea..3c801fc 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -8,7 +8,7 @@ version: 2 build: os: ubuntu-24.04 tools: - python: "3.12" + python: "3.13" # Build documentation in the "docs/" directory with Sphinx sphinx: diff --git a/pyproject.toml b/pyproject.toml index bdac315..fe703f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,11 +7,11 @@ authors = [ ] readme = "README.md" -requires-python = ">=3.12.7" +requires-python = ">=3.13.9" classifiers = [ "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] dependencies = [ diff --git a/ruff.toml b/ruff.toml index 7a0478b..ed8dd8d 100644 --- a/ruff.toml +++ b/ruff.toml @@ -32,7 +32,7 @@ exclude = [ line-length = 120 indent-width = 4 -target-version = "py312" +target-version = "py313" [lint] select = ["ALL"] diff --git a/src/cloud_courier/load_config.py b/src/cloud_courier/load_config.py index 5883c21..c6da694 100644 --- a/src/cloud_courier/load_config.py +++ b/src/cloud_courier/load_config.py @@ -32,7 +32,7 @@ def _get_ssm_param_values(ssm_client: SSMClient, prefix: str) -> dict[str, str]: response = ssm_client.describe_parameters( ParameterFilters=[{"Key": "Name", "Option": "BeginsWith", "Values": [prefix]}], MaxResults=50, # AWS allows up to 50 results per call - NextToken=next_token if next_token else "", + NextToken=next_token or "", ) # Add parameters from this page diff --git a/tests/conftest.py b/tests/conftest.py index c455abd..155331b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -25,7 +25,7 @@ def localstack_profile(mocker: MockerFixture) -> None: @pytest.fixture -def flag_file_dir() -> Generator[str, None, None]: +def flag_file_dir() -> Generator[str]: with tempfile.TemporaryDirectory() as temp_dir: yield temp_dir diff --git a/tests/unit/test_main.py b/tests/unit/test_main.py index b9df38b..597f0ec 100644 --- a/tests/unit/test_main.py +++ b/tests/unit/test_main.py @@ -63,7 +63,7 @@ class MainMixin: def _setup( self, mock_path_to_aws_credentials: None, # noqa: ARG002 # pytest.usefixture cannot be used on a fixturet - flag_file_dir: Generator[str, None, None], + flag_file_dir: Generator[str], ): self.flag_file_dir = str(flag_file_dir) diff --git a/uv.lock b/uv.lock index 8e653ac..a878606 100644 --- a/uv.lock +++ b/uv.lock @@ -1,6 +1,6 @@ version = 1 revision = 3 -requires-python = ">=3.12.7" +requires-python = ">=3.13.9" [[package]] name = "alabaster" @@ -526,19 +526,6 @@ version = "3.4.1" source = { registry = "https://pypi.org/simple/" } sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188, upload-time = "2024-12-24T18:12:35.43Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105, upload-time = "2024-12-24T18:10:38.83Z" }, - { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404, upload-time = "2024-12-24T18:10:44.272Z" }, - { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423, upload-time = "2024-12-24T18:10:45.492Z" }, - { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184, upload-time = "2024-12-24T18:10:47.898Z" }, - { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268, upload-time = "2024-12-24T18:10:50.589Z" }, - { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601, upload-time = "2024-12-24T18:10:52.541Z" }, - { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098, upload-time = "2024-12-24T18:10:53.789Z" }, - { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520, upload-time = "2024-12-24T18:10:55.048Z" }, - { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852, upload-time = "2024-12-24T18:10:57.647Z" }, - { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488, upload-time = "2024-12-24T18:10:59.43Z" }, - { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192, upload-time = "2024-12-24T18:11:00.676Z" }, - { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550, upload-time = "2024-12-24T18:11:01.952Z" }, - { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785, upload-time = "2024-12-24T18:11:03.142Z" }, { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698, upload-time = "2024-12-24T18:11:05.834Z" }, { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162, upload-time = "2024-12-24T18:11:07.064Z" }, { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263, upload-time = "2024-12-24T18:11:08.374Z" }, @@ -621,17 +608,6 @@ version = "7.10.6" source = { registry = "https://pypi.org/simple/" } sdist = { url = "https://files.pythonhosted.org/packages/14/70/025b179c993f019105b79575ac6edb5e084fb0f0e63f15cdebef4e454fb5/coverage-7.10.6.tar.gz", hash = "sha256:f644a3ae5933a552a29dbb9aa2f90c677a875f80ebea028e5a52a4f429044b90", size = 823736, upload-time = "2025-08-29T15:35:16.668Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/26/06/263f3305c97ad78aab066d116b52250dd316e74fcc20c197b61e07eb391a/coverage-7.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5b2dd6059938063a2c9fee1af729d4f2af28fd1a545e9b7652861f0d752ebcea", size = 217324, upload-time = "2025-08-29T15:33:29.06Z" }, - { url = "https://files.pythonhosted.org/packages/e9/60/1e1ded9a4fe80d843d7d53b3e395c1db3ff32d6c301e501f393b2e6c1c1f/coverage-7.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:388d80e56191bf846c485c14ae2bc8898aa3124d9d35903fef7d907780477634", size = 217560, upload-time = "2025-08-29T15:33:30.748Z" }, - { url = "https://files.pythonhosted.org/packages/b8/25/52136173c14e26dfed8b106ed725811bb53c30b896d04d28d74cb64318b3/coverage-7.10.6-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:90cb5b1a4670662719591aa92d0095bb41714970c0b065b02a2610172dbf0af6", size = 249053, upload-time = "2025-08-29T15:33:32.041Z" }, - { url = "https://files.pythonhosted.org/packages/cb/1d/ae25a7dc58fcce8b172d42ffe5313fc267afe61c97fa872b80ee72d9515a/coverage-7.10.6-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:961834e2f2b863a0e14260a9a273aff07ff7818ab6e66d2addf5628590c628f9", size = 251802, upload-time = "2025-08-29T15:33:33.625Z" }, - { url = "https://files.pythonhosted.org/packages/f5/7a/1f561d47743710fe996957ed7c124b421320f150f1d38523d8d9102d3e2a/coverage-7.10.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf9a19f5012dab774628491659646335b1928cfc931bf8d97b0d5918dd58033c", size = 252935, upload-time = "2025-08-29T15:33:34.909Z" }, - { url = "https://files.pythonhosted.org/packages/6c/ad/8b97cd5d28aecdfde792dcbf646bac141167a5cacae2cd775998b45fabb5/coverage-7.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99c4283e2a0e147b9c9cc6bc9c96124de9419d6044837e9799763a0e29a7321a", size = 250855, upload-time = "2025-08-29T15:33:36.922Z" }, - { url = "https://files.pythonhosted.org/packages/33/6a/95c32b558d9a61858ff9d79580d3877df3eb5bc9eed0941b1f187c89e143/coverage-7.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:282b1b20f45df57cc508c1e033403f02283adfb67d4c9c35a90281d81e5c52c5", size = 248974, upload-time = "2025-08-29T15:33:38.175Z" }, - { url = "https://files.pythonhosted.org/packages/0d/9c/8ce95dee640a38e760d5b747c10913e7a06554704d60b41e73fdea6a1ffd/coverage-7.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cdbe264f11afd69841bd8c0d83ca10b5b32853263ee62e6ac6a0ab63895f972", size = 250409, upload-time = "2025-08-29T15:33:39.447Z" }, - { url = "https://files.pythonhosted.org/packages/04/12/7a55b0bdde78a98e2eb2356771fd2dcddb96579e8342bb52aa5bc52e96f0/coverage-7.10.6-cp312-cp312-win32.whl", hash = "sha256:a517feaf3a0a3eca1ee985d8373135cfdedfbba3882a5eab4362bda7c7cf518d", size = 219724, upload-time = "2025-08-29T15:33:41.172Z" }, - { url = "https://files.pythonhosted.org/packages/36/4a/32b185b8b8e327802c9efce3d3108d2fe2d9d31f153a0f7ecfd59c773705/coverage-7.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:856986eadf41f52b214176d894a7de05331117f6035a28ac0016c0f63d887629", size = 220536, upload-time = "2025-08-29T15:33:42.524Z" }, - { url = "https://files.pythonhosted.org/packages/08/3a/d5d8dc703e4998038c3099eaf77adddb00536a3cec08c8dcd556a36a3eb4/coverage-7.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:acf36b8268785aad739443fa2780c16260ee3fa09d12b3a70f772ef100939d80", size = 219171, upload-time = "2025-08-29T15:33:43.974Z" }, { url = "https://files.pythonhosted.org/packages/bd/e7/917e5953ea29a28c1057729c1d5af9084ab6d9c66217523fd0e10f14d8f6/coverage-7.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ffea0575345e9ee0144dfe5701aa17f3ba546f8c3bb48db62ae101afb740e7d6", size = 217351, upload-time = "2025-08-29T15:33:45.438Z" }, { url = "https://files.pythonhosted.org/packages/eb/86/2e161b93a4f11d0ea93f9bebb6a53f113d5d6e416d7561ca41bb0a29996b/coverage-7.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d91d7317cde40a1c249d6b7382750b7e6d86fad9d8eaf4fa3f8f44cf171e80", size = 217600, upload-time = "2025-08-29T15:33:47.269Z" }, { url = "https://files.pythonhosted.org/packages/0e/66/d03348fdd8df262b3a7fb4ee5727e6e4936e39e2f3a842e803196946f200/coverage-7.10.6-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e23dd5408fe71a356b41baa82892772a4cefcf758f2ca3383d2aa39e1b7a003", size = 248600, upload-time = "2025-08-29T15:33:48.953Z" }, @@ -754,16 +730,6 @@ version = "3.0.2" source = { registry = "https://pypi.org/simple/" } sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, @@ -4563,20 +4529,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, - { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, - { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, - { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, - { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, - { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, - { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, - { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, - { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, - { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, - { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, - { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, - { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, @@ -4619,10 +4571,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, - { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, - { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, - { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, ] [[package]] @@ -4950,17 +4898,6 @@ version = "3.2.0" source = { registry = "https://pypi.org/simple/" } sdist = { url = "https://files.pythonhosted.org/packages/02/fc/37b02f6094dbb1f851145330460532176ed2f1dc70511a35828166c41e52/time_machine-3.2.0.tar.gz", hash = "sha256:a4ddd1cea17b8950e462d1805a42b20c81eb9aafc8f66b392dd5ce997e037d79", size = 14804, upload-time = "2025-12-17T23:33:02.599Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/8b/080c8eedcd67921a52ba5bd0e075362062509ab63c86fc1a0442fad241a6/time_machine-3.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:cc4bee5b0214d7dc4ebc91f4a4c600f1a598e9b5606ac751f42cb6f6740b1dbb", size = 19255, upload-time = "2025-12-17T23:31:58.057Z" }, - { url = "https://files.pythonhosted.org/packages/66/17/0e5291e9eb705bf8a5a1305f826e979af307bbeb79def4ddbf4b3f9a81e0/time_machine-3.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3ca036304b4460ae2fdc1b52dd8b1fa7cf1464daa427fc49567413c09aa839c1", size = 15360, upload-time = "2025-12-17T23:31:59.048Z" }, - { url = "https://files.pythonhosted.org/packages/8b/e8/9ab87b71d2e2b62463b9b058b7ae7ac09fb57f8fcd88729dec169d304340/time_machine-3.2.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5442735b41d7a2abc2f04579b4ca6047ed4698a8338a4fec92c7c9423e7938cb", size = 33029, upload-time = "2025-12-17T23:32:00.413Z" }, - { url = "https://files.pythonhosted.org/packages/4b/26/b5ca19da6f25ea905b3e10a0ea95d697c1aeba0404803a43c68f1af253e6/time_machine-3.2.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:97da3e971e505cb637079fb07ab0bcd36e33279f8ecac888ff131f45ef1e4d8d", size = 34579, upload-time = "2025-12-17T23:32:01.431Z" }, - { url = "https://files.pythonhosted.org/packages/79/ca/6ac7ad5f10ea18cc1d9de49716ba38c32132c7b64532430d92ef240c116b/time_machine-3.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3cdda6dee4966e38aeb487309bb414c6cb23a81fc500291c77a8fcd3098832e7", size = 35961, upload-time = "2025-12-17T23:32:02.521Z" }, - { url = "https://files.pythonhosted.org/packages/33/67/390dd958bed395ab32d79a9fe61fe111825c0dd4ded54dbba7e867f171e6/time_machine-3.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:33d9efd302a6998bcc8baa4d84f259f8a4081105bd3d7f7af7f1d0abd3b1c8aa", size = 34668, upload-time = "2025-12-17T23:32:03.585Z" }, - { url = "https://files.pythonhosted.org/packages/da/57/c88fff034a4e9538b3ae7c68c9cfb283670b14d17522c5a8bc17d29f9a4b/time_machine-3.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3a0b0a33971f14145853c9bd95a6ab0353cf7e0019fa2a7aa1ae9fddfe8eab50", size = 32891, upload-time = "2025-12-17T23:32:04.656Z" }, - { url = "https://files.pythonhosted.org/packages/2d/70/ebbb76022dba0fec8f9156540fc647e4beae1680c787c01b1b6200e56d70/time_machine-3.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2d0be9e5f22c38082d247a2cdcd8a936504e9db60b7b3606855fb39f299e9548", size = 34080, upload-time = "2025-12-17T23:32:06.146Z" }, - { url = "https://files.pythonhosted.org/packages/db/9a/2ca9e7af3df540dc1c79e3de588adeddb7dcc2107829248e6969c4f14167/time_machine-3.2.0-cp312-cp312-win32.whl", hash = "sha256:3f74623648b936fdce5f911caf386c0a0b579456410975de8c0dfeaaffece1d8", size = 17371, upload-time = "2025-12-17T23:32:07.164Z" }, - { url = "https://files.pythonhosted.org/packages/d8/ce/21d23efc9c2151939af1b7ee4e60d86d661b74ef32b8eaa148f6fe8c899c/time_machine-3.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:34e26a41d994b5e4b205136a90e9578470386749cc9a2ecf51ca18f83ce25e23", size = 18132, upload-time = "2025-12-17T23:32:08.447Z" }, - { url = "https://files.pythonhosted.org/packages/2f/34/c2b70be483accf6db9e5d6c3139bce3c38fe51f898ccf64e8d3fe14fbf4d/time_machine-3.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:0615d3d82c418d6293f271c348945c5091a71f37e37173653d5c26d0e74b13a8", size = 16930, upload-time = "2025-12-17T23:32:09.477Z" }, { url = "https://files.pythonhosted.org/packages/ee/cd/43ad5efc88298af3c59b66769cea7f055567a85071579ed40536188530c1/time_machine-3.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c421a8eb85a4418a7675a41bf8660224318c46cc62e4751c8f1ceca752059090", size = 19318, upload-time = "2025-12-17T23:32:10.518Z" }, { url = "https://files.pythonhosted.org/packages/b0/f6/084010ef7f4a3f38b5a4900923d7c85b29e797655c4f6ee4ce54d903cca8/time_machine-3.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f4e758f7727d0058c4950c66b58200c187072122d6f7a98b610530a4233ea7b", size = 15390, upload-time = "2025-12-17T23:32:11.625Z" }, { url = "https://files.pythonhosted.org/packages/25/aa/1cabb74134f492270dc6860cb7865859bf40ecf828be65972827646e91ad/time_machine-3.2.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:154bd3f75c81f70218b2585cc12b60762fb2665c507eec5ec5037d8756d9b4e0", size = 33115, upload-time = "2025-12-17T23:32:13.219Z" }, @@ -5061,9 +4998,6 @@ version = "6.0.0" source = { registry = "https://pypi.org/simple/" } sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" }, - { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" }, - { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" }, { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" }, { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" }, { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" }, From 3c6a65d4fa490bb58bc1f826f9efabb47d3e1315 Mon Sep 17 00:00:00 2001 From: Eli Fine <eli88fine@gmail.com> Date: Tue, 31 Mar 2026 12:48:36 +0000 Subject: [PATCH 4/4] tag --- .copier-answers.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.copier-answers.yml b/.copier-answers.yml index 3e122ab..6770cb1 100644 --- a/.copier-answers.yml +++ b/.copier-answers.yml @@ -1,5 +1,5 @@ # Changes here will be overwritten by Copier -_commit: v0.0.62-2-g93c5fcb +_commit: v0.0.63 _src_path: gh:LabAutomationAndScreening/copier-python-package-template configure_python_asyncio: false configure_vcrpy: false