diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..9c52e9a --- /dev/null +++ b/Makefile @@ -0,0 +1,87 @@ +# Makefile for deploying Coding Agents to Databricks Apps +# +# Usage: +# make deploy PROFILE=daveok PAT=dapi... +# make deploy PROFILE=daveok # prompts for PAT interactively +# make redeploy PROFILE=daveok # skip secret setup, just sync + deploy +# make status PROFILE=daveok # check app status +# make logs PROFILE=daveok # tail app logs + +# Configuration +PROFILE ?= DEFAULT +APP_NAME ?= coding-agents +SECRET_SCOPE ?= $(APP_NAME)-secrets +SECRET_KEY ?= databricks-token + +# Resolve user email and workspace path from the profile +USER_EMAIL = $(shell databricks current-user me --profile $(PROFILE) --output json 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin).get('userName',''))") +WORKSPACE_PATH = /Workspace/Users/$(USER_EMAIL)/apps/$(APP_NAME) + +.PHONY: help deploy redeploy create-app setup-secret sync deploy-app status logs clean-secret + +help: ## Show this help + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf " \033[36m%-18s\033[0m %s\n", $$1, $$2}' + +deploy: create-app setup-secret sync deploy-app ## Full deploy: create app, set secret, sync, deploy + @echo "" + @echo "Deployment complete! App URL:" + @databricks apps get $(APP_NAME) --profile $(PROFILE) --output json 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin).get('url','(pending)'))" + +redeploy: sync deploy-app ## Redeploy: sync + deploy (skip secret setup) + @echo "" + @echo "Redeployment complete!" + +create-app: ## Create the Databricks App (idempotent) + @echo "==> Checking if app '$(APP_NAME)' exists..." + @if databricks apps get $(APP_NAME) --profile $(PROFILE) >/dev/null 2>&1; then \ + echo " App '$(APP_NAME)' already exists, skipping create."; \ + else \ + echo " Creating app '$(APP_NAME)'..."; \ + databricks apps create $(APP_NAME) --profile $(PROFILE); \ + fi + +setup-secret: ## Create secret scope and store PAT + @echo "==> Setting up DATABRICKS_TOKEN secret..." + @# Create scope if it doesn't exist + @if databricks secrets list-scopes --profile $(PROFILE) --output json 2>/dev/null | python3 -c "import sys,json; scopes=[s['name'] for s in json.load(sys.stdin).get('scopes',[])]; exit(0 if '$(SECRET_SCOPE)' in scopes else 1)" 2>/dev/null; then \ + echo " Secret scope '$(SECRET_SCOPE)' already exists."; \ + else \ + echo " Creating secret scope '$(SECRET_SCOPE)'..."; \ + databricks secrets create-scope $(SECRET_SCOPE) --profile $(PROFILE); \ + fi + @# Store the PAT - prompt if not provided + @if [ -z "$(PAT)" ]; then \ + echo " Enter your Databricks PAT (will not echo):"; \ + read -s pat_value && \ + echo "$$pat_value" | databricks secrets put-secret $(SECRET_SCOPE) $(SECRET_KEY) --profile $(PROFILE); \ + else \ + echo "$(PAT)" | databricks secrets put-secret $(SECRET_SCOPE) $(SECRET_KEY) --profile $(PROFILE); \ + fi + @echo " Secret stored in $(SECRET_SCOPE)/$(SECRET_KEY)" + @# Link secret to app resource + @echo " Linking secret to app resource 'DATABRICKS_TOKEN'..." + @curl -s -X PATCH \ + "$$(databricks auth env --profile $(PROFILE) 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin)['env']['DATABRICKS_HOST'])")/api/2.0/apps/$(APP_NAME)" \ + -H "Authorization: Bearer $$(databricks auth token --profile $(PROFILE) 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin)['access_token'])")" \ + -H "Content-Type: application/json" \ + -d '{"resources":[{"name":"DATABRICKS_TOKEN","description":"PAT for model serving access","secret":{"scope":"$(SECRET_SCOPE)","key":"$(SECRET_KEY)","permission":"READ"}}]}' \ + >/dev/null + @echo " App resource linked." + +sync: ## Sync local files to Databricks workspace + @echo "==> Syncing to $(WORKSPACE_PATH)..." + databricks sync . $(WORKSPACE_PATH) --watch=false --profile $(PROFILE) + +deploy-app: ## Deploy the app from workspace + @echo "==> Deploying app '$(APP_NAME)'..." + databricks apps deploy $(APP_NAME) --source-code-path $(WORKSPACE_PATH) --profile $(PROFILE) --no-wait + +status: ## Check app status + @databricks apps get $(APP_NAME) --profile $(PROFILE) + +logs: ## Tail app logs + databricks apps logs $(APP_NAME) --profile $(PROFILE) + +clean-secret: ## Remove secret scope (destructive) + @echo "==> Removing secret scope '$(SECRET_SCOPE)'..." + databricks secrets delete-scope $(SECRET_SCOPE) --profile $(PROFILE) diff --git a/agents/build-feature.md b/agents/build-feature.md new file mode 100644 index 0000000..9a35777 --- /dev/null +++ b/agents/build-feature.md @@ -0,0 +1,66 @@ +--- +name: build-feature +description: End-to-end feature builder. Chains prd-writer → test-generator → implementer → web-devloop-tester in TDD flow. Use when asked to "build", "create", or "implement" a feature from scratch. Orchestrates the full cycle including bug fix loops and visual UI testing. +tools: Read, Write, Edit, Glob, Grep, Bash, Agent, AskUserQuestion, WebSearch, WebFetch +--- + +# Role +You are a tech lead orchestrating a TDD feature build. You coordinate four phases and handle failures. + +# Phase 1: PRD +1. Invoke yourself as a prd-writer: interview the user, write `docs/prd/.md` +2. Do NOT proceed until the user approves the PRD +3. PRD must have status `READY_FOR_IMPLEMENTATION` before moving on + +# Phase 2: Tests (TDD) +1. Read the approved PRD +2. Extract all Acceptance Criteria (AC-*) +3. Scan the codebase for test framework and conventions +4. Write failing tests that define the contract — one or more tests per AC +5. Run the tests to confirm they fail for the right reasons (missing implementation, not broken tests) +6. Update PRD status to `TESTS_WRITTEN` + +# Phase 3: Implementation +1. Read the PRD and all test files +2. Run the test suite to see current failures +3. Create an implementation plan, present it to the user for approval +4. Implement code to make tests pass, working through one group at a time +5. After each group, run tests to verify progress + +# Bug Fix Loop +If tests fail after implementation: + +1. Read the failure output carefully +2. Identify whether the bug is in the **test** or the **implementation** +3. If test is wrong (doesn't match PRD): fix the test +4. If implementation is wrong: fix the code +5. Re-run tests +6. **Max 3 fix loops** — if still failing after 3 rounds, stop and report to the user with: + - Which tests are failing + - The error messages + - Your hypothesis on the root cause + - Ask the user how to proceed + +# Phase 4: Visual Testing (Web Apps Only) +If the feature has a UI component (React, Vue, Streamlit, Dash, etc.): + +1. Spawn a `web-devloop-tester` agent (subagent_type: `fe-specialized-agents:web-devloop-tester`) +2. Tell it to: start the dev server, navigate to the relevant page, take screenshots, check console for errors, and test key interactions from the AC-* list +3. Review the tester's report: + - **All clear** → proceed to Completion + - **Issues found** → create fix tasks for the implementer, then re-test +4. **Max 3 visual fix loops** — if issues persist after 3 rounds, stop and report to the user with screenshots and logs + +Skip this phase for: +- CLI tools, libraries, backend-only APIs +- Projects with no dev server or browser UI + +# Completion +When all tests pass and visual testing is complete (or skipped): +1. Run the full test suite one final time +2. Update PRD status to `COMPLETE` +3. Summarize what was built: + - Files created/modified + - Test coverage (AC-* mapping) + - Visual test results (screenshots, if applicable) + - Any open items or manual testing needed diff --git a/agents/implementer.md b/agents/implementer.md new file mode 100644 index 0000000..2f6d088 --- /dev/null +++ b/agents/implementer.md @@ -0,0 +1,59 @@ +--- +name: implementer +description: Reads a PRD and makes all tests pass. Implements code to satisfy the test suite written by test-generator. Use after test-generator has written failing tests. Runs tests iteratively until green. +tools: Read, Write, Edit, Glob, Grep, Bash, Agent +--- + +# Role +You are a senior software engineer who makes failing tests pass. You implement exactly what's needed to satisfy the test suite and PRD requirements — nothing more. + +# Startup +1. Read the PRD file specified (or scan `docs/prd/` for files with status `TESTS_WRITTEN`) +2. Read ALL test files listed in the PRD status section +3. Run the test suite to see the current failures +4. Read any files referenced in the PRD's Technical Notes or Dependencies sections +5. Scan the codebase with Glob/Grep to understand existing patterns and architecture + +# Planning Phase +Before writing any code, create a numbered implementation plan: + +1. List every failing test and what it expects +2. Group tests by module/component +3. Identify files to create or modify +4. Note the order of operations (what depends on what) +5. Flag any Open Questions from the PRD that block implementation + +Present the plan and wait for approval before proceeding. + +# Implementation Phase — Red-Green Loop +For each group of related tests: + +1. **Read the tests** — understand exactly what they expect +2. **Write minimal code** to make those tests pass +3. **Run tests** — check if they pass +4. **If tests fail** — read the error, fix the code, run again +5. **Repeat** until that group is green +6. **Commit** — use `git commit -m "message"` directly +7. Move to the next group + +Rules: +- **Read before writing** — always read existing files before modifying +- **Follow existing patterns** — match the codebase's style and conventions +- **Keep it simple** — don't over-engineer; make the tests pass +- **Max 3 fix attempts per test** — if a test won't pass after 3 tries, flag it and move on + +# Final Validation +After all implementation: + +1. Run the FULL test suite +2. If any tests still fail, attempt fixes (max 2 more rounds) +3. If tests still fail after retries, document the failures + +# Handoff +When complete, update the PRD status: + +> **Status: IMPLEMENTED** +> Commits: +> Test results: +> If all green: **Status: COMPLETE** +> If failures remain: **Status: NEEDS_REVIEW** with failure details diff --git a/agents/prd-writer.md b/agents/prd-writer.md new file mode 100644 index 0000000..baf4aa0 --- /dev/null +++ b/agents/prd-writer.md @@ -0,0 +1,81 @@ +--- +name: prd-writer +description: Use when creating a new feature, epic, or project requirement. Interviews the user with clarifying questions, then generates a structured PRD markdown file ready for implementation. Use proactively when asked about new features or "what should we build". +tools: Read, Write, Glob, Grep, AskUserQuestion, WebSearch, WebFetch +--- + +# Role +You are a senior product manager who turns raw ideas into implementation-ready PRDs through Socratic questioning. + +# Discovery Phase +Before writing anything, interview the user with numbered clarifying questions (max 6 per round) covering: + +1. **Problem** — What problem are we solving and who does it affect? +2. **Success metrics** — How will we know this worked? What are the acceptance criteria? +3. **Scope boundaries** — What is explicitly OUT of scope? +4. **Technical constraints** — Any dependencies, existing systems, or limitations? +5. **Priority & timeline** — How urgent is this? What's the desired delivery window? +6. **Edge cases** — What happens when things go wrong? Error states? + +Use AskUserQuestion to present these as structured questions. WAIT for answers before proceeding. Ask follow-up rounds if answers are vague or incomplete. + +# Research Phase +If the feature involves external APIs, libraries, or patterns: +- Use WebSearch to find current best practices +- Use Glob/Grep to scan the existing codebase for related patterns, data models, and conventions +- Reference any existing PRDs in `docs/prd/` to follow established format and naming + +# Output Format +Write the PRD to `docs/prd/.md` using this structure: + +```markdown +# PRD: +**Author:** | **Date:** | **Status:** DRAFT + +## Problem Statement + + +## User Personas & Stories +- As a [user type], I want [action] so that [outcome] +- ... + +## Functional Requirements +1. FR-1: +2. FR-2: ... + +## Non-Functional Requirements +1. NFR-1: +2. NFR-2: ... + +## Acceptance Criteria +1. AC-1: Given [context], when [action], then [result] +2. AC-2: ... + +## Out of Scope +- + +## Dependencies +- + +## Open Questions +- + +## Technical Notes +- +- +``` + +# Iteration +After writing the first draft: +1. Present a summary to the user +2. Ask if any sections need refinement +3. Update the PRD based on feedback +4. Repeat until the user approves + +# Handoff +Once approved, update the status line and append: + +> **Status: READY_FOR_IMPLEMENTATION** +> Next steps (TDD flow): +> 1. test-generator writes failing tests from the Acceptance Criteria +> 2. implementer makes all tests pass diff --git a/agents/test-generator.md b/agents/test-generator.md new file mode 100644 index 0000000..f2f2d21 --- /dev/null +++ b/agents/test-generator.md @@ -0,0 +1,56 @@ +--- +name: test-generator +description: Reads a PRD's acceptance criteria and generates comprehensive tests BEFORE implementation (TDD). Maps each AC-* criterion to one or more test cases. Tests should initially fail — that's expected. Use after prd-writer and BEFORE the implementer. +tools: Read, Write, Edit, Glob, Grep, Bash +--- + +# Role +You are a senior QA engineer who writes tests FIRST (TDD style). You translate acceptance criteria into failing tests that define the contract the implementer must satisfy. + +# Startup +1. Read the PRD file specified by the user (or scan `docs/prd/` for files with status `READY_FOR_IMPLEMENTATION`) +2. Extract all Acceptance Criteria (AC-*) +3. Scan the codebase to understand the test framework, conventions, and existing test patterns +4. If code already exists, read it to understand the interfaces; if not, define the expected interfaces from the PRD + +# Test Strategy +Before writing tests, produce a test matrix: + +| AC | Test Name | Type | Description | +|----|-----------|------|-------------| +| AC-1 | test_... | unit | ... | +| AC-1 | test_... | integration | ... | +| AC-2 | test_... | unit | ... | + +Every AC must have at least one test. Include: +- **Happy path** — the AC scenario works as described +- **Edge cases** — boundary values, empty inputs, max limits +- **Error cases** — what happens when preconditions aren't met + +# Implementation Rules +1. **Match existing test patterns** — use the same framework, fixtures, helpers, and directory structure already in the project +2. **Name tests after ACs** — include the AC number in the test name or docstring (e.g., `test_ac1_user_can_login`) +3. **Keep tests independent** — no test should depend on another test's state +4. **Test behavior, not implementation** — tests should survive refactoring +5. **Define interfaces** — if the code doesn't exist yet, write tests against the interfaces/function signatures described in the PRD. Import from expected module paths. + +# Test Frameworks +Detect and use whatever the project already has: +- **Python**: pytest (use `uv run pytest`) +- **JS/TS**: jest, vitest, or mocha (use `npx`) +- **Other**: follow existing patterns + +# TDD Validation +After writing all tests: +1. Run the test suite — **tests SHOULD fail** (no implementation yet) +2. Confirm tests fail for the RIGHT reasons (import errors or missing functions, not syntax errors in tests) +3. List the expected failure count + +# Handoff +When complete, update the PRD status: + +> **Status: TESTS_WRITTEN** +> Test files: +> Failing tests: (expected — no implementation yet) +> AC coverage: +> Next: Ask the implementer to read `docs/prd/.md` and make all tests pass diff --git a/app.py b/app.py index 5526a6b..291a81a 100644 --- a/app.py +++ b/app.py @@ -21,6 +21,13 @@ from utils import ensure_https +# Sanitize DATABRICKS_TOKEN early — the platform sometimes injects trailing +# newlines / whitespace which causes auth failures. Cleaning it here prevents +# the agent from "fixing" it in the terminal and leaking the raw token. +_raw_token = os.environ.get("DATABRICKS_TOKEN", "") +if _raw_token != _raw_token.strip(): + os.environ["DATABRICKS_TOKEN"] = _raw_token.strip() + # App version (single source of truth: pyproject.toml) _pyproject_file = os.path.join(os.path.dirname(__file__), 'pyproject.toml') try: @@ -83,6 +90,8 @@ def handle_sigterm(signum, frame): "steps": [ {"id": "git", "label": "Configuring git identity", "status": "pending", "started_at": None, "completed_at": None, "error": None}, {"id": "micro", "label": "Installing micro editor", "status": "pending", "started_at": None, "completed_at": None, "error": None}, + {"id": "gh", "label": "Installing GitHub CLI", "status": "pending", "started_at": None, "completed_at": None, "error": None}, + {"id": "dbcli", "label": "Upgrading Databricks CLI", "status": "pending", "started_at": None, "completed_at": None, "error": None}, {"id": "proxy", "label": "Starting content-filter proxy", "status": "pending", "started_at": None, "completed_at": None, "error": None}, {"id": "claude", "label": "Configuring Claude CLI", "status": "pending", "started_at": None, "completed_at": None, "error": None}, {"id": "codex", "label": "Configuring Codex CLI", "status": "pending", "started_at": None, "completed_at": None, "error": None}, @@ -117,6 +126,11 @@ def _run_step(step_id, command): env = os.environ.copy() if not env.get("HOME") or env["HOME"] == "/": env["HOME"] = "/app/python/source_code" + home = env.get("HOME", "/app/python/source_code") + # Ensure uv and other tools in ~/.local/bin are on PATH + local_bin = os.path.join(home, ".local", "bin") + if local_bin not in env.get("PATH", ""): + env["PATH"] = f"{local_bin}:{env.get('PATH', '')}" env.pop("DATABRICKS_CLIENT_ID", None) env.pop("DATABRICKS_CLIENT_SECRET", None) @@ -198,14 +212,14 @@ def _setup_git_config(): f.write('\n') f.write('echo "[post-commit] $(date +%H:%M:%S) syncing $REPO_ROOT" >> "$SYNC_LOG"\n') f.write('\n') - f.write('# Use venv python directly (avoids fragile source activate)\n') - f.write('VENV_PYTHON="/app/python/source_code/.venv/bin/python"\n') - f.write('SYNC_SCRIPT="/app/python/source_code/sync_to_workspace.py"\n') + f.write('# Use uv run so sync script gets the correct Python + deps\n') + f.write('APP_DIR="/app/python/source_code"\n') + f.write('SYNC_SCRIPT="$APP_DIR/sync_to_workspace.py"\n') f.write('\n') - f.write('if [ -x "$VENV_PYTHON" ] && [ -f "$SYNC_SCRIPT" ]; then\n') - f.write(' nohup "$VENV_PYTHON" "$SYNC_SCRIPT" "$REPO_ROOT" >> "$SYNC_LOG" 2>&1 & disown\n') + f.write('if [ -f "$SYNC_SCRIPT" ]; then\n') + f.write(' nohup uv run --project "$APP_DIR" python "$SYNC_SCRIPT" "$REPO_ROOT" >> "$SYNC_LOG" 2>&1 & disown\n') f.write('else\n') - f.write(' echo "[post-commit] $(date +%H:%M:%S) SKIP: venv=$VENV_PYTHON script=$SYNC_SCRIPT" >> "$SYNC_LOG"\n') + f.write(' echo "[post-commit] $(date +%H:%M:%S) SKIP: sync script not found" >> "$SYNC_LOG"\n') f.write('fi\n') os.chmod(post_commit, 0o755) logger.info(f"Post-commit hook written to {post_commit}") @@ -252,19 +266,64 @@ def run_setup(): _run_step("micro", ["bash", "-c", "mkdir -p ~/.local/bin && bash install_micro.sh && mv micro ~/.local/bin/ 2>/dev/null || true"]) + _run_step( + "gh", + [ + "bash", + "-c", + 'GH_VERSION="2.74.1" && ' + "mkdir -p ~/.local/bin && " + 'curl -fsSL "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_amd64.tar.gz" -o /tmp/gh.tar.gz && ' + "tar -xzf /tmp/gh.tar.gz -C /tmp && " + "mv /tmp/gh_${GH_VERSION}_linux_amd64/bin/gh ~/.local/bin/gh && " + "rm -rf /tmp/gh.tar.gz /tmp/gh_${GH_VERSION}_linux_amd64 && " + "chmod +x ~/.local/bin/gh && " + "gh config set git_protocol https 2>/dev/null || true && " + # Wrap gh auth login to skip interactive prompts (arrow-key menus break in xterm.js PTY) + "printf '#!/bin/bash\\n" + 'if [ "$1" = "auth" ] && [ "$2" = "login" ]; then\\n' + " shift 2\\n" + ' printf "Y\\\\n" | ~/.local/bin/gh.real auth login -h github.com -p https -w --skip-ssh-key "$@"\\n' + "fi\\n" + 'exec ~/.local/bin/gh.real "$@"\\n\' > ~/.local/bin/gh.wrapper && ' + "mv ~/.local/bin/gh ~/.local/bin/gh.real && " + "mv ~/.local/bin/gh.wrapper ~/.local/bin/gh && " + "chmod +x ~/.local/bin/gh", + ], + ) + + # --- Upgrade Databricks CLI (runtime image ships an older version) --- + _run_step( + "dbcli", + [ + "bash", + "-c", + "mkdir -p ~/.local/bin && " + # Fetch latest release tag from GitHub API + 'DB_CLI_VERSION=$(curl -fsSL "https://api.github.com/repos/databricks/cli/releases/latest" | python3 -c "import sys,json; print(json.load(sys.stdin)[\'tag_name\'].lstrip(\'v\'))") && ' + 'echo "Installing Databricks CLI v${DB_CLI_VERSION}" && ' + 'curl -fsSL "https://github.com/databricks/cli/releases/download/v${DB_CLI_VERSION}/databricks_cli_${DB_CLI_VERSION}_linux_amd64.zip" -o /tmp/dbcli.zip && ' + "unzip -o /tmp/dbcli.zip -d /tmp/dbcli && " + "mv /tmp/dbcli/databricks ~/.local/bin/databricks && " + "rm -rf /tmp/dbcli.zip /tmp/dbcli && " + "chmod +x ~/.local/bin/databricks && " + "databricks --version", + ], + ) + # --- Content-filter proxy (must be running before OpenCode starts) --- # Sanitizes requests/responses between OpenCode and Databricks # (see OpenCode #5028, docs/plans/2026-03-11-litellm-empty-content-blocks-design.md) - _run_step("proxy", ["python", "setup_proxy.py"]) + _run_step("proxy", ["uv", "run", "python", "setup_proxy.py"]) # --- Parallel agent setup (all independent of each other) --- parallel_steps = [ - ("claude", ["python", "setup_claude.py"]), - ("codex", ["python", "setup_codex.py"]), - ("opencode", ["python", "setup_opencode.py"]), - ("gemini", ["python", "setup_gemini.py"]), - ("databricks", ["python", "setup_databricks.py"]), - ("mlflow", ["python", "setup_mlflow.py"]), + ("claude", ["uv", "run", "python", "setup_claude.py"]), + ("codex", ["uv", "run", "python", "setup_codex.py"]), + ("opencode", ["uv", "run", "python", "setup_opencode.py"]), + ("gemini", ["uv", "run", "python", "setup_gemini.py"]), + ("databricks", ["uv", "run", "python", "setup_databricks.py"]), + ("mlflow", ["uv", "run", "python", "setup_mlflow.py"]), ] with ThreadPoolExecutor(max_workers=len(parallel_steps)) as executor: @@ -487,7 +546,7 @@ def read_pty_output(session_id, fd): if session_id not in sessions: break try: - readable, _, errors = select.select([fd], [], [fd], 0.5) + readable, _, errors = select.select([fd], [], [fd], 0.05) if readable or errors: output = os.read(fd, 4096) if not output: @@ -569,7 +628,10 @@ def cleanup_stale_sessions(): warning_threshold = SESSION_TIMEOUT_SECONDS * 0.8 with sessions_lock: - for session_id, session in sessions.items(): + session_snapshot = list(sessions.items()) + + for session_id, session in session_snapshot: + with session["lock"]: idle = now - session["last_poll_time"] if idle > SESSION_TIMEOUT_SECONDS: stale_sessions.append((session_id, session["pid"], session["master_fd"])) @@ -801,22 +863,31 @@ def get_output_batch(): outputs = {} now = time.time() + # Step 1: Resolve session refs under global lock (fast dict lookups only) + resolved = {} with sessions_lock: for sid in session_ids: - if sid not in sessions: - continue - session = sessions[sid] + if sid in sessions: + resolved[sid] = sessions[sid] + + # Step 2: Swap buffers under per-session locks (same pattern as get_output) + swapped = {} + for sid, session in resolved.items(): + with session["lock"]: session["last_poll_time"] = now - buffer = session["output_buffer"] - output = "".join(buffer) - buffer.clear() + old_buffer = session["output_buffer"] + session["output_buffer"] = deque(maxlen=1000) exited = session.get("exited", False) timeout_warning = session.pop("timeout_warning", False) - outputs[sid] = { - "output": output, - "exited": exited, - "timeout_warning": timeout_warning - } + swapped[sid] = (old_buffer, exited, timeout_warning) + + # Step 3: Join strings outside all locks + for sid, (old_buffer, exited, timeout_warning) in swapped.items(): + outputs[sid] = { + "output": "".join(old_buffer), + "exited": exited, + "timeout_warning": timeout_warning, + } return jsonify({"outputs": outputs, "shutting_down": shutting_down}) diff --git a/app.yaml b/app.yaml index 4c0dc5c..9d6e516 100644 --- a/app.yaml +++ b/app.yaml @@ -1,4 +1,6 @@ command: + - uv + - run - gunicorn - app:app env: @@ -12,8 +14,5 @@ env: value: databricks-gemini-3-1-pro - name: CODEX_MODEL value: databricks-gpt-5-2 - #OPTIONAL: Move to the new Databricks Gateway if you have access (recommended), otherwise it will default to the older endpoint - - name: DATABRICKS_GATEWAY_HOST - valueFrom: DATABRICKS_GATEWAY_HOST - name: CLAUDE_CODE_DISABLE_AUTO_MEMORY value: 0 diff --git a/app.yaml.template b/app.yaml.template index c29f3a6..277c216 100644 --- a/app.yaml.template +++ b/app.yaml.template @@ -10,11 +10,5 @@ env: value: databricks-claude-opus-4-6 - name: GEMINI_MODEL value: databricks-gemini-3-1-pro - #OPTIONAL: Use the new Databricks AI Gateway if you have access (recommended), otherwise it will default to the older endpoint - - name: DATABRICKS_GATEWAY_HOST - value: https://.ai-gateway..cloud.databricks.com - # NOTE: CLAUDE_CODE_DISABLE_AUTO_MEMORY=0 enables auto memory, allowing Claude Code to - # persist context and history across sessions. This flag is temporary — once Anthropic - # completes the rollout and auto memory is on by default, this can be removed entirely. - name: CLAUDE_CODE_DISABLE_AUTO_MEMORY value: 0 diff --git a/pyproject.toml b/pyproject.toml index 9407e4a..ee91857 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,9 +2,12 @@ name = "coda" version = "0.16.4" description = "CoDA - Coding Agents on Databricks Apps" -requires-python = ">=3.10" +requires-python = ">=3.12" dependencies = [ "flask>=2.0", + "flask-socketio>=5.3", + "simple-websocket>=1.0", + "gunicorn>=21.0", "claude-agent-sdk", "databricks-sdk>=0.20.0", "mlflow[genai]>=3.4", diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index a9c32c9..0000000 --- a/requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -flask>=2.0 -flask-socketio>=5.3 -simple-websocket>=1.0 -claude-agent-sdk -databricks-sdk>=0.20.0 -mlflow[genai]>=3.4 -opentelemetry-exporter-otlp-proto-grpc diff --git a/setup_claude.py b/setup_claude.py index 128ef37..b138f1c 100644 --- a/setup_claude.py +++ b/setup_claude.py @@ -1,5 +1,6 @@ import os import json +import shutil import subprocess from pathlib import Path @@ -47,18 +48,29 @@ settings_path.write_text(json.dumps(settings, indent=2)) # 2. Write ~/.claude.json with onboarding skip AND MCP servers +mcp_servers = { + "deepwiki": { + "type": "http", + "url": "https://mcp.deepwiki.com/mcp" + }, + "exa": { + "type": "http", + "url": "https://mcp.exa.ai/mcp" + } +} + +# Auto-configure team-memory MCP if URL is provided +team_memory_url = os.environ.get("TEAM_MEMORY_MCP_URL", "").strip().rstrip("/") +if team_memory_url: + mcp_servers["team-memory"] = { + "type": "http", + "url": f"{team_memory_url}/mcp" + } + print(f"Team memory MCP configured: {team_memory_url}/mcp") + claude_json = { "hasCompletedOnboarding": True, - "mcpServers": { - "deepwiki": { - "type": "http", - "url": "https://mcp.deepwiki.com/mcp" - }, - "exa": { - "type": "http", - "url": "https://mcp.exa.ai/mcp" - } - } + "mcpServers": mcp_servers } claude_json_path = home / ".claude.json" @@ -71,22 +83,35 @@ local_bin = home / ".local" / "bin" claude_bin = local_bin / "claude" -if not claude_bin.exists(): - print("Installing Claude Code CLI...") - result = subprocess.run( - ["bash", "-c", "curl -fsSL https://claude.ai/install.sh | bash"], - env={**os.environ, "HOME": str(home)}, - capture_output=True, - text=True - ) - if result.returncode == 0: - print("Claude Code CLI installed successfully") - else: - print(f"CLI install warning: {result.stderr}") +print("Installing/upgrading Claude Code CLI...") +result = subprocess.run( + ["bash", "-c", "curl -fsSL https://claude.ai/install.sh | bash"], + env={**os.environ, "HOME": str(home)}, + capture_output=True, + text=True +) +if result.returncode == 0: + print("Claude Code CLI installed successfully") +else: + print(f"CLI install warning: {result.stderr}") + +# 4. Copy subagent definitions to ~/.claude/agents/ +# These enable TDD workflow: prd-writer → test-generator → implementer → build-feature +agents_src = Path(__file__).parent / "agents" +agents_dst = claude_dir / "agents" +agents_dst.mkdir(exist_ok=True) + +if agents_src.exists(): + copied = [] + for agent_file in agents_src.glob("*.md"): + shutil.copy2(str(agent_file), str(agents_dst / agent_file.name)) + copied.append(agent_file.name) + if copied: + print(f"Subagents installed: {', '.join(copied)}") else: - print(f"Claude Code CLI already installed at {claude_bin}") + print("No agents directory found, skipping subagent setup") -# 4. Create projects directory +# 5. Create projects directory projects_dir = home / "projects" projects_dir.mkdir(exist_ok=True) print(f"Projects directory: {projects_dir}") diff --git a/spawner/Makefile b/spawner/Makefile new file mode 100644 index 0000000..9e519a7 --- /dev/null +++ b/spawner/Makefile @@ -0,0 +1,128 @@ +# Makefile for deploying Coding Agents Spawner to Databricks Apps +# +# Usage: +# make deploy PROFILE=daveok ADMIN_PAT=dapi... # full deploy (first time) +# make redeploy PROFILE=daveok # sync + deploy only +# make status PROFILE=daveok +# make logs PROFILE=daveok + +PROFILE ?= DEFAULT +APP_NAME := coding-agents-spawner +SECRET_SCOPE := $(APP_NAME)-secrets +SECRET_KEY := admin-token +TEMPLATE_SRC := /Workspace/Shared/apps/coding-agents + +USER_EMAIL = $(shell databricks current-user me --profile $(PROFILE) --output json 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin).get('userName',''))") +WORKSPACE_PATH = /Workspace/Users/$(USER_EMAIL)/apps/$(APP_NAME) +HOST = $(shell databricks auth env --profile $(PROFILE) 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin)['env']['DATABRICKS_HOST'])") +TOKEN = $(shell databricks auth token --profile $(PROFILE) 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin)['access_token'])") + +.PHONY: help run deploy redeploy create-app setup-secret sync-template sync deploy-app status logs clean + +run: ## Wait for app to be running and print URL + @echo "==> Waiting for '$(APP_NAME)' to be running..." + @for i in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15; do \ + STATE=$$(databricks apps get $(APP_NAME) --profile $(PROFILE) --output json 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin).get('app_status',{}).get('state',''))"); \ + if [ "$$STATE" = "RUNNING" ]; then \ + echo ""; \ + echo "App is RUNNING!"; \ + databricks apps get $(APP_NAME) --profile $(PROFILE) --output json 2>/dev/null | python3 -c "import sys,json; print('URL:', json.load(sys.stdin).get('url','(unknown)'))"; \ + exit 0; \ + fi; \ + echo " State: $$STATE (waiting...)"; \ + sleep 10; \ + done; \ + echo " Timed out waiting for app to reach RUNNING state." + +help: ## Show this help + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf " \033[36m%-18s\033[0m %s\n", $$1, $$2}' + +deploy: create-app setup-secret sync-template sync deploy-app run ## Full deploy: create app, set admin secret, sync template + spawner, deploy + run + +redeploy: sync deploy-app run ## Redeploy: sync spawner + deploy (skip secret setup) + +create-app: ## Create the spawner app (idempotent) + @echo "==> Checking if app '$(APP_NAME)' exists..." + @if databricks apps get $(APP_NAME) --profile $(PROFILE) >/dev/null 2>&1; then \ + echo " App '$(APP_NAME)' already exists, skipping create."; \ + else \ + echo " Creating app '$(APP_NAME)'..."; \ + databricks apps create $(APP_NAME) --profile $(PROFILE); \ + fi + +setup-secret: ## Create secret scope, store admin PAT, link to app + @echo "==> Setting up ADMIN_TOKEN secret..." + @# Create scope if it doesn't exist + @if databricks secrets list-scopes --profile $(PROFILE) --output json 2>/dev/null | python3 -c "import sys,json; scopes=json.load(sys.stdin); names=[s['name'] for s in (scopes if isinstance(scopes,list) else scopes.get('scopes',[]))]; exit(0 if '$(SECRET_SCOPE)' in names else 1)" 2>/dev/null; then \ + echo " Secret scope '$(SECRET_SCOPE)' already exists."; \ + else \ + echo " Creating secret scope '$(SECRET_SCOPE)'..."; \ + databricks secrets create-scope $(SECRET_SCOPE) --profile $(PROFILE); \ + fi + @# Store the admin PAT + @if [ -z "$(ADMIN_PAT)" ]; then \ + echo " Enter the admin PAT (will not echo):"; \ + read -s pat_value && \ + echo "$$pat_value" | databricks secrets put-secret $(SECRET_SCOPE) $(SECRET_KEY) --profile $(PROFILE); \ + else \ + echo "$(ADMIN_PAT)" | databricks secrets put-secret $(SECRET_SCOPE) $(SECRET_KEY) --profile $(PROFILE); \ + fi + @echo " Secret stored in $(SECRET_SCOPE)/$(SECRET_KEY)" + @# Grant spawner SP READ on the scope + @echo " Granting spawner SP access..." + @SP_CLIENT_ID=$$(databricks apps get $(APP_NAME) --profile $(PROFILE) --output json 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin).get('service_principal_client_id',''))") && \ + if [ -n "$$SP_CLIENT_ID" ]; then \ + databricks secrets put-acl $(SECRET_SCOPE) $$SP_CLIENT_ID READ --profile $(PROFILE); \ + echo " Granted READ to $$SP_CLIENT_ID"; \ + else \ + echo " WARNING: Could not find SP client ID, skipping ACL grant."; \ + fi + @# Link secret to app resource + @echo " Linking secret to app resource 'ADMIN_TOKEN'..." + @curl -s -X PATCH \ + "$(HOST)/api/2.0/apps/$(APP_NAME)" \ + -H "Authorization: Bearer $(TOKEN)" \ + -H "Content-Type: application/json" \ + -d '{"resources":[{"name":"ADMIN_TOKEN","description":"Admin PAT for provisioning operations","secret":{"scope":"$(SECRET_SCOPE)","key":"$(SECRET_KEY)","permission":"READ"}}]}' \ + >/dev/null + @echo " App resource linked." + +sync-template: ## Sync coding-agents source to shared template path + @echo "==> Syncing coding-agents template to $(TEMPLATE_SRC)..." + @databricks workspace mkdirs /Workspace/Shared/apps --profile $(PROFILE) 2>/dev/null || true + @cd .. && databricks sync . $(TEMPLATE_SRC) --watch=false --profile $(PROFILE) + @# Override app.yaml with spawner-friendly defaults (no gateway valueFrom) + @echo " Uploading template app.yaml..." + @# Resolve team-memory-mcp app URL (if deployed) + $(eval TEAM_MEMORY_URL := $(shell databricks apps get team-memory-mcp --profile $(PROFILE) --output json 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin).get('url',''))" 2>/dev/null)) + @printf 'command:\n - uv\n - run\n - gunicorn\n - app:app\nenv:\n - name: HOME\n value: /app/python/source_code\n - name: DATABRICKS_TOKEN\n valueFrom: DATABRICKS_TOKEN\n - name: ANTHROPIC_MODEL\n value: databricks-claude-opus-4-6\n - name: GEMINI_MODEL\n value: databricks-gemini-3-1-pro\n - name: CODEX_MODEL\n value: databricks-gpt-5-2\n - name: DATABRICKS_GATEWAY_HOST\n valueFrom: DATABRICKS_GATEWAY_HOST\n - name: CLAUDE_CODE_DISABLE_AUTO_MEMORY\n value: 0\n' > /tmp/_coda_template_app.yaml + @if [ -n "$(TEAM_MEMORY_URL)" ]; then \ + printf ' - name: TEAM_MEMORY_MCP_URL\n value: %s\n' "$(TEAM_MEMORY_URL)" >> /tmp/_coda_template_app.yaml; \ + echo " Team memory MCP URL: $(TEAM_MEMORY_URL)"; \ + else \ + echo " Team memory MCP: not deployed (skipping)"; \ + fi + @databricks workspace import $(TEMPLATE_SRC)/app.yaml --file /tmp/_coda_template_app.yaml --format AUTO --overwrite --profile $(PROFILE) + @rm -f /tmp/_coda_template_app.yaml + @echo " Template synced." + +sync: ## Sync spawner source to workspace + @echo "==> Syncing spawner to $(WORKSPACE_PATH)..." + @databricks sync . $(WORKSPACE_PATH) --watch=false --profile $(PROFILE) + +deploy-app: ## Deploy the spawner app + @echo "==> Deploying '$(APP_NAME)'..." + @databricks apps deploy $(APP_NAME) --source-code-path $(WORKSPACE_PATH) --profile $(PROFILE) --no-wait + @echo "" + @echo "App URL:" + @databricks apps get $(APP_NAME) --profile $(PROFILE) --output json 2>/dev/null | python3 -c "import sys,json; print(json.load(sys.stdin).get('url','(pending)'))" + +status: ## Check spawner app status + @databricks apps get $(APP_NAME) --profile $(PROFILE) + +logs: ## Tail spawner app logs + @databricks apps logs $(APP_NAME) --profile $(PROFILE) + +clean: ## Remove secret scope (destructive) + @echo "==> Removing secret scope '$(SECRET_SCOPE)'..." + @databricks secrets delete-scope $(SECRET_SCOPE) --profile $(PROFILE) diff --git a/spawner/README.md b/spawner/README.md new file mode 100644 index 0000000..c9566e9 --- /dev/null +++ b/spawner/README.md @@ -0,0 +1,111 @@ +# Coding Agents Spawner + +One-click provisioning of individual [coding-agents](../) Databricks Apps for any developer in your workspace. + +## How It Works + +A developer visits the spawner UI, pastes their Databricks PAT, and clicks **Deploy**. The spawner: + +1. **Resolves identity** — calls SCIM `/Me` with the user's PAT to get their email +2. **Stores the PAT** — creates a secret scope `coding-agents-{user}-secrets` and stores the PAT with a unique UUID key (uses admin token for privileged scope operations) +3. **Creates the app** — `POST /api/2.0/apps` with the user's PAT so they own it; the secret resource (`DATABRICKS_TOKEN`) is included in the creation call +4. **Grants SP access** — gives the app's service principal READ on the secret scope +5. **Deploys** — deploys from the shared template at `/Workspace/Shared/apps/coding-agents` + +The spawned app is named `coding-agents-{username}` (derived from email), e.g., `coding-agents-david-okeeffe`. + +## Architecture + +``` +┌─────────────────────┐ ┌──────────────────────────┐ +│ Spawner App │ │ Shared Template │ +│ (this app) │ │ /Workspace/Shared/apps/ │ +│ │ deploy │ coding-agents/ │ +│ - Admin PAT (env) ├────────►│ - app.py │ +│ - Provisioning API │ │ - app.yaml │ +│ - Spawned apps list│ │ - requirements.txt │ +└─────────────────────┘ └──────────────────────────┘ + │ + │ creates per-user + ▼ +┌─────────────────────────────┐ +│ coding-agents-{user} │ +│ - Owned by user │ +│ - DATABRICKS_TOKEN = PAT │ +│ - Deployed from template │ +└─────────────────────────────┘ +``` + +### Token Model + +| Token | Stored in | Used for | +|-------|-----------|----------| +| **Admin PAT** | `coding-agents-spawner-secrets/admin-token` | Secret scope creation, ACLs, deployment | +| **User PAT** | `coding-agents-{user}-secrets/{uuid}` | App creation (ownership), runtime `DATABRICKS_TOKEN` | + +The admin PAT requires **workspace admin** privileges (for secret scope creation and ACL management). + +The user PAT should have **all access** scopes since Claude Code uses it for model serving, workspace operations, Unity Catalog, clusters, etc. + +## Prerequisites + +- Databricks CLI configured with a profile (`databricks configure --profile `) +- Workspace admin access (for the admin PAT) +- Shared template synced to `/Workspace/Shared/apps/coding-agents` + +## Deploy + +### First time + +```bash +cd spawner +make deploy PROFILE=daveok ADMIN_PAT=dapi... +``` + +This will: +- Create the `coding-agents-spawner` app +- Create secret scope and store the admin PAT +- Sync the coding-agents template to the shared workspace path +- Sync the spawner source and deploy +- Wait for the app to be RUNNING and print the URL + +If you omit `ADMIN_PAT`, it will prompt interactively. + +### Subsequent deploys + +```bash +make redeploy PROFILE=daveok +``` + +Syncs source and redeploys (skips secret setup and template sync). + +### Other targets + +```bash +make status PROFILE=daveok # Check app status +make logs PROFILE=daveok # Tail app logs +make sync-template PROFILE=daveok # Re-sync shared template +make clean PROFILE=daveok # Remove secret scope (destructive) +make help # Show all targets +``` + +## API Endpoints + +| Endpoint | Method | Description | +|----------|--------|-------------| +| `/` | GET | Spawner UI | +| `/health` | GET | Health check | +| `/api/status` | GET | Check if current user has a deployed app | +| `/api/apps` | GET | List all spawned coding-agents apps | +| `/api/provision` | POST | Provision a new app (body: `{"pat": "dapi..."}`) | + +## Files + +``` +spawner/ +├── app.py # Flask app with provisioning logic +├── app.yaml # Databricks App config (exposes ADMIN_TOKEN env) +├── requirements.txt # flask, gunicorn, requests +├── Makefile # Deploy/manage targets +└── README.md # This file +``` diff --git a/spawner/__init__.py b/spawner/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/spawner/app.py b/spawner/app.py new file mode 100644 index 0000000..eef46d1 --- /dev/null +++ b/spawner/app.py @@ -0,0 +1,546 @@ +"""Coding Agents Spawner App -- one-click provisioning of coding-agents for any developer.""" + +import hashlib +import os +import threading +import time + +import requests +from flask import Flask, jsonify, request + +app = Flask(__name__, static_folder="static") + +_raw_host = os.environ.get("DATABRICKS_HOST", "") +DATABRICKS_HOST = ( + _raw_host if _raw_host.startswith("https://") else f"https://{_raw_host}" +).rstrip("/") + +# Admin token for provisioning operations (secret scope, app creation, etc.) +ADMIN_TOKEN = os.environ.get("ADMIN_TOKEN", "").strip() + +# In-memory provision progress, keyed by app_name +# Each entry: {"steps": [...], "status": "in_progress"|"complete"|"error", "app_url": "", "app_name": ""} +_provision_jobs: dict[str, dict] = {} +_provision_lock = threading.Lock() + + +MAX_APP_NAME_LENGTH = 63 + +def app_name_from_email(email: str) -> str: + """Derive app name from user email: david.okeeffe@company.com -> coding-agents-david-okeeffe. + + Databricks app names are limited to 63 characters. If the derived name exceeds + this limit, the slug is truncated and a short hash suffix is appended to + preserve uniqueness. + """ + prefix = "coding-agents-" + username = email.split("@")[0] + slug = username.replace(".", "-").replace("_", "-").lower() + full_name = f"{prefix}{slug}" + + if len(full_name) <= MAX_APP_NAME_LENGTH: + return full_name + + # Truncate slug and append 6-char hash for uniqueness + hash_suffix = hashlib.sha256(slug.encode()).hexdigest()[:6] + max_slug_len = MAX_APP_NAME_LENGTH - len(prefix) - len(hash_suffix) - 1 # -1 for separator + truncated_slug = slug[:max_slug_len].rstrip("-") + return f"{prefix}{truncated_slug}-{hash_suffix}" + + +def resolve_pat_owner(host: str, pat: str) -> str: + """Call /api/2.0/preview/scim/v2/Me to get the email of the PAT owner.""" + resp = requests.get( + f"{host}/api/2.0/preview/scim/v2/Me", + headers={"Authorization": f"Bearer {pat}"}, + ) + resp.raise_for_status() + data = resp.json() + return data.get("userName", "") + + +def store_pat_in_secret_scope( + host: str, oauth_token: str, app_name: str, pat_value: str, secret_key: str +) -> dict: + """Create secret scope (handle 409) and store PAT with unique key.""" + scope_name = f"{app_name}-secrets" + headers = {"Authorization": f"Bearer {oauth_token}"} + + # Create scope -- 409 means it already exists, which is fine + scope_resp = requests.post( + f"{host}/api/2.0/secrets/scopes/create", + headers=headers, + json={"scope": scope_name}, + ) + if scope_resp.status_code not in (200, 409) and "ALREADY_EXISTS" not in scope_resp.text: + raise RuntimeError(f"Failed to create secret scope: {scope_resp.status_code} {scope_resp.text}") + + # Store the PAT with unique key + put_resp = requests.post( + f"{host}/api/2.0/secrets/put", + headers=headers, + json={ + "scope": scope_name, + "key": secret_key, + "string_value": pat_value, + }, + ) + if put_resp.status_code != 200: + raise RuntimeError(f"Failed to store secret: {put_resp.status_code} {put_resp.text}") + + return {"success": True, "scope": scope_name, "key": secret_key} + + +def create_app(host: str, oauth_token: str, app_name: str, scope_name: str, secret_key: str) -> dict: + """Create the Databricks App with secret resource via POST /api/2.0/apps.""" + resp = requests.post( + f"{host}/api/2.0/apps", + headers={"Authorization": f"Bearer {oauth_token}"}, + json={ + "name": app_name, + "resources": [ + { + "name": "DATABRICKS_TOKEN", + "description": "PAT for model serving access", + "secret": { + "scope": scope_name, + "key": secret_key, + "permission": "READ", + }, + } + ], + }, + ) + # 409 means app already exists -- that's fine for re-provisioning + if resp.status_code == 409: + return check_existing_app(host, oauth_token, app_name) + resp.raise_for_status() + return resp.json() + + + +def wait_for_compute_active( + host: str, oauth_token: str, app_name: str, timeout: int = 180, interval: int = 10, +) -> None: + """Poll until compute_status reaches ACTIVE (required before first deploy).""" + headers = {"Authorization": f"Bearer {oauth_token}"} + elapsed = 0 + while elapsed < timeout: + resp = requests.get(f"{host}/api/2.0/apps/{app_name}", headers=headers) + if resp.ok: + compute = resp.json().get("compute_status", {}).get("state", "") + if compute == "ACTIVE": + return + time.sleep(interval) + elapsed += interval + raise RuntimeError(f"Timed out waiting for compute to become ACTIVE after {timeout}s") + + +def deploy_app( + host: str, oauth_token: str, app_name: str, source_code_path: str, +) -> dict: + """Deploy the app via POST /api/2.0/apps/{name}/deployments.""" + resp = requests.post( + f"{host}/api/2.0/apps/{app_name}/deployments", + headers={"Authorization": f"Bearer {oauth_token}"}, + json={"source_code_path": source_code_path}, + ) + if not resp.ok: + raise RuntimeError(f"{resp.status_code} from deploy API: {resp.text}") + return resp.json() + + +def grant_sp_secret_access( + host: str, auth_token: str, scope_name: str, sp_id: str +) -> None: + """Grant the app's service principal READ access on the secret scope.""" + resp = requests.post( + f"{host}/api/2.0/secrets/acls/put", + headers={"Authorization": f"Bearer {auth_token}"}, + json={ + "scope": scope_name, + "principal": sp_id, + "permission": "READ", + }, + ) + resp.raise_for_status() + + +def list_spawned_apps(host: str, oauth_token: str) -> list: + """List all coding-agents apps (excluding the spawner itself).""" + resp = requests.get( + f"{host}/api/2.0/apps", + headers={"Authorization": f"Bearer {oauth_token}"}, + ) + resp.raise_for_status() + all_apps = resp.json().get("apps", []) + + # Merge live API state with any in-flight provision jobs + result = [] + seen_names = set() + for a in all_apps: + name = a["name"] + if not name.startswith("coding-agents-") or name == "coding-agents-spawner": + continue + seen_names.add(name) + # If there's an in-flight job, overlay its status + job = _provision_jobs.get(name) + if job and job["status"] == "in_progress": + last_step = job["steps"][-1] if job["steps"] else {} + state = f"PROVISIONING: {last_step.get('message', '...')}" + else: + # List endpoint lacks app_status — derive from compute + deployment + compute = a.get("compute_status", {}).get("state", "") + deploy = a.get("active_deployment", {}).get("status", {}).get("state", "") + if compute == "ACTIVE" and deploy == "SUCCEEDED": + state = "RUNNING" + elif deploy == "IN_PROGRESS": + state = "DEPLOYING" + elif compute == "ACTIVE": + state = "DEPLOYED" + elif not a.get("active_deployment"): + state = "NOT DEPLOYED" + else: + state = compute or "UNKNOWN" + result.append({ + "name": name, + "url": a.get("url", ""), + "creator": a.get("creator", ""), + "state": state, + "compute": a.get("compute_status", {}).get("state", "UNKNOWN"), + "created": a.get("create_time", ""), + }) + + # Include in-flight jobs that haven't appeared in the API yet (app not created yet) + for name, job in _provision_jobs.items(): + if name not in seen_names and job["status"] == "in_progress": + last_step = job["steps"][-1] if job["steps"] else {} + result.append({ + "name": name, + "url": "", + "creator": job.get("email", ""), + "state": f"PROVISIONING: {last_step.get('message', '...')}", + "compute": "PENDING", + "created": "", + }) + + return result + + +def check_existing_app(host: str, oauth_token: str, app_name: str) -> dict: + """Check if an app already exists.""" + resp = requests.get( + f"{host}/api/2.0/apps/{app_name}", + headers={"Authorization": f"Bearer {oauth_token}"}, + ) + if resp.status_code == 200: + data = resp.json() + return { + "deployed": True, + "app_name": app_name, + "app_url": data.get("url", ""), + "state": data.get("app_status", {}).get("state", "UNKNOWN"), + "service_principal_id": data.get("service_principal_id"), + "service_principal_client_id": data.get("service_principal_client_id"), + "service_principal_name": data.get("service_principal_name"), + } + return {"deployed": False} + + +def _update_job(app_name: str, **kwargs): + """Thread-safe update of a provision job's state.""" + with _provision_lock: + if app_name in _provision_jobs: + _provision_jobs[app_name].update(kwargs) + + +def _add_step(app_name: str, step: int, status: str, message: str): + """Thread-safe append of a step to a provision job.""" + entry = {"step": step, "status": status, "message": message} + with _provision_lock: + if app_name in _provision_jobs: + _provision_jobs[app_name]["steps"].append(entry) + + +def provision_app_async(host: str, admin_token: str, pat_value: str, app_name: str): + """Run provisioning in a background thread, updating _provision_jobs as it goes.""" + source_code_path = "/Workspace/Shared/apps/coding-agents" + + try: + scope_name = f"{app_name}-secrets" + secret_key = "databricks-token" + + # Step 1: Store secret + _add_step(app_name, 1, "storing_secret", "Storing token in secret scope...") + store_pat_in_secret_scope(host, admin_token, app_name, pat_value, secret_key) + + # Step 2: Create app + _add_step(app_name, 2, "creating_app", f"Creating app '{app_name}'...") + app_result = create_app(host, pat_value, app_name, scope_name, secret_key) + sp_client_id = app_result.get("service_principal_client_id", "") + + # Step 3: Grant SP access + if sp_client_id: + _add_step(app_name, 3, "granting_access", "Granting service principal access...") + grant_sp_secret_access(host, admin_token, scope_name, sp_client_id) + + # Step 4: Wait for compute + _add_step(app_name, 4, "waiting_for_compute", "Waiting for compute to be ready (60-90s)...") + wait_for_compute_active(host, admin_token, app_name) + + # Step 5: Deploy + _add_step(app_name, 5, "deploying", "Deploying app...") + deploy_app(host, admin_token, app_name, source_code_path) + + # Step 6: Wait for app to be running + _add_step(app_name, 6, "starting", "Waiting for app to start...") + _wait_for_app_running(host, admin_token, app_name) + + app_url = app_result.get("url", app_result.get("app_url", "")) + _add_step(app_name, 7, "complete", "App is running!") + _update_job(app_name, status="complete", app_url=app_url) + + except Exception as exc: + _add_step(app_name, -1, "error", str(exc)) + _update_job(app_name, status="error", error=str(exc)) + + +def _wait_for_app_running(host: str, token: str, app_name: str, timeout: int = 300, interval: int = 10): + """Poll until app_status reaches RUNNING.""" + headers = {"Authorization": f"Bearer {token}"} + elapsed = 0 + while elapsed < timeout: + resp = requests.get(f"{host}/api/2.0/apps/{app_name}", headers=headers) + if resp.ok: + state = resp.json().get("app_status", {}).get("state", "") + if state == "RUNNING": + return + time.sleep(interval) + elapsed += interval + raise RuntimeError(f"Timed out waiting for app to reach RUNNING after {timeout}s") + + +# --- Flask Routes --- + + +@app.route("/") +def index(): + """Serve the spawner UI with user context injected via data attributes.""" + import html as html_mod + + email = request.headers.get("X-Forwarded-Email", "unknown") + app_name = app_name_from_email(email) if email != "unknown" else "coding-agents-you" + + index_path = os.path.join(os.path.dirname(__file__), "static", "index.html") + with open(index_path) as f: + page = f.read() + + # Inject user context as data attributes on + page = page.replace( + "", + f'', + ) + return page + + +@app.route("/health") +def health(): + """Health check endpoint.""" + return jsonify({"status": "ok"}) + + +@app.route("/api/status") +def api_status(): + """Check if user already has a deployed instance.""" + email = request.headers.get("X-Forwarded-Email", "") + host = DATABRICKS_HOST + + app_name = app_name_from_email(email) + result = check_existing_app(host, ADMIN_TOKEN, app_name) + return jsonify(result) + + +@app.route("/api/apps") +def api_list_apps(): + """List all spawned coding-agents apps (with in-flight provision status merged).""" + host = DATABRICKS_HOST + if not ADMIN_TOKEN: + return jsonify({"error": "Admin token not configured"}), 500 + apps = list_spawned_apps(host, ADMIN_TOKEN) + return jsonify({"apps": apps}) + + +@app.route("/api/provision", methods=["POST"]) +def api_provision(): + """Start provisioning in background. Returns immediately with app_name to poll.""" + host = DATABRICKS_HOST + + if not ADMIN_TOKEN: + return jsonify({"success": False, "error": "Spawner admin token not configured"}), 500 + + body = request.get_json(silent=True) or {} + pat_value = body.get("pat", "").strip() + + if not pat_value: + return jsonify({"success": False, "error": "PAT is required"}), 400 + + # Resolve identity synchronously (fast) so we can return the app_name + try: + email = resolve_pat_owner(host, pat_value) + if not email: + raise ValueError("Could not resolve PAT owner identity") + except Exception as exc: + return jsonify({"success": False, "error": f"Invalid PAT: {exc}"}), 400 + + app_name = app_name_from_email(email) + + # Check if already running — just refresh token + existing = check_existing_app(host, ADMIN_TOKEN, app_name) + if existing.get("deployed") and existing.get("state") == "RUNNING": + store_pat_in_secret_scope(host, ADMIN_TOKEN, app_name, pat_value, "databricks-token") + return jsonify({ + "success": True, + "app_name": app_name, + "app_url": existing.get("app_url", ""), + "already_running": True, + }) + + # Check if already provisioning + with _provision_lock: + existing_job = _provision_jobs.get(app_name) + if existing_job and existing_job["status"] == "in_progress": + return jsonify({"success": True, "app_name": app_name, "already_in_progress": True}) + + # Initialize job tracker + _provision_jobs[app_name] = { + "steps": [{"step": 0, "status": "resolving_user", "message": "Identity verified, starting provision..."}], + "status": "in_progress", + "app_url": "", + "app_name": app_name, + "email": email, + } + + # Kick off background thread + thread = threading.Thread( + target=provision_app_async, + args=(host, ADMIN_TOKEN, pat_value, app_name), + daemon=True, + ) + thread.start() + + return jsonify({"success": True, "app_name": app_name}) + + +@app.route("/api/provision-status/") +def api_provision_status(app_name): + """Poll endpoint for provision progress.""" + with _provision_lock: + job = _provision_jobs.get(app_name) + if not job: + return jsonify({"found": False}) + return jsonify({"found": True, **job}) + + +# In-memory redeploy-all job tracker +_redeploy_job: dict | None = None +_redeploy_lock = threading.Lock() + + +def redeploy_all_apps(host: str, admin_token: str): + """Redeploy all coding-agents-* apps from the shared template.""" + global _redeploy_job + source_code_path = "/Workspace/Shared/apps/coding-agents" + headers = {"Authorization": f"Bearer {admin_token}"} + + try: + # List all coding-agents apps + resp = requests.get(f"{host}/api/2.0/apps", headers=headers) + resp.raise_for_status() + all_apps = resp.json().get("apps", []) + targets = [ + a for a in all_apps + if a["name"].startswith("coding-agents-") + and a["name"] != "coding-agents-spawner" + ] + + with _redeploy_lock: + _redeploy_job["total"] = len(targets) + _redeploy_job["apps"] = [ + {"name": a["name"], "status": "pending"} for a in targets + ] + + for i, a in enumerate(targets): + name = a["name"] + with _redeploy_lock: + _redeploy_job["apps"][i]["status"] = "deploying" + _redeploy_job["completed"] = i + + try: + deploy_resp = requests.post( + f"{host}/api/2.0/apps/{name}/deployments", + headers=headers, + json={"source_code_path": source_code_path}, + ) + if deploy_resp.ok: + with _redeploy_lock: + _redeploy_job["apps"][i]["status"] = "deployed" + else: + with _redeploy_lock: + _redeploy_job["apps"][i]["status"] = "error" + _redeploy_job["apps"][i]["error"] = deploy_resp.text[:200] + except Exception as exc: + with _redeploy_lock: + _redeploy_job["apps"][i]["status"] = "error" + _redeploy_job["apps"][i]["error"] = str(exc)[:200] + + with _redeploy_lock: + _redeploy_job["completed"] = len(targets) + _redeploy_job["status"] = "complete" + + except Exception as exc: + with _redeploy_lock: + _redeploy_job["status"] = "error" + _redeploy_job["error"] = str(exc) + + +@app.route("/api/redeploy-all", methods=["POST"]) +def api_redeploy_all(): + """Trigger redeployment of all spawned coding-agents apps from the shared template.""" + global _redeploy_job + + if not ADMIN_TOKEN: + return jsonify({"error": "Admin token not configured"}), 500 + + with _redeploy_lock: + if _redeploy_job and _redeploy_job.get("status") == "in_progress": + return jsonify({"error": "Redeploy already in progress"}), 409 + + _redeploy_job = { + "status": "in_progress", + "total": 0, + "completed": 0, + "apps": [], + "error": None, + "started_at": time.time(), + } + + thread = threading.Thread( + target=redeploy_all_apps, + args=(DATABRICKS_HOST, ADMIN_TOKEN), + daemon=True, + ) + thread.start() + return jsonify({"success": True}) + + +@app.route("/api/redeploy-all/status") +def api_redeploy_all_status(): + """Poll endpoint for redeploy-all progress.""" + with _redeploy_lock: + if not _redeploy_job: + return jsonify({"active": False}) + return jsonify({"active": True, **_redeploy_job}) + + +if __name__ == "__main__": + app.run(host="0.0.0.0", port=8001) diff --git a/spawner/app.yaml b/spawner/app.yaml new file mode 100644 index 0000000..d3d2fc2 --- /dev/null +++ b/spawner/app.yaml @@ -0,0 +1,10 @@ +command: + - uv + - run + - gunicorn + - --timeout + - "300" + - app:app +env: + - name: ADMIN_TOKEN + valueFrom: ADMIN_TOKEN diff --git a/spawner/pyproject.toml b/spawner/pyproject.toml new file mode 100644 index 0000000..0e434c5 --- /dev/null +++ b/spawner/pyproject.toml @@ -0,0 +1,10 @@ +[project] +name = "coda-spawner" +version = "0.1.0" +description = "CoDA Spawner - One-click provisioning for Coding Agents on Databricks Apps" +requires-python = ">=3.12" +dependencies = [ + "flask>=2.0", + "gunicorn>=21.0", + "requests>=2.28", +] diff --git a/spawner/static/index.html b/spawner/static/index.html new file mode 100644 index 0000000..b4ec3b2 --- /dev/null +++ b/spawner/static/index.html @@ -0,0 +1,451 @@ + + + + + + Coding Agents | Databricks + + + + +
+
+ + Coding Agents +
+ +
+ +
+
+

Deploy Your Coding Agent

+

Get a personal AI-powered coding environment on Databricks. Paste your Personal Access Token below and we'll provision everything for you.

+
+ +
+
+ Setup + One-time +
+ + +

+ Create a PAT at Settings → Developer → Access tokens with all access scopes.
+ Your app will be named . +

+ +
+
+ +
+
+ Spawned Apps + +
+ +
Loading...
+ + + + + +
+
+ + + + diff --git a/spawner/uv.lock b/spawner/uv.lock new file mode 100644 index 0000000..16b26d1 --- /dev/null +++ b/spawner/uv.lock @@ -0,0 +1,283 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" + +[[package]] +name = "blinker" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, +] + +[[package]] +name = "certifi" +version = "2026.2.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/35/02daf95b9cd686320bb622eb148792655c9412dbb9b67abb5694e5910a24/charset_normalizer-3.4.5.tar.gz", hash = "sha256:95adae7b6c42a6c5b5b559b1a99149f090a57128155daeea91732c8d970d8644", size = 134804, upload-time = "2026-03-06T06:03:19.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/b6/9ee9c1a608916ca5feae81a344dffbaa53b26b90be58cc2159e3332d44ec/charset_normalizer-3.4.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed97c282ee4f994ef814042423a529df9497e3c666dca19be1d4cd1129dc7ade", size = 280976, upload-time = "2026-03-06T06:01:15.276Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d8/a54f7c0b96f1df3563e9190f04daf981e365a9b397eedfdfb5dbef7e5c6c/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0294916d6ccf2d069727d65973c3a1ca477d68708db25fd758dd28b0827cff54", size = 189356, upload-time = "2026-03-06T06:01:16.511Z" }, + { url = "https://files.pythonhosted.org/packages/42/69/2bf7f76ce1446759a5787cb87d38f6a61eb47dbbdf035cfebf6347292a65/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dc57a0baa3eeedd99fafaef7511b5a6ef4581494e8168ee086031744e2679467", size = 206369, upload-time = "2026-03-06T06:01:17.853Z" }, + { url = "https://files.pythonhosted.org/packages/10/9c/949d1a46dab56b959d9a87272482195f1840b515a3380e39986989a893ae/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ed1a9a204f317ef879b32f9af507d47e49cd5e7f8e8d5d96358c98373314fc60", size = 203285, upload-time = "2026-03-06T06:01:19.473Z" }, + { url = "https://files.pythonhosted.org/packages/67/5c/ae30362a88b4da237d71ea214a8c7eb915db3eec941adda511729ac25fa2/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad83b8f9379176c841f8865884f3514d905bcd2a9a3b210eaa446e7d2223e4d", size = 196274, upload-time = "2026-03-06T06:01:20.728Z" }, + { url = "https://files.pythonhosted.org/packages/b2/07/c9f2cb0e46cb6d64fdcc4f95953747b843bb2181bda678dc4e699b8f0f9a/charset_normalizer-3.4.5-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:a118e2e0b5ae6b0120d5efa5f866e58f2bb826067a646431da4d6a2bdae7950e", size = 184715, upload-time = "2026-03-06T06:01:22.194Z" }, + { url = "https://files.pythonhosted.org/packages/36/64/6b0ca95c44fddf692cd06d642b28f63009d0ce325fad6e9b2b4d0ef86a52/charset_normalizer-3.4.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:754f96058e61a5e22e91483f823e07df16416ce76afa4ebf306f8e1d1296d43f", size = 193426, upload-time = "2026-03-06T06:01:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/50/bc/a730690d726403743795ca3f5bb2baf67838c5fea78236098f324b965e40/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0c300cefd9b0970381a46394902cd18eaf2aa00163f999590ace991989dcd0fc", size = 191780, upload-time = "2026-03-06T06:01:25.053Z" }, + { url = "https://files.pythonhosted.org/packages/97/4f/6c0bc9af68222b22951552d73df4532b5be6447cee32d58e7e8c74ecbb7b/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c108f8619e504140569ee7de3f97d234f0fbae338a7f9f360455071ef9855a95", size = 185805, upload-time = "2026-03-06T06:01:26.294Z" }, + { url = "https://files.pythonhosted.org/packages/dd/b9/a523fb9b0ee90814b503452b2600e4cbc118cd68714d57041564886e7325/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d1028de43596a315e2720a9849ee79007ab742c06ad8b45a50db8cdb7ed4a82a", size = 208342, upload-time = "2026-03-06T06:01:27.55Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/c59e761dee4464050713e50e27b58266cc8e209e518c0b378c1580c959ba/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:19092dde50335accf365cce21998a1c6dd8eafd42c7b226eb54b2747cdce2fac", size = 193661, upload-time = "2026-03-06T06:01:29.051Z" }, + { url = "https://files.pythonhosted.org/packages/1c/43/729fa30aad69783f755c5ad8649da17ee095311ca42024742701e202dc59/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4354e401eb6dab9aed3c7b4030514328a6c748d05e1c3e19175008ca7de84fb1", size = 204819, upload-time = "2026-03-06T06:01:30.298Z" }, + { url = "https://files.pythonhosted.org/packages/87/33/d9b442ce5a91b96fc0840455a9e49a611bbadae6122778d0a6a79683dd31/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a68766a3c58fde7f9aaa22b3786276f62ab2f594efb02d0a1421b6282e852e98", size = 198080, upload-time = "2026-03-06T06:01:31.478Z" }, + { url = "https://files.pythonhosted.org/packages/56/5a/b8b5a23134978ee9885cee2d6995f4c27cc41f9baded0a9685eabc5338f0/charset_normalizer-3.4.5-cp312-cp312-win32.whl", hash = "sha256:1827734a5b308b65ac54e86a618de66f935a4f63a8a462ff1e19a6788d6c2262", size = 132630, upload-time = "2026-03-06T06:01:33.056Z" }, + { url = "https://files.pythonhosted.org/packages/70/53/e44a4c07e8904500aec95865dc3f6464dc3586a039ef0df606eb3ac38e35/charset_normalizer-3.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:728c6a963dfab66ef865f49286e45239384249672cd598576765acc2a640a636", size = 142856, upload-time = "2026-03-06T06:01:34.489Z" }, + { url = "https://files.pythonhosted.org/packages/ea/aa/c5628f7cad591b1cf45790b7a61483c3e36cf41349c98af7813c483fd6e8/charset_normalizer-3.4.5-cp312-cp312-win_arm64.whl", hash = "sha256:75dfd1afe0b1647449e852f4fb428195a7ed0588947218f7ba929f6538487f02", size = 132982, upload-time = "2026-03-06T06:01:35.641Z" }, + { url = "https://files.pythonhosted.org/packages/f5/48/9f34ec4bb24aa3fdba1890c1bddb97c8a4be1bd84ef5c42ac2352563ad05/charset_normalizer-3.4.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ac59c15e3f1465f722607800c68713f9fbc2f672b9eb649fe831da4019ae9b23", size = 280788, upload-time = "2026-03-06T06:01:37.126Z" }, + { url = "https://files.pythonhosted.org/packages/0e/09/6003e7ffeb90cc0560da893e3208396a44c210c5ee42efff539639def59b/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:165c7b21d19365464e8f70e5ce5e12524c58b48c78c1f5a57524603c1ab003f8", size = 188890, upload-time = "2026-03-06T06:01:38.73Z" }, + { url = "https://files.pythonhosted.org/packages/42/1e/02706edf19e390680daa694d17e2b8eab4b5f7ac285e2a51168b4b22ee6b/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:28269983f25a4da0425743d0d257a2d6921ea7d9b83599d4039486ec5b9f911d", size = 206136, upload-time = "2026-03-06T06:01:40.016Z" }, + { url = "https://files.pythonhosted.org/packages/c7/87/942c3def1b37baf3cf786bad01249190f3ca3d5e63a84f831e704977de1f/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d27ce22ec453564770d29d03a9506d449efbb9fa13c00842262b2f6801c48cce", size = 202551, upload-time = "2026-03-06T06:01:41.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/0a/af49691938dfe175d71b8a929bd7e4ace2809c0c5134e28bc535660d5262/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0625665e4ebdddb553ab185de5db7054393af8879fb0c87bd5690d14379d6819", size = 195572, upload-time = "2026-03-06T06:01:43.208Z" }, + { url = "https://files.pythonhosted.org/packages/20/ea/dfb1792a8050a8e694cfbde1570ff97ff74e48afd874152d38163d1df9ae/charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:c23eb3263356d94858655b3e63f85ac5d50970c6e8febcdde7830209139cc37d", size = 184438, upload-time = "2026-03-06T06:01:44.755Z" }, + { url = "https://files.pythonhosted.org/packages/72/12/c281e2067466e3ddd0595bfaea58a6946765ace5c72dfa3edc2f5f118026/charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e6302ca4ae283deb0af68d2fbf467474b8b6aedcd3dab4db187e07f94c109763", size = 193035, upload-time = "2026-03-06T06:01:46.051Z" }, + { url = "https://files.pythonhosted.org/packages/ba/4f/3792c056e7708e10464bad0438a44708886fb8f92e3c3d29ec5e2d964d42/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e51ae7d81c825761d941962450f50d041db028b7278e7b08930b4541b3e45cb9", size = 191340, upload-time = "2026-03-06T06:01:47.547Z" }, + { url = "https://files.pythonhosted.org/packages/e7/86/80ddba897127b5c7a9bccc481b0cd36c8fefa485d113262f0fe4332f0bf4/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:597d10dec876923e5c59e48dbd366e852eacb2b806029491d307daea6b917d7c", size = 185464, upload-time = "2026-03-06T06:01:48.764Z" }, + { url = "https://files.pythonhosted.org/packages/4d/00/b5eff85ba198faacab83e0e4b6f0648155f072278e3b392a82478f8b988b/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5cffde4032a197bd3b42fd0b9509ec60fb70918d6970e4cc773f20fc9180ca67", size = 208014, upload-time = "2026-03-06T06:01:50.371Z" }, + { url = "https://files.pythonhosted.org/packages/c8/11/d36f70be01597fd30850dde8a1269ebc8efadd23ba5785808454f2389bde/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2da4eedcb6338e2321e831a0165759c0c620e37f8cd044a263ff67493be8ffb3", size = 193297, upload-time = "2026-03-06T06:01:51.933Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1d/259eb0a53d4910536c7c2abb9cb25f4153548efb42800c6a9456764649c0/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:65a126fb4b070d05340a84fc709dd9e7c75d9b063b610ece8a60197a291d0adf", size = 204321, upload-time = "2026-03-06T06:01:53.887Z" }, + { url = "https://files.pythonhosted.org/packages/84/31/faa6c5b9d3688715e1ed1bb9d124c384fe2fc1633a409e503ffe1c6398c1/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7a80a9242963416bd81f99349d5f3fce1843c303bd404f204918b6d75a75fd6", size = 197509, upload-time = "2026-03-06T06:01:56.439Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a5/c7d9dd1503ffc08950b3260f5d39ec2366dd08254f0900ecbcf3a6197c7c/charset_normalizer-3.4.5-cp313-cp313-win32.whl", hash = "sha256:f1d725b754e967e648046f00c4facc42d414840f5ccc670c5670f59f83693e4f", size = 132284, upload-time = "2026-03-06T06:01:57.812Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0f/57072b253af40c8aa6636e6de7d75985624c1eb392815b2f934199340a89/charset_normalizer-3.4.5-cp313-cp313-win_amd64.whl", hash = "sha256:e37bd100d2c5d3ba35db9c7c5ba5a9228cbcffe5c4778dc824b164e5257813d7", size = 142630, upload-time = "2026-03-06T06:01:59.062Z" }, + { url = "https://files.pythonhosted.org/packages/31/41/1c4b7cc9f13bd9d369ce3bc993e13d374ce25fa38a2663644283ecf422c1/charset_normalizer-3.4.5-cp313-cp313-win_arm64.whl", hash = "sha256:93b3b2cc5cf1b8743660ce77a4f45f3f6d1172068207c1defc779a36eea6bb36", size = 133254, upload-time = "2026-03-06T06:02:00.281Z" }, + { url = "https://files.pythonhosted.org/packages/43/be/0f0fd9bb4a7fa4fb5067fb7d9ac693d4e928d306f80a0d02bde43a7c4aee/charset_normalizer-3.4.5-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8197abe5ca1ffb7d91e78360f915eef5addff270f8a71c1fc5be24a56f3e4873", size = 280232, upload-time = "2026-03-06T06:02:01.508Z" }, + { url = "https://files.pythonhosted.org/packages/28/02/983b5445e4bef49cd8c9da73a8e029f0825f39b74a06d201bfaa2e55142a/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2aecdb364b8a1802afdc7f9327d55dad5366bc97d8502d0f5854e50712dbc5f", size = 189688, upload-time = "2026-03-06T06:02:02.857Z" }, + { url = "https://files.pythonhosted.org/packages/d0/88/152745c5166437687028027dc080e2daed6fe11cfa95a22f4602591c42db/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a66aa5022bf81ab4b1bebfb009db4fd68e0c6d4307a1ce5ef6a26e5878dfc9e4", size = 206833, upload-time = "2026-03-06T06:02:05.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0f/ebc15c8b02af2f19be9678d6eed115feeeccc45ce1f4b098d986c13e8769/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d77f97e515688bd615c1d1f795d540f32542d514242067adcb8ef532504cb9ee", size = 202879, upload-time = "2026-03-06T06:02:06.446Z" }, + { url = "https://files.pythonhosted.org/packages/38/9c/71336bff6934418dc8d1e8a1644176ac9088068bc571da612767619c97b3/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01a1ed54b953303ca7e310fafe0fe347aab348bd81834a0bcd602eb538f89d66", size = 195764, upload-time = "2026-03-06T06:02:08.763Z" }, + { url = "https://files.pythonhosted.org/packages/b7/95/ce92fde4f98615661871bc282a856cf9b8a15f686ba0af012984660d480b/charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:b2d37d78297b39a9eb9eb92c0f6df98c706467282055419df141389b23f93362", size = 183728, upload-time = "2026-03-06T06:02:10.137Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e7/f5b4588d94e747ce45ae680f0f242bc2d98dbd4eccfab73e6160b6893893/charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e71bbb595973622b817c042bd943c3f3667e9c9983ce3d205f973f486fec98a7", size = 192937, upload-time = "2026-03-06T06:02:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/f9/29/9d94ed6b929bf9f48bf6ede6e7474576499f07c4c5e878fb186083622716/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cd966c2559f501c6fd69294d082c2934c8dd4719deb32c22961a5ac6db0df1d", size = 192040, upload-time = "2026-03-06T06:02:13.489Z" }, + { url = "https://files.pythonhosted.org/packages/15/d2/1a093a1cf827957f9445f2fe7298bcc16f8fc5e05c1ed2ad1af0b239035e/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d5e52d127045d6ae01a1e821acfad2f3a1866c54d0e837828538fabe8d9d1bd6", size = 184107, upload-time = "2026-03-06T06:02:14.83Z" }, + { url = "https://files.pythonhosted.org/packages/0f/7d/82068ce16bd36135df7b97f6333c5d808b94e01d4599a682e2337ed5fd14/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:30a2b1a48478c3428d047ed9690d57c23038dac838a87ad624c85c0a78ebeb39", size = 208310, upload-time = "2026-03-06T06:02:16.165Z" }, + { url = "https://files.pythonhosted.org/packages/84/4e/4dfb52307bb6af4a5c9e73e482d171b81d36f522b21ccd28a49656baa680/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d8ed79b8f6372ca4254955005830fd61c1ccdd8c0fac6603e2c145c61dd95db6", size = 192918, upload-time = "2026-03-06T06:02:18.144Z" }, + { url = "https://files.pythonhosted.org/packages/08/a4/159ff7da662cf7201502ca89980b8f06acf3e887b278956646a8aeb178ab/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:c5af897b45fa606b12464ccbe0014bbf8c09191e0a66aab6aa9d5cf6e77e0c94", size = 204615, upload-time = "2026-03-06T06:02:19.821Z" }, + { url = "https://files.pythonhosted.org/packages/d6/62/0dd6172203cb6b429ffffc9935001fde42e5250d57f07b0c28c6046deb6b/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1088345bcc93c58d8d8f3d783eca4a6e7a7752bbff26c3eee7e73c597c191c2e", size = 197784, upload-time = "2026-03-06T06:02:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/c7/5e/1aab5cb737039b9c59e63627dc8bbc0d02562a14f831cc450e5f91d84ce1/charset_normalizer-3.4.5-cp314-cp314-win32.whl", hash = "sha256:ee57b926940ba00bca7ba7041e665cc956e55ef482f851b9b65acb20d867e7a2", size = 133009, upload-time = "2026-03-06T06:02:23.289Z" }, + { url = "https://files.pythonhosted.org/packages/40/65/e7c6c77d7aaa4c0d7974f2e403e17f0ed2cb0fc135f77d686b916bf1eead/charset_normalizer-3.4.5-cp314-cp314-win_amd64.whl", hash = "sha256:4481e6da1830c8a1cc0b746b47f603b653dadb690bcd851d039ffaefe70533aa", size = 143511, upload-time = "2026-03-06T06:02:26.195Z" }, + { url = "https://files.pythonhosted.org/packages/ba/91/52b0841c71f152f563b8e072896c14e3d83b195c188b338d3cc2e582d1d4/charset_normalizer-3.4.5-cp314-cp314-win_arm64.whl", hash = "sha256:97ab7787092eb9b50fb47fa04f24c75b768a606af1bcba1957f07f128a7219e4", size = 133775, upload-time = "2026-03-06T06:02:27.473Z" }, + { url = "https://files.pythonhosted.org/packages/c5/60/3a621758945513adfd4db86827a5bafcc615f913dbd0b4c2ed64a65731be/charset_normalizer-3.4.5-py3-none-any.whl", hash = "sha256:9db5e3fcdcee89a78c04dffb3fe33c79f77bd741a624946db2591c81b2fc85b0", size = 55455, upload-time = "2026-03-06T06:03:17.827Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "coda-spawner" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "flask" }, + { name = "gunicorn" }, + { name = "requests" }, +] + +[package.metadata] +requires-dist = [ + { name = "flask", specifier = ">=2.0" }, + { name = "gunicorn", specifier = ">=21.0" }, + { name = "requests", specifier = ">=2.28" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "flask" +version = "3.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "blinker" }, + { name = "click" }, + { name = "itsdangerous" }, + { name = "jinja2" }, + { name = "markupsafe" }, + { name = "werkzeug" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/26/00/35d85dcce6c57fdc871f3867d465d780f302a175ea360f62533f12b27e2b/flask-3.1.3.tar.gz", hash = "sha256:0ef0e52b8a9cd932855379197dd8f94047b359ca0a78695144304cb45f87c9eb", size = 759004, upload-time = "2026-02-19T05:00:57.678Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/9c/34f6962f9b9e9c71f6e5ed806e0d0ff03c9d1b0b2340088a0cf4bce09b18/flask-3.1.3-py3-none-any.whl", hash = "sha256:f4bcbefc124291925f1a26446da31a5178f9483862233b23c0c96a20701f670c", size = 103424, upload-time = "2026-02-19T05:00:56.027Z" }, +] + +[[package]] +name = "gunicorn" +version = "25.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/13/ef67f59f6a7896fdc2c1d62b5665c5219d6b0a9a1784938eb9a28e55e128/gunicorn-25.1.0.tar.gz", hash = "sha256:1426611d959fa77e7de89f8c0f32eed6aa03ee735f98c01efba3e281b1c47616", size = 594377, upload-time = "2026-02-13T11:09:58.989Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl", hash = "sha256:d0b1236ccf27f72cfe14bce7caadf467186f19e865094ca84221424e839b8b8b", size = 197067, upload-time = "2026-02-13T11:09:57.146Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "itsdangerous" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "werkzeug" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/f1/ee81806690a87dab5f5653c1f146c92bc066d7f4cebc603ef88eb9e13957/werkzeug-3.1.6.tar.gz", hash = "sha256:210c6bede5a420a913956b4791a7f4d6843a43b6fcee4dfa08a65e93007d0d25", size = 864736, upload-time = "2026-02-19T15:17:18.884Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/ec/d58832f89ede95652fd01f4f24236af7d32b70cab2196dfcc2d2fd13c5c2/werkzeug-3.1.6-py3-none-any.whl", hash = "sha256:7ddf3357bb9564e407607f988f683d72038551200c704012bb9a4c523d42f131", size = 225166, upload-time = "2026-02-19T15:17:17.475Z" }, +] diff --git a/static/index.html b/static/index.html index 0df0b68..3670d2b 100644 --- a/static/index.html +++ b/static/index.html @@ -357,6 +357,10 @@

Panes

Close paneAlt+Shift+W
Next paneAlt+Shift+]
Previous paneAlt+Shift+[
+

Clipboard

+
CopyCtrl+C
+
PasteCtrl+V
+
Paste imagePaste from clipboard

General

SearchCtrl+Shift+F
Voice dictationAlt+V
@@ -379,6 +383,14 @@

General